616 lines
27 KiB
Python
616 lines
27 KiB
Python
"""
|
|
API endpoints for retrieving activity reports.
|
|
|
|
This module provides endpoints for retrieving daily, weekly, and monthly reports,
|
|
as well as detailed user activity logs.
|
|
"""
|
|
import requests # Added import for requests
|
|
from datetime import datetime, timedelta # Removed timezone
|
|
from flask import Blueprint, current_app, jsonify, request
|
|
from sqlalchemy import text
|
|
from sqlalchemy.exc import SQLAlchemyError
|
|
from sqlalchemy.sql import func, and_
|
|
|
|
from app import db
|
|
from app.utils.formatting import (
|
|
format_report_data,
|
|
format_user_activity,
|
|
) # Added format_user_activity
|
|
from app.utils.queries import calculate_duration_sql
|
|
from app.models import UserRealWorkSummary, WorkEvent
|
|
|
|
# Create a blueprint for report-related API endpoints
|
|
reports_bp = Blueprint("reports", __name__, url_prefix="/api")
|
|
|
|
|
|
def fetch_duration_report(
|
|
time_period, user_filter=None, start_date=None, end_date=None
|
|
):
|
|
"""
|
|
Fetches duration report data from the database.
|
|
|
|
Args:
|
|
time_period (str): Time period to group by ('daily', 'weekly', or 'monthly')
|
|
user_filter (str, optional): Username to filter results by.
|
|
start_date (str, optional): Start date for filtering (YYYY-MM-DD).
|
|
end_date (str, optional): End date for filtering (YYYY-MM-DD).
|
|
|
|
Returns:
|
|
list: List of report data rows
|
|
"""
|
|
current_app.logger.debug(
|
|
f"Fetching duration report. Period: {time_period}, User: {user_filter}, Start: {start_date}, End: {end_date}"
|
|
)
|
|
|
|
# Get SQL query and parameters from the refactored function
|
|
sql_query, params = calculate_duration_sql(
|
|
time_period,
|
|
user_filter=user_filter,
|
|
start_date_filter=start_date,
|
|
end_date_filter=end_date,
|
|
)
|
|
|
|
# Debugging for database connection URI (can be made less verbose or conditional)
|
|
db_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
|
|
masked_uri = db_uri
|
|
if "@" in db_uri:
|
|
parts = db_uri.split("@")
|
|
masked_uri = "****@" + parts[1]
|
|
current_app.logger.info(f"Executing query using database: {masked_uri}")
|
|
# current_app.logger.debug(f"Query: {sql_query}") # Optional: log full query
|
|
# current_app.logger.debug(f"Params: {params}") # Optional: log params
|
|
|
|
try:
|
|
# Example diagnostic queries (can be removed or made conditional for production)
|
|
# count_query = "SELECT COUNT(*) FROM work_events"
|
|
# count_result = db.session.execute(text(count_query)).scalar()
|
|
# current_app.logger.info(f"Total records in work_events table: {count_result}")
|
|
|
|
# users_query = "SELECT DISTINCT \"user\" FROM work_events"
|
|
# users_result = db.session.execute(text(users_query)).fetchall()
|
|
# user_list = [row[0] for row in users_result]
|
|
# current_app.logger.info(f"Distinct users in work_events table: {user_list}")
|
|
|
|
results = db.session.execute(text(sql_query), params).mappings().all()
|
|
current_app.logger.debug(
|
|
f"Database query executed for {time_period} report. Found {len(results)} rows."
|
|
)
|
|
return results
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(
|
|
f"Error executing duration report query (period: {time_period}): {e}"
|
|
)
|
|
raise
|
|
|
|
|
|
def fetch_user_activity(username, start_date, end_date):
|
|
"""
|
|
Fetches detailed user activity logs for a specific date range.
|
|
|
|
Args:
|
|
username (str): Username to fetch activity for
|
|
start_date (str): Start date in YYYY-MM-DD format
|
|
end_date (str): End date in YYYY-MM-DD format
|
|
|
|
Returns:
|
|
list: List of user activity rows
|
|
"""
|
|
current_app.logger.debug(
|
|
f"Fetching activity logs for user: {username}, from: {start_date}, to: {end_date}"
|
|
)
|
|
|
|
# SQL query to match working and stopped pairs and calculate durations
|
|
sql_query = """
|
|
WITH EventPairs AS (
|
|
SELECT
|
|
w1."user",
|
|
DATE(w1.ts) AS work_date,
|
|
w1.ts AS start_time,
|
|
w2.ts AS end_time,
|
|
EXTRACT(EPOCH FROM (w2.ts - w1.ts))/3600 AS session_duration_hours
|
|
FROM
|
|
work_events w1
|
|
JOIN
|
|
work_events w2 ON w1."user" = w2."user"
|
|
AND w1.state = 'working'
|
|
AND w2.state = 'stopped'
|
|
AND w2.ts > w1.ts
|
|
AND NOT EXISTS (
|
|
SELECT 1 FROM work_events w3
|
|
WHERE w3."user" = w1."user"
|
|
AND w3.ts > w1.ts AND w3.ts < w2.ts
|
|
)
|
|
WHERE
|
|
w1."user" = :username
|
|
AND DATE(w1.ts) BETWEEN :start_date AND :end_date
|
|
ORDER BY
|
|
w1.ts
|
|
)
|
|
SELECT * FROM EventPairs
|
|
"""
|
|
|
|
try:
|
|
params = {"username": username, "start_date": start_date, "end_date": end_date}
|
|
results = db.session.execute(text(sql_query), params).mappings().all()
|
|
current_app.logger.debug(
|
|
f"User activity query executed. Found {len(results)} rows."
|
|
)
|
|
return results
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(f"Error executing user activity query: {e}")
|
|
raise
|
|
|
|
|
|
@reports_bp.route("/reports/daily", methods=["GET"])
|
|
def get_daily_report():
|
|
"""
|
|
Endpoint for retrieving daily report data.
|
|
All dates are processed in UTC.
|
|
|
|
Query Parameters:
|
|
user (str, optional): Filter results by username
|
|
date (str, optional): Specific date in YYYY-MM-DD format (UTC)
|
|
"""
|
|
current_app.logger.info("Daily report API requested.")
|
|
try:
|
|
user_filter = request.args.get("user")
|
|
selected_date_str = request.args.get(
|
|
"date", datetime.utcnow().strftime("%Y-%m-%d")
|
|
)
|
|
current_app.logger.info(
|
|
f"Fetching daily report data for date: {selected_date_str}, user: {user_filter or 'All'}"
|
|
)
|
|
|
|
# fetch_duration_report now handles date filtering via calculate_duration_sql
|
|
results = fetch_duration_report(
|
|
time_period="daily",
|
|
user_filter=user_filter,
|
|
start_date=selected_date_str, # Pass the selected date as both start and end
|
|
end_date=selected_date_str, # for precise daily filtering by the SQL query.
|
|
)
|
|
|
|
# The SQL query is now expected to return data only for the selected_date_str for 'daily' period.
|
|
# No more manual filtering needed here.
|
|
# filtered_results = []
|
|
# for row in results:
|
|
# row_date = row['period_start']
|
|
# if hasattr(row_date, 'isoformat'):
|
|
# row_date = row_date.isoformat()
|
|
# if row_date and row_date.startswith(selected_date_str):
|
|
# filtered_results.append(row)
|
|
|
|
# current_app.logger.info(f"Raw results usernames for date {selected_date_str}: {[r['user'] for r in results]}")
|
|
|
|
report = format_report_data(results, "daily") # Pass results directly
|
|
|
|
# current_app.logger.info(f"Formatted data usernames: {[r['user'] for r in report]}")
|
|
|
|
current_app.logger.info(
|
|
f"Successfully generated daily report for date: {selected_date_str}, user: {user_filter or 'All'}. Found {len(results)} records."
|
|
)
|
|
return jsonify({"success": True, "data": report})
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(f"Database error generating daily report: {e}")
|
|
return jsonify({"success": False, "message": "Database error generating report"}), 500
|
|
except Exception as e:
|
|
current_app.logger.exception(f"Unexpected error generating daily report: {e}")
|
|
return jsonify({"success": False, "message": "Error generating report"}), 500
|
|
|
|
|
|
@reports_bp.route("/reports/weekly", methods=["GET"])
|
|
def get_weekly_report():
|
|
"""
|
|
Endpoint for retrieving weekly report data.
|
|
All dates are processed in UTC.
|
|
|
|
Query Parameters:
|
|
user (str, optional): Filter results by username
|
|
day (str, optional): Specific day in YYYY-MM-DD format (UTC) for single day view from week
|
|
If not provided, defaults to the current week.
|
|
"""
|
|
current_app.logger.info("Weekly report API requested.")
|
|
try:
|
|
user_filter = request.args.get("user")
|
|
day_filter_str = request.args.get("day")
|
|
|
|
start_date_param = None
|
|
end_date_param = None
|
|
report_period_type = "weekly" # For format_report_data
|
|
|
|
if day_filter_str:
|
|
current_app.logger.info(
|
|
f"Filtering weekly report for specific day: {day_filter_str}, user: {user_filter or 'All'}"
|
|
)
|
|
# Fetch as a daily report for that specific day
|
|
start_date_param = day_filter_str
|
|
end_date_param = day_filter_str
|
|
report_period_type = "daily" # Format as daily if single day is requested
|
|
results = fetch_duration_report(
|
|
"daily",
|
|
user_filter,
|
|
start_date=start_date_param,
|
|
end_date=end_date_param,
|
|
)
|
|
else:
|
|
current_app.logger.info(
|
|
f"Fetching weekly report for current week, user: {user_filter or 'All'}"
|
|
)
|
|
# Calculate start and end of the current UTC week (Mon-Sun)
|
|
now_utc = datetime.utcnow()
|
|
current_week_start_utc = now_utc - timedelta(
|
|
days=now_utc.weekday()
|
|
) # Monday
|
|
current_week_end_utc = current_week_start_utc + timedelta(days=6) # Sunday
|
|
|
|
start_date_param = current_week_start_utc.strftime("%Y-%m-%d")
|
|
end_date_param = current_week_end_utc.strftime("%Y-%m-%d")
|
|
current_app.logger.info(
|
|
f"Current UTC week: {start_date_param} to {end_date_param}"
|
|
)
|
|
|
|
results = fetch_duration_report(
|
|
time_period="weekly", # The SQL will group by week_start using DATE_TRUNC('week',...)
|
|
user_filter=user_filter,
|
|
start_date=start_date_param, # Filters events within this week
|
|
end_date=end_date_param,
|
|
)
|
|
# The SQL query `calculate_duration_sql` for 'weekly' period_grouping does:
|
|
# GROUP BY "user", DATE_TRUNC('week', start_time)
|
|
# So, if date filters are applied correctly to the events, this should return
|
|
# one row per user for the week specified by start_date/end_date,
|
|
# with period_start being the Monday of that week.
|
|
# The previous complex Python aggregation for current week might no longer be needed
|
|
# if the SQL correctly aggregates for the given date range.
|
|
|
|
# current_app.logger.info(f"Raw results usernames: {[r['user'] for r in results]}")
|
|
report = format_report_data(results, report_period_type)
|
|
# current_app.logger.info(f"Formatted data usernames: {[r['user'] for r in report]}")
|
|
|
|
log_message_suffix = (
|
|
f"day {day_filter_str}"
|
|
if day_filter_str
|
|
else f"week {start_date_param}-{end_date_param}"
|
|
)
|
|
current_app.logger.info(
|
|
f"Successfully generated weekly report for {log_message_suffix}, user: {user_filter or 'All'}. Found {len(results)} records."
|
|
)
|
|
return jsonify({"success": True, "data": report})
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(f"Database error generating weekly report: {e}")
|
|
return jsonify({"success": False, "message": "Database error generating report"}), 500
|
|
except Exception as e:
|
|
current_app.logger.exception(f"Unexpected error generating weekly report: {e}")
|
|
return jsonify({"success": False, "message": "Error generating report"}), 500
|
|
|
|
|
|
@reports_bp.route("/reports/monthly", methods=["GET"])
|
|
def get_monthly_report():
|
|
"""
|
|
Endpoint for retrieving monthly report data.
|
|
All dates are processed in UTC. Defaults to the current month.
|
|
|
|
Query Parameters:
|
|
user (str, optional): Filter results by username
|
|
# month (str, optional): Specific month (e.g., YYYY-MM) - Future enhancement
|
|
"""
|
|
current_app.logger.info("Monthly report API requested.")
|
|
try:
|
|
user_filter = request.args.get("user")
|
|
current_app.logger.info(
|
|
f"Fetching monthly report data for current month, user: {user_filter or 'All'}"
|
|
)
|
|
|
|
# Calculate start and end of the current UTC month
|
|
now_utc = datetime.utcnow()
|
|
current_month_start_utc = now_utc.replace(
|
|
day=1, hour=0, minute=0, second=0, microsecond=0
|
|
)
|
|
|
|
# Find the first day of the next month, then subtract one day to get the end of the current month
|
|
if current_month_start_utc.month == 12:
|
|
next_month_start_utc = current_month_start_utc.replace(
|
|
year=current_month_start_utc.year + 1, month=1, day=1
|
|
)
|
|
else:
|
|
next_month_start_utc = current_month_start_utc.replace(
|
|
month=current_month_start_utc.month + 1, day=1
|
|
)
|
|
current_month_end_utc = next_month_start_utc - timedelta(days=1)
|
|
|
|
start_date_param = current_month_start_utc.strftime("%Y-%m-%d")
|
|
end_date_param = current_month_end_utc.strftime("%Y-%m-%d")
|
|
current_app.logger.info(
|
|
f"Current UTC month: {start_date_param} to {end_date_param}"
|
|
)
|
|
|
|
results = fetch_duration_report(
|
|
time_period="monthly", # SQL groups by DATE_TRUNC('month',...)
|
|
user_filter=user_filter,
|
|
start_date=start_date_param, # Filters events within this month
|
|
end_date=end_date_param,
|
|
)
|
|
# Similar to weekly, SQL query with date filters should provide data aggregated for the current month.
|
|
# Previous Python-based aggregation for current month is removed.
|
|
|
|
# current_app.logger.info(f"Raw results usernames: {[r['user'] for r in results]}")
|
|
report = format_report_data(results, "monthly")
|
|
# current_app.logger.info(f"Formatted data usernames: {[r['user'] for r in report]}")
|
|
|
|
current_app.logger.info(
|
|
f"Successfully generated monthly report for {start_date_param[:7]}, user: {user_filter or 'All'}. Found {len(results)} records."
|
|
)
|
|
return jsonify({"success": True, "data": report})
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(f"Database error generating monthly report: {e}")
|
|
return jsonify({"success": False, "message": "Database error generating report"}), 500
|
|
except Exception as e:
|
|
current_app.logger.exception(f"Unexpected error generating monthly report: {e}")
|
|
return jsonify({"success": False, "message": "Error generating report"}), 500
|
|
|
|
|
|
@reports_bp.route("/user-activity/<username>", methods=["GET"])
|
|
def get_user_activity(username):
|
|
"""
|
|
Gets detailed activity logs for a specific user.
|
|
All dates are processed in UTC.
|
|
|
|
Path Parameter:
|
|
username: Username to fetch activity for
|
|
|
|
Query Parameters:
|
|
start_date (str, optional): Start date in YYYY-MM-DD format (UTC)
|
|
end_date (str, optional): End date in YYYY-MM-DD format (UTC)
|
|
"""
|
|
current_app.logger.info(f"User activity logs requested for: {username}")
|
|
|
|
# Get date range from query parameters, default to current UTC day if not provided
|
|
start_date = request.args.get(
|
|
"start_date", datetime.utcnow().strftime("%Y-%m-%d")
|
|
) # Changed to utcnow
|
|
end_date = request.args.get(
|
|
"end_date", start_date
|
|
) # Default end_date to start_date if not provided
|
|
|
|
try:
|
|
current_app.logger.info(
|
|
f"Fetching activity logs for user: {username}, from: {start_date}, to: {end_date}"
|
|
)
|
|
results = fetch_user_activity(username, start_date, end_date)
|
|
activity_logs = format_user_activity(results)
|
|
current_app.logger.info(
|
|
f"Successfully retrieved {len(activity_logs)} activity records for user: {username}"
|
|
)
|
|
return jsonify(
|
|
{
|
|
"success": True,
|
|
"data": {
|
|
"username": username,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
"activities": activity_logs,
|
|
},
|
|
}
|
|
)
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(f"Database error retrieving user activity for {username}: {e}")
|
|
return jsonify({"success": False, "message": "Database error retrieving activity logs"}), 500
|
|
except Exception as e:
|
|
current_app.logger.exception(f"Unexpected error retrieving user activity for {username}: {e}")
|
|
return (
|
|
jsonify({"success": False, "message": "Error retrieving activity logs"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@reports_bp.route("/user-states", methods=["GET"])
|
|
def get_user_states():
|
|
"""
|
|
Endpoint for retrieving the current state of all users.
|
|
|
|
Returns a JSON object with usernames as keys and state ('working' or 'not_working') as values.
|
|
A user is considered 'not_working' if:
|
|
1. Their last state was 'stopped', OR
|
|
2. Their last state was 'working' but they've been inactive for more than 10 minutes
|
|
"""
|
|
current_app.logger.info("User states API requested")
|
|
|
|
try:
|
|
# Automatically consider a user not working after 10 minutes of inactivity
|
|
auto_timeout_seconds = 10 * 60 # 10 minutes in seconds
|
|
# current_time = datetime.utcnow() # Not strictly needed here as SQL uses NOW()
|
|
|
|
# SQL query to get the most recent state for each user with auto-timeout logic
|
|
sql_query = f"""
|
|
WITH LatestEvents AS (
|
|
SELECT
|
|
e."user",
|
|
e.state,
|
|
e.ts,
|
|
ROW_NUMBER() OVER(PARTITION BY e."user" ORDER BY e.ts DESC) as rn,
|
|
-- Calculate the time difference between the last event and now
|
|
EXTRACT(EPOCH FROM (NOW() - e.ts)) as seconds_since_last_event
|
|
FROM
|
|
work_events e
|
|
)
|
|
SELECT
|
|
"user",
|
|
state,
|
|
ts,
|
|
CASE
|
|
-- Consider as not working if last state was stopped or if inactive for 10+ minutes
|
|
WHEN state = 'stopped' OR seconds_since_last_event > {auto_timeout_seconds} THEN 'not_working'
|
|
ELSE 'working'
|
|
END as current_state,
|
|
seconds_since_last_event
|
|
FROM
|
|
LatestEvents
|
|
WHERE
|
|
rn = 1
|
|
"""
|
|
|
|
results = db.session.execute(text(sql_query)).mappings().all()
|
|
|
|
# Convert to dictionary with username -> state
|
|
user_states = {}
|
|
for row in results:
|
|
# Use the calculated current_state from the SQL query
|
|
user_states[row["user"]] = row["current_state"]
|
|
|
|
# Log users with timeout-induced state changes for debugging
|
|
# And send POST request if user timed out
|
|
if (
|
|
row["state"] == "working"
|
|
and row["current_state"] == "not_working"
|
|
and row["seconds_since_last_event"] > auto_timeout_seconds
|
|
):
|
|
user_id = row["user"]
|
|
current_app.logger.debug(
|
|
f"User {user_id} timed out: last activity was {row['seconds_since_last_event']:.0f} seconds ago. Sending POST update."
|
|
)
|
|
|
|
post_url_path = "/api/user_status_update"
|
|
payload = {"user_id": user_id, "status": "not working"}
|
|
|
|
try:
|
|
# Construct absolute URL for the internal POST request.
|
|
# Using request.url_root is generally reliable within the same application.
|
|
# Ensure SERVER_NAME is configured in production if app is behind a proxy
|
|
# and request.url_root does not correctly reflect the public-facing scheme/host.
|
|
target_post_url = request.url_root.rstrip("/") + post_url_path
|
|
|
|
current_app.logger.info(
|
|
f"Sending POST to {target_post_url} with payload: {payload}"
|
|
)
|
|
response = requests.post(target_post_url, json=payload, timeout=10)
|
|
|
|
if response.ok: # Checks for 2xx status codes
|
|
current_app.logger.info(
|
|
f"Successfully updated status for user {user_id} via POST to {post_url_path}. Status: {response.status_code}"
|
|
)
|
|
else:
|
|
current_app.logger.error(
|
|
f"Failed to update status for user {user_id} via POST to {post_url_path}. Status: {response.status_code}, Response: {response.text}"
|
|
)
|
|
except requests.exceptions.RequestException as req_e:
|
|
current_app.logger.error(
|
|
f"Error sending POST request to {post_url_path} for user {user_id}: {req_e}"
|
|
)
|
|
current_app.logger.info(
|
|
f"Successfully retrieved states for {len(user_states)} users"
|
|
)
|
|
|
|
return jsonify({"success": True, "data": user_states})
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(f"Database error retrieving user states: {e}")
|
|
return jsonify({"success": False, "message": "Database error retrieving user states"}), 500
|
|
except Exception as e:
|
|
current_app.logger.exception(f"Unexpected error retrieving user states: {e}")
|
|
return (
|
|
jsonify({"success": False, "message": "Error retrieving user states"}),
|
|
500,
|
|
)
|
|
|
|
|
|
@reports_bp.route("/reports/real_work_hours", methods=["GET"])
|
|
def get_real_work_hours_report():
|
|
"""
|
|
Endpoint for retrieving aggregated "real work hours" (40-minute blocks).
|
|
Shows all users active in the period, with their real work hours if any.
|
|
|
|
Query Parameters:
|
|
username (str, optional): Filter results by username.
|
|
start_date (str, optional): Start date for filtering (YYYY-MM-DD).
|
|
end_date (str, optional): End date for filtering (YYYY-MM-DD).
|
|
If only start_date is provided, end_date defaults to start_date.
|
|
If no dates, this might return a very large dataset or default to a recent period.
|
|
For this implementation, dates are strongly recommended.
|
|
"""
|
|
current_app.logger.info("Real work hours report API requested.")
|
|
try:
|
|
username_filter = request.args.get("username")
|
|
start_date_str = request.args.get("start_date")
|
|
end_date_str = request.args.get("end_date")
|
|
|
|
# Base query: select distinct user/date combinations from work_events within the period
|
|
# This ensures all users with activity in the period are potentially listed.
|
|
base_query = db.session.query(
|
|
WorkEvent.user.label("username"),
|
|
func.date(WorkEvent.ts).label("work_date")
|
|
).distinct()
|
|
|
|
if username_filter:
|
|
base_query = base_query.filter(WorkEvent.user == username_filter)
|
|
current_app.logger.info(f"Filtering real work hours for user: {username_filter}")
|
|
|
|
if start_date_str:
|
|
try:
|
|
start_date_obj = datetime.strptime(start_date_str, "%Y-%m-%d").date()
|
|
base_query = base_query.filter(func.date(WorkEvent.ts) >= start_date_obj)
|
|
current_app.logger.info(f"Filtering events from date: {start_date_str}")
|
|
if not end_date_str:
|
|
end_date_str = start_date_str
|
|
except ValueError:
|
|
return jsonify({"success": False, "message": "Invalid start_date format. Use YYYY-MM-DD."}), 400
|
|
else: # Require start_date for now to limit query scope if no user filter
|
|
if not username_filter:
|
|
# Default to today if no dates and no specific user is provided
|
|
start_date_obj = datetime.utcnow().date()
|
|
end_date_obj = start_date_obj
|
|
base_query = base_query.filter(func.date(WorkEvent.ts) == start_date_obj)
|
|
current_app.logger.info(f"Defaulting to date: {start_date_obj.strftime('%Y-%m-%d')} as no dates/user provided.")
|
|
# else if user is provided, we might allow fetching all their summaries without date filter by removing this else
|
|
|
|
if end_date_str:
|
|
try:
|
|
end_date_obj = datetime.strptime(end_date_str, "%Y-%m-%d").date()
|
|
base_query = base_query.filter(func.date(WorkEvent.ts) <= end_date_obj)
|
|
current_app.logger.info(f"Filtering events up to date: {end_date_str}")
|
|
except ValueError:
|
|
return jsonify({"success": False, "message": "Invalid end_date format. Use YYYY-MM-DD."}), 400
|
|
elif start_date_str: # Only start_date was given, end_date defaulted above
|
|
end_date_obj = start_date_obj
|
|
base_query = base_query.filter(func.date(WorkEvent.ts) <= end_date_obj)
|
|
|
|
# Subquery for active user-dates in the period
|
|
active_user_dates_subq = base_query.subquery('active_user_dates')
|
|
|
|
# Main query: LEFT JOIN UserRealWorkSummary with the active user-dates
|
|
final_query = db.session.query(
|
|
active_user_dates_subq.c.username,
|
|
active_user_dates_subq.c.work_date,
|
|
func.coalesce(UserRealWorkSummary.real_hours_counted, 0).label("real_hours_counted"),
|
|
UserRealWorkSummary.id.label("summary_id"), # Include id if needed from summary
|
|
UserRealWorkSummary.last_processed_event_id
|
|
).outerjoin(
|
|
UserRealWorkSummary,
|
|
and_(
|
|
active_user_dates_subq.c.username == UserRealWorkSummary.username,
|
|
active_user_dates_subq.c.work_date == UserRealWorkSummary.work_date
|
|
)
|
|
).order_by(active_user_dates_subq.c.work_date.desc(), active_user_dates_subq.c.username)
|
|
|
|
results = final_query.all()
|
|
|
|
report_data = [
|
|
{
|
|
"username": r.username,
|
|
"work_date": r.work_date.isoformat() if r.work_date else None,
|
|
"real_hours_counted": r.real_hours_counted,
|
|
# "summary_id": r.summary_id, # Optional
|
|
# "last_processed_event_id": r.last_processed_event_id # Optional
|
|
}
|
|
for r in results
|
|
]
|
|
|
|
current_app.logger.info(
|
|
f"Successfully generated real work hours report. Found {len(report_data)} user-date entries."
|
|
)
|
|
return jsonify({"success": True, "data": report_data})
|
|
|
|
except SQLAlchemyError as e:
|
|
current_app.logger.error(f"Database error generating real work hours report: {e}", exc_info=True)
|
|
return jsonify({"success": False, "message": "Database error generating report"}), 500
|
|
except Exception as e:
|
|
current_app.logger.exception(f"Unexpected error generating real work hours report: {e}", exc_info=True)
|
|
return jsonify({"success": False, "message": "Error generating report"}), 500
|