first commit
This commit is contained in:
commit
cf2d46edd4
1738
MTN6 Equipment Manifest REV6(Conveyor List).csv
Normal file
1738
MTN6 Equipment Manifest REV6(Conveyor List).csv
Normal file
File diff suppressed because it is too large
Load Diff
BIN
__pycache__/find_missing_units.cpython-313.pyc
Normal file
BIN
__pycache__/find_missing_units.cpython-313.pyc
Normal file
Binary file not shown.
233
app.py
Normal file
233
app.py
Normal file
@ -0,0 +1,233 @@
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import json
|
||||
from flask import Flask, render_template, jsonify, Response
|
||||
import git
|
||||
from find_missing_units import calculate_progress # Import the refactored function
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# --- Configuration ---
|
||||
REPO_URL = "http://192.168.5.191:3000/LCI/MTN6"
|
||||
REPO_DIR = "./cloned_repo" # Directory to clone the repo into
|
||||
BRANCH = "main"
|
||||
CSV_FILENAME = "MTN6 Equipment Manifest REV6(Conveyor List).csv"
|
||||
VIEWS_DIR_RELATIVE = "MTN6_SCADA/com.inductiveautomation.perspective/views/Detailed-Views"
|
||||
CHECK_INTERVAL_SECONDS = 60 # Check repo more frequently now, maybe every minute?
|
||||
|
||||
# --- Global state ---
|
||||
last_commit_hash = None
|
||||
progress_data = { "overall": {"total_csv": 0, "missing": 0, "found_json": 0, "percentage": 0}, "panels": {} }
|
||||
status_message = "Initializing..."
|
||||
repo_lock = threading.Lock() # Lock for accessing repo and shared data
|
||||
data_updated_event = threading.Event() # Event to signal data updates
|
||||
|
||||
def get_repo_path():
|
||||
return os.path.abspath(REPO_DIR)
|
||||
|
||||
def get_csv_path():
|
||||
return os.path.abspath(CSV_FILENAME)
|
||||
|
||||
def get_views_dir_path():
|
||||
return os.path.join(get_repo_path(), VIEWS_DIR_RELATIVE)
|
||||
|
||||
def update_progress_data():
|
||||
global progress_data, status_message
|
||||
csv_path = get_csv_path()
|
||||
views_dir = get_views_dir_path()
|
||||
current_status = ""
|
||||
new_data_calculated = None
|
||||
|
||||
if not os.path.exists(csv_path):
|
||||
current_status = f"Error: CSV file not found at {csv_path}"
|
||||
elif not os.path.exists(views_dir):
|
||||
current_status = f"Error: Views directory not found at {views_dir}"
|
||||
else:
|
||||
print(f"Running analysis: CSV='{csv_path}', Views='{views_dir}'")
|
||||
current_status = "Calculating progress..."
|
||||
try:
|
||||
new_data_calculated = calculate_progress(csv_path, views_dir)
|
||||
if new_data_calculated:
|
||||
current_status = f"Progress updated successfully at {time.strftime('%Y-%m-%d %H:%M:%S')}"
|
||||
else:
|
||||
current_status = "Error: Failed to calculate progress (script returned None)."
|
||||
except Exception as e:
|
||||
current_status = f"Error running analysis script: {e}"
|
||||
new_data_calculated = None # Ensure no partial data update
|
||||
|
||||
# Update global state ONLY AFTER calculation (inside lock if modifying multiple related vars)
|
||||
# In this case, modifying status and potentially progress_data
|
||||
print(current_status)
|
||||
status_message = current_status # Update status regardless of success/failure
|
||||
if new_data_calculated is not None:
|
||||
progress_data = new_data_calculated
|
||||
|
||||
# Signal that an update attempt finished (even if it failed, status changed)
|
||||
data_updated_event.set()
|
||||
data_updated_event.clear() # Reset event for next update
|
||||
|
||||
def check_and_update_repo():
|
||||
global last_commit_hash, status_message
|
||||
repo_path = get_repo_path()
|
||||
did_update = False # Flag to track if files were actually updated
|
||||
initial_hash = last_commit_hash # Store hash before check
|
||||
|
||||
with repo_lock:
|
||||
try:
|
||||
repo_existed = os.path.exists(os.path.join(repo_path, ".git"))
|
||||
if not repo_existed:
|
||||
print(f"Cloning repository {REPO_URL} into {repo_path}...")
|
||||
status_message = f"Cloning repository {REPO_URL}..."
|
||||
git.Repo.clone_from(REPO_URL, repo_path, branch=BRANCH)
|
||||
repo = git.Repo(repo_path)
|
||||
last_commit_hash = repo.head.commit.hexsha
|
||||
print(f"Initial clone complete. Commit: {last_commit_hash}")
|
||||
did_update = True # Cloned, so considered an update
|
||||
else:
|
||||
repo = git.Repo(repo_path)
|
||||
print("Fetching updates from remote...")
|
||||
current_local_commit = repo.head.commit.hexsha
|
||||
# Update hash *before* fetch in case fetch fails but commit was readable
|
||||
if last_commit_hash is None: last_commit_hash = current_local_commit
|
||||
origin = repo.remotes.origin
|
||||
fetch_info = origin.fetch()
|
||||
|
||||
# Check if fetch actually brought new data for the target branch
|
||||
fetched_new_commits = any(info.flags & info.NEW_HEAD for info in fetch_info if info.name == f'origin/{BRANCH}')
|
||||
|
||||
current_remote_commit = repo.commit(f'origin/{BRANCH}').hexsha
|
||||
|
||||
print(f"Local commit: {current_local_commit}, Remote commit: {current_remote_commit}")
|
||||
|
||||
if current_local_commit != current_remote_commit:
|
||||
print("New commit detected! Pulling changes...")
|
||||
status_message = "Pulling updates..."
|
||||
try:
|
||||
pull_info = origin.pull()
|
||||
new_commit_hash = repo.head.commit.hexsha
|
||||
print(f"Pull successful. New commit: {new_commit_hash}")
|
||||
last_commit_hash = new_commit_hash
|
||||
did_update = True # Pulled, so considered an update
|
||||
except git.GitCommandError as e:
|
||||
status_message = f"Error pulling repository: {e}"
|
||||
print(status_message)
|
||||
# Revert hash if pull failed
|
||||
last_commit_hash = current_local_commit
|
||||
else:
|
||||
print("No new commits detected.")
|
||||
# No code change, but update status if needed
|
||||
if not status_message.startswith("Error"):
|
||||
status_message = f"Checked at {time.strftime('%Y-%m-%d %H:%M:%S')}. No changes."
|
||||
|
||||
# Run analysis IF the repo was updated (cloned or pulled)
|
||||
if did_update:
|
||||
update_progress_data()
|
||||
# If no git update, but status changed (e.g., to "No changes"), signal SSE
|
||||
elif initial_hash == last_commit_hash:
|
||||
data_updated_event.set() # Signal status change event
|
||||
data_updated_event.clear()
|
||||
|
||||
except git.GitCommandError as e:
|
||||
status_message = f"Git command error: {e}"
|
||||
print(status_message)
|
||||
# Try to get commit hash even if failed
|
||||
try:
|
||||
if os.path.exists(os.path.join(repo_path, ".git")):
|
||||
repo = git.Repo(repo_path)
|
||||
# Use previous hash if available, else try to read current
|
||||
if last_commit_hash is None: last_commit_hash = repo.head.commit.hexsha
|
||||
except Exception:
|
||||
if last_commit_hash is None: last_commit_hash = "Error reading commit"
|
||||
data_updated_event.set() # Signal error status change
|
||||
data_updated_event.clear()
|
||||
except Exception as e:
|
||||
status_message = f"Error checking repository: {e}"
|
||||
print(status_message)
|
||||
if last_commit_hash is None: last_commit_hash = "Error checking repo"
|
||||
data_updated_event.set() # Signal error status change
|
||||
data_updated_event.clear()
|
||||
# Return true if analysis was run, false otherwise
|
||||
return did_update
|
||||
|
||||
def periodic_repo_check():
|
||||
"""Runs the check_and_update_repo function periodically."""
|
||||
while True:
|
||||
print(f"\nStarting periodic repository check (Interval: {CHECK_INTERVAL_SECONDS}s)...")
|
||||
check_and_update_repo()
|
||||
print(f"Check finished. Sleeping...")
|
||||
time.sleep(CHECK_INTERVAL_SECONDS)
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
return render_template('index.html')
|
||||
|
||||
@app.route('/stream')
|
||||
def stream():
|
||||
def event_stream():
|
||||
last_sent_hash = None
|
||||
|
||||
# Send initial state immediately on connection
|
||||
with repo_lock:
|
||||
current_hash = last_commit_hash
|
||||
current_status = status_message
|
||||
current_progress = progress_data
|
||||
|
||||
initial_payload = json.dumps({
|
||||
"status": current_status,
|
||||
"progress": current_progress,
|
||||
"last_commit": current_hash
|
||||
})
|
||||
yield f"data: {initial_payload}\n\n"
|
||||
last_sent_hash = current_hash # Record that we sent the initial state
|
||||
print(f"Sent initial state to new client (Hash: {last_sent_hash})")
|
||||
|
||||
# Now wait for subsequent updates
|
||||
while True:
|
||||
data_updated_event.wait()
|
||||
|
||||
with repo_lock: # Re-acquire lock to check latest state
|
||||
current_hash = last_commit_hash
|
||||
current_status = status_message
|
||||
current_progress = progress_data
|
||||
|
||||
# Only send if commit hash has changed since last send *to this client*
|
||||
if current_hash != last_sent_hash:
|
||||
print(f"Hash changed ({last_sent_hash} -> {current_hash}). Sending update to client.")
|
||||
data_payload = json.dumps({
|
||||
"status": current_status,
|
||||
"progress": current_progress,
|
||||
"last_commit": current_hash
|
||||
})
|
||||
yield f"data: {data_payload}\n\n"
|
||||
last_sent_hash = current_hash
|
||||
else:
|
||||
# If only status changed without hash change, we could optionally send just status
|
||||
# For now, we don't send anything if hash is same as last sent to this client
|
||||
print(f"Data updated event triggered, but hash {current_hash} unchanged for this client.")
|
||||
|
||||
return Response(event_stream(), mimetype="text/event-stream")
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Ensure repo directory exists
|
||||
if not os.path.exists(REPO_DIR):
|
||||
os.makedirs(REPO_DIR)
|
||||
|
||||
# Perform initial check/clone and data load
|
||||
print("Performing initial repository check and data load...")
|
||||
# Run check_and_update_repo which calls update_progress_data if needed
|
||||
initial_update_done = check_and_update_repo()
|
||||
# If repo existed and was up-to-date, run analysis explicitly
|
||||
if not initial_update_done:
|
||||
print("Repository present and up-to-date. Running initial analysis...")
|
||||
# No need for lock here as background thread isn't running yet
|
||||
update_progress_data()
|
||||
|
||||
# Start the background thread for periodic checks
|
||||
print("Starting background repository check thread...")
|
||||
repo_check_thread = threading.Thread(target=periodic_repo_check, daemon=True)
|
||||
repo_check_thread.start()
|
||||
|
||||
# Run the Flask app
|
||||
print("Starting Flask server on port 5050...")
|
||||
app.run(host='0.0.0.0', port=5050, debug=False, threaded=True) # Use threaded=True for SSE
|
||||
1
cloned_repo
Submodule
1
cloned_repo
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit c8aa36809970e0557f46ee80b7f7cf3735efb487
|
||||
243
find_missing_units.py
Normal file
243
find_missing_units.py
Normal file
@ -0,0 +1,243 @@
|
||||
import csv
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
import re
|
||||
from collections import defaultdict # Use defaultdict for easier dictionary building
|
||||
|
||||
def normalize_string(s):
|
||||
"""Convert string to lowercase and replace hyphens/whitespace with underscores."""
|
||||
if not isinstance(s, str):
|
||||
return ""
|
||||
return re.sub(r'[-\s]+', '_', s.lower())
|
||||
|
||||
def get_control_panel_units(csv_filepath):
|
||||
"""Reads Control Panel, Aliases, and equipment details from the CSV."""
|
||||
# Store a list of dictionaries for each panel: [{'alias': 'a1', 'eq': 't1', 'conv': 'c1'}, ...]
|
||||
panel_details_list = defaultdict(list)
|
||||
# Keep track of unique aliases found for counting purposes
|
||||
unique_aliases = set()
|
||||
|
||||
try:
|
||||
with open(csv_filepath, mode='r', encoding='utf-8-sig') as infile:
|
||||
reader = csv.reader(infile)
|
||||
header = next(reader)
|
||||
try:
|
||||
panel_index = header.index('Control Panel')
|
||||
alias_index = header.index('Alias')
|
||||
eq_type_index = header.index('Equipment Type') # <-- New
|
||||
conv_type_index = header.index('Type of Conveyor') # <-- New
|
||||
except ValueError as e:
|
||||
print(f"Error: Required column ('Control Panel', 'Alias', 'Equipment Type', or 'Type of Conveyor') not found in {csv_filepath}: {e}")
|
||||
return None, 0 # Return None for map, 0 for total count
|
||||
|
||||
required_indices = [panel_index, alias_index, eq_type_index, conv_type_index]
|
||||
max_index = max(required_indices)
|
||||
|
||||
for row in reader:
|
||||
if len(row) > max_index:
|
||||
panel = row[panel_index].strip()
|
||||
alias = row[alias_index].strip()
|
||||
eq_type = row[eq_type_index].strip() # <-- Read eq type
|
||||
conv_type = row[conv_type_index].strip() # <-- Read conveyor type
|
||||
|
||||
if panel and alias:
|
||||
# Store the details as a dictionary
|
||||
panel_details_list[panel].append({
|
||||
'alias': alias,
|
||||
'equipment_type': eq_type,
|
||||
'conveyor_type': conv_type
|
||||
})
|
||||
# Add alias to set for unique counting
|
||||
unique_aliases.add(alias)
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"Error: CSV file not found at {csv_filepath}")
|
||||
return None, 0
|
||||
except Exception as e:
|
||||
print(f"Error reading CSV file {csv_filepath}: {e}")
|
||||
return None, 0
|
||||
|
||||
total_csv_aliases = len(unique_aliases) # Total count is the size of the set
|
||||
|
||||
if not panel_details_list:
|
||||
print(f"Warning: No valid Control Panel/Alias pairs found in {csv_filepath}")
|
||||
|
||||
print(f"Loaded {total_csv_aliases} unique aliases (with details) across {len(panel_details_list)} control panels from {csv_filepath}")
|
||||
|
||||
# Return the map of panels to lists of detail dicts, and the total unique alias count
|
||||
return panel_details_list, total_csv_aliases
|
||||
|
||||
|
||||
def extract_names_from_json(data, name_set):
|
||||
"""Recursively extracts and normalizes 'name' properties from JSON data."""
|
||||
if isinstance(data, dict):
|
||||
if 'name' in data and isinstance(data['name'], str):
|
||||
normalized_name = normalize_string(data['name'])
|
||||
if normalized_name:
|
||||
name_set.add(normalized_name)
|
||||
|
||||
for key, value in data.items():
|
||||
if key != 'parent':
|
||||
extract_names_from_json(value, name_set)
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
extract_names_from_json(item, name_set)
|
||||
|
||||
def collect_element_names(views_directory):
|
||||
"""Scans all view.json files and collects all unique, normalized element names."""
|
||||
found_names = set()
|
||||
if not os.path.isdir(views_directory):
|
||||
print(f"Error: Views directory not found at {views_directory}")
|
||||
return None
|
||||
|
||||
print(f"Scanning directory: {views_directory} for element names...")
|
||||
view_count = 0
|
||||
for root, dirs, files in os.walk(views_directory):
|
||||
for filename in files:
|
||||
if filename.lower() == 'view.json':
|
||||
filepath = os.path.join(root, filename)
|
||||
view_count += 1
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
view_data = json.load(f)
|
||||
extract_names_from_json(view_data, found_names)
|
||||
except json.JSONDecodeError:
|
||||
print(f"Warning: Could not decode JSON for file: {filepath}")
|
||||
except Exception as e:
|
||||
print(f"Error processing file {filepath}: {e}")
|
||||
|
||||
print(f"Scanned {view_count} view.json files.")
|
||||
return found_names
|
||||
|
||||
|
||||
def calculate_progress(csv_filepath, views_directory):
|
||||
"""Calculates the progress based on CSV aliases and view element names, including details."""
|
||||
panel_details_map, total_csv_aliases = get_control_panel_units(csv_filepath)
|
||||
if panel_details_map is None:
|
||||
return None
|
||||
if not panel_details_map:
|
||||
print("No control panel/alias data loaded from CSV.")
|
||||
return { "overall": {"total_csv": 0, "missing": 0, "found_json": 0, "percentage": 0, "missing_list": [], "found_list": [] }, "panels": {} }
|
||||
|
||||
found_normalized_names = collect_element_names(views_directory)
|
||||
if found_normalized_names is None:
|
||||
return None
|
||||
total_found_json_names = len(found_normalized_names)
|
||||
print(f"Found {total_found_json_names} unique normalized element names across all views.")
|
||||
|
||||
progress_data = {
|
||||
"overall": {
|
||||
"total_csv": total_csv_aliases,
|
||||
"missing": 0,
|
||||
"found_json": total_found_json_names,
|
||||
"percentage": 0,
|
||||
"missing_list": [], # Initialize overall lists
|
||||
"found_list": []
|
||||
},
|
||||
"panels": {}
|
||||
}
|
||||
total_missing_count = 0
|
||||
overall_missing_list_agg = [] # Temp lists for aggregation
|
||||
overall_found_list_agg = []
|
||||
|
||||
for panel, details_list in panel_details_map.items():
|
||||
panel_total = len(details_list)
|
||||
panel_missing_count = 0
|
||||
panel_missing_details_list = []
|
||||
panel_found_details_list = []
|
||||
|
||||
for item_details in details_list:
|
||||
alias = item_details['alias']
|
||||
normalized_alias = normalize_string(alias)
|
||||
if normalized_alias not in found_normalized_names:
|
||||
panel_missing_details_list.append(item_details)
|
||||
overall_missing_list_agg.append(item_details) # Add to overall missing
|
||||
panel_missing_count += 1
|
||||
else:
|
||||
panel_found_details_list.append(item_details)
|
||||
overall_found_list_agg.append(item_details) # Add to overall found
|
||||
|
||||
sorted_missing_list = sorted(panel_missing_details_list, key=lambda x: x['alias'])
|
||||
sorted_found_list = sorted(panel_found_details_list, key=lambda x: x['alias'])
|
||||
|
||||
progress_data["panels"][panel] = {
|
||||
"total": panel_total,
|
||||
"missing": panel_missing_count,
|
||||
"missing_list": sorted_missing_list,
|
||||
"found_list": sorted_found_list,
|
||||
"percentage": round(((panel_total - panel_missing_count) / panel_total * 100), 2) if panel_total > 0 else 0
|
||||
}
|
||||
total_missing_count += panel_missing_count
|
||||
|
||||
# Finalize overall data
|
||||
progress_data["overall"]["missing"] = total_missing_count
|
||||
progress_data["overall"]["percentage"] = round(((total_csv_aliases - total_missing_count) / total_csv_aliases * 100), 2) if total_csv_aliases > 0 else 0
|
||||
# Sort and store aggregated overall lists
|
||||
progress_data["overall"]["missing_list"] = sorted(overall_missing_list_agg, key=lambda x: x['alias'])
|
||||
progress_data["overall"]["found_list"] = sorted(overall_found_list_agg, key=lambda x: x['alias'])
|
||||
|
||||
return progress_data
|
||||
|
||||
|
||||
def generate_report(progress_data, csv_filename):
|
||||
"""Generates a list of strings forming the Markdown report."""
|
||||
report_lines = []
|
||||
report_lines.append("# Report: Missing Aliases by Control Panel")
|
||||
report_lines.append("\n---")
|
||||
|
||||
if progress_data["overall"]["missing"] == 0 and progress_data["overall"]["total_csv"] > 0 :
|
||||
report_lines.append("\nAll aliases from the CSV were found associated with their Control Panels in view element names (case/hyphen/underscore insensitive).")
|
||||
elif progress_data["overall"]["total_csv"] == 0:
|
||||
report_lines.append("\nNo aliases found in the CSV file.")
|
||||
else:
|
||||
report_lines.append(f"\nFound **{progress_data['overall']['missing']}** alias(es) from `{csv_filename}` NOT found in any view element name:")
|
||||
sorted_panels = sorted(progress_data["panels"].keys())
|
||||
for panel in sorted_panels:
|
||||
panel_data = progress_data["panels"][panel]
|
||||
if panel_data["missing"] > 0:
|
||||
report_lines.append(f"\n## {panel}")
|
||||
report_lines.append(f"Missing {panel_data['missing']} of {panel_data['total']} ({100 - panel_data['percentage']:.2f}% missing)")
|
||||
# Iterate through the list of missing details dictionaries
|
||||
for item in panel_data["missing_list"]:
|
||||
# Display alias and optionally other details in the text report
|
||||
report_lines.append(f"- {item['alias']} (Eq: {item['equipment_type']}, Conv: {item['conveyor_type']})")
|
||||
|
||||
report_lines.append("\n---")
|
||||
report_lines.append("\nScan complete.")
|
||||
return report_lines
|
||||
|
||||
# Keep the command-line interface functionality
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Group missing aliases by Control Panel based on CSV and view element names.')
|
||||
parser.add_argument('csv_file', help='Path to the CSV file (e.g., "Manifest.csv")')
|
||||
parser.add_argument('views_dir', help='Path to the directory containing detailed view folders (e.g., "Detailed-Views")')
|
||||
parser.add_argument('-o', '--output', help='Optional path to save the report as a Markdown file (e.g., "report.md")')
|
||||
|
||||
args = parser.parse_args()
|
||||
csv_filepath = args.csv_file
|
||||
views_directory = args.views_dir
|
||||
|
||||
# Calculate progress data
|
||||
progress_results = calculate_progress(csv_filepath, views_directory)
|
||||
|
||||
if progress_results is not None:
|
||||
# Generate report content from the results
|
||||
report_lines = generate_report(progress_results, os.path.basename(csv_filepath))
|
||||
|
||||
# Output Report
|
||||
if args.output:
|
||||
try:
|
||||
with open(args.output, 'w', encoding='utf-8') as outfile:
|
||||
for line in report_lines:
|
||||
outfile.write(line + '\n')
|
||||
print(f"\nReport successfully saved to: {args.output}")
|
||||
except Exception as e:
|
||||
print(f"\nError writing report file {args.output}: {e}")
|
||||
print("\n--- CONSOLE REPORT FALLBACK ---")
|
||||
for line in report_lines:
|
||||
print(line)
|
||||
else:
|
||||
print("\n".join(report_lines))
|
||||
else:
|
||||
print("\nFailed to calculate progress. Check errors above.")
|
||||
2
requirements.txt
Normal file
2
requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
Flask
|
||||
GitPython
|
||||
368
templates/index.html
Normal file
368
templates/index.html
Normal file
@ -0,0 +1,368 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Ignition SCADA Progress Monitor</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
||||
<style>
|
||||
body { padding: 20px; padding-bottom: 60px; /* Account for status bar */ }
|
||||
.progress-container, .chart-container {
|
||||
margin-bottom: 25px;
|
||||
text-align: center; /* Center chart labels */
|
||||
}
|
||||
.chart-label {
|
||||
font-weight: bold;
|
||||
margin-bottom: 5px;
|
||||
display: block;
|
||||
}
|
||||
.status-bar {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
background-color: #f8f9fa;
|
||||
border-top: 1px solid #dee2e6;
|
||||
padding: 5px 15px;
|
||||
font-size: 0.9em;
|
||||
z-index: 1000;
|
||||
}
|
||||
/* Style for the overall progress bar */
|
||||
.overall-progress-bar {
|
||||
height: 25px;
|
||||
font-size: 1rem;
|
||||
}
|
||||
.progress-bar-label {
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
text-align: center;
|
||||
line-height: 25px; /* Match overall progress bar height */
|
||||
color: white; /* Or black, depending on bar color */
|
||||
mix-blend-mode: difference; /* Improve visibility */
|
||||
font-weight: bold;
|
||||
}
|
||||
/* Style for panel charts */
|
||||
.panel-chart-canvas {
|
||||
max-width: 150px; /* Control pie chart size */
|
||||
max-height: 150px;
|
||||
margin: 0 auto; /* Center the canvas */
|
||||
cursor: pointer; /* Indicate clickable */
|
||||
}
|
||||
#panels-progress {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr)); /* Responsive grid */
|
||||
gap: 20px;
|
||||
}
|
||||
.modal-body table { width: 100%; }
|
||||
.modal-body th, .modal-body td { padding: 5px 10px; border-bottom: 1px solid #eee; }
|
||||
.modal-body th { background-color: #f8f9fa; text-align: left; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1 class="mb-4">SCADA Device Placement Progress</h1>
|
||||
|
||||
<div id="overall-progress" class="chart-container">
|
||||
<span class="chart-label">Overall Progress</span>
|
||||
<canvas id="overall-chart-canvas" class="panel-chart-canvas" style="max-width: 200px; max-height: 200px;"></canvas>
|
||||
<div id="overall-text" style="font-weight: bold; margin-top: 10px;">0/0 (0%)</div>
|
||||
</div>
|
||||
|
||||
<hr>
|
||||
|
||||
<h2>Progress by Control Panel</h2>
|
||||
<div id="panels-progress">
|
||||
<!-- Charts will be loaded here -->
|
||||
<p>Loading panel data...</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Status Bar -->
|
||||
<div class="status-bar">
|
||||
<span id="status-message">Initializing...</span> | Last Commit: <span id="last-commit">N/A</span>
|
||||
</div>
|
||||
|
||||
<!-- Bootstrap Modal for Details -->
|
||||
<div class="modal fade" id="detailsModal" tabindex="-1" aria-labelledby="detailsModalLabel" aria-hidden="true">
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title" id="detailsModalLabel">Details for Panel: <span></span></h5>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<table class="table table-sm table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Alias</th>
|
||||
<th>Equipment Type</th>
|
||||
<th>Type of Conveyor</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<!-- Missing/Found items will be populated here -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script>
|
||||
let chartInstances = {};
|
||||
// Store the *entire* progress object now
|
||||
let progressDetailsData = {};
|
||||
let detailsModalInstance = null;
|
||||
|
||||
// --- Chart Click Handler ---
|
||||
function handleChartClick(event, elements, chart) {
|
||||
if (elements.length > 0) {
|
||||
const clickedElementIndex = elements[0].index;
|
||||
// Check if the clicked chart is the overall chart
|
||||
const isOverallChart = chart.canvas.id === 'overall-chart-canvas';
|
||||
// Determine panelName or use a special key for overall
|
||||
const identifier = isOverallChart ? '__overall__' : chart.canvas.id.replace('chart-', '');
|
||||
|
||||
if (clickedElementIndex === 0) { // Clicked on 'Found' segment
|
||||
showDetailsModal(identifier, 'found');
|
||||
} else if (clickedElementIndex === 1) { // Clicked on 'Missing' segment
|
||||
showDetailsModal(identifier, 'missing');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updateUI(data) {
|
||||
console.log("Updating UI with data:", data);
|
||||
|
||||
// Store the entire progress object
|
||||
progressDetailsData = data.progress;
|
||||
|
||||
// Update status
|
||||
document.getElementById('status-message').textContent = data.status;
|
||||
document.getElementById('last-commit').textContent = data.last_commit || 'N/A';
|
||||
|
||||
// Update overall progress chart
|
||||
const overallData = progressDetailsData.overall; // Use stored data
|
||||
const overallPercentage = overallData.percentage;
|
||||
const overallFound = overallData.total_csv - overallData.missing;
|
||||
const overallMissing = overallData.missing;
|
||||
const overallTotal = overallData.total_csv;
|
||||
document.getElementById('overall-text').textContent = `${overallFound}/${overallTotal} (${overallPercentage}%)`;
|
||||
const overallChartData = { labels: ['Found', 'Missing'], datasets: [{ label: 'Overall Aliases', data: [overallFound, overallMissing], backgroundColor: ['rgb(25, 135, 84)', 'rgb(220, 53, 69)'], hoverOffset: 4 }] };
|
||||
|
||||
const overallCanvas = document.getElementById('overall-chart-canvas');
|
||||
if (chartInstances['overall']) {
|
||||
chartInstances['overall'].data = overallChartData;
|
||||
chartInstances['overall'].update();
|
||||
} else if (overallCanvas) {
|
||||
const ctxOverall = overallCanvas.getContext('2d');
|
||||
chartInstances['overall'] = new Chart(ctxOverall, {
|
||||
type: 'pie', data: overallChartData,
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
onClick: handleChartClick, // <-- Add click handler to overall chart
|
||||
plugins: {
|
||||
legend: { display: false },
|
||||
tooltip: {
|
||||
callbacks: {
|
||||
label: function(context) {
|
||||
let label = context.label || '';
|
||||
if (label) label += ': ';
|
||||
if (context.parsed !== null) label += context.parsed;
|
||||
if (overallTotal > 0) { label += ` (${((context.parsed / overallTotal) * 100).toFixed(1)}%)`; }
|
||||
return label;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Update panel charts
|
||||
const panelsContainer = document.getElementById('panels-progress');
|
||||
const panelsData = progressDetailsData.panels; // Use stored data
|
||||
const sortedPanels = Object.keys(panelsData).sort();
|
||||
const currentPanels = new Set(sortedPanels);
|
||||
const existingPanelCharts = new Set(Object.keys(chartInstances).filter(k => k !== 'overall'));
|
||||
|
||||
existingPanelCharts.forEach(panelName => {
|
||||
if (!currentPanels.has(panelName)) {
|
||||
if(chartInstances[panelName]) { chartInstances[panelName].destroy(); delete chartInstances[panelName]; }
|
||||
const chartElement = document.getElementById(`chart-container-${panelName}`);
|
||||
if (chartElement) chartElement.remove();
|
||||
}
|
||||
});
|
||||
|
||||
if (sortedPanels.length === 0) {
|
||||
panelsContainer.innerHTML = '<p>No panel data available yet.</p>';
|
||||
} else {
|
||||
sortedPanels.forEach(panelName => {
|
||||
const panel = panelsData[panelName];
|
||||
const found = panel.total - panel.missing;
|
||||
const missing = panel.missing;
|
||||
const total = panel.total;
|
||||
|
||||
let chartContainer = document.getElementById(`chart-container-${panelName}`);
|
||||
let canvas = document.getElementById(`chart-${panelName}`);
|
||||
|
||||
if (!chartContainer) {
|
||||
chartContainer = document.createElement('div');
|
||||
chartContainer.id = `chart-container-${panelName}`;
|
||||
chartContainer.className = 'chart-container';
|
||||
const label = document.createElement('span');
|
||||
label.className = 'chart-label'; label.textContent = panelName;
|
||||
canvas = document.createElement('canvas');
|
||||
canvas.id = `chart-${panelName}`;
|
||||
canvas.className = 'panel-chart-canvas';
|
||||
chartContainer.appendChild(label);
|
||||
chartContainer.appendChild(canvas);
|
||||
panelsContainer.appendChild(chartContainer);
|
||||
const loadingMsg = panelsContainer.querySelector('p');
|
||||
if (loadingMsg && loadingMsg.textContent.includes('Loading')) { loadingMsg.remove(); }
|
||||
} else {
|
||||
if (!canvas) {
|
||||
canvas = document.createElement('canvas');
|
||||
canvas.id = `chart-${panelName}`;
|
||||
canvas.className = 'panel-chart-canvas';
|
||||
chartContainer.appendChild(canvas);
|
||||
}
|
||||
}
|
||||
|
||||
const chartData = { labels: ['Found', 'Missing'], datasets: [{ label: 'Aliases', data: [found, missing], backgroundColor: ['rgb(25, 135, 84)', 'rgb(220, 53, 69)'], hoverOffset: 4 }] };
|
||||
|
||||
if (chartInstances[panelName]) {
|
||||
chartInstances[panelName].data = chartData;
|
||||
chartInstances[panelName].update();
|
||||
} else if (canvas) {
|
||||
const ctx = canvas.getContext('2d');
|
||||
chartInstances[panelName] = new Chart(ctx, {
|
||||
type: 'pie',
|
||||
data: chartData,
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
onClick: handleChartClick,
|
||||
plugins: {
|
||||
legend: { display: false },
|
||||
tooltip: {
|
||||
callbacks: {
|
||||
label: function(context) {
|
||||
let label = context.label || '';
|
||||
if (label) label += ': ';
|
||||
if (context.parsed !== null) label += context.parsed;
|
||||
if (total > 0) { label += ` (${((context.parsed / total) * 100).toFixed(1)}%)`; }
|
||||
return label;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
const loadingMsgCheck = panelsContainer.querySelector('p');
|
||||
if (loadingMsgCheck && loadingMsgCheck.textContent.includes('Loading')) {
|
||||
loadingMsgCheck.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Updated function to handle overall chart clicks
|
||||
function showDetailsModal(identifier, listType) {
|
||||
let dataList = null;
|
||||
let panelName = identifier; // Default to identifier being the panel name
|
||||
const listTypeName = (listType === 'found') ? 'Found' : 'Missing';
|
||||
|
||||
if (identifier === '__overall__') {
|
||||
// Handle overall chart click
|
||||
const overallData = progressDetailsData.overall;
|
||||
dataList = (listType === 'found') ? overallData.found_list : overallData.missing_list;
|
||||
panelName = "Overall"; // Set display name for modal title
|
||||
} else {
|
||||
// Handle panel chart click
|
||||
const panelData = progressDetailsData.panels[identifier]; // Use identifier as panelName
|
||||
if (panelData) {
|
||||
dataList = (listType === 'found') ? panelData.found_list : panelData.missing_list;
|
||||
} else {
|
||||
console.error("Panel data not found for identifier:", identifier);
|
||||
return; // Exit if panel data doesn't exist
|
||||
}
|
||||
}
|
||||
|
||||
if (!dataList || dataList.length === 0) {
|
||||
console.log(`No ${listTypeName} items to show for:`, panelName);
|
||||
alert(`No ${listTypeName} items found for ${panelName}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const modalTitleElement = document.getElementById('detailsModalLabel');
|
||||
const modalTitleSpan = modalTitleElement.querySelector('span');
|
||||
const modalTableBody = document.querySelector('#detailsModal .modal-body tbody');
|
||||
|
||||
// Update modal title
|
||||
modalTitleElement.childNodes[0].nodeValue = `${listTypeName} Items for ${panelName}: `;
|
||||
modalTitleSpan.textContent = ""; // Clear the span if using the main text
|
||||
// Or, if keeping the span: modalTitleSpan.textContent = panelName;
|
||||
|
||||
modalTableBody.innerHTML = ''; // Clear previous entries
|
||||
|
||||
dataList.forEach(item => {
|
||||
const row = document.createElement('tr');
|
||||
const aliasCell = document.createElement('td');
|
||||
aliasCell.textContent = item.alias;
|
||||
row.appendChild(aliasCell);
|
||||
const eqTypeCell = document.createElement('td');
|
||||
eqTypeCell.textContent = item.equipment_type || 'N/A';
|
||||
row.appendChild(eqTypeCell);
|
||||
const convTypeCell = document.createElement('td');
|
||||
convTypeCell.textContent = item.conveyor_type || 'N/A';
|
||||
row.appendChild(convTypeCell);
|
||||
modalTableBody.appendChild(row);
|
||||
});
|
||||
|
||||
if (!detailsModalInstance) {
|
||||
detailsModalInstance = new bootstrap.Modal(document.getElementById('detailsModal'));
|
||||
}
|
||||
detailsModalInstance.show();
|
||||
}
|
||||
|
||||
// --- Connect to SSE stream ---
|
||||
const eventSource = new EventSource("/stream");
|
||||
|
||||
eventSource.onmessage = function(event) {
|
||||
console.log("SSE message received:", event.data);
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
updateUI(data); // Call the UI update function with the new data
|
||||
} catch (error) {
|
||||
console.error("Error parsing SSE data:", error);
|
||||
document.getElementById('status-message').textContent = 'Error processing update from server.';
|
||||
}
|
||||
};
|
||||
|
||||
eventSource.onerror = function(err) {
|
||||
console.error("EventSource failed:", err);
|
||||
document.getElementById('status-message').textContent = 'Connection to server lost. Retrying...';
|
||||
};
|
||||
|
||||
// Initial fetch remains the same
|
||||
fetch('/stream').then(response => {
|
||||
if (!response.ok) {
|
||||
console.error("Initial fetch failed", response.statusText);
|
||||
document.getElementById('status-message').textContent = 'Failed to fetch initial data.';
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error("Error during initial fetch for stream setup:", err);
|
||||
document.getElementById('status-message').textContent = 'Error setting up data stream.';
|
||||
});
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
Loading…
x
Reference in New Issue
Block a user