233 lines
10 KiB
Python
233 lines
10 KiB
Python
import os
|
|
import threading
|
|
import time
|
|
import json
|
|
from flask import Flask, render_template, jsonify, Response
|
|
import git
|
|
from find_missing_units import calculate_progress # Import the refactored function
|
|
|
|
app = Flask(__name__)
|
|
|
|
# --- Configuration ---
|
|
REPO_URL = "http://192.168.5.191:3000/LCI/MTN6"
|
|
REPO_DIR = "./cloned_repo" # Directory to clone the repo into
|
|
BRANCH = "main"
|
|
CSV_FILENAME = "MTN6 Equipment Manifest REV6(Conveyor List).csv"
|
|
VIEWS_DIR_RELATIVE = "MTN6_SCADA/com.inductiveautomation.perspective/views/Detailed-Views"
|
|
CHECK_INTERVAL_SECONDS = 60 # Check repo more frequently now, maybe every minute?
|
|
|
|
# --- Global state ---
|
|
last_commit_hash = None
|
|
progress_data = { "overall": {"total_csv": 0, "missing": 0, "found_json": 0, "percentage": 0}, "panels": {} }
|
|
status_message = "Initializing..."
|
|
repo_lock = threading.Lock() # Lock for accessing repo and shared data
|
|
data_updated_event = threading.Event() # Event to signal data updates
|
|
|
|
def get_repo_path():
|
|
return os.path.abspath(REPO_DIR)
|
|
|
|
def get_csv_path():
|
|
return os.path.abspath(CSV_FILENAME)
|
|
|
|
def get_views_dir_path():
|
|
return os.path.join(get_repo_path(), VIEWS_DIR_RELATIVE)
|
|
|
|
def update_progress_data():
|
|
global progress_data, status_message
|
|
csv_path = get_csv_path()
|
|
views_dir = get_views_dir_path()
|
|
current_status = ""
|
|
new_data_calculated = None
|
|
|
|
if not os.path.exists(csv_path):
|
|
current_status = f"Error: CSV file not found at {csv_path}"
|
|
elif not os.path.exists(views_dir):
|
|
current_status = f"Error: Views directory not found at {views_dir}"
|
|
else:
|
|
print(f"Running analysis: CSV='{csv_path}', Views='{views_dir}'")
|
|
current_status = "Calculating progress..."
|
|
try:
|
|
new_data_calculated = calculate_progress(csv_path, views_dir)
|
|
if new_data_calculated:
|
|
current_status = f"Progress updated successfully at {time.strftime('%Y-%m-%d %H:%M:%S')}"
|
|
else:
|
|
current_status = "Error: Failed to calculate progress (script returned None)."
|
|
except Exception as e:
|
|
current_status = f"Error running analysis script: {e}"
|
|
new_data_calculated = None # Ensure no partial data update
|
|
|
|
# Update global state ONLY AFTER calculation (inside lock if modifying multiple related vars)
|
|
# In this case, modifying status and potentially progress_data
|
|
print(current_status)
|
|
status_message = current_status # Update status regardless of success/failure
|
|
if new_data_calculated is not None:
|
|
progress_data = new_data_calculated
|
|
|
|
# Signal that an update attempt finished (even if it failed, status changed)
|
|
data_updated_event.set()
|
|
data_updated_event.clear() # Reset event for next update
|
|
|
|
def check_and_update_repo():
|
|
global last_commit_hash, status_message
|
|
repo_path = get_repo_path()
|
|
did_update = False # Flag to track if files were actually updated
|
|
initial_hash = last_commit_hash # Store hash before check
|
|
|
|
with repo_lock:
|
|
try:
|
|
repo_existed = os.path.exists(os.path.join(repo_path, ".git"))
|
|
if not repo_existed:
|
|
print(f"Cloning repository {REPO_URL} into {repo_path}...")
|
|
status_message = f"Cloning repository {REPO_URL}..."
|
|
git.Repo.clone_from(REPO_URL, repo_path, branch=BRANCH)
|
|
repo = git.Repo(repo_path)
|
|
last_commit_hash = repo.head.commit.hexsha
|
|
print(f"Initial clone complete. Commit: {last_commit_hash}")
|
|
did_update = True # Cloned, so considered an update
|
|
else:
|
|
repo = git.Repo(repo_path)
|
|
print("Fetching updates from remote...")
|
|
current_local_commit = repo.head.commit.hexsha
|
|
# Update hash *before* fetch in case fetch fails but commit was readable
|
|
if last_commit_hash is None: last_commit_hash = current_local_commit
|
|
origin = repo.remotes.origin
|
|
fetch_info = origin.fetch()
|
|
|
|
# Check if fetch actually brought new data for the target branch
|
|
fetched_new_commits = any(info.flags & info.NEW_HEAD for info in fetch_info if info.name == f'origin/{BRANCH}')
|
|
|
|
current_remote_commit = repo.commit(f'origin/{BRANCH}').hexsha
|
|
|
|
print(f"Local commit: {current_local_commit}, Remote commit: {current_remote_commit}")
|
|
|
|
if current_local_commit != current_remote_commit:
|
|
print("New commit detected! Pulling changes...")
|
|
status_message = "Pulling updates..."
|
|
try:
|
|
pull_info = origin.pull()
|
|
new_commit_hash = repo.head.commit.hexsha
|
|
print(f"Pull successful. New commit: {new_commit_hash}")
|
|
last_commit_hash = new_commit_hash
|
|
did_update = True # Pulled, so considered an update
|
|
except git.GitCommandError as e:
|
|
status_message = f"Error pulling repository: {e}"
|
|
print(status_message)
|
|
# Revert hash if pull failed
|
|
last_commit_hash = current_local_commit
|
|
else:
|
|
print("No new commits detected.")
|
|
# No code change, but update status if needed
|
|
if not status_message.startswith("Error"):
|
|
status_message = f"Checked at {time.strftime('%Y-%m-%d %H:%M:%S')}. No changes."
|
|
|
|
# Run analysis IF the repo was updated (cloned or pulled)
|
|
if did_update:
|
|
update_progress_data()
|
|
# If no git update, but status changed (e.g., to "No changes"), signal SSE
|
|
elif initial_hash == last_commit_hash:
|
|
data_updated_event.set() # Signal status change event
|
|
data_updated_event.clear()
|
|
|
|
except git.GitCommandError as e:
|
|
status_message = f"Git command error: {e}"
|
|
print(status_message)
|
|
# Try to get commit hash even if failed
|
|
try:
|
|
if os.path.exists(os.path.join(repo_path, ".git")):
|
|
repo = git.Repo(repo_path)
|
|
# Use previous hash if available, else try to read current
|
|
if last_commit_hash is None: last_commit_hash = repo.head.commit.hexsha
|
|
except Exception:
|
|
if last_commit_hash is None: last_commit_hash = "Error reading commit"
|
|
data_updated_event.set() # Signal error status change
|
|
data_updated_event.clear()
|
|
except Exception as e:
|
|
status_message = f"Error checking repository: {e}"
|
|
print(status_message)
|
|
if last_commit_hash is None: last_commit_hash = "Error checking repo"
|
|
data_updated_event.set() # Signal error status change
|
|
data_updated_event.clear()
|
|
# Return true if analysis was run, false otherwise
|
|
return did_update
|
|
|
|
def periodic_repo_check():
|
|
"""Runs the check_and_update_repo function periodically."""
|
|
while True:
|
|
print(f"\nStarting periodic repository check (Interval: {CHECK_INTERVAL_SECONDS}s)...")
|
|
check_and_update_repo()
|
|
print(f"Check finished. Sleeping...")
|
|
time.sleep(CHECK_INTERVAL_SECONDS)
|
|
|
|
@app.route('/')
|
|
def index():
|
|
return render_template('index.html')
|
|
|
|
@app.route('/stream')
|
|
def stream():
|
|
def event_stream():
|
|
last_sent_hash = None
|
|
|
|
# Send initial state immediately on connection
|
|
with repo_lock:
|
|
current_hash = last_commit_hash
|
|
current_status = status_message
|
|
current_progress = progress_data
|
|
|
|
initial_payload = json.dumps({
|
|
"status": current_status,
|
|
"progress": current_progress,
|
|
"last_commit": current_hash
|
|
})
|
|
yield f"data: {initial_payload}\n\n"
|
|
last_sent_hash = current_hash # Record that we sent the initial state
|
|
print(f"Sent initial state to new client (Hash: {last_sent_hash})")
|
|
|
|
# Now wait for subsequent updates
|
|
while True:
|
|
data_updated_event.wait()
|
|
|
|
with repo_lock: # Re-acquire lock to check latest state
|
|
current_hash = last_commit_hash
|
|
current_status = status_message
|
|
current_progress = progress_data
|
|
|
|
# Only send if commit hash has changed since last send *to this client*
|
|
if current_hash != last_sent_hash:
|
|
print(f"Hash changed ({last_sent_hash} -> {current_hash}). Sending update to client.")
|
|
data_payload = json.dumps({
|
|
"status": current_status,
|
|
"progress": current_progress,
|
|
"last_commit": current_hash
|
|
})
|
|
yield f"data: {data_payload}\n\n"
|
|
last_sent_hash = current_hash
|
|
else:
|
|
# If only status changed without hash change, we could optionally send just status
|
|
# For now, we don't send anything if hash is same as last sent to this client
|
|
print(f"Data updated event triggered, but hash {current_hash} unchanged for this client.")
|
|
|
|
return Response(event_stream(), mimetype="text/event-stream")
|
|
|
|
if __name__ == '__main__':
|
|
# Ensure repo directory exists
|
|
if not os.path.exists(REPO_DIR):
|
|
os.makedirs(REPO_DIR)
|
|
|
|
# Perform initial check/clone and data load
|
|
print("Performing initial repository check and data load...")
|
|
# Run check_and_update_repo which calls update_progress_data if needed
|
|
initial_update_done = check_and_update_repo()
|
|
# If repo existed and was up-to-date, run analysis explicitly
|
|
if not initial_update_done:
|
|
print("Repository present and up-to-date. Running initial analysis...")
|
|
# No need for lock here as background thread isn't running yet
|
|
update_progress_data()
|
|
|
|
# Start the background thread for periodic checks
|
|
print("Starting background repository check thread...")
|
|
repo_check_thread = threading.Thread(target=periodic_repo_check, daemon=True)
|
|
repo_check_thread.start()
|
|
|
|
# Run the Flask app
|
|
print("Starting Flask server on port 5050...")
|
|
app.run(host='0.0.0.0', port=5050, debug=False, threaded=True) # Use threaded=True for SSE |