286 lines
12 KiB
Python
286 lines
12 KiB
Python
from __future__ import annotations
|
|
|
|
import xml.etree.ElementTree as ET
|
|
from typing import List
|
|
|
|
import pandas as pd
|
|
|
|
from ..plugin_system import RoutinePlugin
|
|
from ..logging_config import get_logger
|
|
from ..utils.tag_utils import device_base_from_desca, epc_base_from_term, is_estop_desca
|
|
from ..utils.common import natural_sort_key
|
|
|
|
|
|
def create_zones_routine(routines: ET.Element, zones_df: pd.DataFrame, epc_df: pd.DataFrame) -> None:
|
|
"""Create R030_ZONES routine using zones.json and EPC-derived DCS outputs.
|
|
|
|
For each zone entry (name like 01-01) we generate a rung:
|
|
AND of DCS outputs (.O1) for all EPC bases within the zone's start/stop
|
|
range (e.g., UL1_1..UL1_13 => UL1_3, UL1_4, UL1_9...).
|
|
OTE(EStop_<ZONE>_OK) with zone name normalized to underscores.
|
|
|
|
Then we add a master rung combining top-level zones (those with interlock
|
|
equal to the root like MCM01):
|
|
XIC(EStop_01_01_OK)XIC(EStop_01_06_OK)... OTE(EStop_MCM_OK)
|
|
"""
|
|
routine = ET.SubElement(routines, "Routine", Name="R030_ZONES", Type="RLL")
|
|
rll = ET.SubElement(routine, "RLLContent")
|
|
|
|
# Normalize and build dependency order
|
|
def norm(s: str) -> str:
|
|
return str(s).strip().replace('-', '_')
|
|
|
|
rows = []
|
|
for _, row in zones_df.iterrows():
|
|
name = str(row.get('name', '')).strip()
|
|
if not name:
|
|
continue
|
|
# Handle both single interlock (string) and multiple interlocks (array)
|
|
interlock_raw = row.get('interlock', '')
|
|
if isinstance(interlock_raw, list):
|
|
interlocks = [str(il).strip() for il in interlock_raw if str(il).strip()]
|
|
else:
|
|
interlocks = [str(interlock_raw).strip()] if str(interlock_raw).strip() else []
|
|
|
|
rows.append({
|
|
'name': name,
|
|
'name_norm': norm(name),
|
|
'interlock': str(row.get('interlock', '')).strip(), # Keep for backward compatibility
|
|
'interlocks': interlocks, # New multiple interlocks field
|
|
'interlock_norm': norm(row.get('interlock', '')) if str(row.get('interlock', '')).strip() else '',
|
|
'interlocks_norm': [norm(il) for il in interlocks] # Normalized multiple interlocks
|
|
})
|
|
|
|
logger = get_logger(__name__)
|
|
|
|
# Topological-ish order: repeatedly add items whose interlock already placed or empty
|
|
ordered: List[dict] = []
|
|
placed = set()
|
|
remaining = rows.copy()
|
|
# guard against cycles with max iterations
|
|
for _ in range(len(rows) + 5):
|
|
progressed = False
|
|
next_remaining = []
|
|
for r in remaining:
|
|
# Check if all interlocks are satisfied (placed)
|
|
interlocks_satisfied = True
|
|
for interlock in r['interlocks']:
|
|
interlock_norm = norm(interlock)
|
|
if interlock and interlock_norm not in placed:
|
|
interlocks_satisfied = False
|
|
break
|
|
|
|
if not r['interlocks'] or interlocks_satisfied:
|
|
ordered.append(r)
|
|
placed.add(r['name_norm'])
|
|
progressed = True
|
|
else:
|
|
next_remaining.append(r)
|
|
remaining = next_remaining
|
|
if not remaining or not progressed:
|
|
# append any unresolved to keep moving
|
|
ordered.extend(remaining)
|
|
break
|
|
|
|
# Build available DCS controllers from EPC dataframe (matches what ESTOPS created)
|
|
available_dcs: dict[str, set[str]] = {}
|
|
try:
|
|
si_epc_df = epc_df[epc_df["TERM"].str.startswith("SI", na=False)] if "TERM" in epc_df.columns else epc_df.iloc[0:0]
|
|
# Ensure TAGNAME column exists before groupby
|
|
if "TAGNAME" not in si_epc_df.columns:
|
|
si_epc_df = si_epc_df.copy()
|
|
si_epc_df["TAGNAME"] = ""
|
|
epc_groups = si_epc_df.groupby("TAGNAME") if len(si_epc_df) > 0 else []
|
|
for tagname, group in epc_groups:
|
|
if group.empty:
|
|
continue
|
|
first_desca = str(group.iloc[0]["DESCA"])
|
|
base_name = device_base_from_desca(first_desca)
|
|
present: set[str] = set()
|
|
# Skip ESTOP devices - do not create XIC logic for ESTOP DCS controllers in zones
|
|
if is_estop_desca(first_desca):
|
|
continue
|
|
else:
|
|
# Only gather EPC1/EPC2 per SI terminals present
|
|
for _, row in group.iterrows():
|
|
eb = epc_base_from_term(str(row.get("TERM", "")))
|
|
if eb:
|
|
present.add(eb)
|
|
if present:
|
|
available_dcs[base_name] = present
|
|
logger.debug(f"Found DCS controllers for {base_name}: {present}")
|
|
except Exception as e:
|
|
# If anything goes wrong, leave available_dcs empty (no rungs will be emitted)
|
|
logger.warning(f"Failed to build available DCS controllers: {e}")
|
|
available_dcs = {}
|
|
|
|
logger.debug(f"Available DCS controllers: {available_dcs}")
|
|
|
|
# Generate rungs using available EPC DCS tags only
|
|
rung_num = 0
|
|
# We no longer aggregate top-level zone OKs into EStop_MCM_OK; that is driven by MCM_EPB_DCS_CTRL.O1
|
|
|
|
# Helper to parse token like UL1_13 into (prefix='UL1', num=13)
|
|
def parse_ul(token: str) -> tuple[str, int] | None:
|
|
try:
|
|
parts = token.split('_')
|
|
return parts[0], int(parts[1])
|
|
except Exception:
|
|
return None
|
|
|
|
# Build an index of EPC bases from the DCS tags that will exist
|
|
# We infer bases from the EPC dataframe via plugin path; here create a lazy accessor
|
|
epc_bases: List[str] = []
|
|
try:
|
|
# Import via local to avoid cycles
|
|
from ..data_loader import DataLoader as _DL
|
|
# We cannot instantiate here; the plugin generate() will re-call this
|
|
# function so as a fallback we create a broad set from zones tokens
|
|
for _, zr in zones_df.iterrows():
|
|
st = str(zr.get('start', '')).strip()
|
|
sp = str(zr.get('stop', '')).strip()
|
|
for tok in (st, sp):
|
|
if '_' in tok:
|
|
epc_bases.append(tok)
|
|
except Exception:
|
|
pass
|
|
|
|
# Helper to process range data from zones
|
|
def get_zone_candidates(zone_row) -> List[str]:
|
|
"""Extract all device candidates from a zone (supporting both old start/stop and new ranges format)."""
|
|
candidates = []
|
|
|
|
# Check if zone has ranges field (new format)
|
|
ranges_data = zone_row.get('ranges')
|
|
if ranges_data and isinstance(ranges_data, list) and len(ranges_data) > 0:
|
|
# Process multiple ranges
|
|
for range_item in ranges_data:
|
|
start_tok = str(range_item.get('start', '')).strip()
|
|
stop_tok = str(range_item.get('stop', '')).strip()
|
|
|
|
# Skip empty ranges
|
|
if not start_tok:
|
|
continue
|
|
|
|
# Handle single device case (start == stop or only start provided)
|
|
if not stop_tok or start_tok == stop_tok:
|
|
candidates.append(start_tok)
|
|
continue
|
|
|
|
# Handle range case (start != stop)
|
|
bounds_s = parse_ul(start_tok)
|
|
bounds_e = parse_ul(stop_tok)
|
|
if bounds_s and bounds_e and bounds_s[0] == bounds_e[0]:
|
|
prefix = bounds_s[0]
|
|
lo, hi = sorted([bounds_s[1], bounds_e[1]])
|
|
candidates.extend([f"{prefix}_{i}" for i in range(lo, hi + 1)])
|
|
else:
|
|
# If parsing fails, add both tokens individually
|
|
candidates.append(start_tok)
|
|
if stop_tok != start_tok:
|
|
candidates.append(stop_tok)
|
|
else:
|
|
# Fallback to old start/stop format
|
|
start_tok = str(zone_row.get('start', '')).strip()
|
|
stop_tok = str(zone_row.get('stop', '')).strip()
|
|
if start_tok and stop_tok:
|
|
if start_tok == stop_tok:
|
|
# Single device
|
|
candidates.append(start_tok)
|
|
else:
|
|
# Range
|
|
bounds_s = parse_ul(start_tok)
|
|
bounds_e = parse_ul(stop_tok)
|
|
if bounds_s and bounds_e and bounds_s[0] == bounds_e[0]:
|
|
prefix = bounds_s[0]
|
|
lo, hi = sorted([bounds_s[1], bounds_e[1]])
|
|
candidates.extend([f"{prefix}_{i}" for i in range(lo, hi + 1)])
|
|
else:
|
|
# If parsing fails, add both tokens
|
|
candidates.append(start_tok)
|
|
candidates.append(stop_tok)
|
|
elif start_tok:
|
|
# Only start token provided
|
|
candidates.append(start_tok)
|
|
|
|
# Remove duplicates while preserving order
|
|
seen = set()
|
|
unique_candidates = []
|
|
for candidate in candidates:
|
|
if candidate not in seen:
|
|
seen.add(candidate)
|
|
unique_candidates.append(candidate)
|
|
|
|
return unique_candidates
|
|
|
|
# For each zone, assemble the DCS XICs - sort zones naturally
|
|
for r in sorted(ordered, key=lambda x: natural_sort_key(x['name'])):
|
|
zone_name = r['name']
|
|
if zone_name.upper().startswith('MCM'):
|
|
# root marker; skip rung here, we'll compute master later
|
|
continue
|
|
|
|
zone_row = zones_df[zones_df['name'] == zone_name].iloc[0]
|
|
candidates = get_zone_candidates(zone_row)
|
|
logger.debug(f"Zone {zone_name} candidates: {candidates}")
|
|
|
|
# Build XIC chain for EPC1/EPC2 DCS outputs that actually exist per ESTOPS
|
|
xic_parts: List[str] = []
|
|
included_dcs: List[str] = []
|
|
# Sort candidates naturally and then sort labels naturally within each base
|
|
for base in sorted(candidates, key=natural_sort_key):
|
|
dc_set = available_dcs.get(base, set())
|
|
if dc_set:
|
|
for label in sorted(dc_set, key=natural_sort_key): # natural sort order
|
|
dcs_ref = f"{base}_{label}_DCS_CTRL.O1"
|
|
xic_parts.append(f"XIC({dcs_ref})")
|
|
included_dcs.append(dcs_ref)
|
|
else:
|
|
# If no DCS set found for this base, check if it should be included anyway
|
|
# This handles cases where the device exists but wasn't properly categorized
|
|
logger.debug(f"No DCS controllers found for base {base} in zone {zone_name}")
|
|
|
|
ok_tag = f"EStop_{zone_name.replace('-', '_')}_OK"
|
|
rung = ET.SubElement(rll, "Rung", Number=str(rung_num), Type="N")
|
|
text = ET.SubElement(rung, "Text")
|
|
|
|
# If zone has no estop1 or epc, just add the OTE
|
|
if not xic_parts:
|
|
text.text = f"OTE({ok_tag});"
|
|
else:
|
|
text.text = ''.join(xic_parts) + f"OTE({ok_tag});"
|
|
rung_num += 1
|
|
logger.debug("Zones: rung", zone=zone_name, interlock=r['interlock'], dcs_list=included_dcs)
|
|
|
|
# Master EStop_MCM_OK is tied directly to the MCM EPB DCS output bit (config-driven)
|
|
from ..config import get_config
|
|
cfg = get_config()
|
|
mcm_epb_o1 = getattr(cfg.routines, 'mcm_epb_tag', 'MCM_EPB_DCS_CTRL.O1')
|
|
top_ok = getattr(cfg.routines, 'top_level_estop_ok_tag', 'EStop_MCM_OK')
|
|
rung = ET.SubElement(rll, "Rung", Number=str(rung_num), Type="N")
|
|
text = ET.SubElement(rung, "Text")
|
|
text.text = f"XIC({mcm_epb_o1})OTE({top_ok});"
|
|
|
|
|
|
class ZonesRoutinePlugin(RoutinePlugin):
|
|
name = "zones"
|
|
description = "Generates the R030_ZONES routine from configured zones"
|
|
category = "safety"
|
|
|
|
def can_generate(self) -> bool:
|
|
try:
|
|
# Generate if zones exist (including default single MCM row)
|
|
return self.context.data_loader.zones is not None
|
|
except Exception:
|
|
return False
|
|
|
|
def generate(self) -> bool:
|
|
create_zones_routine(
|
|
self.context.routines_element,
|
|
self.context.data_loader.zones,
|
|
self.context.data_loader.epc,
|
|
)
|
|
return True
|
|
|
|
|