first commit
This commit is contained in:
commit
f251cc7e25
7
.cursor/rules/000-always-context.mdc
Normal file
7
.cursor/rules/000-always-context.mdc
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
description: "Core config and utilities"
|
||||||
|
globs:
|
||||||
|
- "project_config.md"
|
||||||
|
- "workflow_state.md"
|
||||||
|
alwaysApply: true
|
||||||
|
---
|
||||||
7
.env_linux
Normal file
7
.env_linux
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
# Linux Development Environment Configuration
|
||||||
|
FLASK_ENV=development
|
||||||
|
FLASK_DEBUG=1
|
||||||
|
PYTHONPATH=${PWD}:${PWD}/src
|
||||||
|
PLATFORM=linux
|
||||||
|
HYBRID_COMPILATION=true
|
||||||
|
WINDOWS_SHARE_PATH=/mnt/c/Users/ilia.gurielidze/plc_compilation
|
||||||
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
**.ACD
|
||||||
|
**.SEM
|
||||||
|
**.L5X
|
||||||
|
**.WRK
|
||||||
BIN
DESC_IP_MERGED.xlsx
Normal file
BIN
DESC_IP_MERGED.xlsx
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
577
IO Tree Configuration Generator/controller_builder.py
Normal file
577
IO Tree Configuration Generator/controller_builder.py
Normal file
@ -0,0 +1,577 @@
|
|||||||
|
"""ControllerBuilder
|
||||||
|
====================
|
||||||
|
|
||||||
|
Responsible for building the static parts of a 1756-L83ES controller project:
|
||||||
|
• Creates the base controller boilerplate (Local slot, etc.)
|
||||||
|
• Injects fixed chassis modules (EN4TR, IB16, OB16E, IB16S)
|
||||||
|
• Provides a hook (`modules_section`) so external callers can append additional
|
||||||
|
module <Module> elements (e.g. Excel-driven families).
|
||||||
|
• Finalises the project (programs/tasks, auxiliary sections, export date) and
|
||||||
|
writes it to an L5X file while preserving CDATA blocks.
|
||||||
|
|
||||||
|
The class intentionally keeps **zero** knowledge about Excel or individual
|
||||||
|
module families beyond the fixed chassis modules; that logic stays in
|
||||||
|
`EnhancedMCMGenerator` (or higher-level orchestrators).
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
# Base controller & fixed slot models
|
||||||
|
from models.l83es_boilerplate_model import (
|
||||||
|
create_l83es_controller,
|
||||||
|
L83ESControllerGenerator,
|
||||||
|
)
|
||||||
|
from models.en4tr_boilerplate_model import (
|
||||||
|
create_en4tr_module,
|
||||||
|
EN4TRModuleGenerator,
|
||||||
|
)
|
||||||
|
from models.ib16_boilerplate_model import (
|
||||||
|
create_ib16_module,
|
||||||
|
IB16ModuleGenerator,
|
||||||
|
)
|
||||||
|
from models.ob16e_boilerplate_model import (
|
||||||
|
create_ob16e_module,
|
||||||
|
OB16EModuleGenerator,
|
||||||
|
)
|
||||||
|
from models.ib16s_boilerplate_model import (
|
||||||
|
create_ib16s_module,
|
||||||
|
IB16SModuleGenerator,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_export_date(export_date_str: str) -> bool:
|
||||||
|
"""Validate that ExportDate is not in the future.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
export_date_str: ExportDate string in Rockwell format (e.g., "Wed Jul 03 11:47:56 2024")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if valid (not in future), False if invalid
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Parse the ExportDate string
|
||||||
|
parsed_date = datetime.strptime(export_date_str, "%a %b %d %H:%M:%S %Y")
|
||||||
|
current_date = datetime.now()
|
||||||
|
|
||||||
|
# Check if date is in the future (allow small buffer for processing time)
|
||||||
|
if parsed_date > current_date:
|
||||||
|
print(f"WARNING: Future ExportDate detected: {export_date_str}")
|
||||||
|
print(f" Current time: {current_date.strftime('%a %b %d %H:%M:%S %Y')}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"WARNING: Invalid ExportDate format: {export_date_str} - {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class ControllerBuilder:
|
||||||
|
"""Constructs the controller XML tree and offers an API for callers to
|
||||||
|
attach extra modules before the project is saved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, controller_name: str, skip_chassis_modules: bool = False):
|
||||||
|
self.controller_name = controller_name
|
||||||
|
self.skip_chassis_modules = skip_chassis_modules
|
||||||
|
|
||||||
|
# 1. Build base controller from boilerplate
|
||||||
|
controller_cfg = create_l83es_controller(controller_name)
|
||||||
|
gen = L83ESControllerGenerator(controller_cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
|
||||||
|
self.tree: ET.ElementTree = gen.tree
|
||||||
|
self.root: ET.Element = gen.root
|
||||||
|
|
||||||
|
# 2. Apply controller-level attributes
|
||||||
|
self._configure_controller_settings()
|
||||||
|
|
||||||
|
# 3. Insert fixed chassis/IO modules
|
||||||
|
self._add_fixed_modules()
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# Public helpers
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
|
def get_modules_section(self) -> ET.Element:
|
||||||
|
"""Expose the <Modules> container so external code can append modules."""
|
||||||
|
controller = self.root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is None:
|
||||||
|
raise ValueError("Controller element not found – this should never happen")
|
||||||
|
modules = controller.find("Modules")
|
||||||
|
if modules is None:
|
||||||
|
raise ValueError("<Modules> section missing – builder initialisation failed")
|
||||||
|
return modules
|
||||||
|
|
||||||
|
def finalise_and_save(self, filename: str):
|
||||||
|
"""Complete remaining sections and write the finished L5X file."""
|
||||||
|
# Add logical program/task scaffolding only once at the end so that any
|
||||||
|
# Excel-added AOIs etc. (if present) appear *before* Programs, matching
|
||||||
|
# the reference file ordering.
|
||||||
|
self._add_programs_and_tasks()
|
||||||
|
self._configure_additional_elements()
|
||||||
|
|
||||||
|
# Update export date – Rockwell format: "Wed Jul 03 11:47:56 2024"
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
|
||||||
|
# Validate the timestamp before setting it
|
||||||
|
if validate_export_date(export_date):
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
else:
|
||||||
|
print(f"ERROR: Refusing to set invalid ExportDate: {export_date}")
|
||||||
|
raise ValueError(f"Invalid ExportDate generated: {export_date}")
|
||||||
|
|
||||||
|
# Indent & persist
|
||||||
|
self._save_project(self.tree, filename)
|
||||||
|
|
||||||
|
def add_generated_tags(self, routines_generator_dir: str, zones_dict=None):
|
||||||
|
"""Add the generated tags from the Routines Generator into this controller.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
routines_generator_dir: Path to the Routines Generator directory containing generated files
|
||||||
|
zones_dict: Optional zones configuration to use instead of DEFAULT_ZONES
|
||||||
|
"""
|
||||||
|
controller = self.root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is None:
|
||||||
|
print("ERROR: Controller element not found")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Find or create Tags section
|
||||||
|
tags_section = controller.find("Tags")
|
||||||
|
if tags_section is None:
|
||||||
|
tags_section = ET.SubElement(controller, "Tags")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Import the tag writer to get tag XML elements
|
||||||
|
import sys
|
||||||
|
sys.path.append(f"{routines_generator_dir}/src")
|
||||||
|
from writers.xml_tag_writer import create_limited_tag_xml_elements
|
||||||
|
from data_loader import DataLoader
|
||||||
|
|
||||||
|
# Create DataLoader for DESC_IP extraction
|
||||||
|
desc_ip_file = f"{routines_generator_dir}/DESC_IP_MERGED.xlsx"
|
||||||
|
data_loader = DataLoader(desc_ip_file, zones_dict=zones_dict)
|
||||||
|
|
||||||
|
# Generate tag XML elements
|
||||||
|
print(" Generating tags from DESC_IP data...")
|
||||||
|
tag_elements = create_limited_tag_xml_elements(desc_ip_file, data_loader)
|
||||||
|
|
||||||
|
# Add each tag element to the Tags section
|
||||||
|
tags_added = 0
|
||||||
|
for tag_element in tag_elements:
|
||||||
|
tags_section.append(tag_element)
|
||||||
|
tags_added += 1
|
||||||
|
|
||||||
|
print(f" Successfully embedded {tags_added} tags into complete project")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"WARNING: Failed to embed tags: {e}")
|
||||||
|
# Continue without tags - the project will still have hardware modules
|
||||||
|
|
||||||
|
def add_generated_programs(self, routines_generator_dir: str):
|
||||||
|
"""Add the generated programs from the Routines Generator into this controller.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
routines_generator_dir: Path to the Routines Generator directory containing generated L5X files
|
||||||
|
"""
|
||||||
|
controller = self.root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is None:
|
||||||
|
print("ERROR: Controller element not found")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Find or create Programs section
|
||||||
|
programs = controller.find("Programs")
|
||||||
|
if programs is None:
|
||||||
|
programs = ET.SubElement(controller, "Programs")
|
||||||
|
|
||||||
|
# Load and embed SafetyProgram
|
||||||
|
safety_l5x_path = f"{routines_generator_dir}/SafetyProgram_Generated.L5X"
|
||||||
|
if os.path.exists(safety_l5x_path):
|
||||||
|
print(f" Loading SafetyProgram from {safety_l5x_path}")
|
||||||
|
safety_tree = ET.parse(safety_l5x_path)
|
||||||
|
safety_program = safety_tree.find(".//Program[@Name='SafetyProgram']")
|
||||||
|
if safety_program is not None:
|
||||||
|
# Extract SafetyTagMap and move it to SafetyInfo before embedding the program
|
||||||
|
safety_tag_map = safety_program.find("SafetyTagMap")
|
||||||
|
if safety_tag_map is not None:
|
||||||
|
# Find SafetyInfo element in controller
|
||||||
|
safety_info = controller.find("SafetyInfo")
|
||||||
|
if safety_info is not None:
|
||||||
|
# Remove SafetyTagMap from program and add to SafetyInfo
|
||||||
|
safety_program.remove(safety_tag_map)
|
||||||
|
safety_info.append(safety_tag_map)
|
||||||
|
print(" Moved SafetyTagMap from program to SafetyInfo")
|
||||||
|
else:
|
||||||
|
print(" WARNING: SafetyInfo element not found in controller")
|
||||||
|
|
||||||
|
programs.append(safety_program)
|
||||||
|
print(f" Successfully embedded SafetyProgram with {len(safety_program.find('Routines'))} routines")
|
||||||
|
else:
|
||||||
|
print(" WARNING: SafetyProgram element not found in L5X file")
|
||||||
|
else:
|
||||||
|
print(f" WARNING: SafetyProgram not found at {safety_l5x_path}")
|
||||||
|
|
||||||
|
# Load and embed MainProgram
|
||||||
|
main_l5x_path = f"{routines_generator_dir}/MainProgram_Generated.L5X"
|
||||||
|
if os.path.exists(main_l5x_path):
|
||||||
|
print(f" Loading MainProgram from {main_l5x_path}")
|
||||||
|
main_tree = ET.parse(main_l5x_path)
|
||||||
|
main_program = main_tree.find(".//Program[@Name='MainProgram']")
|
||||||
|
if main_program is not None:
|
||||||
|
programs.append(main_program)
|
||||||
|
print(f" Successfully embedded MainProgram with {len(main_program.find('Routines'))} routines")
|
||||||
|
else:
|
||||||
|
print(" WARNING: MainProgram element not found in L5X file")
|
||||||
|
else:
|
||||||
|
print(f" WARNING: MainProgram not found at {main_l5x_path}")
|
||||||
|
|
||||||
|
if len(list(programs)) > 0:
|
||||||
|
print(f" Successfully embedded {len(list(programs))} programs into complete project")
|
||||||
|
else:
|
||||||
|
print(" WARNING: No programs were embedded - complete project will have hardware only")
|
||||||
|
|
||||||
|
def _embed_program_from_l5x(self, programs_section: ET.Element, l5x_file_path: str):
|
||||||
|
"""Extract the Program element from an L5X file and embed it into the controller.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
programs_section: The <Programs> element in the controller
|
||||||
|
l5x_file_path: Path to the L5X file containing the program to embed
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Parse the L5X file
|
||||||
|
tree = ET.parse(l5x_file_path)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
# Find the Program element
|
||||||
|
program_element = root.find(".//Program[@Use='Target']")
|
||||||
|
if program_element is None:
|
||||||
|
print(f" ERROR: No Program with Use='Target' found in {l5x_file_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Remove the Use='Target' attribute since we're embedding it
|
||||||
|
if 'Use' in program_element.attrib:
|
||||||
|
del program_element.attrib['Use']
|
||||||
|
|
||||||
|
# Check if program already exists
|
||||||
|
program_name = program_element.get('Name', 'UnknownProgram')
|
||||||
|
existing = programs_section.find(f"Program[@Name='{program_name}']")
|
||||||
|
if existing is not None:
|
||||||
|
print(f" WARNING: Program '{program_name}' already exists, replacing it")
|
||||||
|
programs_section.remove(existing)
|
||||||
|
|
||||||
|
# Add the complete program to the controller
|
||||||
|
programs_section.append(program_element)
|
||||||
|
print(f" [SUCCESS] Embedded program '{program_name}'")
|
||||||
|
|
||||||
|
# Count routines for verification
|
||||||
|
routines = program_element.find("Routines")
|
||||||
|
if routines is not None:
|
||||||
|
routine_count = len(list(routines))
|
||||||
|
print(f" [INFO] Program contains {routine_count} routines")
|
||||||
|
|
||||||
|
except ET.ParseError as e:
|
||||||
|
print(f" ERROR: Failed to parse L5X file {l5x_file_path}: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ERROR: Failed to embed program from {l5x_file_path}: {e}")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Internal helpers (mostly verbatim from previous implementation)
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _configure_controller_settings(self):
|
||||||
|
root = self.root
|
||||||
|
root.set("TargetName", self.controller_name)
|
||||||
|
root.set("TargetType", "Controller")
|
||||||
|
root.set("ContainsContext", "false")
|
||||||
|
root.set(
|
||||||
|
"ExportOptions",
|
||||||
|
"NoRawData L5KData DecoratedData ForceProtectedEncoding AllProjDocTrans",
|
||||||
|
)
|
||||||
|
|
||||||
|
controller = root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
controller.set("Name", self.controller_name)
|
||||||
|
controller.set("ProcessorType", "1756-L83ES")
|
||||||
|
controller.set("MajorRev", "36")
|
||||||
|
controller.set("MinorRev", "11")
|
||||||
|
controller.set("ProjectSN", "16#0000_0000")
|
||||||
|
controller.set("MatchProjectToController", "false")
|
||||||
|
controller.set("CanUseRPIFromProducer", "false")
|
||||||
|
controller.set("InhibitAutomaticFirmwareUpdate", "0")
|
||||||
|
controller.set("PassThroughConfiguration", "EnabledWithAppend")
|
||||||
|
controller.set("DownloadProjectDocumentationAndExtendedProperties", "true")
|
||||||
|
controller.set("DownloadProjectCustomProperties", "true")
|
||||||
|
controller.set("ReportMinorOverflow", "false")
|
||||||
|
controller.set("AutoDiagsEnabled", "true")
|
||||||
|
controller.set("WebServerEnabled", "false")
|
||||||
|
|
||||||
|
safety_info = controller.find("SafetyInfo")
|
||||||
|
if safety_info is None:
|
||||||
|
safety_info = ET.SubElement(controller, "SafetyInfo")
|
||||||
|
safety_info.set("SafetyLocked", "false")
|
||||||
|
safety_info.set("SignatureRunModeProtect", "false")
|
||||||
|
safety_info.set("ConfigureSafetyIOAlways", "false")
|
||||||
|
safety_info.set("SafetyLevel", "SIL2/PLd")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Fixed chassis modules
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _add_fixed_modules(self):
|
||||||
|
controller = self.root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is None:
|
||||||
|
raise ValueError("Controller element not found")
|
||||||
|
|
||||||
|
# Ensure <Modules> exists right after SafetyInfo
|
||||||
|
modules = controller.find("Modules")
|
||||||
|
if modules is None:
|
||||||
|
safety_info = controller.find("SafetyInfo")
|
||||||
|
if safety_info is not None:
|
||||||
|
idx = list(controller).index(safety_info) + 1
|
||||||
|
modules = ET.Element("Modules")
|
||||||
|
controller.insert(idx, modules)
|
||||||
|
else:
|
||||||
|
modules = ET.SubElement(controller, "Modules")
|
||||||
|
|
||||||
|
# Local backplane tweaks
|
||||||
|
self._configure_local_module(modules)
|
||||||
|
self._add_en4tr_module(modules)
|
||||||
|
|
||||||
|
# Only add chassis modules if not skipped (to avoid conflicts with Excel-driven generation)
|
||||||
|
if not self.skip_chassis_modules:
|
||||||
|
self._add_ib16_module(modules)
|
||||||
|
self._add_ob16e_module(modules)
|
||||||
|
self._add_ib16s_module(modules)
|
||||||
|
|
||||||
|
def _configure_local_module(self, modules_section: ET.Element):
|
||||||
|
local_module = modules_section.find("Module[@Name='Local']")
|
||||||
|
if local_module is not None:
|
||||||
|
ports = local_module.find("Ports")
|
||||||
|
if ports is not None:
|
||||||
|
for port in ports.findall("Port"):
|
||||||
|
if port.get("Id") == "1":
|
||||||
|
port.set("SafetyNetwork", "16#0000_4c33_031d_8f1b")
|
||||||
|
bus = port.find("Bus")
|
||||||
|
if bus is None:
|
||||||
|
bus = ET.SubElement(port, "Bus")
|
||||||
|
bus.set("Size", "10")
|
||||||
|
elif port.get("Id") == "2":
|
||||||
|
port.set("SafetyNetwork", "16#0000_4c33_031d_8f1c")
|
||||||
|
|
||||||
|
def _add_en4tr_module(self, modules_section: ET.Element):
|
||||||
|
cfg = create_en4tr_module("SLOT2_EN4TR", self.controller_name)
|
||||||
|
gen = EN4TRModuleGenerator(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
|
||||||
|
mod = gen.root.find(".//Module[@Name='SLOT2_EN4TR']")
|
||||||
|
if mod is not None:
|
||||||
|
mod.set("ParentModule", "Local")
|
||||||
|
mod.set("ParentModPortId", "1")
|
||||||
|
port = mod.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", "11.200.1.1")
|
||||||
|
modules_section.append(mod)
|
||||||
|
|
||||||
|
def _add_ib16_module(self, modules_section: ET.Element):
|
||||||
|
cfg = create_ib16_module("SLOT5_IB16", "5", "Local", "1", None)
|
||||||
|
gen = IB16ModuleGenerator(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
|
||||||
|
mod = gen.root.find(".//Module[@Name='SLOT5_IB16']")
|
||||||
|
if mod is not None:
|
||||||
|
mod.set("ParentModule", "Local")
|
||||||
|
mod.set("ParentModPortId", "1")
|
||||||
|
icp = mod.find(".//Port[@Type='ICP']")
|
||||||
|
if icp is not None:
|
||||||
|
icp.set("Address", "5")
|
||||||
|
modules_section.append(mod)
|
||||||
|
|
||||||
|
def _add_ob16e_module(self, modules_section: ET.Element):
|
||||||
|
cfg = create_ob16e_module("SLOT6_OB16E", "6", "Local", "1", None)
|
||||||
|
gen = OB16EModuleGenerator(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
|
||||||
|
mod = gen.root.find(".//Module[@Name='SLOT6_OB16E']")
|
||||||
|
if mod is not None:
|
||||||
|
mod.set("ParentModule", "Local")
|
||||||
|
mod.set("ParentModPortId", "1")
|
||||||
|
mod.set("AutoDiagsEnabled", "true")
|
||||||
|
icp = mod.find(".//Port[@Type='ICP']")
|
||||||
|
if icp is not None:
|
||||||
|
icp.set("Address", "6")
|
||||||
|
modules_section.append(mod)
|
||||||
|
|
||||||
|
def _add_ib16s_module(self, modules_section: ET.Element):
|
||||||
|
cfg = create_ib16s_module(
|
||||||
|
"SLOT7_IB16S",
|
||||||
|
"7",
|
||||||
|
"Local",
|
||||||
|
"1",
|
||||||
|
"16#0000_4c33_031d_8f1b",
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
gen = IB16SModuleGenerator(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
|
||||||
|
mod = gen.root.find(".//Module[@Name='SLOT7_IB16S']")
|
||||||
|
if mod is not None:
|
||||||
|
mod.set("ParentModule", "Local")
|
||||||
|
mod.set("ParentModPortId", "1")
|
||||||
|
mod.set("SafetyNetwork", "16#0000_4c33_031d_8f1b")
|
||||||
|
mod.set("SafetyEnabled", "true")
|
||||||
|
icp = mod.find(".//Port[@Type='ICP']")
|
||||||
|
if icp is not None:
|
||||||
|
icp.set("Address", "7")
|
||||||
|
modules_section.append(mod)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Programs, tasks, misc sections
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _add_programs_and_tasks(self):
|
||||||
|
controller = self.root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
programs = controller.find("Programs")
|
||||||
|
if programs is None:
|
||||||
|
programs = ET.SubElement(controller, "Programs")
|
||||||
|
|
||||||
|
# Only create empty MainProgram if no programs exist yet
|
||||||
|
# (they might have been embedded from Routines Generator)
|
||||||
|
if len(list(programs)) == 0:
|
||||||
|
print(" No programs found, creating empty MainProgram skeleton")
|
||||||
|
existing_main = programs.find("Program[@Name='MainProgram']")
|
||||||
|
if existing_main is None:
|
||||||
|
main_prog = ET.SubElement(programs, "Program")
|
||||||
|
main_prog.set("Name", "MainProgram")
|
||||||
|
main_prog.set("TestEdits", "false")
|
||||||
|
main_prog.set("MainRoutineName", "MainRoutine")
|
||||||
|
main_prog.set("Disabled", "false")
|
||||||
|
main_prog.set("Class", "Standard")
|
||||||
|
main_prog.set("UseAsFolder", "false")
|
||||||
|
|
||||||
|
ET.SubElement(main_prog, "Tags")
|
||||||
|
routines = ET.SubElement(main_prog, "Routines")
|
||||||
|
main_routine = ET.SubElement(routines, "Routine")
|
||||||
|
main_routine.set("Name", "MainRoutine")
|
||||||
|
main_routine.set("Type", "RLL")
|
||||||
|
else:
|
||||||
|
print(f" Programs already exist ({len(list(programs))} programs found), skipping empty program creation")
|
||||||
|
|
||||||
|
# Always ensure Tasks section exists
|
||||||
|
tasks = controller.find("Tasks")
|
||||||
|
if tasks is None:
|
||||||
|
tasks = ET.SubElement(controller, "Tasks")
|
||||||
|
|
||||||
|
existing_task = tasks.find("Task[@Name='MainTask']")
|
||||||
|
if existing_task is None:
|
||||||
|
main_task = ET.SubElement(tasks, "Task")
|
||||||
|
main_task.set("Name", "MainTask")
|
||||||
|
main_task.set("Type", "CONTINUOUS")
|
||||||
|
main_task.set("Priority", "10")
|
||||||
|
main_task.set("Watchdog", "500")
|
||||||
|
main_task.set("DisableUpdateOutputs", "false")
|
||||||
|
main_task.set("InhibitTask", "false")
|
||||||
|
main_task.set("Class", "Standard")
|
||||||
|
|
||||||
|
sched = ET.SubElement(main_task, "ScheduledPrograms")
|
||||||
|
sched_prog = ET.SubElement(sched, "ScheduledProgram")
|
||||||
|
sched_prog.set("Name", "MainProgram")
|
||||||
|
|
||||||
|
def _configure_additional_elements(self):
|
||||||
|
controller = self.root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Ensure AddOnInstructionDefinitions, Tags, DataTypes exist
|
||||||
|
sections = ["AddOnInstructionDefinitions", "Tags", "DataTypes"]
|
||||||
|
for name in sections:
|
||||||
|
if controller.find(name) is None:
|
||||||
|
modules = controller.find("Modules")
|
||||||
|
idx = len(list(controller)) # default append
|
||||||
|
if modules is not None:
|
||||||
|
idx = list(controller).index(modules)
|
||||||
|
elem = ET.Element(name)
|
||||||
|
if name == "DataTypes":
|
||||||
|
controller.insert(idx, elem)
|
||||||
|
else:
|
||||||
|
controller.append(elem)
|
||||||
|
|
||||||
|
# Other empty placeholders
|
||||||
|
if controller.find("CST") is None:
|
||||||
|
cst = ET.SubElement(controller, "CST")
|
||||||
|
cst.set("MasterID", "0")
|
||||||
|
if controller.find("WallClockTime") is None:
|
||||||
|
wct = ET.SubElement(controller, "WallClockTime")
|
||||||
|
wct.set("LocalTimeAdjustment", "0")
|
||||||
|
wct.set("TimeZone", "0")
|
||||||
|
if controller.find("Trends") is None:
|
||||||
|
ET.SubElement(controller, "Trends")
|
||||||
|
if controller.find("DataLogs") is None:
|
||||||
|
ET.SubElement(controller, "DataLogs")
|
||||||
|
if controller.find("TimeSynchronize") is None:
|
||||||
|
ts = ET.SubElement(controller, "TimeSynchronize")
|
||||||
|
ts.set("Priority1", "128")
|
||||||
|
ts.set("Priority2", "128")
|
||||||
|
ts.set("PTPEnable", "false")
|
||||||
|
if controller.find("EthernetPorts") is None:
|
||||||
|
ports = ET.SubElement(controller, "EthernetPorts")
|
||||||
|
port = ET.SubElement(ports, "EthernetPort")
|
||||||
|
port.set("Port", "1")
|
||||||
|
port.set("Label", "1")
|
||||||
|
port.set("PortEnabled", "true")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Saving helpers
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _save_project(self, tree: ET.ElementTree, filename: str):
|
||||||
|
self._indent(tree.getroot())
|
||||||
|
xml_str = ET.tostring(tree.getroot(), encoding="unicode")
|
||||||
|
full_xml = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" + xml_str
|
||||||
|
|
||||||
|
# Re-wrap L5K and DATA blocks with CDATA (ElementTree loses them)
|
||||||
|
l5k_pattern = r'(<Data Format="L5K">)\s*(\[.*?\]|\(.*?\))\s*(</Data>)'
|
||||||
|
data_pattern = r'(<DataValueMember Name="DATA"[^>]*>)([^<]*)(</DataValueMember>)'
|
||||||
|
# Add pattern for RLL Text content
|
||||||
|
text_pattern = r'(<Text>)(.*?)(</Text>)'
|
||||||
|
|
||||||
|
def _to_cdata(match):
|
||||||
|
start, content, end = match.group(1), match.group(2), match.group(3)
|
||||||
|
return f"{start}<![CDATA[{content}]]>{end}"
|
||||||
|
|
||||||
|
full_xml = re.sub(l5k_pattern, _to_cdata, full_xml, flags=re.DOTALL)
|
||||||
|
full_xml = re.sub(data_pattern, _to_cdata, full_xml, flags=re.DOTALL)
|
||||||
|
# Apply CDATA wrapping to Text elements
|
||||||
|
full_xml = re.sub(text_pattern, lambda m: f"{m.group(1)}\n<![CDATA[{m.group(2)}]]>\n{m.group(3)}" if m.group(2).strip() else m.group(0), full_xml, flags=re.DOTALL)
|
||||||
|
|
||||||
|
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
||||||
|
with open(filename, "w", encoding="utf-8") as fh:
|
||||||
|
fh.write(full_xml)
|
||||||
|
|
||||||
|
def _indent(self, elem: ET.Element, level: int = 0):
|
||||||
|
i = "\n" + level * " "
|
||||||
|
if len(elem):
|
||||||
|
if not elem.text or not elem.text.strip():
|
||||||
|
elem.text = i + " "
|
||||||
|
if not elem.tail or not elem.tail.strip():
|
||||||
|
elem.tail = i
|
||||||
|
for child in elem:
|
||||||
|
self._indent(child, level + 1)
|
||||||
|
if not child.tail or not child.tail.strip():
|
||||||
|
child.tail = i
|
||||||
|
else:
|
||||||
|
if level and (not elem.tail or not elem.tail.strip()):
|
||||||
|
elem.tail = i
|
||||||
1225
IO Tree Configuration Generator/enhanced_mcm_generator.py
Normal file
1225
IO Tree Configuration Generator/enhanced_mcm_generator.py
Normal file
File diff suppressed because it is too large
Load Diff
435
IO Tree Configuration Generator/excel_data_processor.py
Normal file
435
IO Tree Configuration Generator/excel_data_processor.py
Normal file
@ -0,0 +1,435 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Excel Data Processor for IO Configuration
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
Processes Excel data with columns: TAGNAME, IP, PARTNUMBER, IO_PATH, DESC, TERM, SIGNAL
|
||||||
|
Handles FIO, FIOH, ZMX, DPM, VFD(APF) modules and maps IO_PATH to DESC comments.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import re
|
||||||
|
from typing import Dict, List, Tuple, Optional
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class IOPathMapping:
|
||||||
|
"""Represents an IO path mapping from Excel data."""
|
||||||
|
tagname: str
|
||||||
|
terminal: str # From TERM column (e.g., "I0", "SI1", "IO0")
|
||||||
|
io_path: str # From IO_PATH column (e.g., "FL3024_2_VFD1:I.In_0")
|
||||||
|
description: str # From DESC column
|
||||||
|
ip_address: str # From IP column
|
||||||
|
part_number: str # From PARTNUMBER column
|
||||||
|
signal: str # From SIGNAL column (e.g., "O", "I", "IOLink")
|
||||||
|
desb: str = "" # From DESB column (beacon-specific descriptions)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ModuleData:
|
||||||
|
"""Represents a complete module with all its IO mappings."""
|
||||||
|
tagname: str
|
||||||
|
ip_address: str
|
||||||
|
part_number: str
|
||||||
|
io_mappings: List[IOPathMapping] = field(default_factory=list)
|
||||||
|
parent_module: Optional[str] = None # For FIOH modules
|
||||||
|
unknown_part_number: bool = False
|
||||||
|
terminal: str = "" # For FIOH modules - stores the terminal they're found on (IO4/IO12)
|
||||||
|
comments: Dict[int, str] = field(default_factory=dict) # New: Per-point comments {point_index: description}
|
||||||
|
|
||||||
|
|
||||||
|
class ExcelDataProcessor:
|
||||||
|
"""Processes Excel data for IO configuration generation."""
|
||||||
|
|
||||||
|
# Known part number mappings
|
||||||
|
PART_NUMBER_MAP = {
|
||||||
|
# APF modules
|
||||||
|
"35S-6D1-P001": {"type": "APF", "hp": "1"},
|
||||||
|
"35S-6D2-P101": {"type": "APF", "hp": "2"},
|
||||||
|
"35S-6D3-P101": {"type": "APF", "hp": "3"},
|
||||||
|
"35S-6D4-P111": {"type": "APF", "hp": "5"},
|
||||||
|
"35S-6D5-P111": {"type": "APF", "hp": "7.5"},
|
||||||
|
"35S-6D6-P111": {"type": "APF", "hp": "10"},
|
||||||
|
|
||||||
|
# FIOH modules (Turck Hubs)
|
||||||
|
"TBIL-M1-16DXP": {"type": "FIOH"},
|
||||||
|
|
||||||
|
# DPM modules (to be implemented later)
|
||||||
|
"OS30-002404-2S": {"type": "DPM"},
|
||||||
|
|
||||||
|
# ZMX modules
|
||||||
|
"ZMX-3DE2500HF-Q7-AMZCHD": {"type": "ZMX"},
|
||||||
|
|
||||||
|
# EXTENDO modules (Siemens ET 200SP - CALJAN network)
|
||||||
|
"6ES7 158-3MU10-0XA0": {"type": "EXTENDO"},
|
||||||
|
"CALJAN": {"type": "EXTENDO"},
|
||||||
|
|
||||||
|
# IO-Link Master modules
|
||||||
|
"5032-8IOLM12DR": {"type": "IOLM"},
|
||||||
|
"1734-IB16": {"type": "IB16"},
|
||||||
|
"1734-IB16S": {"type": "IB16S"},
|
||||||
|
"1734-OB16E": {"type": "OB16E"},
|
||||||
|
|
||||||
|
# ControlLogix I/O modules (1756 series)
|
||||||
|
"1756-IB16": {"type": "IB16"},
|
||||||
|
"1756-IB16S": {"type": "IB16S"},
|
||||||
|
"1756-OB16E": {"type": "OB16E"},
|
||||||
|
|
||||||
|
# PMM modules (Power Monitoring Module)
|
||||||
|
"1420-V2-ENT": {"type": "PMM"},
|
||||||
|
|
||||||
|
# SIO modules (Safety Input/Output)
|
||||||
|
"0980SSL3131-121-007D-202": {"type": "SIO"},
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, excel_file_path: str = "Data.xlsx"):
|
||||||
|
self.excel_file_path = excel_file_path
|
||||||
|
self.raw_data = None
|
||||||
|
self.modules: Dict[str, ModuleData] = {}
|
||||||
|
self.unknown_part_numbers: List[str] = []
|
||||||
|
self.fio_fioh_relationships: Dict[str, str] = {} # FIOH -> FIO parent mapping
|
||||||
|
|
||||||
|
def load_data(self) -> bool:
|
||||||
|
"""Load data from Excel file."""
|
||||||
|
try:
|
||||||
|
# Read Excel file from DESC_IP sheet (contains processed data with all required columns)
|
||||||
|
self.raw_data = pd.read_excel(self.excel_file_path, sheet_name='DESC_IP')
|
||||||
|
|
||||||
|
# Validate required columns
|
||||||
|
required_columns = ['TAGNAME', 'IP', 'PARTNUMBER', 'IO_PATH', 'DESC', 'TERM', 'SIGNAL']
|
||||||
|
optional_columns = ['DESB'] # DESB column for beacon-specific descriptions
|
||||||
|
missing_columns = [col for col in required_columns if col not in self.raw_data.columns]
|
||||||
|
|
||||||
|
if missing_columns:
|
||||||
|
print(f"ERROR: Missing required columns: {missing_columns}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Remove rows with empty TAGNAME (but keep empty PARTNUMBER for inference)
|
||||||
|
initial_count = len(self.raw_data)
|
||||||
|
|
||||||
|
# Find rows with empty TAGNAME or PARTNUMBER for reporting
|
||||||
|
empty_tagnames = self.raw_data[self.raw_data['TAGNAME'].isna()]
|
||||||
|
empty_partnumbers = self.raw_data[self.raw_data['PARTNUMBER'].isna()]
|
||||||
|
|
||||||
|
# Report empty TAGNAME rows
|
||||||
|
if len(empty_tagnames) > 0:
|
||||||
|
print(f"WARNING: Removing {len(empty_tagnames)} rows with empty TAGNAME:")
|
||||||
|
for idx, row in empty_tagnames.iterrows():
|
||||||
|
tagname = str(row['TAGNAME']) if pd.notna(row['TAGNAME']) else "[EMPTY]"
|
||||||
|
partnumber = str(row['PARTNUMBER']) if pd.notna(row['PARTNUMBER']) else "[EMPTY]"
|
||||||
|
desc = str(row['DESC']) if pd.notna(row['DESC']) else ""
|
||||||
|
term = str(row['TERM']) if pd.notna(row['TERM']) else ""
|
||||||
|
print(f" Row {idx+2}: TAGNAME='{tagname}', PARTNUMBER='{partnumber}', DESC='{desc}', TERM='{term}'")
|
||||||
|
|
||||||
|
# Report empty PARTNUMBER rows (but don't remove them)
|
||||||
|
if len(empty_partnumbers) > 0:
|
||||||
|
print(f"INFO: Found {len(empty_partnumbers)} rows with empty PARTNUMBER (will attempt inference):")
|
||||||
|
for idx, row in empty_partnumbers.iterrows():
|
||||||
|
tagname = str(row['TAGNAME']) if pd.notna(row['TAGNAME']) else "[EMPTY]"
|
||||||
|
partnumber = str(row['PARTNUMBER']) if pd.notna(row['PARTNUMBER']) else "[EMPTY]"
|
||||||
|
desc = str(row['DESC']) if pd.notna(row['DESC']) else ""
|
||||||
|
term = str(row['TERM']) if pd.notna(row['TERM']) else ""
|
||||||
|
print(f" Row {idx+2}: TAGNAME='{tagname}', PARTNUMBER='{partnumber}', DESC='{desc}', TERM='{term}'")
|
||||||
|
|
||||||
|
# Only remove rows with empty TAGNAME
|
||||||
|
self.raw_data = self.raw_data.dropna(subset=['TAGNAME'])
|
||||||
|
final_count = len(self.raw_data)
|
||||||
|
|
||||||
|
# Check for TAGNAMEs that appear more than 16 times (potential data issues)
|
||||||
|
tagname_counts = self.raw_data['TAGNAME'].value_counts()
|
||||||
|
excessive_tagnames = tagname_counts[tagname_counts > 16]
|
||||||
|
|
||||||
|
if len(excessive_tagnames) > 0:
|
||||||
|
print(f"WARNING: Found {len(excessive_tagnames)} TAGNAMEs with more than 16 entries:")
|
||||||
|
for tagname, count in excessive_tagnames.items():
|
||||||
|
partnumber = self.raw_data[self.raw_data['TAGNAME'] == tagname]['PARTNUMBER'].iloc[0]
|
||||||
|
print(f" {tagname}: {count} entries (PARTNUMBER: {partnumber})")
|
||||||
|
print(" Note: Most modules should have ≤16 IO channels. Review these for potential data issues.")
|
||||||
|
|
||||||
|
if initial_count != final_count:
|
||||||
|
print(f"WARNING: Removed {initial_count - final_count} rows with empty TAGNAME")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR: Loading Excel file: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def process_data(self) -> bool:
|
||||||
|
"""Process the loaded data and organize by modules."""
|
||||||
|
if self.raw_data is None:
|
||||||
|
print("ERROR: No data loaded. Call load_data() first.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Group data by TAGNAME to create modules
|
||||||
|
grouped = self.raw_data.groupby('TAGNAME')
|
||||||
|
|
||||||
|
for tagname, group in grouped:
|
||||||
|
# Get the first row for module-level info
|
||||||
|
first_row = group.iloc[0]
|
||||||
|
part_number = str(first_row['PARTNUMBER']) if pd.notna(first_row['PARTNUMBER']) else ""
|
||||||
|
|
||||||
|
# Infer part number and type from TAGNAME if empty
|
||||||
|
inferred_part_number = part_number
|
||||||
|
if not part_number:
|
||||||
|
if tagname.endswith("_IB16"):
|
||||||
|
inferred_part_number = "1734-IB16"
|
||||||
|
elif tagname.endswith("_IB16S"):
|
||||||
|
inferred_part_number = "1734-IB16S"
|
||||||
|
elif tagname.endswith("_OB16E"):
|
||||||
|
inferred_part_number = "1734-OB16E"
|
||||||
|
|
||||||
|
# Use inferred part number if available
|
||||||
|
part_number = inferred_part_number or part_number
|
||||||
|
|
||||||
|
# Handle IP address from data sheet
|
||||||
|
ip_address = str(first_row['IP']).strip() if pd.notna(first_row['IP']) else ""
|
||||||
|
if ip_address and not self._is_valid_ip(ip_address):
|
||||||
|
print(f"WARNING: Invalid IP address format '{ip_address}' for module {tagname}")
|
||||||
|
ip_address = ""
|
||||||
|
|
||||||
|
# Check if part number is known
|
||||||
|
unknown_part = part_number not in self.PART_NUMBER_MAP
|
||||||
|
if unknown_part and part_number not in self.unknown_part_numbers:
|
||||||
|
self.unknown_part_numbers.append(part_number)
|
||||||
|
|
||||||
|
# Create module data
|
||||||
|
module = ModuleData(
|
||||||
|
tagname=tagname,
|
||||||
|
ip_address=ip_address,
|
||||||
|
part_number=part_number,
|
||||||
|
unknown_part_number=unknown_part
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process each IO mapping for this module
|
||||||
|
for _, row in group.iterrows():
|
||||||
|
io_mapping = IOPathMapping(
|
||||||
|
tagname=tagname,
|
||||||
|
terminal=str(row['TERM']) if pd.notna(row['TERM']) else "",
|
||||||
|
io_path=str(row['IO_PATH']) if pd.notna(row['IO_PATH']) else "",
|
||||||
|
description=str(row['DESC']) if pd.notna(row['DESC']) else "",
|
||||||
|
ip_address=ip_address,
|
||||||
|
part_number=part_number,
|
||||||
|
signal=str(row['SIGNAL']) if pd.notna(row['SIGNAL']) else "",
|
||||||
|
desb=str(row['DESB']) if 'DESB' in row and pd.notna(row['DESB']) else ""
|
||||||
|
)
|
||||||
|
module.io_mappings.append(io_mapping)
|
||||||
|
|
||||||
|
# New: Collect comments - map TERM to point index and store DESC
|
||||||
|
term = str(row['TERM']) if pd.notna(row['TERM']) else ""
|
||||||
|
desc = str(row['DESC']) if pd.notna(row['DESC']) else ""
|
||||||
|
if term and desc:
|
||||||
|
# Parse point index from TERM (e.g., 'I0' -> 0, 'O15' -> 15, 'I10' -> 10)
|
||||||
|
match = re.match(r'^[IO](\d{1,2})$', term.upper())
|
||||||
|
if match:
|
||||||
|
point_index = int(match.group(1))
|
||||||
|
if 0 <= point_index <= 15:
|
||||||
|
module.comments[point_index] = desc
|
||||||
|
|
||||||
|
self.modules[tagname] = module
|
||||||
|
|
||||||
|
# Find FIOH modules based on TERM IO4/IO12 and DESC containing FIOH
|
||||||
|
self._find_fioh_modules()
|
||||||
|
|
||||||
|
# Find FIO-FIOH relationships
|
||||||
|
self._find_fio_fioh_relationships()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _find_fioh_modules(self):
|
||||||
|
"""Find FIOH modules based on TERM IO4/IO12 and DESC containing FIOH."""
|
||||||
|
fioh_modules_to_create = {} # TAGNAME -> (parent_module, terminal)
|
||||||
|
|
||||||
|
# Scan all data for TERM IO4 or IO12 with FIOH in DESC
|
||||||
|
for _, row in self.raw_data.iterrows():
|
||||||
|
term = str(row['TERM']) if pd.notna(row['TERM']) else ""
|
||||||
|
desc = str(row['DESC']) if pd.notna(row['DESC']) else ""
|
||||||
|
tagname = str(row['TAGNAME']) if pd.notna(row['TAGNAME']) else ""
|
||||||
|
|
||||||
|
# Check if TERM is IO4/IO12 AND DESC contains FIOH (channels restricted to 4 and 12)
|
||||||
|
# Temporarily also support IO6/IO14 for backward compatibility during transition
|
||||||
|
if term.upper() in ["IO4", "IO12", "IO6", "IO14"] and "FIOH" in desc.upper():
|
||||||
|
# Extract FIOH name from description
|
||||||
|
# Look for patterns like "FL1014_FIOH1" or similar in the description
|
||||||
|
fioh_match = re.search(r'([A-Z0-9_]+FIOH\d*)', desc.upper())
|
||||||
|
if fioh_match:
|
||||||
|
fioh_name = fioh_match.group(1)
|
||||||
|
|
||||||
|
# The parent module is the TAGNAME where we found this FIOH reference
|
||||||
|
# Store both parent and terminal info
|
||||||
|
if fioh_name not in fioh_modules_to_create:
|
||||||
|
fioh_modules_to_create[fioh_name] = (tagname, term.upper())
|
||||||
|
|
||||||
|
# Create FIOH modules based on findings
|
||||||
|
for fioh_name, (parent_module, terminal) in fioh_modules_to_create.items():
|
||||||
|
# Create a new FIOH module
|
||||||
|
fioh_module = ModuleData(
|
||||||
|
tagname=fioh_name,
|
||||||
|
ip_address="", # FIOHs don't have IP addresses
|
||||||
|
part_number="TBIL-M1-16DXP", # Known FIOH part number
|
||||||
|
parent_module=parent_module
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store terminal info for port assignment (IO6 -> address 6, IO14 -> address 14)
|
||||||
|
fioh_module.terminal = terminal
|
||||||
|
|
||||||
|
# Find all IO mappings for this FIOH by scanning the data
|
||||||
|
for _, row in self.raw_data.iterrows():
|
||||||
|
row_tagname = str(row['TAGNAME']) if pd.notna(row['TAGNAME']) else ""
|
||||||
|
if row_tagname.upper() == fioh_name.upper():
|
||||||
|
io_mapping = IOPathMapping(
|
||||||
|
tagname=fioh_name,
|
||||||
|
terminal=str(row['TERM']) if pd.notna(row['TERM']) else "",
|
||||||
|
io_path=str(row['IO_PATH']) if pd.notna(row['IO_PATH']) else "",
|
||||||
|
description=str(row['DESC']) if pd.notna(row['DESC']) else "",
|
||||||
|
ip_address="",
|
||||||
|
part_number="TBIL-M1-16DXP",
|
||||||
|
signal=str(row['SIGNAL']) if pd.notna(row['SIGNAL']) else ""
|
||||||
|
)
|
||||||
|
fioh_module.io_mappings.append(io_mapping)
|
||||||
|
|
||||||
|
# Add to modules collection
|
||||||
|
self.modules[fioh_name] = fioh_module
|
||||||
|
|
||||||
|
if fioh_modules_to_create:
|
||||||
|
print(f"Created {len(fioh_modules_to_create)} FIOH modules based on TERM analysis")
|
||||||
|
|
||||||
|
def _find_fio_fioh_relationships(self):
|
||||||
|
"""Find parent-child relationships between FIO and FIOH modules."""
|
||||||
|
# Update the relationships dict based on modules that already have parent_module set
|
||||||
|
for module_name, module in self.modules.items():
|
||||||
|
if (module.part_number == "TBIL-M1-16DXP" and # FIOH modules
|
||||||
|
module.parent_module): # Has a parent
|
||||||
|
self.fio_fioh_relationships[module_name] = module.parent_module
|
||||||
|
|
||||||
|
def parse_io_path(self, io_path: str) -> Tuple[str, str, str]:
|
||||||
|
"""Parse IO_PATH string to extract tagname, channel, and terminal.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- "FL3024_2_VFD1:I.In_0" -> ("FL3024_2_VFD1", "I", "In_0")
|
||||||
|
- "FL3024_2_VFD1:SI.In01Data" -> ("FL3024_2_VFD1", "SI", "In01Data")
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Split on colon to separate tagname from path
|
||||||
|
parts = io_path.split(":", 1)
|
||||||
|
if len(parts) != 2:
|
||||||
|
return "", "", ""
|
||||||
|
|
||||||
|
tagname, path_part = parts
|
||||||
|
|
||||||
|
# Split path part on dot to get channel and terminal
|
||||||
|
path_parts = path_part.split(".", 1)
|
||||||
|
if len(path_parts) != 2:
|
||||||
|
return tagname, "", ""
|
||||||
|
|
||||||
|
channel, terminal = path_parts
|
||||||
|
return tagname, channel, terminal
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
return "", "", ""
|
||||||
|
|
||||||
|
def get_modules_by_type(self, module_type: str) -> List[ModuleData]:
|
||||||
|
"""Get all modules of a specific type."""
|
||||||
|
result = []
|
||||||
|
for module in self.modules.values():
|
||||||
|
if (module.part_number in self.PART_NUMBER_MAP and
|
||||||
|
self.PART_NUMBER_MAP[module.part_number]["type"] == module_type):
|
||||||
|
result.append(module)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_comments_for_module(self, tagname: str) -> Dict[str, str]:
|
||||||
|
"""Get comment mappings for a specific module.
|
||||||
|
|
||||||
|
Returns dict mapping terminal -> description
|
||||||
|
"""
|
||||||
|
if tagname not in self.modules:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
comments = {}
|
||||||
|
for io_mapping in self.modules[tagname].io_mappings:
|
||||||
|
if io_mapping.terminal and io_mapping.description:
|
||||||
|
# Handle SPARE entries
|
||||||
|
if io_mapping.description.upper() == "SPARE":
|
||||||
|
comments[io_mapping.terminal] = "SPARE"
|
||||||
|
else:
|
||||||
|
comments[io_mapping.terminal] = io_mapping.description
|
||||||
|
|
||||||
|
return comments
|
||||||
|
|
||||||
|
def print_summary(self):
|
||||||
|
"""Print processing summary."""
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("Excel Data Processing Summary")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
print(f"Total modules processed: {len(self.modules)}")
|
||||||
|
|
||||||
|
# Count by type
|
||||||
|
type_counts = defaultdict(int)
|
||||||
|
for module in self.modules.values():
|
||||||
|
if module.part_number in self.PART_NUMBER_MAP:
|
||||||
|
module_type = self.PART_NUMBER_MAP[module.part_number]["type"]
|
||||||
|
type_counts[module_type] += 1
|
||||||
|
else:
|
||||||
|
type_counts["UNKNOWN"] += 1
|
||||||
|
|
||||||
|
print("\nModules by type:")
|
||||||
|
for module_type, count in type_counts.items():
|
||||||
|
print(f" {module_type}: {count}")
|
||||||
|
|
||||||
|
print(f"\nFIO-FIOH relationships found: {len(self.fio_fioh_relationships)}")
|
||||||
|
for fioh, fio in self.fio_fioh_relationships.items():
|
||||||
|
print(f" {fioh} -> {fio}")
|
||||||
|
|
||||||
|
if self.unknown_part_numbers:
|
||||||
|
print(f"\nUnknown part numbers ({len(self.unknown_part_numbers)}):")
|
||||||
|
for part_num in self.unknown_part_numbers:
|
||||||
|
print(f" {part_num}")
|
||||||
|
|
||||||
|
print("\nSample modules:")
|
||||||
|
for i, (tagname, module) in enumerate(self.modules.items()):
|
||||||
|
if i >= 5: # Show first 5 modules
|
||||||
|
break
|
||||||
|
print(f" {tagname}: {module.part_number} ({len(module.io_mappings)} IO mappings)")
|
||||||
|
|
||||||
|
def _is_valid_ip(self, ip: str) -> bool:
|
||||||
|
"""Validate IP address format."""
|
||||||
|
try:
|
||||||
|
parts = ip.split('.')
|
||||||
|
if len(parts) != 4:
|
||||||
|
return False
|
||||||
|
return all(0 <= int(part) <= 255 for part in parts)
|
||||||
|
except (AttributeError, TypeError, ValueError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Example usage of the Excel data processor."""
|
||||||
|
print("Excel Data Processor Test")
|
||||||
|
print("=" * 40)
|
||||||
|
|
||||||
|
processor = ExcelDataProcessor("MCM04_Data.xlsx")
|
||||||
|
|
||||||
|
# Load and process data
|
||||||
|
if processor.load_data():
|
||||||
|
if processor.process_data():
|
||||||
|
processor.print_summary()
|
||||||
|
|
||||||
|
# Example: Get comments for a specific module
|
||||||
|
modules = list(processor.modules.keys())
|
||||||
|
if modules:
|
||||||
|
sample_module = modules[0]
|
||||||
|
comments = processor.get_comments_for_module(sample_module)
|
||||||
|
print(f"\nSample comments for {sample_module}:")
|
||||||
|
for terminal, desc in comments.items():
|
||||||
|
print(f" {terminal}: {desc}")
|
||||||
|
else:
|
||||||
|
print("Failed to process data")
|
||||||
|
else:
|
||||||
|
print("Failed to load data")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
File diff suppressed because one or more lines are too long
15
IO Tree Configuration Generator/models/__init__.py
Normal file
15
IO Tree Configuration Generator/models/__init__.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Boilerplate Models
|
||||||
|
==================
|
||||||
|
|
||||||
|
This package contains boilerplate models for various Logix 5000 modules.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .base_boilerplate_model import BaseBoilerplateGenerator, BaseModuleConfig
|
||||||
|
from .l83es_boilerplate_model import create_l83es_controller, L83ESControllerConfig
|
||||||
|
from .en4tr_boilerplate_model import create_en4tr_module, EN4TRModuleConfig
|
||||||
|
from .turck_hub_boilerplate_model import create_turck_hub_module, TurckHubModuleConfig
|
||||||
|
from .apf_boilerplate_model import create_apf_module, APFModuleConfig
|
||||||
|
from .dpm_boilerplate_model import create_dpm_module, DPMModuleConfig
|
||||||
|
from .tl70_beacon_boilerplate_model import create_tl70_beacon, TL70BeaconConfig, TL70Colors
|
||||||
|
from .lpe_boilerplate_model import create_lpe_module, LPEModuleConfig
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
359
IO Tree Configuration Generator/models/apf_boilerplate_model.py
Normal file
359
IO Tree Configuration Generator/models/apf_boilerplate_model.py
Normal file
@ -0,0 +1,359 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
APF Module Boilerplate Model
|
||||||
|
============================
|
||||||
|
|
||||||
|
Model for APF (Armor PowerFlex) modules with support for different horsepower ratings.
|
||||||
|
Supports 1, 2, 3, 5, 7.5, and 10 HP variants.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class APFModuleConfig:
|
||||||
|
"""Configuration for an APF module instance."""
|
||||||
|
name: str # Module name (e.g., "APF1")
|
||||||
|
hp: str # Horsepower rating: "1", "2", "3", "5", "7_5", or "10"
|
||||||
|
ip_address: str = "192.168.1.10"
|
||||||
|
parent_module: str = "SLOT2_EN4TR"
|
||||||
|
parent_port_id: str = "2"
|
||||||
|
inhibited: bool = False
|
||||||
|
major_fault: bool = False
|
||||||
|
safety_network: str = "16#0000_4c14_03e7_33a8"
|
||||||
|
safety_enabled: bool = True
|
||||||
|
input_device_names: Optional[Dict[int, str]] = None
|
||||||
|
output_device_names: Optional[Dict[int, str]] = None
|
||||||
|
safety_input_names: Optional[Dict[int, str]] = None
|
||||||
|
safety_output_names: Optional[Dict[int, str]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class APFModuleGenerator:
|
||||||
|
"""Generator for APF module XML with different HP support."""
|
||||||
|
|
||||||
|
# Mapping of HP values to boilerplate filenames
|
||||||
|
HP_BOILERPLATE_MAP = {
|
||||||
|
"1": "APF_Module_1_HP.L5X",
|
||||||
|
"2": "APF_Module_2_HP.L5X",
|
||||||
|
"3": "APF_Module_3_HP.L5X",
|
||||||
|
"5": "APF_Module_5_HP.L5X",
|
||||||
|
"7_5": "APF_Module_7_5_HP.L5X",
|
||||||
|
"7.5": "APF_Module_7_5_HP.L5X", # Allow both formats
|
||||||
|
"10": "APF_Module_10_HP.L5X"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, config: APFModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
# Normalize HP value
|
||||||
|
if self.config.hp == "7.5":
|
||||||
|
self.config.hp = "7_5"
|
||||||
|
|
||||||
|
# Determine the correct boilerplate file
|
||||||
|
if self.config.hp not in self.HP_BOILERPLATE_MAP:
|
||||||
|
raise ValueError(f"Unsupported HP value: {self.config.hp}. Supported values: 1, 2, 3, 5, 7.5 (or 7_5), 10")
|
||||||
|
|
||||||
|
self.boilerplate_filename = self.HP_BOILERPLATE_MAP[self.config.hp]
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", self.boilerplate_filename)
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the appropriate boilerplate template based on HP rating."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the Ethernet port."""
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_comments(self):
|
||||||
|
"""Update comments for inputs and outputs."""
|
||||||
|
# Update standard connection input comments
|
||||||
|
if self.config.input_device_names:
|
||||||
|
input_comments = self.root.find(".//Connection[@Name='A_Standard_Rev2']/InputTag/Comments")
|
||||||
|
if input_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
input_comments.clear()
|
||||||
|
# Add new comments
|
||||||
|
for index, name in self.config.input_device_names.items():
|
||||||
|
comment = ET.SubElement(input_comments, "Comment")
|
||||||
|
if index < 4: # IN_0 through IN_3
|
||||||
|
comment.set("Operand", f".IN_{index}")
|
||||||
|
else: # IO_0 and IO_1 (index 4 and 5)
|
||||||
|
comment.set("Operand", f".IO_{index-4}")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
# Update standard connection output comments
|
||||||
|
if self.config.output_device_names:
|
||||||
|
output_comments = self.root.find(".//Connection[@Name='A_Standard_Rev2']/OutputTag/Comments")
|
||||||
|
if output_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
output_comments.clear()
|
||||||
|
# Add new comments
|
||||||
|
for index, name in self.config.output_device_names.items():
|
||||||
|
comment = ET.SubElement(output_comments, "Comment")
|
||||||
|
comment.set("Operand", f".IO_{index}")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
# Update safety input comments
|
||||||
|
if self.config.safety_input_names:
|
||||||
|
safety_input_comments = self.root.find(".//Connection[@Name='D_Safety_Input']/InputTag/Comments")
|
||||||
|
if safety_input_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
safety_input_comments.clear()
|
||||||
|
# Add new comments
|
||||||
|
for index, name in self.config.safety_input_names.items():
|
||||||
|
comment = ET.SubElement(safety_input_comments, "Comment")
|
||||||
|
comment.set("Operand", f".IN0{index}DATA")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
# Update safety output comments
|
||||||
|
if self.config.safety_output_names:
|
||||||
|
safety_output_comments = self.root.find(".//Connection[@Name='C_Safety_Output']/OutputTag/Comments")
|
||||||
|
if safety_output_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
safety_output_comments.clear()
|
||||||
|
# Add new comments
|
||||||
|
for index, name in self.config.safety_output_names.items():
|
||||||
|
comment = ET.SubElement(safety_output_comments, "Comment")
|
||||||
|
comment.set("Operand", f".OUT0{index}OUTPUT")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_comments()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Save with proper formatting
|
||||||
|
self.tree.write(output_path, encoding='UTF-8', xml_declaration=True)
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Convenience helpers for EnhancedMCMGenerator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_comment_dictionaries(module_data: 'ModuleData') -> (
|
||||||
|
Dict[int, str], Dict[int, str], Dict[int, str], Dict[int, str]
|
||||||
|
):
|
||||||
|
"""Translate the raw Excel `ModuleData` into the comment dictionaries
|
||||||
|
expected by APFModuleGenerator.update_comments().
|
||||||
|
|
||||||
|
The logic is a verbatim copy of what previously lived in
|
||||||
|
EnhancedMCMGenerator._add_apf_modules so that behaviour stays
|
||||||
|
identical after the refactor.
|
||||||
|
"""
|
||||||
|
|
||||||
|
input_device_names: Dict[int, str] = {}
|
||||||
|
output_device_names: Dict[int, str] = {}
|
||||||
|
safety_input_names: Dict[int, str] = {}
|
||||||
|
safety_output_names: Dict[int, str] = {}
|
||||||
|
|
||||||
|
for io_mapping in module_data.io_mappings:
|
||||||
|
if not io_mapping.description:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle SPARE entries explicitly
|
||||||
|
comment = (
|
||||||
|
"SPARE" if io_mapping.description.upper() == "SPARE" else io_mapping.description
|
||||||
|
)
|
||||||
|
|
||||||
|
io_path = io_mapping.io_path
|
||||||
|
if not io_path or ":" not in io_path:
|
||||||
|
# Skip malformed IO_PATH strings (keeps old behaviour)
|
||||||
|
continue
|
||||||
|
|
||||||
|
path_parts = io_path.split(":", 1)[1]
|
||||||
|
if "." not in path_parts:
|
||||||
|
continue
|
||||||
|
|
||||||
|
channel, terminal = path_parts.split(".", 1)
|
||||||
|
|
||||||
|
channel_upper = channel.upper()
|
||||||
|
|
||||||
|
if channel_upper == "I" and terminal.startswith("In_"):
|
||||||
|
# Standard input: I.In_0, I.In_1, …
|
||||||
|
try:
|
||||||
|
index = int(terminal.split("_")[1])
|
||||||
|
input_device_names[index] = comment
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif terminal.startswith("IO_"):
|
||||||
|
# IO channel: I.IO_0, O.IO_1, …
|
||||||
|
try:
|
||||||
|
index = int(terminal.split("_")[1])
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if channel_upper == "O":
|
||||||
|
output_device_names[index] = comment
|
||||||
|
elif channel_upper == "I":
|
||||||
|
# For inputs IO channels start at index 4
|
||||||
|
input_device_names[index + 4] = comment
|
||||||
|
|
||||||
|
elif channel_upper == "SI":
|
||||||
|
# Safety input: SI.In00Data, SI.In01Data, …
|
||||||
|
if terminal.startswith("In") and terminal.endswith("Data"):
|
||||||
|
try:
|
||||||
|
index = int(terminal[2:-4]) # extract NN from InNNDATA
|
||||||
|
safety_input_names[index] = comment
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif channel_upper == "SO":
|
||||||
|
# Safety output: SO.Out00Output, …
|
||||||
|
if terminal.startswith("Out") and terminal.endswith("Output"):
|
||||||
|
try:
|
||||||
|
index = int(terminal[3:-6]) # extract NN from OutNNOUTPUT
|
||||||
|
safety_output_names[index] = comment
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Any other variants are ignored (same as before)
|
||||||
|
|
||||||
|
return (
|
||||||
|
input_device_names,
|
||||||
|
output_device_names,
|
||||||
|
safety_input_names,
|
||||||
|
safety_output_names,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(
|
||||||
|
cls,
|
||||||
|
module_data: 'ModuleData',
|
||||||
|
hp: str,
|
||||||
|
*,
|
||||||
|
ip_address: str = "",
|
||||||
|
parent_module: str = "SLOT2_EN4TR",
|
||||||
|
parent_port_id: str = "2",
|
||||||
|
) -> 'APFModuleGenerator':
|
||||||
|
"""Factory that builds a fully-configured generator directly from
|
||||||
|
ExcelDataProcessor.ModuleData.
|
||||||
|
|
||||||
|
It returns an *instance* (already loaded and updated) so callers can
|
||||||
|
access .root or save it immediately.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from excel_data_processor import ModuleData # local import to avoid cycle at top level
|
||||||
|
|
||||||
|
if not isinstance(module_data, ModuleData):
|
||||||
|
raise TypeError("module_data must be an Excel ModuleData instance")
|
||||||
|
|
||||||
|
(
|
||||||
|
input_device_names,
|
||||||
|
output_device_names,
|
||||||
|
safety_input_names,
|
||||||
|
safety_output_names,
|
||||||
|
) = cls._extract_comment_dictionaries(module_data)
|
||||||
|
|
||||||
|
config = create_apf_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
hp=hp,
|
||||||
|
ip_address=ip_address or module_data.ip_address or "192.168.1.10",
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
input_device_names=input_device_names if input_device_names else None,
|
||||||
|
output_device_names=output_device_names if output_device_names else None,
|
||||||
|
safety_input_names=safety_input_names if safety_input_names else None,
|
||||||
|
safety_output_names=safety_output_names if safety_output_names else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = cls(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
return generator
|
||||||
|
|
||||||
|
|
||||||
|
def create_apf_module(name: str, hp: str, ip_address: str = "192.168.1.10",
|
||||||
|
parent_module: str = "SLOT2_EN4TR", parent_port_id: str = "2",
|
||||||
|
input_device_names: Optional[Dict[int, str]] = None,
|
||||||
|
output_device_names: Optional[Dict[int, str]] = None,
|
||||||
|
safety_input_names: Optional[Dict[int, str]] = None,
|
||||||
|
safety_output_names: Optional[Dict[int, str]] = None) -> APFModuleConfig:
|
||||||
|
"""Factory function to create an APF module configuration."""
|
||||||
|
return APFModuleConfig(
|
||||||
|
name=name,
|
||||||
|
hp=hp,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
input_device_names=input_device_names,
|
||||||
|
output_device_names=output_device_names,
|
||||||
|
safety_input_names=safety_input_names,
|
||||||
|
safety_output_names=safety_output_names
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example: Create a 5 HP APF module
|
||||||
|
config = create_apf_module(
|
||||||
|
name="APF1_5HP",
|
||||||
|
hp="5", # Specify the horsepower
|
||||||
|
ip_address="192.168.1.10",
|
||||||
|
input_device_names={
|
||||||
|
0: "E-Stop",
|
||||||
|
1: "Start PB",
|
||||||
|
2: "Stop PB",
|
||||||
|
3: "Reset",
|
||||||
|
4: "Speed Ref",
|
||||||
|
5: "Enable"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = APFModuleGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
generator.save("generated/APF1_5HP.L5X")
|
||||||
|
|
||||||
|
print(f"Generated {config.hp} HP APF module: {config.name}")
|
||||||
102
IO Tree Configuration Generator/models/base_boilerplate_model.py
Normal file
102
IO Tree Configuration Generator/models/base_boilerplate_model.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
"""
|
||||||
|
Base Boilerplate Model
|
||||||
|
======================
|
||||||
|
|
||||||
|
This module provides a base class for all L5X models that use boilerplate templates.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, Dict
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BaseModuleConfig(ABC):
|
||||||
|
"""Base configuration for all modules."""
|
||||||
|
name: str
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_updates(self) -> Dict[str, any]:
|
||||||
|
"""Get dictionary of updates to apply to the boilerplate."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BaseBoilerplateGenerator(ABC):
|
||||||
|
"""Base generator for L5X files using boilerplate templates."""
|
||||||
|
|
||||||
|
def __init__(self, config: BaseModuleConfig):
|
||||||
|
"""Initialize with configuration."""
|
||||||
|
self.config = config
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the boilerplate L5X file."""
|
||||||
|
if not hasattr(self.config, 'boilerplate_path'):
|
||||||
|
raise AttributeError("Config must have a boilerplate_path attribute")
|
||||||
|
|
||||||
|
if not os.path.exists(self.config.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.config.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.config.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply module-specific updates to the boilerplate."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def generate(self) -> str:
|
||||||
|
"""Generate the complete L5X file as a string."""
|
||||||
|
# Load boilerplate
|
||||||
|
self.load_boilerplate()
|
||||||
|
|
||||||
|
# Apply updates
|
||||||
|
self.apply_updates()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
# Convert to string with proper formatting
|
||||||
|
return self._to_pretty_xml()
|
||||||
|
|
||||||
|
def _to_pretty_xml(self) -> str:
|
||||||
|
"""Convert the XML tree to a pretty-printed string."""
|
||||||
|
# Create XML declaration
|
||||||
|
xml_str = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
|
||||||
|
|
||||||
|
# Add the root element
|
||||||
|
self._indent(self.root)
|
||||||
|
xml_str += ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
return xml_str
|
||||||
|
|
||||||
|
def _indent(self, elem, level=0):
|
||||||
|
"""Add proper indentation to XML elements."""
|
||||||
|
i = "\n" + level * " "
|
||||||
|
if len(elem):
|
||||||
|
if not elem.text or not elem.text.strip():
|
||||||
|
elem.text = i + " "
|
||||||
|
if not elem.tail or not elem.tail.strip():
|
||||||
|
elem.tail = i
|
||||||
|
for child in elem:
|
||||||
|
self._indent(child, level + 1)
|
||||||
|
if not child.tail or not child.tail.strip():
|
||||||
|
child.tail = i
|
||||||
|
else:
|
||||||
|
if level and (not elem.tail or not elem.tail.strip()):
|
||||||
|
elem.tail = i
|
||||||
|
|
||||||
|
def save_to_file(self, filename: str):
|
||||||
|
"""Generate and save the L5X content to a file."""
|
||||||
|
content = self.generate()
|
||||||
|
with open(filename, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(content)
|
||||||
172
IO Tree Configuration Generator/models/dpm_boilerplate_model.py
Normal file
172
IO Tree Configuration Generator/models/dpm_boilerplate_model.py
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
DPM Module Boilerplate Model
|
||||||
|
============================
|
||||||
|
|
||||||
|
Model for DPM (Display Panel Module) modules - OS30-002404-2S.
|
||||||
|
Supports configuring module name, parent module, IP address, and basic settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DPMModuleConfig:
|
||||||
|
"""Configuration for a DPM module instance."""
|
||||||
|
name: str # Module name (e.g., "DPM1")
|
||||||
|
ip_address: str = "192.168.1.100" # IP address for Ethernet connection
|
||||||
|
parent_module: str = "Local" # Parent module (usually "Local" for Ethernet)
|
||||||
|
parent_port_id: str = "2" # Port on the parent module
|
||||||
|
inhibited: bool = False # Whether module starts inhibited
|
||||||
|
major_fault: bool = False
|
||||||
|
rpi: int = 100000 # Request Packet Interval in microseconds (100ms default)
|
||||||
|
|
||||||
|
|
||||||
|
class DPMModuleGenerator:
|
||||||
|
"""Generator for DPM module XML."""
|
||||||
|
|
||||||
|
def __init__(self, config: DPMModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", "DPM_Module.L5X")
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the DPM boilerplate template."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the Ethernet port."""
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_inhibited_status(self):
|
||||||
|
"""Update the inhibited and fault status."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Inhibited", str(self.config.inhibited).lower())
|
||||||
|
module.set("MajorFault", str(self.config.major_fault).lower())
|
||||||
|
|
||||||
|
def update_rpi(self):
|
||||||
|
"""Update the Request Packet Interval."""
|
||||||
|
connection = self.root.find(".//Connection")
|
||||||
|
if connection is not None:
|
||||||
|
connection.set("RPI", str(self.config.rpi))
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_inhibited_status()
|
||||||
|
self.update_rpi()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Save with proper formatting
|
||||||
|
self.tree.write(output_path, encoding='UTF-8', xml_declaration=True)
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Helper for generator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(cls, module_data: 'ModuleData') -> 'DPMModuleGenerator':
|
||||||
|
"""Create and configure generator from Excel ModuleData."""
|
||||||
|
cfg = create_dpm_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
ip_address=module_data.ip_address or "192.168.1.100",
|
||||||
|
parent_module="SLOT2_EN4TR",
|
||||||
|
parent_port_id="2",
|
||||||
|
inhibited=True,
|
||||||
|
)
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_dpm_module(name: str, ip_address: str = "192.168.1.100",
|
||||||
|
parent_module: str = "Local", parent_port_id: str = "2",
|
||||||
|
inhibited: bool = True, major_fault: bool = False,
|
||||||
|
rpi: int = 100000) -> DPMModuleConfig:
|
||||||
|
"""Factory function to create a DPM module configuration."""
|
||||||
|
return DPMModuleConfig(
|
||||||
|
name=name,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
inhibited=inhibited,
|
||||||
|
major_fault=major_fault,
|
||||||
|
rpi=rpi
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example: Create a DPM module with custom configuration
|
||||||
|
config = create_dpm_module(
|
||||||
|
name="DPM_STATION_1",
|
||||||
|
ip_address="192.168.1.100",
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="2",
|
||||||
|
inhibited=False, # Start enabled
|
||||||
|
rpi=50000 # 50ms update rate
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = DPMModuleGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
generator.save("generated/DPM_STATION_1.L5X")
|
||||||
|
|
||||||
|
print(f"Generated DPM module: {config.name}")
|
||||||
|
print(f"IP Address: {config.ip_address}")
|
||||||
|
print(f"Parent module: {config.parent_module}")
|
||||||
|
print(f"Port: {config.parent_port_id}")
|
||||||
|
print(f"Inhibited: {config.inhibited}")
|
||||||
|
print(f"RPI: {config.rpi}µs ({config.rpi/1000}ms)")
|
||||||
@ -0,0 +1,104 @@
|
|||||||
|
"""
|
||||||
|
EN4TR EtherNet/IP Module L5X Generator (Boilerplate-based)
|
||||||
|
==========================================================
|
||||||
|
|
||||||
|
This module provides functionality to generate EN4TR module L5X files by
|
||||||
|
loading a boilerplate template and modifying specific fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Optional
|
||||||
|
from .base_boilerplate_model import BaseModuleConfig, BaseBoilerplateGenerator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EN4TRModuleConfig(BaseModuleConfig):
|
||||||
|
"""Configuration for an EN4TR module."""
|
||||||
|
name: str
|
||||||
|
ethernet_address: str # e.g., "11.200.1.1"
|
||||||
|
boilerplate_path: str = "boilerplate/SLOT2_EN4TR_Module.L5X"
|
||||||
|
parent_module: str = "Local"
|
||||||
|
parent_port_id: str = "1"
|
||||||
|
slot_address: str = "2"
|
||||||
|
|
||||||
|
def get_updates(self):
|
||||||
|
"""Get dictionary of updates to apply."""
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"ethernet_address": self.ethernet_address,
|
||||||
|
"parent_module": self.parent_module,
|
||||||
|
"parent_port_id": self.parent_port_id,
|
||||||
|
"slot_address": self.slot_address
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class EN4TRModuleGenerator(BaseBoilerplateGenerator):
|
||||||
|
"""Generator for EN4TR module L5X files using boilerplate template."""
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply EN4TR-specific updates to the boilerplate."""
|
||||||
|
# Update TargetName in root element
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module name
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
# Update slot address (ICP port)
|
||||||
|
icp_port = self.root.find(".//Module[@Use='Target']/Ports/Port[@Type='ICP']")
|
||||||
|
if icp_port is not None:
|
||||||
|
icp_port.set("Address", self.config.slot_address)
|
||||||
|
|
||||||
|
# Update Ethernet address
|
||||||
|
eth_port = self.root.find(".//Module[@Use='Target']/Ports/Port[@Type='Ethernet']")
|
||||||
|
if eth_port is not None:
|
||||||
|
eth_port.set("Address", self.config.ethernet_address)
|
||||||
|
|
||||||
|
|
||||||
|
# Factory function
|
||||||
|
def create_en4tr_module(
|
||||||
|
name: str,
|
||||||
|
ethernet_address: str,
|
||||||
|
slot_address: str = "2",
|
||||||
|
parent_module: str = "Local",
|
||||||
|
parent_port_id: str = "1"
|
||||||
|
) -> EN4TRModuleConfig:
|
||||||
|
"""
|
||||||
|
Create an EN4TR module configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name
|
||||||
|
ethernet_address: Ethernet IP address (e.g., "11.200.1.1")
|
||||||
|
slot_address: Slot number in the chassis
|
||||||
|
parent_module: Parent module name
|
||||||
|
parent_port_id: Parent module port ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
EN4TRModuleConfig object
|
||||||
|
"""
|
||||||
|
return EN4TRModuleConfig(
|
||||||
|
name=name,
|
||||||
|
ethernet_address=ethernet_address,
|
||||||
|
slot_address=slot_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example usage
|
||||||
|
config = create_en4tr_module(
|
||||||
|
name="SLOT3_EN4TR",
|
||||||
|
ethernet_address="11.200.1.10",
|
||||||
|
slot_address="3",
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="1"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the module
|
||||||
|
generator = EN4TRModuleGenerator(config)
|
||||||
|
generator.save_to_file("generated/SLOT3_EN4TR.L5X")
|
||||||
|
print(f"Generated EN4TR module configuration saved to generated/SLOT3_EN4TR.L5X")
|
||||||
@ -0,0 +1,210 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Siemens EXTENDO Module Boilerplate Model
|
||||||
|
========================================
|
||||||
|
|
||||||
|
Model for Siemens EXTENDO modules (6ES7 158-3MU10-0XA0).
|
||||||
|
Supports name, IP address, and parent module configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ExtendoModuleConfig:
|
||||||
|
"""Configuration for a Siemens EXTENDO module instance."""
|
||||||
|
name: str # Module name (e.g., "EXTENDO1")
|
||||||
|
ip_address: str = "112.131.213.123"
|
||||||
|
parent_module: str = "SLOT2_EN4TR"
|
||||||
|
parent_port_id: str = "2"
|
||||||
|
|
||||||
|
|
||||||
|
class ExtendoModuleGenerator:
|
||||||
|
"""Generates Siemens EXTENDO module configurations from boilerplate."""
|
||||||
|
|
||||||
|
def __init__(self, config: ExtendoModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
self.boilerplate_filename = "EXTENDO_Module.L5X"
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", self.boilerplate_filename)
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the boilerplate XML file."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name in the XML."""
|
||||||
|
# Update module name
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
# Update target name in root
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the module configuration."""
|
||||||
|
# Find the Ethernet port and update IP address
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update the parent module configuration."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all configuration updates."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the configured module to a file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise ValueError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Create output directory if it doesn't exist
|
||||||
|
output_dir = os.path.dirname(output_path)
|
||||||
|
if output_dir and not os.path.exists(output_dir):
|
||||||
|
os.makedirs(output_dir)
|
||||||
|
|
||||||
|
# Write the XML to file with proper formatting
|
||||||
|
self._indent(self.root)
|
||||||
|
|
||||||
|
# Convert to string and preserve CDATA sections
|
||||||
|
xml_string = ET.tostring(self.root, encoding='unicode')
|
||||||
|
full_xml = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n' + xml_string
|
||||||
|
|
||||||
|
# Fix CDATA sections that got stripped
|
||||||
|
import re
|
||||||
|
pattern = r'(<Data Format="L5K">)\s*(\[\[.*?\]\])\s*(</Data>)'
|
||||||
|
def fix_cdata(match):
|
||||||
|
start_tag = match.group(1)
|
||||||
|
content = match.group(2)
|
||||||
|
end_tag = match.group(3)
|
||||||
|
return f'{start_tag}\n<![CDATA[{content}]]>\n{end_tag}'
|
||||||
|
|
||||||
|
full_xml = re.sub(pattern, fix_cdata, full_xml, flags=re.DOTALL)
|
||||||
|
|
||||||
|
# Save the corrected XML
|
||||||
|
with open(output_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(full_xml)
|
||||||
|
|
||||||
|
def _indent(self, elem, level=0):
|
||||||
|
"""Add proper indentation to XML elements."""
|
||||||
|
i = "\n" + level * " "
|
||||||
|
if len(elem):
|
||||||
|
if not elem.text or not elem.text.strip():
|
||||||
|
elem.text = i + " "
|
||||||
|
if not elem.tail or not elem.tail.strip():
|
||||||
|
elem.tail = i
|
||||||
|
for child in elem:
|
||||||
|
self._indent(child, level + 1)
|
||||||
|
if not child.tail or not child.tail.strip():
|
||||||
|
child.tail = i
|
||||||
|
else:
|
||||||
|
if level and (not elem.tail or not elem.tail.strip()):
|
||||||
|
elem.tail = i
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Helper for EnhancedMCMGenerator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(cls, module_data: 'ModuleData') -> 'ExtendoModuleGenerator':
|
||||||
|
cfg = create_extendo_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
ip_address=module_data.ip_address or "112.131.213.123",
|
||||||
|
parent_module="SLOT2_EN4TR",
|
||||||
|
parent_port_id="2",
|
||||||
|
)
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_extendo_module(name: str, ip_address: str = "112.131.213.123",
|
||||||
|
parent_module: str = "SLOT2_EN4TR",
|
||||||
|
parent_port_id: str = "2") -> ExtendoModuleConfig:
|
||||||
|
"""
|
||||||
|
Factory function to create a Siemens EXTENDO module configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name (e.g., "EXTENDO1")
|
||||||
|
ip_address: IP address for the module (default: "112.131.213.123")
|
||||||
|
parent_module: Parent module name (default: "SLOT2_EN4TR")
|
||||||
|
parent_port_id: Parent port ID (default: "2")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ExtendoModuleConfig: Configured Siemens EXTENDO module
|
||||||
|
"""
|
||||||
|
return ExtendoModuleConfig(
|
||||||
|
name=name,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Example usage of the Siemens EXTENDO module generator."""
|
||||||
|
print("Siemens EXTENDO Module Generator Example")
|
||||||
|
print("=" * 42)
|
||||||
|
|
||||||
|
# Create Siemens EXTENDO module configuration
|
||||||
|
config = create_extendo_module(
|
||||||
|
name="EXTENDO1",
|
||||||
|
ip_address="112.131.213.200",
|
||||||
|
parent_module="SLOT2_EN4TR"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the module
|
||||||
|
generator = ExtendoModuleGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
|
||||||
|
# Save to generated folder
|
||||||
|
os.makedirs("generated", exist_ok=True)
|
||||||
|
output_file = f"generated/{config.name}.L5X"
|
||||||
|
generator.save(output_file)
|
||||||
|
|
||||||
|
print(f"Generated Siemens EXTENDO module: {output_file}")
|
||||||
|
print(f" Name: {config.name}")
|
||||||
|
print(f" IP Address: {config.ip_address}")
|
||||||
|
print(f" Parent Module: {config.parent_module}")
|
||||||
|
print(f" Parent Port: {config.parent_port_id}")
|
||||||
|
|
||||||
|
print("\nModule Features:")
|
||||||
|
print(" - Siemens ET 200SP remote I/O")
|
||||||
|
print(" - Input data: 15 bytes (11 SINT data + connection info)")
|
||||||
|
print(" - Output data: 8 bytes (8 SINT data)")
|
||||||
|
print(" - Catalog Number: 6ES7 158-3MU10-0XA0")
|
||||||
|
print(" - Ethernet/IP communication")
|
||||||
|
print(" - Vendor: Siemens AG")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@ -0,0 +1,181 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Festo Solenoid Module Boilerplate Model
|
||||||
|
======================================
|
||||||
|
|
||||||
|
Model for Festo Solenoid modules (VAEM-L1-S-8).
|
||||||
|
Supports configuring module name, parent module, port ID (should be even for M12DR masters), port address, and basic settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FestoSolenoidConfig:
|
||||||
|
"""Configuration for a Festo solenoid module instance."""
|
||||||
|
name: str # Module name (e.g., "UL11_13_SOL1")
|
||||||
|
parent_module: str = "Master" # Parent IO-Link master module
|
||||||
|
parent_port_id: str = "4" # Port on the IO-Link master (should be even for M12DR: 2,4,6,8)
|
||||||
|
port_address: str = "0" # IO-Link port address
|
||||||
|
inhibited: bool = False
|
||||||
|
major_fault: bool = False
|
||||||
|
# Note: For M12DR masters, parent_port_id must be even (2,4,6,8)
|
||||||
|
|
||||||
|
|
||||||
|
class FestoSolenoidGenerator:
|
||||||
|
"""Generator for Festo solenoid module XML."""
|
||||||
|
|
||||||
|
def __init__(self, config: FestoSolenoidConfig):
|
||||||
|
self.config = config
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", "Festo_Solenoids_Module.L5X")
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the Festo solenoid boilerplate template."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_port_address(self):
|
||||||
|
"""Update the IO-Link port address."""
|
||||||
|
port = self.root.find(".//Port[@Type='IO-Link']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.port_address)
|
||||||
|
|
||||||
|
def update_inhibited_status(self):
|
||||||
|
"""Update the inhibited and major fault status."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Inhibited", str(self.config.inhibited).lower())
|
||||||
|
module.set("MajorFault", str(self.config.major_fault).lower())
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_port_address()
|
||||||
|
self.update_inhibited_status()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Save with proper formatting and preserve CDATA sections
|
||||||
|
xml_string = ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# Fix CDATA wrapper for L5K data - ElementTree strips CDATA sections
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Pattern to find L5K data that needs CDATA wrapper
|
||||||
|
l5k_pattern = r'(<Data Format="L5K">)(\s*\[.*?\]|\s*\(.*?\))\s*(</Data>)'
|
||||||
|
|
||||||
|
def replace_with_cdata(match):
|
||||||
|
opening_tag = match.group(1)
|
||||||
|
data_content = match.group(2).strip()
|
||||||
|
closing_tag = match.group(3)
|
||||||
|
# Add proper indentation and line breaks
|
||||||
|
return f'{opening_tag}\n<![CDATA[{data_content}]]>\n{closing_tag}'
|
||||||
|
|
||||||
|
# Apply CDATA wrapper to L5K data
|
||||||
|
xml_string = re.sub(l5k_pattern, replace_with_cdata, xml_string, flags=re.DOTALL | re.MULTILINE)
|
||||||
|
|
||||||
|
# Write the corrected XML
|
||||||
|
with open(output_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n')
|
||||||
|
f.write(xml_string)
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Convenience helper used by EnhancedMCMGenerator's factory dispatch.
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_mapping(cls, mapping: Dict[str, str]) -> "FestoSolenoidGenerator":
|
||||||
|
"""Create and fully configure a solenoid generator from the Excel-derived
|
||||||
|
entry (a plain dict). The structure expected is the one produced in
|
||||||
|
EnhancedMCMGenerator._organize_modules_by_type()."""
|
||||||
|
|
||||||
|
cfg = FestoSolenoidConfig(
|
||||||
|
name=mapping["name"],
|
||||||
|
parent_module=mapping["parent_module"],
|
||||||
|
parent_port_id=mapping["parent_port_id"],
|
||||||
|
port_address=mapping["port_address"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Optional: Validate even port for M12DR
|
||||||
|
if "M12DR" in mapping.get("model", "") and int(cfg.parent_port_id) % 2 != 0:
|
||||||
|
raise ValueError(f"Festo solenoid must connect to even port on M12DR, got {cfg.parent_port_id}")
|
||||||
|
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_festo_solenoid(name: str, parent_module: str = "Master", parent_port_id: str = "4",
|
||||||
|
port_address: str = "0") -> FestoSolenoidConfig:
|
||||||
|
"""Factory function to create a Festo solenoid configuration."""
|
||||||
|
return FestoSolenoidConfig(
|
||||||
|
name=name,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
port_address=port_address,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example: Create a Festo solenoid with custom configuration
|
||||||
|
config = create_festo_solenoid(
|
||||||
|
name="UL11_13_SOL1",
|
||||||
|
parent_module="UL11_13_FIO1",
|
||||||
|
parent_port_id="4",
|
||||||
|
port_address="0",
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = FestoSolenoidGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
generator.save("generated/UL11_13_SOL1.L5X")
|
||||||
|
|
||||||
|
print(f"Generated Festo solenoid module: {config.name}")
|
||||||
|
print(f"Parent module: {config.parent_module}")
|
||||||
|
print(f"Port: {config.parent_port_id}")
|
||||||
|
print(f"Address: {config.port_address}")
|
||||||
162
IO Tree Configuration Generator/models/ib16_boilerplate_model.py
Normal file
162
IO Tree Configuration Generator/models/ib16_boilerplate_model.py
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
"""
|
||||||
|
IB16 Digital Input Module L5X Generator (Boilerplate-based)
|
||||||
|
===========================================================
|
||||||
|
|
||||||
|
This module provides functionality to generate IB16 module L5X files by
|
||||||
|
loading a boilerplate template and modifying specific fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, Dict
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from .base_boilerplate_model import BaseModuleConfig, BaseBoilerplateGenerator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class IB16ModuleConfig(BaseModuleConfig):
|
||||||
|
"""Configuration for an IB16 module."""
|
||||||
|
name: str
|
||||||
|
boilerplate_path: str = "boilerplate/SLOT5_IB16_Module.L5X"
|
||||||
|
slot_address: str = "5"
|
||||||
|
parent_module: str = "Local"
|
||||||
|
parent_port_id: str = "1"
|
||||||
|
input_device_names: Optional[Dict[int, str]] = None # Names for 16 input devices (0-15)
|
||||||
|
|
||||||
|
def get_updates(self):
|
||||||
|
"""Get dictionary of updates to apply."""
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"slot_address": self.slot_address,
|
||||||
|
"parent_module": self.parent_module,
|
||||||
|
"parent_port_id": self.parent_port_id,
|
||||||
|
"input_device_names": self.input_device_names
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class IB16ModuleGenerator(BaseBoilerplateGenerator):
|
||||||
|
"""Generator for IB16 module L5X files using boilerplate template."""
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply IB16-specific updates to the boilerplate."""
|
||||||
|
# Update TargetName in root element
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module name and parent info
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
# Update slot address (ICP port)
|
||||||
|
icp_port = self.root.find(".//Module[@Use='Target']/Ports/Port[@Type='ICP']")
|
||||||
|
if icp_port is not None:
|
||||||
|
icp_port.set("Address", self.config.slot_address)
|
||||||
|
|
||||||
|
# Update input device comments
|
||||||
|
self.update_comments()
|
||||||
|
|
||||||
|
def update_comments(self):
|
||||||
|
"""Update comments for input devices."""
|
||||||
|
# Always clear boilerplate comments so callers can opt-in to add their
|
||||||
|
# own. When no mapping is supplied, the section remains empty.
|
||||||
|
input_comments = self.root.find(".//Connection[@Name='StandardInput']/InputTag/Comments")
|
||||||
|
if input_comments is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Clear whatever the template had.
|
||||||
|
input_comments.clear()
|
||||||
|
|
||||||
|
if not self.config.input_device_names:
|
||||||
|
return # leave section empty (no comments)
|
||||||
|
|
||||||
|
# Populate provided names
|
||||||
|
for index, text in self.config.input_device_names.items():
|
||||||
|
comment = ET.SubElement(input_comments, "Comment")
|
||||||
|
comment.set("Operand", f".DATA.{index}")
|
||||||
|
comment.text = text
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_mapping(cls, mapping: Dict[str, str], comments: Optional[Dict[int, str]] = None) -> "IB16ModuleGenerator":
|
||||||
|
"""Create and fully configure an IB16 generator from the Excel-derived
|
||||||
|
`modules` entry (a plain dict). The structure expected is the one
|
||||||
|
produced in EnhancedMCMGenerator._organize_modules_by_type().
|
||||||
|
|
||||||
|
:param comments: Optional dict of input index (0-15) to comment text.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cfg = IB16ModuleConfig(
|
||||||
|
name=mapping["name"],
|
||||||
|
slot_address=mapping["slot_address"],
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="1",
|
||||||
|
input_device_names=comments,
|
||||||
|
)
|
||||||
|
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
# Factory function
|
||||||
|
def create_ib16_module(
|
||||||
|
name: str,
|
||||||
|
slot_address: str = "5",
|
||||||
|
parent_module: str = "Local",
|
||||||
|
parent_port_id: str = "1",
|
||||||
|
input_device_names: Optional[Dict[int, str]] = None
|
||||||
|
) -> IB16ModuleConfig:
|
||||||
|
"""
|
||||||
|
Create an IB16 module configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name
|
||||||
|
slot_address: Slot number in the chassis
|
||||||
|
parent_module: Parent module name
|
||||||
|
parent_port_id: Parent module port ID
|
||||||
|
input_device_names: Dictionary mapping input index (0-15) to device names
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
IB16ModuleConfig object
|
||||||
|
"""
|
||||||
|
return IB16ModuleConfig(
|
||||||
|
name=name,
|
||||||
|
slot_address=slot_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
input_device_names=input_device_names
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example usage
|
||||||
|
config = create_ib16_module(
|
||||||
|
name="SLOT4_IB16",
|
||||||
|
slot_address="4",
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="1",
|
||||||
|
input_device_names={
|
||||||
|
0: "Emergency Stop",
|
||||||
|
1: "Start Button",
|
||||||
|
2: "Stop Button",
|
||||||
|
3: "Reset Button",
|
||||||
|
4: "Door Sensor",
|
||||||
|
5: "Light Curtain",
|
||||||
|
6: "Pressure Switch",
|
||||||
|
7: "Flow Switch",
|
||||||
|
8: "Level Sensor High",
|
||||||
|
9: "Level Sensor Low",
|
||||||
|
10: "Temperature Alarm",
|
||||||
|
11: "Vibration Sensor",
|
||||||
|
12: "Proximity Sensor 1",
|
||||||
|
13: "Proximity Sensor 2",
|
||||||
|
14: "Limit Switch Up",
|
||||||
|
15: "Limit Switch Down"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the module
|
||||||
|
generator = IB16ModuleGenerator(config)
|
||||||
|
generator.save_to_file("generated/SLOT4_IB16.L5X")
|
||||||
|
print(f"Generated IB16 module configuration saved to generated/SLOT4_IB16.L5X")
|
||||||
@ -0,0 +1,166 @@
|
|||||||
|
"""
|
||||||
|
IB16S Safety Digital Input Module L5X Generator (Boilerplate-based)
|
||||||
|
==================================================================
|
||||||
|
|
||||||
|
This module provides functionality to generate IB16S safety module L5X files by
|
||||||
|
loading a boilerplate template and modifying specific fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, Dict
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from .base_boilerplate_model import BaseModuleConfig, BaseBoilerplateGenerator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class IB16SModuleConfig(BaseModuleConfig):
|
||||||
|
"""Configuration for an IB16S safety module."""
|
||||||
|
name: str
|
||||||
|
boilerplate_path: str = "boilerplate/SLOT7_IB16S_Module.L5X"
|
||||||
|
slot_address: str = "7"
|
||||||
|
parent_module: str = "Local"
|
||||||
|
parent_port_id: str = "1"
|
||||||
|
safety_network: str = "16#0000_4c33_031d_8f1b"
|
||||||
|
safety_input_device_names: Optional[Dict[int, str]] = None # Names for 16 safety input devices (0-15)
|
||||||
|
|
||||||
|
def get_updates(self):
|
||||||
|
"""Get dictionary of updates to apply."""
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"slot_address": self.slot_address,
|
||||||
|
"parent_module": self.parent_module,
|
||||||
|
"parent_port_id": self.parent_port_id,
|
||||||
|
"safety_network": self.safety_network,
|
||||||
|
"safety_input_device_names": self.safety_input_device_names
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class IB16SModuleGenerator(BaseBoilerplateGenerator):
|
||||||
|
"""Generator for IB16S safety module L5X files using boilerplate template."""
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply IB16S-specific updates to the boilerplate."""
|
||||||
|
# Update TargetName in root element
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module name and parent info
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
module.set("SafetyNetwork", self.config.safety_network)
|
||||||
|
|
||||||
|
# Update slot address (ICP port)
|
||||||
|
icp_port = self.root.find(".//Module[@Use='Target']/Ports/Port[@Type='ICP']")
|
||||||
|
if icp_port is not None:
|
||||||
|
icp_port.set("Address", self.config.slot_address)
|
||||||
|
|
||||||
|
# Update safety input device comments
|
||||||
|
self.update_comments()
|
||||||
|
|
||||||
|
def update_comments(self):
|
||||||
|
"""Update comments for safety input devices."""
|
||||||
|
safety_input_comments = self.root.find(".//Connection[@Name='SafetyInput']/InputTag/Comments")
|
||||||
|
if safety_input_comments is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
safety_input_comments.clear()
|
||||||
|
|
||||||
|
if not self.config.safety_input_device_names:
|
||||||
|
return
|
||||||
|
|
||||||
|
for index, text in self.config.safety_input_device_names.items():
|
||||||
|
comment = ET.SubElement(safety_input_comments, "Comment")
|
||||||
|
comment.set("Operand", f".PT{index:02d}.DATA")
|
||||||
|
comment.text = text
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_mapping(cls, mapping: Dict[str, str], comments: Optional[Dict[int, str]] = None) -> "IB16SModuleGenerator":
|
||||||
|
"""Create and fully configure an IB16S generator from the Excel-derived
|
||||||
|
`modules` entry (a plain dict). The structure expected is the one
|
||||||
|
produced in EnhancedMCMGenerator._organize_modules_by_type().
|
||||||
|
|
||||||
|
:param comments: Optional dict of input index (0-15) to comment text.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cfg = IB16SModuleConfig(
|
||||||
|
name=mapping["name"],
|
||||||
|
slot_address=mapping["slot_address"],
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="1",
|
||||||
|
safety_network=mapping.get("safety_network", "16#0000_4c33_031d_8f1b"),
|
||||||
|
safety_input_device_names=comments,
|
||||||
|
)
|
||||||
|
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
# Factory function
|
||||||
|
def create_ib16s_module(
|
||||||
|
name: str,
|
||||||
|
slot_address: str = "7",
|
||||||
|
parent_module: str = "Local",
|
||||||
|
parent_port_id: str = "1",
|
||||||
|
safety_network: str = "16#0000_4c33_031d_8f1b",
|
||||||
|
safety_input_device_names: Optional[Dict[int, str]] = None
|
||||||
|
) -> IB16SModuleConfig:
|
||||||
|
"""
|
||||||
|
Create an IB16S safety module configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name
|
||||||
|
slot_address: Slot number in the chassis
|
||||||
|
parent_module: Parent module name
|
||||||
|
parent_port_id: Parent module port ID
|
||||||
|
safety_network: Safety network ID
|
||||||
|
safety_input_device_names: Dictionary mapping safety input index (0-15) to device names
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
IB16SModuleConfig object
|
||||||
|
"""
|
||||||
|
return IB16SModuleConfig(
|
||||||
|
name=name,
|
||||||
|
slot_address=slot_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
safety_network=safety_network,
|
||||||
|
safety_input_device_names=safety_input_device_names
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example usage
|
||||||
|
config = create_ib16s_module(
|
||||||
|
name="SLOT8_IB16S",
|
||||||
|
slot_address="8",
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="1",
|
||||||
|
safety_network="16#0000_4c33_031d_8f1b",
|
||||||
|
safety_input_device_names={
|
||||||
|
0: "Emergency Stop Circuit 1",
|
||||||
|
1: "Emergency Stop Circuit 2",
|
||||||
|
2: "Safety Door 1 Closed",
|
||||||
|
3: "Safety Door 2 Closed",
|
||||||
|
4: "Light Curtain Area 1",
|
||||||
|
5: "Light Curtain Area 2",
|
||||||
|
6: "Safety Mat Zone 1",
|
||||||
|
7: "Safety Mat Zone 2",
|
||||||
|
8: "Two Hand Control Left",
|
||||||
|
9: "Two Hand Control Right",
|
||||||
|
10: "Pull Cord Switch 1",
|
||||||
|
11: "Pull Cord Switch 2",
|
||||||
|
12: "Safety Interlock 1",
|
||||||
|
13: "Safety Interlock 2",
|
||||||
|
14: "Guard Door Position",
|
||||||
|
15: "Safety Reset Button"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the module
|
||||||
|
generator = IB16SModuleGenerator(config)
|
||||||
|
generator.save_to_file("generated/SLOT8_IB16S.L5X")
|
||||||
|
print(f"Generated IB16S safety module configuration saved to generated/SLOT8_IB16S.L5X")
|
||||||
@ -0,0 +1,94 @@
|
|||||||
|
"""
|
||||||
|
1756-L83ES Controller L5X Generator (Boilerplate-based)
|
||||||
|
======================================================
|
||||||
|
|
||||||
|
This module provides functionality to generate L83ES controller L5X files by
|
||||||
|
loading a boilerplate template and modifying specific fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from .base_boilerplate_model import BaseModuleConfig, BaseBoilerplateGenerator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class L83ESControllerConfig(BaseModuleConfig):
|
||||||
|
"""Configuration for an L83ES controller."""
|
||||||
|
name: str
|
||||||
|
boilerplate_path: str = "boilerplate/SLOT0_L83ES.L5X"
|
||||||
|
processor_type: str = "1756-L83ES"
|
||||||
|
major_rev: str = "36"
|
||||||
|
minor_rev: str = "11"
|
||||||
|
|
||||||
|
def get_updates(self):
|
||||||
|
"""Get dictionary of updates to apply."""
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"processor_type": self.processor_type,
|
||||||
|
"major_rev": self.major_rev,
|
||||||
|
"minor_rev": self.minor_rev
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class L83ESControllerGenerator(BaseBoilerplateGenerator):
|
||||||
|
"""Generator for L83ES controller L5X files using boilerplate template."""
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply L83ES-specific updates to the boilerplate."""
|
||||||
|
# Update TargetName in root element
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Controller name and attributes
|
||||||
|
controller = self.root.find(".//Controller[@Use='Target']")
|
||||||
|
if controller is not None:
|
||||||
|
controller.set("Name", self.config.name)
|
||||||
|
controller.set("ProcessorType", self.config.processor_type)
|
||||||
|
controller.set("MajorRev", self.config.major_rev)
|
||||||
|
controller.set("MinorRev", self.config.minor_rev)
|
||||||
|
|
||||||
|
# Update Local module name to match controller name
|
||||||
|
local_module = self.root.find(".//Module[@Name='Local']")
|
||||||
|
if local_module is not None:
|
||||||
|
# Local module always has same name as controller
|
||||||
|
local_module.set("Name", "Local") # Actually, it stays "Local"
|
||||||
|
|
||||||
|
|
||||||
|
# Factory function
|
||||||
|
def create_l83es_controller(
|
||||||
|
name: str,
|
||||||
|
processor_type: str = "1756-L83ES",
|
||||||
|
major_rev: str = "36",
|
||||||
|
minor_rev: str = "11"
|
||||||
|
) -> L83ESControllerConfig:
|
||||||
|
"""
|
||||||
|
Create an L83ES controller configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Controller name
|
||||||
|
processor_type: Processor type (default: "1756-L83ES")
|
||||||
|
major_rev: Major revision
|
||||||
|
minor_rev: Minor revision
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
L83ESControllerConfig object
|
||||||
|
"""
|
||||||
|
return L83ESControllerConfig(
|
||||||
|
name=name,
|
||||||
|
processor_type=processor_type,
|
||||||
|
major_rev=major_rev,
|
||||||
|
minor_rev=minor_rev
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example usage
|
||||||
|
config = create_l83es_controller(
|
||||||
|
name="MTN7",
|
||||||
|
processor_type="1756-L83ES",
|
||||||
|
major_rev="36",
|
||||||
|
minor_rev="11"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the controller
|
||||||
|
generator = L83ESControllerGenerator(config)
|
||||||
|
generator.save_to_file("generated/MTN7_Controller.L5X")
|
||||||
|
print(f"Generated L83ES controller configuration saved to generated/MTN7_Controller.L5X")
|
||||||
120
IO Tree Configuration Generator/models/lpe_boilerplate_model.py
Normal file
120
IO Tree Configuration Generator/models/lpe_boilerplate_model.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
LPE Module Boilerplate Model
|
||||||
|
====================================
|
||||||
|
|
||||||
|
Model for LPE modules.
|
||||||
|
Supports configuring module name and parent module.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Optional
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LPEModuleConfig:
|
||||||
|
"""Configuration for an LPE module."""
|
||||||
|
name: str
|
||||||
|
parent_module: str
|
||||||
|
port_address: str = "0"
|
||||||
|
|
||||||
|
class LPEBoilerplateGenerator:
|
||||||
|
"""Generator for LPE module XML."""
|
||||||
|
|
||||||
|
def __init__(self, name: str, parent_module: str, port_address: str = "0"):
|
||||||
|
self.name = name
|
||||||
|
self.parent_module = parent_module
|
||||||
|
self.port_address = port_address
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", "LPE_Module.L5X")
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the LPE boilerplate template."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
self.root.set("TargetName", self.name)
|
||||||
|
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.name)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.parent_module)
|
||||||
|
# All LPE modules are children of IOLM, so ParentModPortId is always 4 (IO-Link Port)
|
||||||
|
module.set("ParentModPortId", "4")
|
||||||
|
|
||||||
|
def update_port_address(self):
|
||||||
|
"""Update the IO-Link port address."""
|
||||||
|
port = self.root.find(".//Port[@Type='IO-Link']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.port_address)
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_port_address()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
self.tree.write(output_path, encoding='UTF-8', xml_declaration=True)
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_mapping(cls, mapping: Dict[str, str]) -> "LPEBoilerplateGenerator":
|
||||||
|
"""Create, configure, and return a generator from an entry in
|
||||||
|
`EnhancedMCMGenerator.lpe_modules`."""
|
||||||
|
|
||||||
|
gen = cls(
|
||||||
|
name=mapping["name"],
|
||||||
|
parent_module=mapping["parent_module"],
|
||||||
|
port_address=mapping["port_address"],
|
||||||
|
)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
def create_lpe_module(config: LPEModuleConfig) -> LPEBoilerplateGenerator:
|
||||||
|
"""Factory function to create and configure an LPE module generator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Configuration for the LPE module
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured LPE module generator
|
||||||
|
"""
|
||||||
|
generator = LPEBoilerplateGenerator(
|
||||||
|
name=config.name,
|
||||||
|
parent_module=config.parent_module,
|
||||||
|
port_address=config.port_address
|
||||||
|
)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
return generator
|
||||||
@ -0,0 +1,494 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
M12DR Module Boilerplate Model
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Model for M12DR (5032-8IOLM12DR/A) modules with support for different configurations.
|
||||||
|
Supports PalletBuildMaster and D2CMaster variants.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData, IOPathMapping
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class M12DRModuleConfig:
|
||||||
|
"""Configuration for an M12DR module instance."""
|
||||||
|
name: str # Module name (e.g., "PalletBuildMaster1")
|
||||||
|
variant: str # Module variant: "PalletBuildMaster" or "D2CMaster"
|
||||||
|
ip_address: str = "192.168.1.1"
|
||||||
|
parent_module: str = "SLOT1_EN4TR"
|
||||||
|
parent_port_id: str = "2"
|
||||||
|
inhibited: bool = False
|
||||||
|
major_fault: bool = False
|
||||||
|
input_comments: Optional[Dict[str, str]] = None # Key: operand (e.g., ".IOLINK00"), Value: comment
|
||||||
|
output_comments: Optional[Dict[str, str]] = None # Key: operand (e.g., ".PT07"), Value: comment
|
||||||
|
|
||||||
|
|
||||||
|
class M12DRModuleGenerator:
|
||||||
|
"""Generator for M12DR module XML with different variant support."""
|
||||||
|
|
||||||
|
# Mapping of variants to default boilerplate filenames (fallback)
|
||||||
|
VARIANT_BOILERPLATE_MAP = {
|
||||||
|
"PalletBuildMaster": "PalletBuildMaster_Module.L5X",
|
||||||
|
"D2CMaster": "D2CMaster_Module.L5X",
|
||||||
|
"PDP_FIO": "PDP_FIO_Module.L5X",
|
||||||
|
#"UL_FIO": "PDP_FIO_Module.L5X", # Fallback to PDP_FIO if specific UL boilerplate not found
|
||||||
|
#"FIO_GENERIC": "PDP_FIO_Module.L5X" # Fallback to PDP_FIO for generic FIO modules
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, config: M12DRModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
# Determine the correct boilerplate file
|
||||||
|
self.boilerplate_filename = self._determine_boilerplate_filename()
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", self.boilerplate_filename)
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
# Cache for operand patterns extracted from boilerplate
|
||||||
|
self._operand_patterns = None
|
||||||
|
|
||||||
|
def _determine_boilerplate_filename(self) -> str:
|
||||||
|
"""Determine the boilerplate filename to use.
|
||||||
|
|
||||||
|
Priority:
|
||||||
|
1. Check for module-specific boilerplate: {module_name}_Module.L5X
|
||||||
|
2. Fall back to variant-based boilerplate
|
||||||
|
"""
|
||||||
|
# First, try module-specific boilerplate
|
||||||
|
module_specific_filename = f"{self.config.name}_Module.L5X"
|
||||||
|
module_specific_path = os.path.join("boilerplate", module_specific_filename)
|
||||||
|
|
||||||
|
if os.path.exists(module_specific_path):
|
||||||
|
print(f" {self.config.name} (FIO {self.config.variant}): Using module-specific boilerplate {module_specific_filename}")
|
||||||
|
return module_specific_filename
|
||||||
|
|
||||||
|
# Fall back to variant-based boilerplate
|
||||||
|
if self.config.variant not in self.VARIANT_BOILERPLATE_MAP:
|
||||||
|
raise ValueError(f"Unsupported variant: {self.config.variant}. Supported variants: {list(self.VARIANT_BOILERPLATE_MAP.keys())}")
|
||||||
|
|
||||||
|
fallback_filename = self.VARIANT_BOILERPLATE_MAP[self.config.variant]
|
||||||
|
print(f" {self.config.name} (FIO {self.config.variant}): Using variant boilerplate {fallback_filename}")
|
||||||
|
return fallback_filename
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the appropriate boilerplate template based on variant."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def _extract_operand_patterns(self) -> Dict[str, str]:
|
||||||
|
"""Extract operand patterns from the boilerplate XML structure.
|
||||||
|
|
||||||
|
Returns dict mapping terminal number to operand format:
|
||||||
|
e.g., {"04": ".PT04.DATA", "00": ".IOLINK00"}
|
||||||
|
"""
|
||||||
|
if self._operand_patterns is not None:
|
||||||
|
return self._operand_patterns
|
||||||
|
|
||||||
|
patterns = {}
|
||||||
|
|
||||||
|
# Find InputTag and OutputTag structures
|
||||||
|
input_tag = self.root.find(".//Connection[@Name='Data']/InputTag")
|
||||||
|
output_tag = self.root.find(".//Connection[@Name='Data']/OutputTag")
|
||||||
|
|
||||||
|
# Get variant-specific formatting rules
|
||||||
|
use_data_suffix = self._should_use_data_suffix()
|
||||||
|
|
||||||
|
# Process InputTag structure
|
||||||
|
if input_tag is not None:
|
||||||
|
structure = input_tag.find(".//Structure")
|
||||||
|
if structure is not None:
|
||||||
|
self._extract_patterns_from_structure(structure, patterns, use_data_suffix)
|
||||||
|
|
||||||
|
# Process OutputTag structure
|
||||||
|
if output_tag is not None:
|
||||||
|
structure = output_tag.find(".//Structure")
|
||||||
|
if structure is not None:
|
||||||
|
self._extract_patterns_from_structure(structure, patterns, use_data_suffix)
|
||||||
|
|
||||||
|
self._operand_patterns = patterns
|
||||||
|
return patterns
|
||||||
|
|
||||||
|
def _should_use_data_suffix(self) -> bool:
|
||||||
|
"""Determine if this variant should use .DATA suffix for digital I/O based on variant type."""
|
||||||
|
# PDP_FIO uses .DATA suffix for digital I/O
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _extract_patterns_from_structure(self, structure: ET.Element, patterns: Dict[str, str], use_data_suffix: bool):
|
||||||
|
"""Extract operand patterns from a Structure element."""
|
||||||
|
for member in structure.findall("StructureMember"):
|
||||||
|
name = member.get("Name")
|
||||||
|
data_type = member.get("DataType")
|
||||||
|
|
||||||
|
if not name or not data_type:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle IOLink channels
|
||||||
|
if name.startswith("IOLink") and "IOL" in data_type:
|
||||||
|
# Extract number from IOLink00, IOLink02, etc.
|
||||||
|
number_match = re.search(r'IOLink(\d+)', name)
|
||||||
|
if number_match:
|
||||||
|
number = number_match.group(1)
|
||||||
|
patterns[number] = f".IOLINK{number}"
|
||||||
|
|
||||||
|
# Handle Point channels (digital I/O)
|
||||||
|
elif name.startswith("Pt") and ("DI" in data_type or "DO" in data_type):
|
||||||
|
# Extract number from Pt04, Pt05, etc.
|
||||||
|
number_match = re.search(r'Pt(\d+)', name)
|
||||||
|
if number_match:
|
||||||
|
number = number_match.group(1)
|
||||||
|
if use_data_suffix:
|
||||||
|
patterns[number] = f".PT{number}.DATA"
|
||||||
|
else:
|
||||||
|
patterns[number] = f".PT{number}"
|
||||||
|
|
||||||
|
def _convert_terminal_to_operand_dynamic(self, terminal: str, signal: str) -> str:
|
||||||
|
"""Convert terminal and signal to operand using patterns from boilerplate.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
terminal: Terminal like "IO4", "IO00", etc.
|
||||||
|
signal: Signal type like "I", "O", "IOLINK", "SPARE"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operand string like ".PT04.DATA", ".IOLINK00", etc.
|
||||||
|
"""
|
||||||
|
if not terminal or not signal:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Get operand patterns from boilerplate
|
||||||
|
patterns = self._extract_operand_patterns()
|
||||||
|
|
||||||
|
terminal = terminal.upper().strip()
|
||||||
|
signal = signal.upper().strip()
|
||||||
|
|
||||||
|
# Extract number from terminal (IO4 -> 04, IO00 -> 00, etc.)
|
||||||
|
if terminal.startswith("IO"):
|
||||||
|
try:
|
||||||
|
terminal_num = terminal[2:] # Remove "IO" prefix
|
||||||
|
# Pad to 2 digits if needed
|
||||||
|
terminal_num = terminal_num.zfill(2)
|
||||||
|
|
||||||
|
# For IOLINK signals, look for IOLINK pattern
|
||||||
|
if signal == "IOLINK":
|
||||||
|
iolink_operand = f".IOLINK{terminal_num}"
|
||||||
|
if terminal_num in patterns and patterns[terminal_num] == iolink_operand:
|
||||||
|
return iolink_operand
|
||||||
|
|
||||||
|
# For digital I/O signals, look for PT pattern
|
||||||
|
if signal in ("I", "O", "SPARE"):
|
||||||
|
# Check if we have a pattern for this terminal
|
||||||
|
if terminal_num in patterns:
|
||||||
|
return patterns[terminal_num]
|
||||||
|
# Fallback to .PT format if no pattern found
|
||||||
|
return f".PT{terminal_num}.DATA"
|
||||||
|
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Handle direct operand format (starts with .)
|
||||||
|
if terminal.startswith('.'):
|
||||||
|
return terminal
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the Ethernet port."""
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_input_comments(self):
|
||||||
|
"""Update input tag comments."""
|
||||||
|
input_tag = self.root.find(".//Connection[@Name='Data']/InputTag")
|
||||||
|
if input_tag is not None and self.config.input_comments:
|
||||||
|
# Find or create Comments section
|
||||||
|
comments_section = input_tag.find("Comments")
|
||||||
|
if comments_section is None:
|
||||||
|
# Create Comments section as the first child
|
||||||
|
comments_section = ET.Element("Comments")
|
||||||
|
input_tag.insert(0, comments_section)
|
||||||
|
else:
|
||||||
|
# Clear existing comments
|
||||||
|
comments_section.clear()
|
||||||
|
|
||||||
|
# Add new comments
|
||||||
|
for operand, comment_text in self.config.input_comments.items():
|
||||||
|
comment = ET.SubElement(comments_section, "Comment")
|
||||||
|
comment.set("Operand", operand)
|
||||||
|
comment.text = comment_text
|
||||||
|
|
||||||
|
def update_output_comments(self):
|
||||||
|
"""Update output tag comments."""
|
||||||
|
output_tag = self.root.find(".//Connection[@Name='Data']/OutputTag")
|
||||||
|
if output_tag is not None and self.config.output_comments:
|
||||||
|
# Find or create Comments section
|
||||||
|
comments_section = output_tag.find("Comments")
|
||||||
|
if comments_section is None:
|
||||||
|
# Create Comments section as the first child
|
||||||
|
comments_section = ET.Element("Comments")
|
||||||
|
output_tag.insert(0, comments_section)
|
||||||
|
else:
|
||||||
|
# Clear existing comments
|
||||||
|
comments_section.clear()
|
||||||
|
|
||||||
|
# Add new comments
|
||||||
|
for operand, comment_text in self.config.output_comments.items():
|
||||||
|
comment = ET.SubElement(comments_section, "Comment")
|
||||||
|
comment.set("Operand", operand)
|
||||||
|
comment.text = comment_text
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_input_comments()
|
||||||
|
self.update_output_comments()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file with proper formatting."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Add proper indentation to the XML
|
||||||
|
self._indent_xml(self.root)
|
||||||
|
|
||||||
|
# Save with proper formatting
|
||||||
|
self.tree.write(output_path, encoding='UTF-8', xml_declaration=True)
|
||||||
|
|
||||||
|
def _indent_xml(self, elem, level=0):
|
||||||
|
"""Add proper indentation to XML elements for readable output."""
|
||||||
|
indent = "\n" + level * " "
|
||||||
|
if len(elem):
|
||||||
|
if not elem.text or not elem.text.strip():
|
||||||
|
elem.text = indent + " "
|
||||||
|
if not elem.tail or not elem.tail.strip():
|
||||||
|
elem.tail = indent
|
||||||
|
for child in elem:
|
||||||
|
self._indent_xml(child, level + 1)
|
||||||
|
if not child.tail or not child.tail.strip():
|
||||||
|
child.tail = indent
|
||||||
|
else:
|
||||||
|
if level and (not elem.tail or not elem.tail.strip()):
|
||||||
|
elem.tail = indent
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# High-level helper for generator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(
|
||||||
|
cls,
|
||||||
|
module_data: "ModuleData",
|
||||||
|
*,
|
||||||
|
parent_module: str = "SLOT2_EN4TR",
|
||||||
|
parent_port_id: str = "2",
|
||||||
|
) -> "M12DRModuleGenerator":
|
||||||
|
"""Create, configure, and return a generator using only Excel data.
|
||||||
|
|
||||||
|
The calling code can then directly access ``generator.root`` or save
|
||||||
|
the file. It fully replaces the manual logic previously present in
|
||||||
|
EnhancedMCMGenerator._add_iolm_modules.
|
||||||
|
"""
|
||||||
|
|
||||||
|
variant = _determine_variant(module_data)
|
||||||
|
|
||||||
|
# Create generator to access operand patterns
|
||||||
|
config = M12DRModuleConfig(
|
||||||
|
name=module_data.tagname,
|
||||||
|
variant=variant,
|
||||||
|
ip_address=module_data.ip_address or "192.168.1.1",
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
gen = cls(config)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
|
||||||
|
# Now process comments using dynamic operand conversion
|
||||||
|
input_comments: Dict[str, str] = {}
|
||||||
|
output_comments: Dict[str, str] = {}
|
||||||
|
|
||||||
|
for m in module_data.io_mappings:
|
||||||
|
operand = gen._convert_terminal_to_operand_dynamic(m.terminal, m.signal)
|
||||||
|
if not operand:
|
||||||
|
continue
|
||||||
|
comment_text = "SPARE" if m.description and m.description.upper() == "SPARE" else m.description
|
||||||
|
if not comment_text:
|
||||||
|
continue
|
||||||
|
if _is_output_signal(m.signal, m.io_path):
|
||||||
|
output_comments[operand] = comment_text
|
||||||
|
else:
|
||||||
|
input_comments[operand] = comment_text
|
||||||
|
|
||||||
|
# Update config with processed comments
|
||||||
|
gen.config.input_comments = input_comments if input_comments else None
|
||||||
|
gen.config.output_comments = output_comments if output_comments else None
|
||||||
|
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_m12dr_module(name: str, variant: str, ip_address: str = "192.168.1.1",
|
||||||
|
parent_module: str = "SLOT1_EN4TR", parent_port_id: str = "2",
|
||||||
|
input_comments: Optional[Dict[str, str]] = None,
|
||||||
|
output_comments: Optional[Dict[str, str]] = None) -> M12DRModuleConfig:
|
||||||
|
"""Factory function to create an M12DR module configuration.
|
||||||
|
|
||||||
|
Note: input_comments and output_comments default to None (no comments).
|
||||||
|
Use the get_*_default_*_comments() helper functions if you want default templates.
|
||||||
|
"""
|
||||||
|
return M12DRModuleConfig(
|
||||||
|
name=name,
|
||||||
|
variant=variant,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
input_comments=input_comments,
|
||||||
|
output_comments=output_comments
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Helper functions to get default comment structures for PDP_FIO variant
|
||||||
|
def get_pdp_fio_default_input_comments() -> Dict[str, str]:
|
||||||
|
"""Get default input comments for PDP_FIO variant."""
|
||||||
|
return {
|
||||||
|
".PT00.DATA": "Input 1",
|
||||||
|
".PT01.DATA": "Input 2",
|
||||||
|
".PT02.DATA": "Input 3",
|
||||||
|
".PT03.DATA": "Input 4",
|
||||||
|
".PT04.DATA": "Input 5",
|
||||||
|
".PT06.DATA": "Input 6",
|
||||||
|
".PT08.DATA": "Input 7",
|
||||||
|
".PT09.DATA": "Input 8",
|
||||||
|
".PT10.DATA": "Input 9",
|
||||||
|
".PT11.DATA": "Input 10",
|
||||||
|
".PT12.DATA": "Input 11",
|
||||||
|
".IOLINK14": "Smart Device"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_pdp_fio_default_output_comments() -> Dict[str, str]:
|
||||||
|
"""Get default output comments for PDP_FIO variant."""
|
||||||
|
return {
|
||||||
|
".PT05.DATA": "Output 1",
|
||||||
|
".PT07.DATA": "Output 2",
|
||||||
|
".PT13.DATA": "Output 3"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------------------
|
||||||
|
# Utility helpers (ported from EnhancedMCMGenerator for 100 % behaviour parity)
|
||||||
|
# --------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _determine_variant(module_data: "ModuleData") -> str:
|
||||||
|
"""Determine M12DR variant based on module name and FIOH patterns.
|
||||||
|
|
||||||
|
Logic:
|
||||||
|
1. If module name contains "PDP", use PDP_FIO variant
|
||||||
|
2. If FIOH is found in IO4 or IO12 descriptions, use PalletBuildMaster
|
||||||
|
3. Otherwise, use D2CMaster
|
||||||
|
"""
|
||||||
|
# Check if module name contains PDP
|
||||||
|
if "PDP" in module_data.tagname.upper():
|
||||||
|
return "PDP_FIO"
|
||||||
|
|
||||||
|
# Check for FIOH in IO4 or IO12 descriptions
|
||||||
|
terminal_desc: Dict[str, str] = {}
|
||||||
|
for m in module_data.io_mappings:
|
||||||
|
if m.terminal and m.description:
|
||||||
|
terminal_desc[m.terminal.upper()] = m.description.upper()
|
||||||
|
|
||||||
|
io12 = terminal_desc.get("IO12", "")
|
||||||
|
io4 = terminal_desc.get("IO4", "")
|
||||||
|
|
||||||
|
# If any of the IO4/IO12 terminals contains FIOH, then choose PalletBuildMaster
|
||||||
|
if any("FIOH" in t for t in (io12, io4)):
|
||||||
|
return "PalletBuildMaster"
|
||||||
|
|
||||||
|
# Default to D2CMaster for all other cases
|
||||||
|
return "D2CMaster"
|
||||||
|
|
||||||
|
|
||||||
|
def _is_output_signal(signal: str, io_path: str) -> bool:
|
||||||
|
if not signal:
|
||||||
|
return False
|
||||||
|
s = signal.upper().strip()
|
||||||
|
if s == "O":
|
||||||
|
return True
|
||||||
|
if s in ("I", "IOLINK"):
|
||||||
|
return False
|
||||||
|
if io_path and ":O." in io_path.upper():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example: Create a PDP_FIO module
|
||||||
|
pdp_fio_config = create_m12dr_module(
|
||||||
|
name="PDP_FIO1",
|
||||||
|
variant="PDP_FIO",
|
||||||
|
ip_address="123.121.231.231",
|
||||||
|
input_comments={
|
||||||
|
".PT00.DATA": "Emergency Stop",
|
||||||
|
".PT01.DATA": "Start Button",
|
||||||
|
".PT02.DATA": "Reset Button",
|
||||||
|
".IOLINK14": "Smart Sensor"
|
||||||
|
},
|
||||||
|
output_comments={
|
||||||
|
".PT05.DATA": "Status Light",
|
||||||
|
".PT07.DATA": "Warning Light",
|
||||||
|
".PT13.DATA": "Alarm Horn"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = M12DRModuleGenerator(pdp_fio_config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
generator.save("generated/PDP_FIO1.L5X")
|
||||||
|
|
||||||
|
print(f"Generated PDP_FIO module: {pdp_fio_config.name}")
|
||||||
@ -0,0 +1,159 @@
|
|||||||
|
"""
|
||||||
|
OB16E Digital Output Module L5X Generator (Boilerplate-based)
|
||||||
|
============================================================
|
||||||
|
|
||||||
|
This module provides functionality to generate OB16E module L5X files by
|
||||||
|
loading a boilerplate template and modifying specific fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, Dict
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from .base_boilerplate_model import BaseModuleConfig, BaseBoilerplateGenerator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class OB16EModuleConfig(BaseModuleConfig):
|
||||||
|
"""Configuration for an OB16E module."""
|
||||||
|
name: str
|
||||||
|
boilerplate_path: str = "boilerplate/SLOT6_OB16E_Module.L5X"
|
||||||
|
slot_address: str = "6"
|
||||||
|
parent_module: str = "Local"
|
||||||
|
parent_port_id: str = "1"
|
||||||
|
output_device_names: Optional[Dict[int, str]] = None # Names for 16 output devices (0-15)
|
||||||
|
|
||||||
|
def get_updates(self):
|
||||||
|
"""Get dictionary of updates to apply."""
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"slot_address": self.slot_address,
|
||||||
|
"parent_module": self.parent_module,
|
||||||
|
"parent_port_id": self.parent_port_id,
|
||||||
|
"output_device_names": self.output_device_names
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class OB16EModuleGenerator(BaseBoilerplateGenerator):
|
||||||
|
"""Generator for OB16E module L5X files using boilerplate template."""
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply OB16E-specific updates to the boilerplate."""
|
||||||
|
# Update TargetName in root element
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module name and parent info
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
# Update slot address (ICP port)
|
||||||
|
icp_port = self.root.find(".//Module[@Use='Target']/Ports/Port[@Type='ICP']")
|
||||||
|
if icp_port is not None:
|
||||||
|
icp_port.set("Address", self.config.slot_address)
|
||||||
|
|
||||||
|
# Update output device comments
|
||||||
|
self.update_comments()
|
||||||
|
|
||||||
|
def update_comments(self):
|
||||||
|
"""Update comments for output devices."""
|
||||||
|
# Always clear template comments first
|
||||||
|
output_comments = self.root.find(".//Connection[@Name='Fused']/OutputTag/Comments")
|
||||||
|
if output_comments is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
output_comments.clear()
|
||||||
|
|
||||||
|
if not self.config.output_device_names:
|
||||||
|
return
|
||||||
|
|
||||||
|
for index, text in self.config.output_device_names.items():
|
||||||
|
comment = ET.SubElement(output_comments, "Comment")
|
||||||
|
comment.set("Operand", f".DATA.{index}")
|
||||||
|
comment.text = text
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_mapping(cls, mapping: Dict[str, str], comments: Optional[Dict[int, str]] = None) -> "OB16EModuleGenerator":
|
||||||
|
"""Create and fully configure an OB16E generator from the Excel-derived
|
||||||
|
`modules` entry (a plain dict). The structure expected is the one
|
||||||
|
produced in EnhancedMCMGenerator._organize_modules_by_type().
|
||||||
|
|
||||||
|
:param comments: Optional dict of output index (0-15) to comment text.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cfg = OB16EModuleConfig(
|
||||||
|
name=mapping["name"],
|
||||||
|
slot_address=mapping["slot_address"],
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="1",
|
||||||
|
output_device_names=comments,
|
||||||
|
)
|
||||||
|
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
# Factory function
|
||||||
|
def create_ob16e_module(
|
||||||
|
name: str,
|
||||||
|
slot_address: str = "6",
|
||||||
|
parent_module: str = "Local",
|
||||||
|
parent_port_id: str = "1",
|
||||||
|
output_device_names: Optional[Dict[int, str]] = None
|
||||||
|
) -> OB16EModuleConfig:
|
||||||
|
"""
|
||||||
|
Create an OB16E module configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name
|
||||||
|
slot_address: Slot number in the chassis
|
||||||
|
parent_module: Parent module name
|
||||||
|
parent_port_id: Parent module port ID
|
||||||
|
output_device_names: Dictionary mapping output index (0-15) to device names
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
OB16EModuleConfig object
|
||||||
|
"""
|
||||||
|
return OB16EModuleConfig(
|
||||||
|
name=name,
|
||||||
|
slot_address=slot_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
output_device_names=output_device_names
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example usage
|
||||||
|
config = create_ob16e_module(
|
||||||
|
name="SLOT3_OB16E",
|
||||||
|
slot_address="3",
|
||||||
|
parent_module="Local",
|
||||||
|
parent_port_id="1",
|
||||||
|
output_device_names={
|
||||||
|
0: "Motor 1 Starter",
|
||||||
|
1: "Motor 2 Starter",
|
||||||
|
2: "Pump Control",
|
||||||
|
3: "Valve 1 Open",
|
||||||
|
4: "Valve 2 Close",
|
||||||
|
5: "Alarm Horn",
|
||||||
|
6: "Status Light Red",
|
||||||
|
7: "Status Light Green",
|
||||||
|
8: "Conveyor Start",
|
||||||
|
9: "Fan Control",
|
||||||
|
10: "Heater Enable",
|
||||||
|
11: "Solenoid 1",
|
||||||
|
12: "Solenoid 2",
|
||||||
|
13: "Brake Release",
|
||||||
|
14: "Emergency Light",
|
||||||
|
15: "System Ready Lamp"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the module
|
||||||
|
generator = OB16EModuleGenerator(config)
|
||||||
|
generator.save_to_file("generated/SLOT3_OB16E.L5X")
|
||||||
|
print(f"Generated OB16E module configuration saved to generated/SLOT3_OB16E.L5X")
|
||||||
181
IO Tree Configuration Generator/models/pmm_boilerplate_model.py
Normal file
181
IO Tree Configuration Generator/models/pmm_boilerplate_model.py
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
PMM (Power Monitoring Module) Boilerplate Model
|
||||||
|
===============================================
|
||||||
|
|
||||||
|
Model for 1420-V2-ENT Power Monitoring modules.
|
||||||
|
Supports name, IP address, and parent module configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PMMModuleConfig:
|
||||||
|
"""Configuration for a PMM (Power Monitoring Module) instance."""
|
||||||
|
name: str # Module name (e.g., "CP01_V2_ENT_Voltage_Monitor")
|
||||||
|
ip_address: str = "11.200.1.2"
|
||||||
|
parent_module: str = "SLOT2_EN4TR"
|
||||||
|
parent_port_id: str = "2"
|
||||||
|
|
||||||
|
|
||||||
|
class PMMModuleGenerator:
|
||||||
|
"""Generates PMM (Power Monitoring Module) configurations from boilerplate."""
|
||||||
|
|
||||||
|
def __init__(self, config: PMMModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
self.boilerplate_filename = "PMM_Module.L5X"
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", self.boilerplate_filename)
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the boilerplate XML file."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name in the XML."""
|
||||||
|
# Update module name
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
# Update target name in root
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the module configuration."""
|
||||||
|
# Find the Ethernet port and update IP address
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update the parent module configuration."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all configuration updates."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the configured module to a file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise ValueError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Create output directory if it doesn't exist
|
||||||
|
output_dir = os.path.dirname(output_path)
|
||||||
|
if output_dir and not os.path.exists(output_dir):
|
||||||
|
os.makedirs(output_dir)
|
||||||
|
|
||||||
|
# Write the XML to file
|
||||||
|
with open(output_path, 'wb') as f:
|
||||||
|
f.write(b'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n')
|
||||||
|
self.tree.write(f, encoding='UTF-8')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Convenience helper for generator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(cls, module_data: 'ModuleData') -> 'PMMModuleGenerator':
|
||||||
|
"""Create and return a generator configured from Excel ModuleData."""
|
||||||
|
ip_addr = module_data.ip_address or "11.200.1.2"
|
||||||
|
cfg = create_pmm_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
ip_address=ip_addr,
|
||||||
|
parent_module="SLOT2_EN4TR",
|
||||||
|
parent_port_id="2",
|
||||||
|
)
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_pmm_module(name: str, ip_address: str = "11.200.1.2",
|
||||||
|
parent_module: str = "SLOT2_EN4TR",
|
||||||
|
parent_port_id: str = "2") -> PMMModuleConfig:
|
||||||
|
"""
|
||||||
|
Factory function to create a PMM (Power Monitoring Module) configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name (e.g., "CP01_V2_ENT_Voltage_Monitor")
|
||||||
|
ip_address: IP address for the module (default: "11.200.1.2")
|
||||||
|
parent_module: Parent module name (default: "SLOT2_EN4TR")
|
||||||
|
parent_port_id: Parent port ID (default: "2")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
PMMModuleConfig: Configured PMM module
|
||||||
|
"""
|
||||||
|
return PMMModuleConfig(
|
||||||
|
name=name,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Example usage of the PMM module generator."""
|
||||||
|
print("PMM (Power Monitoring Module) Generator Example")
|
||||||
|
print("=" * 50)
|
||||||
|
|
||||||
|
# Create PMM module configuration
|
||||||
|
config = create_pmm_module(
|
||||||
|
name="CP01_V2_ENT_Voltage_Monitor",
|
||||||
|
ip_address="11.200.1.2",
|
||||||
|
parent_module="SLOT2_EN4TR"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the module
|
||||||
|
generator = PMMModuleGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
|
||||||
|
# Save to generated folder
|
||||||
|
os.makedirs("generated", exist_ok=True)
|
||||||
|
output_file = f"generated/{config.name}.L5X"
|
||||||
|
generator.save(output_file)
|
||||||
|
|
||||||
|
print(f"Generated PMM module: {output_file}")
|
||||||
|
print(f" Name: {config.name}")
|
||||||
|
print(f" IP Address: {config.ip_address}")
|
||||||
|
print(f" Parent Module: {config.parent_module}")
|
||||||
|
print(f" Parent Port: {config.parent_port_id}")
|
||||||
|
|
||||||
|
print("\nModule Features:")
|
||||||
|
print(" - Allen-Bradley 1420-V2-ENT Power Monitor")
|
||||||
|
print(" - Real-time power monitoring and metering")
|
||||||
|
print(" - Multiple data connections (I1, I2, I3, I4)")
|
||||||
|
print(" - Voltage, current, power, and energy measurements")
|
||||||
|
print(" - Ethernet/IP communication")
|
||||||
|
print(" - Firmware and identification information")
|
||||||
|
print(" - Power factor and frequency monitoring")
|
||||||
|
print(" - Energy consumption tracking (kWh, kVARh)")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
350
IO Tree Configuration Generator/models/sio_boilerplate_model.py
Normal file
350
IO Tree Configuration Generator/models/sio_boilerplate_model.py
Normal file
@ -0,0 +1,350 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
SIO Module Boilerplate Model
|
||||||
|
============================
|
||||||
|
|
||||||
|
Model for SIO (Safety Input/Output) modules with safety-enabled functionality.
|
||||||
|
Supports IP address configuration and comment updates for safety I/O channels.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SIOModuleConfig:
|
||||||
|
"""Configuration for a SIO module instance."""
|
||||||
|
name: str # Module name (e.g., "SIO1")
|
||||||
|
ip_address: str = "123.124.125.15"
|
||||||
|
parent_module: str = "SLOT2_EN4TR"
|
||||||
|
parent_port_id: str = "2"
|
||||||
|
inhibited: bool = False
|
||||||
|
major_fault: bool = False
|
||||||
|
safety_network: str = "16#0000_4c14_03e7_33a8"
|
||||||
|
safety_enabled: bool = True
|
||||||
|
standard_input_names: Optional[Dict[int, str]] = None # For standard data connection inputs
|
||||||
|
standard_output_names: Optional[Dict[int, str]] = None # For standard data connection outputs
|
||||||
|
safety_input_names: Optional[Dict[int, str]] = None # For SI connection (DATA[0].0-7)
|
||||||
|
safety_output_names: Optional[Dict[int, str]] = None # For SO connection (DATA[1].0-7)
|
||||||
|
|
||||||
|
|
||||||
|
class SIOModuleGenerator:
|
||||||
|
"""Generator for SIO module XML with safety I/O support."""
|
||||||
|
|
||||||
|
def __init__(self, config: SIOModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", "SIO_Module.L5X")
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the SIO boilerplate template."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the Ethernet port."""
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_safety_settings(self):
|
||||||
|
"""Update safety-related settings."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("SafetyNetwork", self.config.safety_network)
|
||||||
|
module.set("SafetyEnabled", str(self.config.safety_enabled).lower())
|
||||||
|
module.set("Inhibited", str(self.config.inhibited).lower())
|
||||||
|
module.set("MajorFault", str(self.config.major_fault).lower())
|
||||||
|
|
||||||
|
def update_comments(self):
|
||||||
|
"""Update comments for different connection types."""
|
||||||
|
# Update standard data connection input comments (if any)
|
||||||
|
if self.config.standard_input_names:
|
||||||
|
input_comments = self.root.find(".//Connection[@Name='_200424912C822C83']/InputTag/Comments")
|
||||||
|
if input_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
input_comments.clear()
|
||||||
|
# Add new comments for standard inputs
|
||||||
|
for index, name in self.config.standard_input_names.items():
|
||||||
|
comment = ET.SubElement(input_comments, "Comment")
|
||||||
|
comment.set("Operand", f".Data[{index}]")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
# Update standard data connection output comments (if any)
|
||||||
|
if self.config.standard_output_names:
|
||||||
|
output_comments = self.root.find(".//Connection[@Name='_200424912C822C83']/OutputTag/Comments")
|
||||||
|
if output_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
output_comments.clear()
|
||||||
|
# Add new comments for standard outputs
|
||||||
|
for index, name in self.config.standard_output_names.items():
|
||||||
|
comment = ET.SubElement(output_comments, "Comment")
|
||||||
|
comment.set("Operand", f".Data[{index}]")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
# Update safety input comments (SI connection - DATA[0].0 through DATA[0].7)
|
||||||
|
if self.config.safety_input_names:
|
||||||
|
safety_input_comments = self.root.find(".//Connection[@Name='_200424962CC22C87']/InputTag/Comments")
|
||||||
|
if safety_input_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
safety_input_comments.clear()
|
||||||
|
# Add new comments for safety inputs (SI0-SI7)
|
||||||
|
for index, name in self.config.safety_input_names.items():
|
||||||
|
if 0 <= index <= 7: # SI0-SI7 valid range
|
||||||
|
comment = ET.SubElement(safety_input_comments, "Comment")
|
||||||
|
comment.set("Operand", f".DATA[0].{index}")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
# Update safety output comments (SO connection - DATA[1].0 through DATA[1].7)
|
||||||
|
if self.config.safety_output_names:
|
||||||
|
safety_output_comments = self.root.find(".//Connection[@Name='_200424962C862CC2']/OutputTag/Comments")
|
||||||
|
if safety_output_comments is not None:
|
||||||
|
# Clear existing comments
|
||||||
|
safety_output_comments.clear()
|
||||||
|
# Add new comments for safety outputs (SO0-SO7, but SO5-SO7 might not be used)
|
||||||
|
for index, name in self.config.safety_output_names.items():
|
||||||
|
if 0 <= index <= 7: # SO0-SO7 valid range
|
||||||
|
comment = ET.SubElement(safety_output_comments, "Comment")
|
||||||
|
comment.set("Operand", f".DATA[1].{index}")
|
||||||
|
comment.text = name
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_safety_settings()
|
||||||
|
self.update_comments()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Save with proper formatting
|
||||||
|
self.tree.write(output_path, encoding='UTF-8', xml_declaration=True)
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Convenience helpers for EnhancedMCMGenerator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_comment_dictionaries(module_data: 'ModuleData') -> tuple[
|
||||||
|
Dict[int, str], Dict[int, str], Dict[int, str], Dict[int, str]
|
||||||
|
]:
|
||||||
|
"""Translate the raw Excel `ModuleData` into the comment dictionaries
|
||||||
|
expected by SIOModuleGenerator.update_comments().
|
||||||
|
|
||||||
|
The logic follows SIO channel constraints:
|
||||||
|
- SI0–SI7: Safety Input channels
|
||||||
|
- SO0–SO4: Safety Output channels
|
||||||
|
- IO0–IO3: IO-Link channels
|
||||||
|
"""
|
||||||
|
|
||||||
|
standard_input_names: Dict[int, str] = {}
|
||||||
|
standard_output_names: Dict[int, str] = {}
|
||||||
|
safety_input_names: Dict[int, str] = {}
|
||||||
|
safety_output_names: Dict[int, str] = {}
|
||||||
|
|
||||||
|
for io_mapping in module_data.io_mappings:
|
||||||
|
if not io_mapping.description:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle SPARE entries explicitly
|
||||||
|
comment = (
|
||||||
|
"SPARE" if io_mapping.description.upper() == "SPARE" else io_mapping.description
|
||||||
|
)
|
||||||
|
|
||||||
|
io_path = io_mapping.io_path
|
||||||
|
if not io_path or ":" not in io_path:
|
||||||
|
# Skip malformed IO_PATH strings
|
||||||
|
continue
|
||||||
|
|
||||||
|
path_parts = io_path.split(":", 1)[1]
|
||||||
|
if "." not in path_parts:
|
||||||
|
continue
|
||||||
|
|
||||||
|
channel, terminal = path_parts.split(".", 1)
|
||||||
|
channel_upper = channel.upper()
|
||||||
|
|
||||||
|
# Parse terminal to get channel number
|
||||||
|
terminal_upper = terminal.upper()
|
||||||
|
|
||||||
|
if channel_upper == "SI":
|
||||||
|
# Safety Input: SI0-SI7
|
||||||
|
if terminal_upper.startswith("SI") and len(terminal_upper) >= 3:
|
||||||
|
try:
|
||||||
|
index = int(terminal_upper[2:]) # Extract number from SI0, SI1, etc.
|
||||||
|
if 0 <= index <= 7: # Valid SI range
|
||||||
|
safety_input_names[index] = comment
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif channel_upper == "SO":
|
||||||
|
# Safety Output: SO0-SO4 (per memory constraints)
|
||||||
|
if terminal_upper.startswith("SO") and len(terminal_upper) >= 3:
|
||||||
|
try:
|
||||||
|
index = int(terminal_upper[2:]) # Extract number from SO0, SO1, etc.
|
||||||
|
if 0 <= index <= 4: # Valid SO range per constraints
|
||||||
|
safety_output_names[index] = comment
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif channel_upper == "IO":
|
||||||
|
# IO-Link channels: IO0-IO3 (per memory constraints)
|
||||||
|
if terminal_upper.startswith("IO") and len(terminal_upper) >= 3:
|
||||||
|
try:
|
||||||
|
index = int(terminal_upper[2:]) # Extract number from IO0, IO1, etc.
|
||||||
|
if 0 <= index <= 3: # Valid IO-Link range per constraints
|
||||||
|
# IO-Link channels could go to standard input/output depending on signal type
|
||||||
|
if io_mapping.signal.upper() == "I":
|
||||||
|
standard_input_names[index] = comment
|
||||||
|
elif io_mapping.signal.upper() == "O":
|
||||||
|
standard_output_names[index] = comment
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Any other terminal formats are ignored
|
||||||
|
|
||||||
|
return (
|
||||||
|
standard_input_names,
|
||||||
|
standard_output_names,
|
||||||
|
safety_input_names,
|
||||||
|
safety_output_names,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(
|
||||||
|
cls,
|
||||||
|
module_data: 'ModuleData',
|
||||||
|
*,
|
||||||
|
ip_address: str = "",
|
||||||
|
parent_module: str = "SLOT2_EN4TR",
|
||||||
|
parent_port_id: str = "2",
|
||||||
|
) -> 'SIOModuleGenerator':
|
||||||
|
"""Factory that builds a fully-configured generator directly from
|
||||||
|
ExcelDataProcessor.ModuleData.
|
||||||
|
|
||||||
|
It returns an *instance* (already loaded and updated) so callers can
|
||||||
|
access .root or save it immediately.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from excel_data_processor import ModuleData # local import to avoid cycle at top level
|
||||||
|
|
||||||
|
if not isinstance(module_data, ModuleData):
|
||||||
|
raise TypeError("module_data must be an Excel ModuleData instance")
|
||||||
|
|
||||||
|
(
|
||||||
|
standard_input_names,
|
||||||
|
standard_output_names,
|
||||||
|
safety_input_names,
|
||||||
|
safety_output_names,
|
||||||
|
) = cls._extract_comment_dictionaries(module_data)
|
||||||
|
|
||||||
|
config = create_sio_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
ip_address=ip_address or module_data.ip_address or "123.124.125.15",
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
standard_input_names=standard_input_names if standard_input_names else None,
|
||||||
|
standard_output_names=standard_output_names if standard_output_names else None,
|
||||||
|
safety_input_names=safety_input_names if safety_input_names else None,
|
||||||
|
safety_output_names=safety_output_names if safety_output_names else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = cls(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
return generator
|
||||||
|
|
||||||
|
|
||||||
|
def create_sio_module(name: str, ip_address: str = "123.124.125.15",
|
||||||
|
parent_module: str = "SLOT2_EN4TR", parent_port_id: str = "2",
|
||||||
|
standard_input_names: Optional[Dict[int, str]] = None,
|
||||||
|
standard_output_names: Optional[Dict[int, str]] = None,
|
||||||
|
safety_input_names: Optional[Dict[int, str]] = None,
|
||||||
|
safety_output_names: Optional[Dict[int, str]] = None) -> SIOModuleConfig:
|
||||||
|
"""Factory function to create a SIO module configuration."""
|
||||||
|
return SIOModuleConfig(
|
||||||
|
name=name,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
standard_input_names=standard_input_names,
|
||||||
|
standard_output_names=standard_output_names,
|
||||||
|
safety_input_names=safety_input_names,
|
||||||
|
safety_output_names=safety_output_names
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example: Create a SIO module with safety I/O
|
||||||
|
config = create_sio_module(
|
||||||
|
name="SIO1",
|
||||||
|
ip_address="123.124.125.15",
|
||||||
|
safety_input_names={
|
||||||
|
0: "Emergency Stop 1",
|
||||||
|
1: "Emergency Stop 2",
|
||||||
|
2: "Safety Gate 1",
|
||||||
|
3: "Safety Gate 2",
|
||||||
|
4: "Light Curtain",
|
||||||
|
5: "Safety Mat",
|
||||||
|
6: "Reset Button",
|
||||||
|
7: "Enable Switch"
|
||||||
|
},
|
||||||
|
safety_output_names={
|
||||||
|
0: "Safety Relay 1",
|
||||||
|
1: "Safety Relay 2",
|
||||||
|
2: "Warning Light",
|
||||||
|
3: "Safety Valve",
|
||||||
|
4: "Brake Release"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = SIOModuleGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
generator.save("generated/SIO1.L5X")
|
||||||
|
|
||||||
|
print(f"Generated SIO module: {config.name}")
|
||||||
@ -0,0 +1,443 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
TL70 Beacon Module Boilerplate Model
|
||||||
|
====================================
|
||||||
|
|
||||||
|
Model for TL70 Pro with IO-Link beacon modules.
|
||||||
|
Supports configuring module name, parent module, port address, and basic segment settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TL70BeaconConfig:
|
||||||
|
"""Configuration for a TL70 beacon instance."""
|
||||||
|
name: str # Module name (e.g., "BEACON1")
|
||||||
|
parent_module: str = "IOLM1" # Parent IO-Link master module
|
||||||
|
parent_port_id: str = "4" # Port on the IO-Link master (always 4 for IOLM modules)
|
||||||
|
port_address: str = "0" # IO-Link port address
|
||||||
|
inhibited: bool = False
|
||||||
|
major_fault: bool = False
|
||||||
|
application_tag: str = "***" # Application specific tag (up to 29 chars)
|
||||||
|
segment_1_color: int = 0 # Segment 1 basic color (0-15)
|
||||||
|
segment_1_flash_rate: int = 0 # Segment 1 flash rate (0-15)
|
||||||
|
segment_2_color: int = 0 # Segment 2 basic color (0-15)
|
||||||
|
segment_2_flash_rate: int = 0 # Segment 2 flash rate (0-15)
|
||||||
|
segment_3_color: int = 9 # Segment 3 basic color (0-15)
|
||||||
|
segment_3_flash_rate: int = 0 # Segment 3 flash rate (0-15)
|
||||||
|
segment_4_color: int = 0 # Segment 4 basic color (0-15)
|
||||||
|
segment_4_flash_rate: int = 0 # Segment 4 flash rate (0-15)
|
||||||
|
segment_5_color: int = 0 # Segment 5 basic color (0-15)
|
||||||
|
segment_5_flash_rate: int = 0 # Segment 5 flash rate (0-15)
|
||||||
|
segment_6_color: int = 0 # Segment 6 basic color (0-15)
|
||||||
|
segment_6_flash_rate: int = 0 # Segment 6 flash rate (0-15)
|
||||||
|
operating_mode: int = 1 # Operating mode: 0=Basic, 1=Run, 2=Level, 3=Gauge, 4=Advanced
|
||||||
|
|
||||||
|
|
||||||
|
class TL70BeaconGenerator:
|
||||||
|
"""Generator for TL70 beacon module XML."""
|
||||||
|
|
||||||
|
def __init__(self, config: TL70BeaconConfig):
|
||||||
|
self.config = config
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", "TL70_Module.L5X")
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the TL70 boilerplate template."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
|
||||||
|
# For IO-Link Master modules (M12DR), always use port 4 (the IO-Link port)
|
||||||
|
# regardless of what was specified in the config
|
||||||
|
parent_port_id = "4" # Always use port 4 for IOLM modules
|
||||||
|
module.set("ParentModPortId", parent_port_id)
|
||||||
|
|
||||||
|
def update_port_address(self):
|
||||||
|
"""Update the IO-Link port address."""
|
||||||
|
port = self.root.find(".//Port[@Type='IO-Link']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.port_address)
|
||||||
|
|
||||||
|
def update_inhibited_status(self):
|
||||||
|
"""Update the inhibited status."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Inhibited", str(self.config.inhibited).lower())
|
||||||
|
module.set("MajorFault", str(self.config.major_fault).lower())
|
||||||
|
|
||||||
|
def update_application_tag(self):
|
||||||
|
"""Update the application specific tag."""
|
||||||
|
# Find the Application_specific_Tag structure
|
||||||
|
app_tag_len = self.root.find(".//StructureMember[@Name='Application_specific_Tag']//DataValueMember[@Name='LEN']")
|
||||||
|
app_tag_data = self.root.find(".//StructureMember[@Name='Application_specific_Tag']//DataValueMember[@Name='DATA']")
|
||||||
|
|
||||||
|
if app_tag_len is not None and app_tag_data is not None:
|
||||||
|
# Update length
|
||||||
|
app_tag_len.set("Value", str(len(self.config.application_tag)))
|
||||||
|
|
||||||
|
# Update data - need to format as CDATA with proper padding
|
||||||
|
padded_tag = self.config.application_tag + "$00" * (29 - len(self.config.application_tag))
|
||||||
|
# Just set the text directly, we'll handle CDATA in save()
|
||||||
|
app_tag_data.text = f"'{padded_tag}'"
|
||||||
|
|
||||||
|
# Also update the L5K data format
|
||||||
|
config_tag = self.root.find(".//ConfigTag//Data[@Format='L5K']")
|
||||||
|
if config_tag is not None:
|
||||||
|
# The L5K format contains the tag data - we need to update it
|
||||||
|
# This is complex binary data, so we'll focus on the Decorated format
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_segment_configuration(self):
|
||||||
|
"""Update segment color and flash rate configurations."""
|
||||||
|
segments = [
|
||||||
|
(1, self.config.segment_1_color, self.config.segment_1_flash_rate),
|
||||||
|
(2, self.config.segment_2_color, self.config.segment_2_flash_rate),
|
||||||
|
(3, self.config.segment_3_color, self.config.segment_3_flash_rate),
|
||||||
|
(4, self.config.segment_4_color, self.config.segment_4_flash_rate),
|
||||||
|
(5, self.config.segment_5_color, self.config.segment_5_flash_rate),
|
||||||
|
(6, self.config.segment_6_color, self.config.segment_6_flash_rate),
|
||||||
|
]
|
||||||
|
|
||||||
|
for segment_num, color, flash_rate in segments:
|
||||||
|
# Update basic color
|
||||||
|
color_elem = self.root.find(f".//StructureMember[@Name='Segment_{segment_num}_Config']//DataValueMember[@Name='Segment_{segment_num}_Settings_Basic_Color']")
|
||||||
|
if color_elem is not None:
|
||||||
|
color_elem.set("Value", str(color))
|
||||||
|
|
||||||
|
# Update flash rate
|
||||||
|
flash_elem = self.root.find(f".//StructureMember[@Name='Segment_{segment_num}_Config']//DataValueMember[@Name='Segment_{segment_num}_Settings_Basic_Flash_Rate']")
|
||||||
|
if flash_elem is not None:
|
||||||
|
flash_elem.set("Value", str(flash_rate))
|
||||||
|
|
||||||
|
def update_output_tag_segment_data(self):
|
||||||
|
"""Update the output tag segment data - only Color_1 values are configured, everything else is zero."""
|
||||||
|
|
||||||
|
# List of all segment-related output fields that should be set to 0
|
||||||
|
segment_fields = [
|
||||||
|
# Segment 1 fields
|
||||||
|
"Segment_1_Color_2", "Segment_1_Color_1_Intensity", "Segment_1_Color_2_Intensity",
|
||||||
|
"Segment_1_Pulse_Pattern", "Segment_1_Speed", "Segment_1_Animation_Type",
|
||||||
|
# Segment 2 fields
|
||||||
|
"Segment_2_Color_2", "Segment_2_Color_1_Intensity", "Segment_2_Color_2_Intensity",
|
||||||
|
"Segment_2_Pulse_Pattern", "Segment_2_Speed", "Segment_2_Animation_Type",
|
||||||
|
# Segment 3 fields
|
||||||
|
"Segment_3_Color_2", "Segment_3_Color_1_Intensity", "Segment_3_Color_2_Intensity",
|
||||||
|
"Segment_3_Pulse_Pattern", "Segment_3_Speed", "Segment_3_Animation_Type",
|
||||||
|
# Segment 4 fields
|
||||||
|
"Segment_4_Color_1", "Segment_4_Color_2", "Segment_4_Color_1_Intensity", "Segment_4_Color_2_Intensity",
|
||||||
|
"Segment_4_Pulse_Pattern", "Segment_4_Speed", "Segment_4_Animation_Type",
|
||||||
|
# Segment 5 fields
|
||||||
|
"Segment_5_Color_1", "Segment_5_Color_2", "Segment_5_Color_1_Intensity", "Segment_5_Color_2_Intensity",
|
||||||
|
"Segment_5_Pulse_Pattern", "Segment_5_Speed", "Segment_5_Animation_Type",
|
||||||
|
# Segment 6 fields
|
||||||
|
"Segment_6_Color_1", "Segment_6_Color_2", "Segment_6_Color_1_Intensity", "Segment_6_Color_2_Intensity",
|
||||||
|
"Segment_6_Pulse_Pattern", "Segment_6_Speed", "Segment_6_Animation_Type",
|
||||||
|
# Audible
|
||||||
|
"Audible"
|
||||||
|
]
|
||||||
|
|
||||||
|
# Set all segment fields to 0
|
||||||
|
for field_name in segment_fields:
|
||||||
|
field_elem = self.root.find(f".//OutputTag//DataValueMember[@Name='{field_name}']")
|
||||||
|
if field_elem is not None:
|
||||||
|
field_elem.set("Value", "0")
|
||||||
|
|
||||||
|
# Now set only the Color_1 values for segments 1, 2, 3 to configured values
|
||||||
|
seg1_color1 = self.root.find(".//OutputTag//DataValueMember[@Name='Segment_1_Color_1']")
|
||||||
|
if seg1_color1 is not None:
|
||||||
|
seg1_color1.set("Value", str(self.config.segment_1_color))
|
||||||
|
|
||||||
|
seg2_color1 = self.root.find(".//OutputTag//DataValueMember[@Name='Segment_2_Color_1']")
|
||||||
|
if seg2_color1 is not None:
|
||||||
|
seg2_color1.set("Value", str(self.config.segment_2_color))
|
||||||
|
|
||||||
|
seg3_color1 = self.root.find(".//OutputTag//DataValueMember[@Name='Segment_3_Color_1']")
|
||||||
|
if seg3_color1 is not None:
|
||||||
|
seg3_color1.set("Value", str(self.config.segment_3_color))
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_port_address()
|
||||||
|
self.update_inhibited_status()
|
||||||
|
self.update_application_tag()
|
||||||
|
self.update_segment_configuration()
|
||||||
|
self.update_output_tag_segment_data()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Save with proper formatting and preserve CDATA sections
|
||||||
|
xml_string = ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# Fix CDATA wrapper for L5K data and DATA members - ElementTree strips CDATA sections
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Pattern to find L5K data and DATA members that need CDATA wrapper
|
||||||
|
l5k_pattern = r'(<Data Format="L5K">)(\s*\[.*?\]|\s*\(.*?\))\s*(</Data>)'
|
||||||
|
data_pattern = r'(<DataValueMember Name="DATA"[^>]*>)([^<]*)(</DataValueMember>)'
|
||||||
|
|
||||||
|
def replace_with_cdata(match):
|
||||||
|
opening_tag = match.group(1)
|
||||||
|
data_content = match.group(2).strip()
|
||||||
|
closing_tag = match.group(3)
|
||||||
|
# Add proper indentation and line breaks
|
||||||
|
return f'{opening_tag}\n<![CDATA[{data_content}]]>\n{closing_tag}'
|
||||||
|
|
||||||
|
# Apply CDATA wrapper to L5K data and DATA members
|
||||||
|
xml_string = re.sub(l5k_pattern, replace_with_cdata, xml_string, flags=re.DOTALL | re.MULTILINE)
|
||||||
|
xml_string = re.sub(data_pattern, replace_with_cdata, xml_string, flags=re.DOTALL | re.MULTILINE)
|
||||||
|
|
||||||
|
# Write the corrected XML
|
||||||
|
with open(output_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write('<?xml version=\'1.0\' encoding=\'UTF-8\'?>\n')
|
||||||
|
f.write(xml_string)
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Convenience helper used by EnhancedMCMGenerator's factory dispatch.
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_mapping(cls, mapping: Dict[str, str]) -> "TL70BeaconGenerator":
|
||||||
|
"""Create and fully configure a beacon generator from the Excel-derived
|
||||||
|
`beacon_modules` entry (a plain dict). The structure expected is the one
|
||||||
|
produced in EnhancedMCMGenerator._organize_modules_by_type()."""
|
||||||
|
|
||||||
|
# Determine segment colors based on description
|
||||||
|
description = mapping.get("description", "").upper()
|
||||||
|
segment_1_color, segment_2_color, segment_3_color = _determine_segment_colors(description)
|
||||||
|
|
||||||
|
cfg = TL70BeaconConfig(
|
||||||
|
name=mapping["name"],
|
||||||
|
parent_module=mapping["parent_module"],
|
||||||
|
parent_port_id=mapping["parent_port_id"],
|
||||||
|
port_address=mapping["port_address"],
|
||||||
|
application_tag=mapping["application_tag"],
|
||||||
|
segment_1_color=segment_1_color,
|
||||||
|
segment_2_color=segment_2_color,
|
||||||
|
segment_3_color=segment_3_color,
|
||||||
|
)
|
||||||
|
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(cls, module_data, *, parent_module: str = "IOLM1", port_address: str = "0") -> "TL70BeaconGenerator":
|
||||||
|
"""Create, configure, and return a generator using Excel data."""
|
||||||
|
|
||||||
|
# Get DESB description from module data to determine segment colors
|
||||||
|
description = ""
|
||||||
|
if hasattr(module_data, 'description') and module_data.description:
|
||||||
|
description = module_data.description
|
||||||
|
elif hasattr(module_data, 'io_mappings') and module_data.io_mappings:
|
||||||
|
# Try to get DESB first, then fall back to description from IO mappings
|
||||||
|
for mapping in module_data.io_mappings:
|
||||||
|
if hasattr(mapping, 'desb') and mapping.desb:
|
||||||
|
description = mapping.desb
|
||||||
|
break
|
||||||
|
elif mapping.description:
|
||||||
|
description = mapping.description
|
||||||
|
|
||||||
|
# Determine segment colors based on DESB description
|
||||||
|
segment_1_color, segment_2_color, segment_3_color = _determine_segment_colors(description.upper())
|
||||||
|
|
||||||
|
cfg = TL70BeaconConfig(
|
||||||
|
name=module_data.tagname,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id="4", # Always use port 4 for IO-Link
|
||||||
|
port_address=port_address,
|
||||||
|
application_tag="***", # Default application tag
|
||||||
|
segment_1_color=segment_1_color,
|
||||||
|
segment_2_color=segment_2_color,
|
||||||
|
segment_3_color=segment_3_color,
|
||||||
|
)
|
||||||
|
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_tl70_beacon(name: str, parent_module: str = "IOLM1", parent_port_id: str = "4",
|
||||||
|
port_address: str = "0", application_tag: str = "***",
|
||||||
|
segment_1_color: int = 0, segment_1_flash_rate: int = 0,
|
||||||
|
segment_2_color: int = 0, segment_2_flash_rate: int = 0,
|
||||||
|
segment_3_color: int = 9, segment_3_flash_rate: int = 0,
|
||||||
|
segment_4_color: int = 0, segment_4_flash_rate: int = 0,
|
||||||
|
segment_5_color: int = 0, segment_5_flash_rate: int = 0,
|
||||||
|
segment_6_color: int = 0, segment_6_flash_rate: int = 0,
|
||||||
|
operating_mode: int = 1) -> TL70BeaconConfig:
|
||||||
|
"""Factory function to create a TL70 beacon configuration."""
|
||||||
|
return TL70BeaconConfig(
|
||||||
|
name=name,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
port_address=port_address,
|
||||||
|
application_tag=application_tag,
|
||||||
|
segment_1_color=segment_1_color,
|
||||||
|
segment_1_flash_rate=segment_1_flash_rate,
|
||||||
|
segment_2_color=segment_2_color,
|
||||||
|
segment_2_flash_rate=segment_2_flash_rate,
|
||||||
|
segment_3_color=segment_3_color,
|
||||||
|
segment_3_flash_rate=segment_3_flash_rate,
|
||||||
|
segment_4_color=segment_4_color,
|
||||||
|
segment_4_flash_rate=segment_4_flash_rate,
|
||||||
|
segment_5_color=segment_5_color,
|
||||||
|
segment_5_flash_rate=segment_5_flash_rate,
|
||||||
|
segment_6_color=segment_6_color,
|
||||||
|
segment_6_flash_rate=segment_6_flash_rate,
|
||||||
|
operating_mode=operating_mode
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Color constants for easy reference
|
||||||
|
class TL70Colors:
|
||||||
|
"""Color constants for TL70 beacon segments."""
|
||||||
|
OFF = 0
|
||||||
|
RED = 1
|
||||||
|
GREEN = 2
|
||||||
|
YELLOW = 3
|
||||||
|
BLUE = 4
|
||||||
|
MAGENTA = 5
|
||||||
|
CYAN = 6
|
||||||
|
WHITE = 7
|
||||||
|
CUSTOM_1 = 8
|
||||||
|
CUSTOM_2 = 9
|
||||||
|
# Colors 10-15 are additional custom colors
|
||||||
|
|
||||||
|
|
||||||
|
# Flash rate constants
|
||||||
|
class TL70FlashRates:
|
||||||
|
"""Flash rate constants for TL70 beacon segments."""
|
||||||
|
STEADY = 0
|
||||||
|
SLOW = 1
|
||||||
|
MEDIUM = 2
|
||||||
|
FAST = 3
|
||||||
|
# Additional rates 4-15 available
|
||||||
|
|
||||||
|
|
||||||
|
def _determine_segment_colors(description: str) -> Tuple[int, int, int]:
|
||||||
|
"""Determine TL70 beacon segment colors based on DESB description patterns.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
description: DESB description text to analyze
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (segment_1_color, segment_2_color, segment_3_color)
|
||||||
|
|
||||||
|
Logic:
|
||||||
|
- If "3" in DESB: segment 1=1, segment 2=3, segment 3=9
|
||||||
|
- If "2" in DESB: segment 1=1, segment 2=9, segment 3=0
|
||||||
|
- Default: segment 1=0, segment 2=0, segment 3=9
|
||||||
|
"""
|
||||||
|
description = description.upper()
|
||||||
|
|
||||||
|
if "3" in description:
|
||||||
|
return (1, 3, 9) # segment 1=1 (RED), segment 2=3 (YELLOW), segment 3=9 (CUSTOM_2)
|
||||||
|
elif "2" in description:
|
||||||
|
return (1, 9, 0) # segment 1=1 (RED), segment 2=9 (CUSTOM_2), segment 3=0 (OFF)
|
||||||
|
else:
|
||||||
|
return (0, 0, 9) # Default: segment 1=0 (OFF), segment 2=0 (OFF), segment 3=9 (CUSTOM_2)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example 1: Create a TL70 beacon with manual configuration
|
||||||
|
config1 = create_tl70_beacon(
|
||||||
|
name="BEACON1",
|
||||||
|
parent_module="IOLM1",
|
||||||
|
parent_port_id="4",
|
||||||
|
port_address="0",
|
||||||
|
application_tag="STATUS_BEACON",
|
||||||
|
segment_1_color=TL70Colors.GREEN,
|
||||||
|
segment_1_flash_rate=TL70FlashRates.STEADY,
|
||||||
|
segment_3_color=TL70Colors.RED,
|
||||||
|
segment_3_flash_rate=TL70FlashRates.SLOW
|
||||||
|
)
|
||||||
|
|
||||||
|
generator1 = TL70BeaconGenerator(config1)
|
||||||
|
generator1.load_boilerplate()
|
||||||
|
generator1.apply_updates()
|
||||||
|
generator1.save("generated/BEACON1.L5X")
|
||||||
|
|
||||||
|
print(f"Generated TL70 beacon module: {config1.name}")
|
||||||
|
print(f"Parent module: {config1.parent_module}")
|
||||||
|
print(f"Port: {config1.parent_port_id}")
|
||||||
|
print(f"Address: {config1.port_address}")
|
||||||
|
|
||||||
|
# Example 2: Test automatic segment color detection
|
||||||
|
print("\n--- Testing automatic segment color detection ---")
|
||||||
|
|
||||||
|
# Test description with "3"
|
||||||
|
colors_3 = _determine_segment_colors("BEACON WITH 3 SEGMENTS")
|
||||||
|
print(f"Description with '3': Segments = {colors_3} (Segment 1=RED, Segment 2=YELLOW, Segment 3=CUSTOM_2)")
|
||||||
|
|
||||||
|
# Test description with "2"
|
||||||
|
colors_2 = _determine_segment_colors("BEACON WITH 2 SEGMENTS")
|
||||||
|
print(f"Description with '2': Segments = {colors_2} (Segment 1=RED, Segment 2=CUSTOM_2, Segment 3=OFF)")
|
||||||
|
|
||||||
|
# Test default
|
||||||
|
colors_default = _determine_segment_colors("BEACON STATUS")
|
||||||
|
print(f"Default description: Segments = {colors_default} (Segment 1=OFF, Segment 2=OFF, Segment 3=CUSTOM_2)")
|
||||||
|
|
||||||
|
# Example 3: Create beacon with automatic segment detection for "3"
|
||||||
|
config2 = create_tl70_beacon(
|
||||||
|
name="BEACON2",
|
||||||
|
parent_module="PDP_FIO1",
|
||||||
|
port_address="2",
|
||||||
|
application_tag="AUTO_3_SEG"
|
||||||
|
)
|
||||||
|
# Apply automatic segment detection
|
||||||
|
seg1, seg2, seg3 = _determine_segment_colors("BEACON 3")
|
||||||
|
config2.segment_1_color = seg1
|
||||||
|
config2.segment_2_color = seg2
|
||||||
|
config2.segment_3_color = seg3
|
||||||
|
|
||||||
|
generator2 = TL70BeaconGenerator(config2)
|
||||||
|
generator2.load_boilerplate()
|
||||||
|
generator2.apply_updates()
|
||||||
|
generator2.save("generated/BEACON2.L5X")
|
||||||
|
print(f"\nGenerated auto-configured beacon: {config2.name} with segments {(seg1, seg2, seg3)}")
|
||||||
@ -0,0 +1,808 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Turck Hub Module Boilerplate Model
|
||||||
|
==================================
|
||||||
|
|
||||||
|
Model for Turck Hub (TBIL-M1-16DXP) modules with support for different configurations.
|
||||||
|
Supports Chute_Load, Chute_Chute, Load_Chute, and PDP_FIOH variants.
|
||||||
|
|
||||||
|
Important Constraints:
|
||||||
|
- Port addresses must be even numbers only (0, 2, 4, 6, 8, 10, 12, 14)
|
||||||
|
- Maximum port address is 14
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Optional, List
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData, IOPathMapping
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TurckHubModuleConfig:
|
||||||
|
"""Configuration for a Turck Hub module instance."""
|
||||||
|
name: str # Module name (e.g., "Chute_Load_Hub1")
|
||||||
|
variant: str # Module variant: "Chute_Load", "Chute_Chute", "Load_Chute", or "PDP_FIOH"
|
||||||
|
parent_module: str = "D2CMaster"
|
||||||
|
parent_port_id: str = "4"
|
||||||
|
port_address: str = "2" # Port address: Must be even number (0, 2, 4, 6, 8, 10, 12, 14)
|
||||||
|
inhibited: bool = False
|
||||||
|
major_fault: bool = False
|
||||||
|
input_comments: Optional[Dict[str, str]] = None # Key: operand (e.g., ".PROCESSDATAIN.CONNECTOR_4_A_PIN_4"), Value: comment
|
||||||
|
output_comments: Optional[Dict[str, str]] = None # Key: operand (e.g., ".PROCESSDATAOUT.CONNECTOR_3_B_PIN_2"), Value: comment
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
"""Validate configuration after initialization."""
|
||||||
|
self._validate_port_address()
|
||||||
|
|
||||||
|
def _validate_port_address(self):
|
||||||
|
"""Validate that port address is an even number between 0-14."""
|
||||||
|
try:
|
||||||
|
addr = int(self.port_address)
|
||||||
|
if addr < 0 or addr > 14:
|
||||||
|
raise ValueError(f"Port address must be between 0-14, got: {addr}")
|
||||||
|
if addr % 2 != 0:
|
||||||
|
raise ValueError(f"Port address must be even number, got: {addr}")
|
||||||
|
except ValueError as e:
|
||||||
|
if "invalid literal" in str(e):
|
||||||
|
raise ValueError(f"Port address must be a valid integer, got: '{self.port_address}'")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class TurckHubModuleGenerator:
|
||||||
|
"""Generator for Turck Hub module XML with different variant support.
|
||||||
|
|
||||||
|
Note: Port addresses must be even numbers between 0-14 (0, 2, 4, 6, 8, 10, 12, 14).
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Mapping of variants to boilerplate filenames
|
||||||
|
VARIANT_BOILERPLATE_MAP = {
|
||||||
|
"Chute_Load": "Chute_Load_Hub_Module.L5X",
|
||||||
|
"Chute_Chute": "Chute_Chute_Hub_Module.L5X",
|
||||||
|
"Load_Chute": "Load_Chute_Hub_Module.L5X",
|
||||||
|
"PDP_FIOH": "PDP_FIOH_Module.L5X",
|
||||||
|
"FL_Hub": "FL_Hub_Module.L5X"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Default port addresses for each variant (FIOH must be on 6 or 14)
|
||||||
|
VARIANT_PORT_ADDRESSES = {
|
||||||
|
"Chute_Load": "6", # Fixed: was "2", now proper FIOH address
|
||||||
|
"Chute_Chute": "6", # Fixed: was "0", now proper FIOH address
|
||||||
|
"Load_Chute": "14", # Fixed: was "8", now proper FIOH address
|
||||||
|
"PDP_FIOH": "6", # PDP FIOH modules default to address 6
|
||||||
|
"FL_Hub": "6" # FL Hub modules default to address 6
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, config: TurckHubModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
# Determine the correct boilerplate file
|
||||||
|
self.boilerplate_filename = self._determine_boilerplate_filename()
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", self.boilerplate_filename)
|
||||||
|
|
||||||
|
# Set default port address if not specified
|
||||||
|
if not self.config.port_address:
|
||||||
|
self.config.port_address = self.VARIANT_PORT_ADDRESSES[self.config.variant]
|
||||||
|
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def _determine_boilerplate_filename(self) -> str:
|
||||||
|
"""Determine the boilerplate filename to use.
|
||||||
|
|
||||||
|
Priority:
|
||||||
|
1. If "FL" is in the module name, use FL_Hub_Module.L5X
|
||||||
|
2. Check for module-specific boilerplate: {module_name}_Module.L5X
|
||||||
|
3. Fall back to variant-based boilerplate
|
||||||
|
"""
|
||||||
|
# First, check if "FL" is in the module name
|
||||||
|
if "FL" in self.config.name.upper():
|
||||||
|
fl_hub_filename = "FL_Hub_Module.L5X"
|
||||||
|
print(f" {self.config.name}: Detected 'FL' in name, using FL_Hub boilerplate {fl_hub_filename}")
|
||||||
|
return fl_hub_filename
|
||||||
|
|
||||||
|
# Second, try module-specific boilerplate
|
||||||
|
module_specific_filename = f"{self.config.name}_Module.L5X"
|
||||||
|
module_specific_path = os.path.join("boilerplate", module_specific_filename)
|
||||||
|
|
||||||
|
if os.path.exists(module_specific_path):
|
||||||
|
print(f" {self.config.name} (FIOH {self.config.variant}): Using module-specific boilerplate {module_specific_filename}")
|
||||||
|
return module_specific_filename
|
||||||
|
|
||||||
|
# Fall back to variant-based boilerplate
|
||||||
|
if self.config.variant not in self.VARIANT_BOILERPLATE_MAP:
|
||||||
|
raise ValueError(f"Unsupported variant: {self.config.variant}. Supported variants: {list(self.VARIANT_BOILERPLATE_MAP.keys())}")
|
||||||
|
|
||||||
|
fallback_filename = self.VARIANT_BOILERPLATE_MAP[self.config.variant]
|
||||||
|
print(f" {self.config.name} (FIOH {self.config.variant}): Using variant boilerplate {fallback_filename}")
|
||||||
|
return fallback_filename
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the appropriate boilerplate template based on variant."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_port_address(self):
|
||||||
|
"""Update the port address."""
|
||||||
|
port = self.root.find(".//Port[@Type='IO-Link']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.port_address)
|
||||||
|
else:
|
||||||
|
print(f" ERROR: Could not find IO-Link port for {self.config.name}")
|
||||||
|
|
||||||
|
def update_inhibited_status(self):
|
||||||
|
"""Update the inhibited status."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Inhibited", "true" if self.config.inhibited else "false")
|
||||||
|
module.set("MajorFault", "true" if self.config.major_fault else "false")
|
||||||
|
|
||||||
|
def update_input_comments(self):
|
||||||
|
"""Update input tag comments."""
|
||||||
|
if self.config.input_comments:
|
||||||
|
input_tag = self.root.find(".//Connection[@Name='_2004250069802D0028802D005304']/InputTag")
|
||||||
|
if input_tag is not None:
|
||||||
|
# Find or create Comments section
|
||||||
|
input_comments = input_tag.find("Comments")
|
||||||
|
if input_comments is None:
|
||||||
|
# Create Comments section as the first child
|
||||||
|
input_comments = ET.Element("Comments")
|
||||||
|
input_tag.insert(0, input_comments)
|
||||||
|
else:
|
||||||
|
# Clear existing comments
|
||||||
|
input_comments.clear()
|
||||||
|
|
||||||
|
# Add new comments
|
||||||
|
for operand, comment_text in self.config.input_comments.items():
|
||||||
|
comment = ET.SubElement(input_comments, "Comment")
|
||||||
|
comment.set("Operand", operand)
|
||||||
|
comment.text = comment_text
|
||||||
|
|
||||||
|
def update_output_comments(self):
|
||||||
|
"""Update output tag comments."""
|
||||||
|
if self.config.output_comments:
|
||||||
|
output_tag = self.root.find(".//Connection[@Name='_2004250069802D0028802D005304']/OutputTag")
|
||||||
|
if output_tag is not None:
|
||||||
|
# Find or create Comments section
|
||||||
|
output_comments = output_tag.find("Comments")
|
||||||
|
if output_comments is None:
|
||||||
|
# Create Comments section as the first child
|
||||||
|
output_comments = ET.Element("Comments")
|
||||||
|
output_tag.insert(0, output_comments)
|
||||||
|
else:
|
||||||
|
# Clear existing comments
|
||||||
|
output_comments.clear()
|
||||||
|
|
||||||
|
# Add new comments
|
||||||
|
for operand, comment_text in self.config.output_comments.items():
|
||||||
|
comment = ET.SubElement(output_comments, "Comment")
|
||||||
|
comment.set("Operand", operand)
|
||||||
|
comment.text = comment_text
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_port_address()
|
||||||
|
self.update_inhibited_status()
|
||||||
|
self.update_input_comments()
|
||||||
|
self.update_output_comments()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file, preserving CDATA sections."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Read the original boilerplate file to preserve formatting and CDATA
|
||||||
|
with open(self.boilerplate_path, 'r', encoding='utf-8') as f:
|
||||||
|
original_content = f.read()
|
||||||
|
|
||||||
|
# Apply our updates by doing string replacements on the original content
|
||||||
|
updated_content = self._apply_updates_to_content(original_content)
|
||||||
|
|
||||||
|
# Write the updated content
|
||||||
|
with open(output_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(updated_content)
|
||||||
|
|
||||||
|
def _apply_updates_to_content(self, content: str) -> str:
|
||||||
|
"""Apply updates to the original XML content via string replacement."""
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Update TargetName in root element
|
||||||
|
content = re.sub(
|
||||||
|
r'TargetName="[^"]*"',
|
||||||
|
f'TargetName="{self.config.name}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update ExportDate
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
content = re.sub(
|
||||||
|
r'ExportDate="[^"]*"',
|
||||||
|
f'ExportDate="{export_date}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update Module Name
|
||||||
|
content = re.sub(
|
||||||
|
r'<Module Use="Target" Name="[^"]*"',
|
||||||
|
f'<Module Use="Target" Name="{self.config.name}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update ParentModule and ParentModPortId
|
||||||
|
content = re.sub(
|
||||||
|
r'ParentModule="[^"]*"',
|
||||||
|
f'ParentModule="{self.config.parent_module}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
content = re.sub(
|
||||||
|
r'ParentModPortId="[^"]*"',
|
||||||
|
f'ParentModPortId="{self.config.parent_port_id}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update Port Address
|
||||||
|
content = re.sub(
|
||||||
|
r'<Port Id="2" Address="[^"]*"',
|
||||||
|
f'<Port Id="2" Address="{self.config.port_address}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update Inhibited and MajorFault status
|
||||||
|
content = re.sub(
|
||||||
|
r'Inhibited="[^"]*"',
|
||||||
|
f'Inhibited="{"true" if self.config.inhibited else "false"}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
content = re.sub(
|
||||||
|
r'MajorFault="[^"]*"',
|
||||||
|
f'MajorFault="{"true" if self.config.major_fault else "false"}"',
|
||||||
|
content
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update comments sections
|
||||||
|
if self.config.input_comments:
|
||||||
|
content = self._update_input_comments_in_content(content)
|
||||||
|
|
||||||
|
if self.config.output_comments:
|
||||||
|
content = self._update_output_comments_in_content(content)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
def _update_input_comments_in_content(self, content: str) -> str:
|
||||||
|
"""Update input comments in the content string."""
|
||||||
|
import re
|
||||||
|
|
||||||
|
# First try to find existing Comments section
|
||||||
|
pattern = r'(<InputTag[^>]*>\s*<Comments>)(.*?)(</Comments>)'
|
||||||
|
|
||||||
|
def replace_comments(match):
|
||||||
|
start = match.group(1)
|
||||||
|
end = match.group(3)
|
||||||
|
|
||||||
|
# Determine base indentation from existing block
|
||||||
|
m_indent = re.search(r"\n(\s*)<", start)
|
||||||
|
base_indent = m_indent.group(1) if m_indent else " " # 12 spaces as fallback
|
||||||
|
|
||||||
|
# Build comments exactly like boiler-plate
|
||||||
|
pieces = []
|
||||||
|
for operand, txt in self.config.input_comments.items():
|
||||||
|
pieces.extend([
|
||||||
|
f"{base_indent}<Comment Operand=\"{operand}\">",
|
||||||
|
f"{base_indent} <![CDATA[{txt}]]>",
|
||||||
|
f"{base_indent}</Comment>"
|
||||||
|
])
|
||||||
|
|
||||||
|
return f"{start}\n" + "\n".join(pieces) + f"\n{base_indent}{end}"
|
||||||
|
|
||||||
|
# Try to replace existing Comments section
|
||||||
|
new_content = re.sub(pattern, replace_comments, content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
# If no replacement was made, we need to create the Comments section
|
||||||
|
if new_content == content:
|
||||||
|
# Find InputTag and insert Comments section
|
||||||
|
input_tag_pattern = r'(<InputTag[^>]*>)(\s*<Data)'
|
||||||
|
|
||||||
|
def insert_comments(match):
|
||||||
|
tag_start = match.group(1)
|
||||||
|
data_start = match.group(2)
|
||||||
|
|
||||||
|
# Determine indentation
|
||||||
|
m_indent = re.search(r"\n(\s*)<Data", data_start)
|
||||||
|
base_indent = m_indent.group(1) if m_indent else " "
|
||||||
|
|
||||||
|
# Build comments section
|
||||||
|
pieces = [f"{tag_start}", f"{base_indent}<Comments>"]
|
||||||
|
for operand, txt in self.config.input_comments.items():
|
||||||
|
pieces.extend([
|
||||||
|
f"{base_indent}<Comment Operand=\"{operand}\">",
|
||||||
|
f"{base_indent} <![CDATA[{txt}]]>",
|
||||||
|
f"{base_indent}</Comment>"
|
||||||
|
])
|
||||||
|
pieces.append(f"{base_indent}</Comments>")
|
||||||
|
|
||||||
|
return "\n".join(pieces) + data_start
|
||||||
|
|
||||||
|
new_content = re.sub(input_tag_pattern, insert_comments, content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
return new_content
|
||||||
|
|
||||||
|
def _update_output_comments_in_content(self, content: str) -> str:
|
||||||
|
"""Update output comments in the content string."""
|
||||||
|
import re
|
||||||
|
|
||||||
|
# First try to find existing Comments section
|
||||||
|
pattern = r'(<OutputTag[^>]*>\s*<Comments>)(.*?)(</Comments>)'
|
||||||
|
|
||||||
|
def replace_comments(match):
|
||||||
|
start = match.group(1)
|
||||||
|
end = match.group(3)
|
||||||
|
|
||||||
|
import re
|
||||||
|
m_indent = re.search(r"\n(\s*)<", start)
|
||||||
|
base_indent = m_indent.group(1) if m_indent else " "
|
||||||
|
|
||||||
|
pieces = []
|
||||||
|
for operand, txt in self.config.output_comments.items():
|
||||||
|
pieces.extend([
|
||||||
|
f"{base_indent}<Comment Operand=\"{operand}\">",
|
||||||
|
f"{base_indent} <![CDATA[{txt}]]>",
|
||||||
|
f"{base_indent}</Comment>"
|
||||||
|
])
|
||||||
|
|
||||||
|
return f"{start}\n" + "\n".join(pieces) + f"\n{base_indent}{end}"
|
||||||
|
|
||||||
|
# Try to replace existing Comments section
|
||||||
|
new_content = re.sub(pattern, replace_comments, content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
# If no replacement was made, we need to create the Comments section
|
||||||
|
if new_content == content:
|
||||||
|
# Find OutputTag and insert Comments section
|
||||||
|
output_tag_pattern = r'(<OutputTag[^>]*>)(\s*<Data)'
|
||||||
|
|
||||||
|
def insert_comments(match):
|
||||||
|
tag_start = match.group(1)
|
||||||
|
data_start = match.group(2)
|
||||||
|
|
||||||
|
# Determine indentation
|
||||||
|
m_indent = re.search(r"\n(\s*)<Data", data_start)
|
||||||
|
base_indent = m_indent.group(1) if m_indent else " "
|
||||||
|
|
||||||
|
# Build comments section
|
||||||
|
pieces = [f"{tag_start}", f"{base_indent}<Comments>"]
|
||||||
|
for operand, txt in self.config.output_comments.items():
|
||||||
|
pieces.extend([
|
||||||
|
f"{base_indent}<Comment Operand=\"{operand}\">",
|
||||||
|
f"{base_indent} <![CDATA[{txt}]]>",
|
||||||
|
f"{base_indent}</Comment>"
|
||||||
|
])
|
||||||
|
pieces.append(f"{base_indent}</Comments>")
|
||||||
|
|
||||||
|
return "\n".join(pieces) + data_start
|
||||||
|
|
||||||
|
new_content = re.sub(output_tag_pattern, insert_comments, content, flags=re.DOTALL)
|
||||||
|
|
||||||
|
return new_content
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Factory helper for EnhancedMCMGenerator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(cls, module_data: "ModuleData") -> "TurckHubModuleGenerator":
|
||||||
|
"""Create, configure, and return generator directly from Excel data."""
|
||||||
|
|
||||||
|
variant = _determine_variant(module_data)
|
||||||
|
parent_module, parent_port_id, port_address = _parent_info(module_data)
|
||||||
|
|
||||||
|
input_comments, output_comments = _extract_comments(module_data)
|
||||||
|
|
||||||
|
cfg = create_turck_hub_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
variant=variant,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
port_address=port_address,
|
||||||
|
input_comments=input_comments if input_comments else None,
|
||||||
|
output_comments=output_comments if output_comments else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def get_valid_port_addresses() -> List[str]:
|
||||||
|
"""Get list of valid port addresses for Turck Hub modules.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of valid port addresses: ['0', '2', '4', '6', '8', '10', '12', '14']
|
||||||
|
"""
|
||||||
|
return [str(i) for i in range(0, 15, 2)]
|
||||||
|
|
||||||
|
def create_turck_hub_module(name: str, variant: str, parent_module: str = "D2CMaster",
|
||||||
|
parent_port_id: str = "4", port_address: str = None,
|
||||||
|
input_comments: Optional[Dict[str, str]] = None,
|
||||||
|
output_comments: Optional[Dict[str, str]] = None,
|
||||||
|
inhibited: bool = False, major_fault: bool = False) -> TurckHubModuleConfig:
|
||||||
|
"""Factory function to create a Turck Hub module configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name
|
||||||
|
variant: One of "Chute_Load", "Chute_Chute", "Load_Chute", "PDP_FIOH"
|
||||||
|
parent_module: Parent module name
|
||||||
|
parent_port_id: Parent module port ID
|
||||||
|
port_address: IO-Link port address - must be even number (0, 2, 4, 6, 8, 10, 12, 14)
|
||||||
|
If None, uses variant default
|
||||||
|
input_comments: Dict of input tag comments
|
||||||
|
output_comments: Dict of output tag comments
|
||||||
|
inhibited: Whether module starts inhibited
|
||||||
|
major_fault: Whether module starts with major fault
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TurckHubModuleConfig instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If port_address is not a valid even number between 0-14
|
||||||
|
"""
|
||||||
|
return TurckHubModuleConfig(
|
||||||
|
name=name,
|
||||||
|
variant=variant,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id,
|
||||||
|
port_address=port_address,
|
||||||
|
input_comments=input_comments,
|
||||||
|
output_comments=output_comments,
|
||||||
|
inhibited=inhibited,
|
||||||
|
major_fault=major_fault
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Helper functions to get default comment structures for each variant
|
||||||
|
def get_chute_load_default_input_comments() -> Dict[str, str]:
|
||||||
|
"""Get default input comments for Chute_Load variant."""
|
||||||
|
return {
|
||||||
|
".PROCESSDATAIN.CONNECTOR_4_A_PIN_4": "PE",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_3_A_PIN_4": "PB In",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_2_A_PIN_4": "PE 50",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_6_A_PIN_4": "PB In",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_5_A_PIN_4": "PE"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_chute_load_default_output_comments() -> Dict[str, str]:
|
||||||
|
"""Get default output comments for Chute_Load variant."""
|
||||||
|
return {
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_3_B_PIN_2": "PB LT Out",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_1_B_PIN_2": "Beacon Segment2",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_1_A_PIN_4": "Beacon Segment1",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_8_A_PIN_4": "Sol",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_7_B_PIN_2": "Beacon Segment2",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_7_A_PIN_4": "Beacon Segment1"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_chute_chute_default_input_comments() -> Dict[str, str]:
|
||||||
|
"""Get default input comments for Chute_Chute variant."""
|
||||||
|
return {
|
||||||
|
".PROCESSDATAIN.CONNECTOR_4_A_PIN_4": "PE 100",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_3_A_PIN_4": "PE 100",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_2_A_PIN_4": "PE 50",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_1_A_PIN_4": "PE 50",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_6_A_PIN_4": "PB In",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_5_A_PIN_4": "PB In"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_chute_chute_default_output_comments() -> Dict[str, str]:
|
||||||
|
"""Get default output comments for Chute_Chute variant."""
|
||||||
|
return {
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_8_A_PIN_4": "Sol",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_7_A_PIN_4": "Sol"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_load_chute_default_input_comments() -> Dict[str, str]:
|
||||||
|
"""Get default input comments for Load_Chute variant."""
|
||||||
|
return {
|
||||||
|
".PROCESSDATAIN.CONNECTOR_4_A_PIN_4": "PB In",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_3_A_PIN_4": "PE 100",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_1_A_PIN_4": "PE 50",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_6_A_PIN_4": "PE",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_5_A_PIN_4": "PB In"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_load_chute_default_output_comments() -> Dict[str, str]:
|
||||||
|
"""Get default output comments for Load_Chute variant."""
|
||||||
|
return {
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_4_B_PIN_2": "PB Out",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_2_B_PIN_2": "Beacon Segment2",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_2_A_PIN_4": "Beacon Segment1",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_8_B_PIN_2": "Beacon Segment2",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_8_A_PIN_4": "Beacon Segment1",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_7_A_PIN_4": "Sol"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_pdp_fioh_default_input_comments() -> Dict[str, str]:
|
||||||
|
"""Get default input comments for PDP_FIOH variant."""
|
||||||
|
return {
|
||||||
|
".PROCESSDATAIN.CONNECTOR_1_A_PIN_4": "Circuit Breaker 1",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_1_B_PIN_2": "Circuit Breaker 2",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_2_A_PIN_4": "Circuit Breaker 3",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_2_B_PIN_2": "Circuit Breaker 4",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_3_A_PIN_4": "Circuit Breaker 5",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_3_B_PIN_2": "Circuit Breaker 6",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_4_A_PIN_4": "Circuit Breaker 7",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_4_B_PIN_2": "Circuit Breaker 8",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_5_A_PIN_4": "Circuit Breaker 9",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_5_B_PIN_2": "Circuit Breaker 10",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_6_A_PIN_4": "Circuit Breaker 11",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_6_B_PIN_2": "Circuit Breaker 12",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_7_A_PIN_4": "Circuit Breaker 13",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_7_B_PIN_2": "Circuit Breaker 14",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_8_A_PIN_4": "Circuit Breaker 15",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_8_B_PIN_2": "Circuit Breaker 16"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------------
|
||||||
|
# Helper logic migrated from EnhancedMCMGenerator for behaviour parity
|
||||||
|
# --------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _determine_variant(module_data: "ModuleData") -> str:
|
||||||
|
"""Determine Turck hub variant based on module name and DESC patterns."""
|
||||||
|
|
||||||
|
# Check for PDP FIOH modules first (name-based detection)
|
||||||
|
module_name = module_data.tagname.upper()
|
||||||
|
if "PDP" in module_name and "FIOH" in module_name:
|
||||||
|
return "PDP_FIOH"
|
||||||
|
|
||||||
|
# Build terminal to description mapping
|
||||||
|
terminal_desc: Dict[str, str] = {}
|
||||||
|
for m in module_data.io_mappings:
|
||||||
|
if m.terminal and m.description:
|
||||||
|
terminal_desc[m.terminal.upper()] = m.description.upper()
|
||||||
|
|
||||||
|
io4 = terminal_desc.get("IO4", "")
|
||||||
|
io10 = terminal_desc.get("IO10", "")
|
||||||
|
io11 = terminal_desc.get("IO11", "")
|
||||||
|
io12 = terminal_desc.get("IO12", "")
|
||||||
|
|
||||||
|
# Check for Chute_Load variant: JR1 in IO11 or IO10
|
||||||
|
if "JR1" in io11 or "JR1" in io10:
|
||||||
|
return "Chute_Load"
|
||||||
|
# Check for Chute_Chute variant: PR1 in IO4 or IO12
|
||||||
|
if "PR1" in io4 or "PR1" in io12:
|
||||||
|
return "Chute_Chute"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Check for Load_Chute variant (different from Chute_Load)
|
||||||
|
# This might need additional logic - for now keeping as separate case
|
||||||
|
# You may need to specify the exact criteria for Load_Chute
|
||||||
|
|
||||||
|
# Default to Chute_Load for other cases
|
||||||
|
return "Chute_Load"
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_comments(module_data: "ModuleData") -> tuple[Dict[str, str], Dict[str, str]]:
|
||||||
|
input_comments: Dict[str, str] = {}
|
||||||
|
output_comments: Dict[str, str] = {}
|
||||||
|
|
||||||
|
for m in module_data.io_mappings:
|
||||||
|
if not (m.io_path and m.description):
|
||||||
|
continue
|
||||||
|
comment_text = m.description
|
||||||
|
|
||||||
|
# For PDP FIOH modules, map IO terminals to connector operands
|
||||||
|
if module_data.tagname.upper().find("PDP") != -1 and module_data.tagname.upper().find("FIOH1") != -1:
|
||||||
|
operand = _map_pdp_io_to_connector(m.terminal, m.signal)
|
||||||
|
if operand:
|
||||||
|
if m.signal.upper() == "I":
|
||||||
|
input_comments[operand] = comment_text
|
||||||
|
elif m.signal.upper() == "O":
|
||||||
|
output_comments[operand] = comment_text
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Original logic for other variants
|
||||||
|
if ":" in m.io_path:
|
||||||
|
_, io_part = m.io_path.split(":", 1)
|
||||||
|
else:
|
||||||
|
io_part = m.io_path # fallback
|
||||||
|
io_part_up = io_part.upper()
|
||||||
|
|
||||||
|
if "I.PROCESSDATAIN." in io_part_up:
|
||||||
|
connector = io_part_up.split("I.PROCESSDATAIN.", 1)[1]
|
||||||
|
input_comments[f".PROCESSDATAIN.{connector}"] = comment_text
|
||||||
|
elif "O.PROCESSDATAOUT." in io_part_up:
|
||||||
|
connector = io_part_up.split("O.PROCESSDATAOUT.", 1)[1]
|
||||||
|
output_comments[f".PROCESSDATAOUT.{connector}"] = comment_text
|
||||||
|
else:
|
||||||
|
# Handle path without colon by checking substrings
|
||||||
|
if "I.PROCESSDATAIN." in io_part_up:
|
||||||
|
connector = io_part_up.split("I.PROCESSDATAIN.", 1)[1]
|
||||||
|
input_comments[f".PROCESSDATAIN.{connector}"] = comment_text
|
||||||
|
elif "O.PROCESSDATAOUT." in io_part_up:
|
||||||
|
connector = io_part_up.split("O.PROCESSDATAOUT.", 1)[1]
|
||||||
|
output_comments[f".PROCESSDATAOUT.{connector}"] = comment_text
|
||||||
|
|
||||||
|
return input_comments, output_comments
|
||||||
|
|
||||||
|
|
||||||
|
def _map_pdp_io_to_connector(terminal: str, signal: str) -> str:
|
||||||
|
"""Map PDP FIOH IO terminal to connector operand.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- IO00 -> .PROCESSDATAIN.CONNECTOR_1_A_PIN_4
|
||||||
|
- IO01 -> .PROCESSDATAIN.CONNECTOR_1_B_PIN_2
|
||||||
|
- IO02 -> .PROCESSDATAIN.CONNECTOR_2_A_PIN_4
|
||||||
|
- etc.
|
||||||
|
"""
|
||||||
|
if not terminal.upper().startswith("IO"):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
try:
|
||||||
|
io_num = int(terminal.upper().replace("IO", ""))
|
||||||
|
except ValueError:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Calculate connector number (1-based)
|
||||||
|
connector_num = (io_num // 2) + 1
|
||||||
|
|
||||||
|
# Determine pin type based on even/odd
|
||||||
|
if io_num % 2 == 0:
|
||||||
|
pin_type = "A_PIN_4"
|
||||||
|
else:
|
||||||
|
pin_type = "B_PIN_2"
|
||||||
|
|
||||||
|
# Build operand
|
||||||
|
if signal.upper() == "I":
|
||||||
|
return f".PROCESSDATAIN.CONNECTOR_{connector_num}_{pin_type}"
|
||||||
|
elif signal.upper() == "O":
|
||||||
|
return f".PROCESSDATAOUT.CONNECTOR_{connector_num}_{pin_type}"
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def _parent_info(module_data: "ModuleData") -> tuple[str, str, str]:
|
||||||
|
"""Mimic EnhancedMCMGenerator._get_parent_info for a single FIOH module."""
|
||||||
|
|
||||||
|
# Extract port address from terminal - use the actual terminal number
|
||||||
|
term = module_data.terminal.upper() if module_data.terminal else ""
|
||||||
|
if term.startswith("IO"):
|
||||||
|
# Extract the numeric part directly (IO4 -> 4, IO12 -> 12, IO14 -> 14)
|
||||||
|
try:
|
||||||
|
port_address = term[2:] # Get everything after "IO"
|
||||||
|
except:
|
||||||
|
port_address = "4" # Default to channel 4 if parsing fails
|
||||||
|
else:
|
||||||
|
port_address = "4" # Default to channel 4 if not an IO terminal
|
||||||
|
|
||||||
|
# The parent_module should be set from Excel data (e.g., "PDP1_FIO1")
|
||||||
|
parent_module = module_data.parent_module
|
||||||
|
if not parent_module:
|
||||||
|
raise ValueError(f"FIOH module {module_data.tagname} missing parent module information")
|
||||||
|
|
||||||
|
# For FIOH modules connected via IO-Link, always use port 4 (the IO-Link port on M12DR)
|
||||||
|
# The port_address is different - it's the address on the IO-Link network
|
||||||
|
parent_port_id = "4"
|
||||||
|
|
||||||
|
return parent_module, parent_port_id, port_address
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example: Create a Chute_Load hub module
|
||||||
|
chute_load_config = create_turck_hub_module(
|
||||||
|
name="Chute_Load_Hub1",
|
||||||
|
variant="Chute_Load",
|
||||||
|
parent_module="D2CMaster",
|
||||||
|
parent_port_id="4",
|
||||||
|
input_comments={
|
||||||
|
".PROCESSDATAIN.CONNECTOR_4_A_PIN_4": "Emergency Stop",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_3_A_PIN_4": "Reset Button",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_2_A_PIN_4": "Photo Eye 1",
|
||||||
|
".PROCESSDATAIN.CONNECTOR_6_A_PIN_4": "Photo Eye 2"
|
||||||
|
},
|
||||||
|
output_comments={
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_3_B_PIN_2": "Status Light",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_1_B_PIN_2": "Warning Beacon",
|
||||||
|
".PROCESSDATAOUT.CONNECTOR_8_A_PIN_4": "Conveyor Motor"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = TurckHubModuleGenerator(chute_load_config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
generator.save("generated_projects/Chute_Load_Hub1.L5X")
|
||||||
|
|
||||||
|
# Example: Create a Chute_Chute hub module with defaults
|
||||||
|
chute_chute_config = create_turck_hub_module(
|
||||||
|
name="Chute_Chute_Hub2",
|
||||||
|
variant="Chute_Chute",
|
||||||
|
input_comments=get_chute_chute_default_input_comments(),
|
||||||
|
output_comments=get_chute_chute_default_output_comments()
|
||||||
|
)
|
||||||
|
|
||||||
|
generator2 = TurckHubModuleGenerator(chute_chute_config)
|
||||||
|
generator2.load_boilerplate()
|
||||||
|
generator2.apply_updates()
|
||||||
|
generator2.save("generated_projects/Chute_Chute_Hub2.L5X")
|
||||||
|
|
||||||
|
# Example: Create a Load_Chute hub module
|
||||||
|
load_chute_config = create_turck_hub_module(
|
||||||
|
name="Load_Chute_Hub3",
|
||||||
|
variant="Load_Chute",
|
||||||
|
parent_module="IOLMMaster1",
|
||||||
|
parent_port_id="2",
|
||||||
|
input_comments=get_load_chute_default_input_comments(),
|
||||||
|
output_comments=get_load_chute_default_output_comments()
|
||||||
|
)
|
||||||
|
|
||||||
|
generator3 = TurckHubModuleGenerator(load_chute_config)
|
||||||
|
generator3.load_boilerplate()
|
||||||
|
generator3.apply_updates()
|
||||||
|
generator3.save("generated_projects/Load_Chute_Hub3.L5X")
|
||||||
|
|
||||||
|
# Example: Create a PDP_FIOH hub module
|
||||||
|
pdp_fioh_config = create_turck_hub_module(
|
||||||
|
name="PDP1_FIOH1",
|
||||||
|
variant="PDP_FIOH",
|
||||||
|
parent_module="SLOT2_EN4TR",
|
||||||
|
parent_port_id="2",
|
||||||
|
input_comments=get_pdp_fioh_default_input_comments()
|
||||||
|
)
|
||||||
|
|
||||||
|
generator4 = TurckHubModuleGenerator(pdp_fioh_config)
|
||||||
|
generator4.load_boilerplate()
|
||||||
|
generator4.apply_updates()
|
||||||
|
generator4.save("generated_projects/PDP1_FIOH1.L5X")
|
||||||
|
|
||||||
|
print(f"Generated Chute_Load hub module: {chute_load_config.name}")
|
||||||
|
print(f"Generated Chute_Chute hub module: {chute_chute_config.name}")
|
||||||
|
print(f"Generated Load_Chute hub module: {load_chute_config.name}")
|
||||||
|
print(f"Generated PDP_FIOH hub module: {pdp_fioh_config.name}")
|
||||||
156
IO Tree Configuration Generator/models/vfd_boilerplate_model.py
Normal file
156
IO Tree Configuration Generator/models/vfd_boilerplate_model.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
VFD Module Boilerplate Model
|
||||||
|
============================
|
||||||
|
|
||||||
|
Model for VFD (Variable Frequency Drive) modules with support for different horsepower ratings.
|
||||||
|
Supports 15, 20, and 30 HP variants.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class VFDModuleConfig:
|
||||||
|
"""Configuration for a VFD module instance."""
|
||||||
|
name: str # Module name (e.g., "VFD1")
|
||||||
|
hp: str # Horsepower rating: "15", "20", "30"
|
||||||
|
ip_address: str = "11.200.1.10"
|
||||||
|
parent_module: str = "SLOT2_EN4TR"
|
||||||
|
parent_port_id: str = "2"
|
||||||
|
|
||||||
|
|
||||||
|
class VFDModuleGenerator:
|
||||||
|
"""Generator for VFD module XML with different HP support."""
|
||||||
|
|
||||||
|
# Mapping of HP values to boilerplate filenames
|
||||||
|
HP_BOILERPLATE_MAP = {
|
||||||
|
"15": "VFD_Module_15_HP.L5X",
|
||||||
|
"20": "VFD_Module_20_HP.L5X",
|
||||||
|
"30": "VFD_Module_30_HP.L5X"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, config: VFDModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
# Determine the correct boilerplate file
|
||||||
|
if self.config.hp not in self.HP_BOILERPLATE_MAP:
|
||||||
|
raise ValueError(f"Unsupported HP value: {self.config.hp}. Supported values: 15, 20, 30")
|
||||||
|
|
||||||
|
self.boilerplate_filename = self.HP_BOILERPLATE_MAP[self.config.hp]
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", self.boilerplate_filename)
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the appropriate boilerplate template based on HP rating."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name throughout the XML."""
|
||||||
|
# Update in root attributes
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
# Update Module element
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the Ethernet port."""
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update parent module references."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all updates to the boilerplate."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the updated module to file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Save with proper formatting
|
||||||
|
self.tree.write(output_path, encoding='UTF-8', xml_declaration=True)
|
||||||
|
|
||||||
|
def get_xml_string(self) -> str:
|
||||||
|
"""Get the XML as a string."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise RuntimeError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
return ET.tostring(self.root, encoding='unicode')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Helper for EnhancedMCMGenerator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(cls, module_data: 'ModuleData', hp: str) -> 'VFDModuleGenerator':
|
||||||
|
cfg = create_vfd_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
hp=hp,
|
||||||
|
ip_address=module_data.ip_address or "11.200.1.10",
|
||||||
|
parent_module="SLOT2_EN4TR",
|
||||||
|
parent_port_id="2",
|
||||||
|
)
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_vfd_module(name: str, hp: str, ip_address: str = "11.200.1.10",
|
||||||
|
parent_module: str = "SLOT2_EN4TR", parent_port_id: str = "2") -> VFDModuleConfig:
|
||||||
|
"""Factory function to create a VFD module configuration."""
|
||||||
|
return VFDModuleConfig(
|
||||||
|
name=name,
|
||||||
|
hp=hp,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Example: Create a 15 HP VFD module
|
||||||
|
config = create_vfd_module(
|
||||||
|
name="VFD1_15HP",
|
||||||
|
hp="15", # Specify the horsepower
|
||||||
|
ip_address="11.200.1.10",
|
||||||
|
parent_module="SLOT2_EN4TR"
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = VFDModuleGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
generator.save("generated/VFD1_15HP.L5X")
|
||||||
|
|
||||||
|
print(f"Generated {config.hp} HP VFD module: {config.name}")
|
||||||
178
IO Tree Configuration Generator/models/zmx_boilerplate_model.py
Normal file
178
IO Tree Configuration Generator/models/zmx_boilerplate_model.py
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Banner ZMX Module Boilerplate Model
|
||||||
|
===================================
|
||||||
|
|
||||||
|
Model for Banner ZMX modules.
|
||||||
|
Supports name, IP address, and parent module configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from excel_data_processor import ModuleData
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ZMXModuleConfig:
|
||||||
|
"""Configuration for a Banner ZMX module instance."""
|
||||||
|
name: str # Module name (e.g., "ZMX1")
|
||||||
|
ip_address: str = "192.168.1.2"
|
||||||
|
parent_module: str = "SLOT2_EN4TR"
|
||||||
|
parent_port_id: str = "2"
|
||||||
|
|
||||||
|
|
||||||
|
class ZMXModuleGenerator:
|
||||||
|
"""Generates Banner ZMX module configurations from boilerplate."""
|
||||||
|
|
||||||
|
def __init__(self, config: ZMXModuleConfig):
|
||||||
|
self.config = config
|
||||||
|
self.boilerplate_filename = "ZMX_Module.L5X"
|
||||||
|
self.boilerplate_path = os.path.join("boilerplate", self.boilerplate_filename)
|
||||||
|
self.tree = None
|
||||||
|
self.root = None
|
||||||
|
|
||||||
|
def load_boilerplate(self):
|
||||||
|
"""Load the boilerplate XML file."""
|
||||||
|
if not os.path.exists(self.boilerplate_path):
|
||||||
|
raise FileNotFoundError(f"Boilerplate file not found: {self.boilerplate_path}")
|
||||||
|
|
||||||
|
self.tree = ET.parse(self.boilerplate_path)
|
||||||
|
self.root = self.tree.getroot()
|
||||||
|
|
||||||
|
def update_module_name(self):
|
||||||
|
"""Update the module name in the XML."""
|
||||||
|
# Update module name
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("Name", self.config.name)
|
||||||
|
|
||||||
|
# Update target name in root
|
||||||
|
self.root.set("TargetName", self.config.name)
|
||||||
|
|
||||||
|
def update_ip_address(self):
|
||||||
|
"""Update the IP address in the module configuration."""
|
||||||
|
# Find the Ethernet port and update IP address
|
||||||
|
port = self.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None:
|
||||||
|
port.set("Address", self.config.ip_address)
|
||||||
|
|
||||||
|
def update_parent_module(self):
|
||||||
|
"""Update the parent module configuration."""
|
||||||
|
module = self.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None:
|
||||||
|
module.set("ParentModule", self.config.parent_module)
|
||||||
|
module.set("ParentModPortId", self.config.parent_port_id)
|
||||||
|
|
||||||
|
def update_export_date(self):
|
||||||
|
"""Update the export date to current time."""
|
||||||
|
export_date = datetime.now().strftime("%a %b %d %H:%M:%S %Y")
|
||||||
|
self.root.set("ExportDate", export_date)
|
||||||
|
|
||||||
|
def apply_updates(self):
|
||||||
|
"""Apply all configuration updates."""
|
||||||
|
self.update_module_name()
|
||||||
|
self.update_ip_address()
|
||||||
|
self.update_parent_module()
|
||||||
|
self.update_export_date()
|
||||||
|
|
||||||
|
def save(self, output_path: str):
|
||||||
|
"""Save the configured module to a file."""
|
||||||
|
if self.tree is None:
|
||||||
|
raise ValueError("No boilerplate loaded. Call load_boilerplate() first.")
|
||||||
|
|
||||||
|
# Create output directory if it doesn't exist
|
||||||
|
output_dir = os.path.dirname(output_path)
|
||||||
|
if output_dir and not os.path.exists(output_dir):
|
||||||
|
os.makedirs(output_dir)
|
||||||
|
|
||||||
|
# Write the XML to file
|
||||||
|
with open(output_path, 'wb') as f:
|
||||||
|
f.write(b'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n')
|
||||||
|
self.tree.write(f, encoding='UTF-8')
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Convenience helper for generator refactor
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_excel(cls, module_data: 'ModuleData') -> 'ZMXModuleGenerator':
|
||||||
|
"""Create and return a generator configured from Excel ModuleData."""
|
||||||
|
ip_addr = module_data.ip_address or "192.168.1.2"
|
||||||
|
cfg = create_zmx_module(
|
||||||
|
name=module_data.tagname,
|
||||||
|
ip_address=ip_addr,
|
||||||
|
parent_module="SLOT2_EN4TR",
|
||||||
|
parent_port_id="2",
|
||||||
|
)
|
||||||
|
gen = cls(cfg)
|
||||||
|
gen.load_boilerplate()
|
||||||
|
gen.apply_updates()
|
||||||
|
return gen
|
||||||
|
|
||||||
|
|
||||||
|
def create_zmx_module(name: str, ip_address: str = "192.168.1.2",
|
||||||
|
parent_module: str = "SLOT2_EN4TR",
|
||||||
|
parent_port_id: str = "2") -> ZMXModuleConfig:
|
||||||
|
"""
|
||||||
|
Factory function to create a Banner ZMX module configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Module name (e.g., "ZMX1")
|
||||||
|
ip_address: IP address for the module (default: "192.168.1.2")
|
||||||
|
parent_module: Parent module name (default: "SLOT2_EN4TR")
|
||||||
|
parent_port_id: Parent port ID (default: "2")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ZMXModuleConfig: Configured Banner ZMX module
|
||||||
|
"""
|
||||||
|
return ZMXModuleConfig(
|
||||||
|
name=name,
|
||||||
|
ip_address=ip_address,
|
||||||
|
parent_module=parent_module,
|
||||||
|
parent_port_id=parent_port_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Example usage of the Banner ZMX module generator."""
|
||||||
|
print("Banner ZMX Module Generator Example")
|
||||||
|
print("=" * 40)
|
||||||
|
|
||||||
|
# Create Banner ZMX module configuration
|
||||||
|
config = create_zmx_module(
|
||||||
|
name="ZMX1",
|
||||||
|
ip_address="192.168.1.10",
|
||||||
|
parent_module="SLOT2_EN4TR"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate the module
|
||||||
|
generator = ZMXModuleGenerator(config)
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
|
||||||
|
# Save to generated folder
|
||||||
|
os.makedirs("generated", exist_ok=True)
|
||||||
|
output_file = f"generated/{config.name}.L5X"
|
||||||
|
generator.save(output_file)
|
||||||
|
|
||||||
|
print(f"Generated Banner ZMX module: {output_file}")
|
||||||
|
print(f" Name: {config.name}")
|
||||||
|
print(f" IP Address: {config.ip_address}")
|
||||||
|
print(f" Parent Module: {config.parent_module}")
|
||||||
|
print(f" Parent Port: {config.parent_port_id}")
|
||||||
|
|
||||||
|
print("\nModule Features:")
|
||||||
|
print(" - High-speed I/O data exchange")
|
||||||
|
print(" - Input data array (68 INT elements)")
|
||||||
|
print(" - Output data array (40 INT elements)")
|
||||||
|
print(" - Banner Engineering Corporation device")
|
||||||
|
print(" - Ethernet/IP communication")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
4
IO Tree Configuration Generator/requirements.txt
Normal file
4
IO Tree Configuration Generator/requirements.txt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
pandas>=1.3.0
|
||||||
|
openpyxl>=3.0.0
|
||||||
|
pathlib2>=2.3.0; python_version < '3.4'
|
||||||
|
pytest>=7.0.0
|
||||||
2064
IO Tree Configuration Generator/state-of-the-art/MCM01_OUTPUT.csv
Normal file
2064
IO Tree Configuration Generator/state-of-the-art/MCM01_OUTPUT.csv
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
191
IO Tree Configuration Generator/test_sio_implementation.py
Normal file
191
IO Tree Configuration Generator/test_sio_implementation.py
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Test script for SIO module implementation
|
||||||
|
========================================
|
||||||
|
|
||||||
|
Tests the SIO boilerplate model to ensure it works correctly with
|
||||||
|
IP address configuration and comment updates.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Add the models directory to the path
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'models'))
|
||||||
|
|
||||||
|
from models.sio_boilerplate_model import create_sio_module, SIOModuleGenerator
|
||||||
|
|
||||||
|
|
||||||
|
def test_sio_basic_functionality():
|
||||||
|
"""Test basic SIO module creation and configuration."""
|
||||||
|
print("Testing SIO module basic functionality...")
|
||||||
|
|
||||||
|
# Create a basic SIO configuration
|
||||||
|
config = create_sio_module(
|
||||||
|
name="SIO_TEST_1",
|
||||||
|
ip_address="192.168.1.100",
|
||||||
|
safety_input_names={
|
||||||
|
0: "Emergency Stop Line 1",
|
||||||
|
1: "Emergency Stop Line 2",
|
||||||
|
2: "Safety Gate Position",
|
||||||
|
3: "Light Curtain Active",
|
||||||
|
4: "Safety Mat Pressure",
|
||||||
|
5: "Reset Button Pressed",
|
||||||
|
6: "Enable Switch Active",
|
||||||
|
7: "Safety Scanner Zone"
|
||||||
|
},
|
||||||
|
safety_output_names={
|
||||||
|
0: "Main Safety Relay",
|
||||||
|
1: "Backup Safety Relay",
|
||||||
|
2: "Warning Light Red",
|
||||||
|
3: "Warning Light Amber",
|
||||||
|
4: "Safety Brake Release",
|
||||||
|
5: "Safety Brake Release",
|
||||||
|
6: "Safety Brake Release",
|
||||||
|
7: "Safety Brake Release"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create generator and apply updates
|
||||||
|
generator = SIOModuleGenerator(config)
|
||||||
|
|
||||||
|
try:
|
||||||
|
generator.load_boilerplate()
|
||||||
|
print(" ✓ Boilerplate loaded successfully")
|
||||||
|
|
||||||
|
generator.apply_updates()
|
||||||
|
print(" ✓ Updates applied successfully")
|
||||||
|
|
||||||
|
# Check that the IP address was updated
|
||||||
|
port = generator.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
if port is not None and port.get("Address") == "192.168.1.100":
|
||||||
|
print(" ✓ IP address updated correctly")
|
||||||
|
else:
|
||||||
|
print(" ✗ IP address not updated correctly")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check that module name was updated
|
||||||
|
module = generator.root.find(".//Module[@Use='Target']")
|
||||||
|
if module is not None and module.get("Name") == "SIO_TEST_1":
|
||||||
|
print(" ✓ Module name updated correctly")
|
||||||
|
else:
|
||||||
|
print(" ✗ Module name not updated correctly")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check safety input comments
|
||||||
|
si_comments = generator.root.find(".//Connection[@Name='_200424962CC22C87']/InputTag/Comments")
|
||||||
|
if si_comments is not None:
|
||||||
|
comment_count = len(list(si_comments))
|
||||||
|
if comment_count == 8: # Should have 8 safety input comments
|
||||||
|
print(f" ✓ Safety input comments added ({comment_count} comments)")
|
||||||
|
else:
|
||||||
|
print(f" ✗ Expected 8 safety input comments, got {comment_count}")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print(" ✗ Safety input comments section not found")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check safety output comments
|
||||||
|
so_comments = generator.root.find(".//Connection[@Name='_200424962C862CC2']/OutputTag/Comments")
|
||||||
|
if so_comments is not None:
|
||||||
|
comment_count = len(list(so_comments))
|
||||||
|
if comment_count == 5: # Should have 5 safety output comments
|
||||||
|
print(f" ✓ Safety output comments added ({comment_count} comments)")
|
||||||
|
else:
|
||||||
|
print(f" ✗ Expected 5 safety output comments, got {comment_count}")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print(" ✗ Safety output comments section not found")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test saving the output
|
||||||
|
os.makedirs("generated", exist_ok=True)
|
||||||
|
output_path = "generated/SIO_TEST_1.L5X"
|
||||||
|
generator.save(output_path)
|
||||||
|
|
||||||
|
if os.path.exists(output_path):
|
||||||
|
print(f" ✓ SIO module saved successfully to {output_path}")
|
||||||
|
# Check file size to ensure it's not empty
|
||||||
|
file_size = os.path.getsize(output_path)
|
||||||
|
if file_size > 1000: # Reasonable size for XML file
|
||||||
|
print(f" ✓ Generated file has reasonable size ({file_size} bytes)")
|
||||||
|
else:
|
||||||
|
print(f" ✗ Generated file seems too small ({file_size} bytes)")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print(" ✗ Failed to save SIO module")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
print(f" ✗ Boilerplate file not found: {e}")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ✗ Error during testing: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def test_sio_minimal_config():
|
||||||
|
"""Test SIO module with minimal configuration."""
|
||||||
|
print("\nTesting SIO module with minimal configuration...")
|
||||||
|
|
||||||
|
# Create minimal configuration
|
||||||
|
config = create_sio_module(
|
||||||
|
name="SIO_MINIMAL",
|
||||||
|
ip_address="10.0.0.50"
|
||||||
|
)
|
||||||
|
|
||||||
|
generator = SIOModuleGenerator(config)
|
||||||
|
|
||||||
|
try:
|
||||||
|
generator.load_boilerplate()
|
||||||
|
generator.apply_updates()
|
||||||
|
|
||||||
|
# Verify basic properties
|
||||||
|
module = generator.root.find(".//Module[@Use='Target']")
|
||||||
|
port = generator.root.find(".//Port[@Type='Ethernet']")
|
||||||
|
|
||||||
|
if (module is not None and module.get("Name") == "SIO_MINIMAL" and
|
||||||
|
port is not None and port.get("Address") == "10.0.0.50"):
|
||||||
|
print(" ✓ Minimal configuration applied successfully")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print(" ✗ Minimal configuration failed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ✗ Error with minimal configuration: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Run all SIO tests."""
|
||||||
|
print("Running SIO Module Implementation Tests")
|
||||||
|
print("=" * 40)
|
||||||
|
|
||||||
|
test_results = []
|
||||||
|
|
||||||
|
# Test basic functionality
|
||||||
|
test_results.append(test_sio_basic_functionality())
|
||||||
|
|
||||||
|
# Test minimal configuration
|
||||||
|
test_results.append(test_sio_minimal_config())
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
print("\n" + "=" * 40)
|
||||||
|
passed = sum(test_results)
|
||||||
|
total = len(test_results)
|
||||||
|
|
||||||
|
if passed == total:
|
||||||
|
print(f"✓ All tests passed ({passed}/{total})")
|
||||||
|
print("SIO module implementation is working correctly!")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
print(f"✗ Some tests failed ({passed}/{total})")
|
||||||
|
print("Please check the SIO implementation.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
exit(main())
|
||||||
75
IO Tree Configuration Generator/tests/test_regression.py
Normal file
75
IO Tree Configuration Generator/tests/test_regression.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import sys
|
||||||
|
import pathlib
|
||||||
|
sys.path.insert(0, str(pathlib.Path(__file__).resolve().parent.parent))
|
||||||
|
|
||||||
|
import os
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from enhanced_mcm_generator import EnhancedMCMGenerator
|
||||||
|
|
||||||
|
|
||||||
|
def _canonicalize_xml(path: str) -> bytes:
|
||||||
|
"""Return a canonicalised representation of an L5X / XML file.
|
||||||
|
|
||||||
|
The function removes volatile attributes (e.g. ExportDate) and sorts
|
||||||
|
attributes of every element so the resulting byte string is stable
|
||||||
|
across Python runs and operating systems.
|
||||||
|
"""
|
||||||
|
tree = ET.parse(path)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
# Remove volatile attributes that change on every export (present on many elements)
|
||||||
|
for elem in root.iter():
|
||||||
|
elem.attrib.pop("ExportDate", None)
|
||||||
|
|
||||||
|
# Recursively sort attributes to obtain a deterministic ordering
|
||||||
|
def _sort_attrs(elem: ET.Element):
|
||||||
|
if elem.attrib:
|
||||||
|
# Convert to list with sorted items to keep ElementTree stable
|
||||||
|
sorted_items = sorted(elem.attrib.items())
|
||||||
|
elem.attrib.clear()
|
||||||
|
elem.attrib.update(sorted_items)
|
||||||
|
for child in elem:
|
||||||
|
_sort_attrs(child)
|
||||||
|
|
||||||
|
_sort_attrs(root)
|
||||||
|
|
||||||
|
# Normalise text nodes: strip leading/trailing whitespace so that
|
||||||
|
# cosmetic indentation inside <Comment> elements does not cause false
|
||||||
|
# differences.
|
||||||
|
for elem in root.iter():
|
||||||
|
if elem.text is not None:
|
||||||
|
elem.text = elem.text.strip()
|
||||||
|
|
||||||
|
# ElementTree does not guarantee attribute ordering when converting to
|
||||||
|
# string, but because we have manually re-inserted sorted attributes we
|
||||||
|
# get a deterministic output here.
|
||||||
|
return ET.tostring(root, encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression
|
||||||
|
def test_generated_project_matches_golden(tmp_path):
|
||||||
|
"""Generate the project and compare it against the golden reference.
|
||||||
|
|
||||||
|
If this test fails, a refactor has changed the *semantic* XML output.
|
||||||
|
Check the diff to decide whether the change is intended or not.
|
||||||
|
"""
|
||||||
|
project_name = "MCM04_Chute_Load"
|
||||||
|
|
||||||
|
# 1. Run the generator to build a fresh project under the temporary dir
|
||||||
|
generator = EnhancedMCMGenerator(project_name, excel_file="Data.xlsx")
|
||||||
|
assert generator.load_and_process_data(), "Failed to load/process Excel data"
|
||||||
|
|
||||||
|
output_path = generator.generate_complete_project()
|
||||||
|
|
||||||
|
# 2. Compare with the golden file
|
||||||
|
golden_path = os.path.join(
|
||||||
|
"generated_projects", "MCM04_Chute_Load_To_Compare_Against.L5X"
|
||||||
|
)
|
||||||
|
assert os.path.exists(golden_path), "Golden file is missing"
|
||||||
|
|
||||||
|
assert _canonicalize_xml(output_path) == _canonicalize_xml(
|
||||||
|
golden_path
|
||||||
|
), "Generated project differs from golden reference"
|
||||||
21
IO Tree Configuration Generator/tools/diff_xml.py
Normal file
21
IO Tree Configuration Generator/tools/diff_xml.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
import difflib
|
||||||
|
import sys
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
def canon(path):
|
||||||
|
tree = ET.parse(path)
|
||||||
|
root = tree.getroot()
|
||||||
|
for elem in root.iter():
|
||||||
|
elem.attrib.pop('ExportDate', None)
|
||||||
|
# sort attributes
|
||||||
|
if elem.attrib:
|
||||||
|
items = sorted(elem.attrib.items())
|
||||||
|
elem.attrib.clear()
|
||||||
|
elem.attrib.update(items)
|
||||||
|
return ET.tostring(root, encoding='unicode').splitlines()
|
||||||
|
|
||||||
|
a = canon(sys.argv[1])
|
||||||
|
b = canon(sys.argv[2])
|
||||||
|
for l in difflib.unified_diff(a,b, lineterm=''):
|
||||||
|
print(l)
|
||||||
BIN
L5X2ACD Compiler/__pycache__/compilation_manager.cpython-312.pyc
Normal file
BIN
L5X2ACD Compiler/__pycache__/compilation_manager.cpython-312.pyc
Normal file
Binary file not shown.
BIN
L5X2ACD Compiler/__pycache__/l5x_to_acd.cpython-312.pyc
Normal file
BIN
L5X2ACD Compiler/__pycache__/l5x_to_acd.cpython-312.pyc
Normal file
Binary file not shown.
433
L5X2ACD Compiler/compilation_manager.py
Normal file
433
L5X2ACD Compiler/compilation_manager.py
Normal file
@ -0,0 +1,433 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Dynamic PLC Compilation Manager
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Manages the L5X2ACD compilation directory by:
|
||||||
|
1. Wiping existing files completely before each run
|
||||||
|
2. Generating project-specific batch files
|
||||||
|
3. Copying appropriate L5X files based on the command/project
|
||||||
|
4. Creating dynamic compilation setups for different projects
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python compilation_manager.py --project MTN6_MCM01_UL1_UL3 --l5x-file "path/to/project.L5X"
|
||||||
|
python compilation_manager.py --project MTN6_MCM04_CHUTE_LOAD --l5x-file "path/to/project.L5X"
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import shutil
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
import glob
|
||||||
|
|
||||||
|
class CompilationManager:
|
||||||
|
"""Manages dynamic compilation directory setup for different PLC projects."""
|
||||||
|
|
||||||
|
def __init__(self, compilation_dir: Path):
|
||||||
|
"""Initialize the compilation manager.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
compilation_dir: Path to the L5X2ACD Compiler directory
|
||||||
|
"""
|
||||||
|
self.compilation_dir = Path(compilation_dir)
|
||||||
|
self.l5x_to_acd_script = self.compilation_dir / "l5x_to_acd.py"
|
||||||
|
|
||||||
|
# Ensure the compilation directory exists
|
||||||
|
self.compilation_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
def wipe_compilation_files(self, preserve_core: bool = True) -> None:
|
||||||
|
"""Completely wipe compilation files, optionally preserving core scripts.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
preserve_core: If True, preserve l5x_to_acd.py and __pycache__
|
||||||
|
"""
|
||||||
|
print("🧹 Wiping existing compilation files...")
|
||||||
|
|
||||||
|
# Files to always preserve
|
||||||
|
core_files = {
|
||||||
|
"l5x_to_acd.py",
|
||||||
|
"compilation_manager.py",
|
||||||
|
"setup_windows_sdk.bat",
|
||||||
|
"logix_designer_sdk-2.0.1-py3-none-any.whl"
|
||||||
|
} if preserve_core else set()
|
||||||
|
|
||||||
|
# Directories to always preserve
|
||||||
|
core_dirs = {
|
||||||
|
"__pycache__"
|
||||||
|
} if preserve_core else set()
|
||||||
|
|
||||||
|
files_removed = 0
|
||||||
|
dirs_removed = 0
|
||||||
|
|
||||||
|
# Remove all files except core files
|
||||||
|
for file_path in self.compilation_dir.glob("*"):
|
||||||
|
if file_path.is_file():
|
||||||
|
if file_path.name not in core_files:
|
||||||
|
try:
|
||||||
|
file_path.unlink()
|
||||||
|
files_removed += 1
|
||||||
|
print(f" ✓ Removed file: {file_path.name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ⚠️ Could not remove {file_path.name}: {e}")
|
||||||
|
elif file_path.is_dir():
|
||||||
|
if file_path.name not in core_dirs:
|
||||||
|
try:
|
||||||
|
shutil.rmtree(file_path)
|
||||||
|
dirs_removed += 1
|
||||||
|
print(f" ✓ Removed directory: {file_path.name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ⚠️ Could not remove {file_path.name}: {e}")
|
||||||
|
|
||||||
|
print(f"🧹 Cleanup complete: {files_removed} files, {dirs_removed} directories removed")
|
||||||
|
|
||||||
|
def copy_l5x_file(self, source_l5x: Path, project_name: str) -> Path:
|
||||||
|
"""Copy L5X file to compilation directory with project-specific naming.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_l5x: Path to the source L5X file
|
||||||
|
project_name: Name of the project for file naming
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the copied L5X file in compilation directory
|
||||||
|
"""
|
||||||
|
if not source_l5x.exists():
|
||||||
|
raise FileNotFoundError(f"Source L5X file not found: {source_l5x}")
|
||||||
|
|
||||||
|
# Create project-specific filename
|
||||||
|
dest_filename = f"{project_name}.L5X"
|
||||||
|
dest_l5x = self.compilation_dir / dest_filename
|
||||||
|
|
||||||
|
print(f"📁 Copying L5X file: {source_l5x.name} → {dest_filename}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.copy2(source_l5x, dest_l5x)
|
||||||
|
file_size_mb = dest_l5x.stat().st_size / (1024 * 1024)
|
||||||
|
print(f" ✓ Copied successfully ({file_size_mb:.2f} MB)")
|
||||||
|
return dest_l5x
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(f"Failed to copy L5X file: {e}")
|
||||||
|
|
||||||
|
def generate_batch_file(self, project_name: str, l5x_filename: str,
|
||||||
|
compilation_options: Optional[Dict] = None) -> Path:
|
||||||
|
"""Generate project-specific batch file for compilation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name: Name of the project
|
||||||
|
l5x_filename: Name of the L5X file to compile
|
||||||
|
compilation_options: Optional compilation settings
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the generated batch file
|
||||||
|
"""
|
||||||
|
options = compilation_options or {}
|
||||||
|
|
||||||
|
# Create project-specific batch filename
|
||||||
|
batch_filename = f"compile_{project_name}.bat"
|
||||||
|
batch_path = self.compilation_dir / batch_filename
|
||||||
|
|
||||||
|
# Convert paths to Windows format for batch file
|
||||||
|
compilation_dir_win = str(self.compilation_dir).replace('/mnt/c/', 'C:\\').replace('/', '\\')
|
||||||
|
l5x_file_win = f"{compilation_dir_win}\\{l5x_filename}"
|
||||||
|
|
||||||
|
# Create project-specific batch content
|
||||||
|
batch_content = self._create_batch_content(
|
||||||
|
project_name, l5x_file_win, compilation_dir_win, options
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"🔧 Generating batch file: {batch_filename}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(batch_path, 'w', newline='\r\n') as f: # Windows line endings
|
||||||
|
f.write(batch_content)
|
||||||
|
print(f" ✓ Generated successfully")
|
||||||
|
return batch_path
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(f"Failed to generate batch file: {e}")
|
||||||
|
|
||||||
|
def _create_batch_content(self, project_name: str, l5x_file_win: str,
|
||||||
|
compilation_dir_win: str, options: Dict) -> str:
|
||||||
|
"""Create the content for the batch file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name: Name of the project
|
||||||
|
l5x_file_win: Windows path to L5X file
|
||||||
|
compilation_dir_win: Windows path to compilation directory
|
||||||
|
options: Compilation options
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Batch file content as string
|
||||||
|
"""
|
||||||
|
# Determine project type for specialized handling
|
||||||
|
project_type = self._detect_project_type(project_name)
|
||||||
|
|
||||||
|
# Header
|
||||||
|
content = f"@echo off\n"
|
||||||
|
content += f"echo ====================================\n"
|
||||||
|
content += f"echo PLC Compilation: {project_name}\n"
|
||||||
|
content += f"echo Project Type: {project_type}\n"
|
||||||
|
content += f"echo ====================================\n"
|
||||||
|
content += f"echo.\n\n"
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
content += f"cd /d \"{compilation_dir_win}\"\n"
|
||||||
|
content += f"echo Working directory: %CD%\n"
|
||||||
|
content += f"echo.\n\n"
|
||||||
|
|
||||||
|
# Check if L5X file exists
|
||||||
|
content += f"if not exist \"{l5x_file_win}\" (\n"
|
||||||
|
content += f" echo ERROR: L5X file not found: {l5x_file_win}\n"
|
||||||
|
content += f" pause\n"
|
||||||
|
content += f" exit /b 1\n"
|
||||||
|
content += f")\n\n"
|
||||||
|
|
||||||
|
# Check for Python 3.12
|
||||||
|
content += f"echo Checking for Python 3.12...\n"
|
||||||
|
content += f"py -3.12 --version >nul 2>&1\n"
|
||||||
|
content += f"if errorlevel 1 (\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" echo ERROR: Python 3.12 not found!\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo This compilation requires Python 3.12 specifically.\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo INSTALLATION STEPS:\n"
|
||||||
|
content += f" echo 1. Download Python 3.12 from: https://www.python.org/downloads/\n"
|
||||||
|
content += f" echo 2. During installation, check 'Add Python to PATH'\n"
|
||||||
|
content += f" echo 3. Verify installation: py -3.12 --version\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" pause\n"
|
||||||
|
content += f" exit /b 1\n"
|
||||||
|
content += f")\n"
|
||||||
|
content += f"echo ✓ Python 3.12 found\n"
|
||||||
|
|
||||||
|
# Check for Logix Designer SDK
|
||||||
|
content += f"echo Checking for Logix Designer SDK...\n"
|
||||||
|
content += f"py -3.12 -c \"import logix_designer_sdk\" 2>nul\n"
|
||||||
|
content += f"if errorlevel 1 (\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" echo ERROR: Logix Designer SDK not found!\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo The Logix Designer SDK is required for L5X to ACD compilation.\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo INSTALLATION STEPS:\n"
|
||||||
|
content += f" echo 1. Install the logix_designer_sdk package with Python 3.12:\n"
|
||||||
|
content += f" echo py -3.12 -m pip install logix_designer_sdk-2.0.1-py3-none-any.whl\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo 2. Or run the setup script first:\n"
|
||||||
|
content += f" echo setup_windows_sdk.bat\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo 3. Make sure you have Logix Designer installed on this Windows machine\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" pause\n"
|
||||||
|
content += f" exit /b 1\n"
|
||||||
|
content += f")\n"
|
||||||
|
content += f"echo ✓ Logix Designer SDK found\n"
|
||||||
|
content += f"echo.\n\n"
|
||||||
|
|
||||||
|
# Show file info
|
||||||
|
content += f"echo Input L5X file: {l5x_file_win}\n"
|
||||||
|
content += f"for %%F in (\"{l5x_file_win}\") do echo File size: %%~zF bytes\n"
|
||||||
|
content += f"echo.\n\n"
|
||||||
|
|
||||||
|
# Compilation command with project-specific options
|
||||||
|
compilation_cmd = f"py -3.12 l5x_to_acd.py \"{l5x_file_win}\""
|
||||||
|
|
||||||
|
# Add project-specific options
|
||||||
|
if project_type == "MCM01" and options.get("enable_safety_validation", True):
|
||||||
|
content += f"echo Enabling MCM01 safety validation...\n"
|
||||||
|
elif project_type == "MCM04" and options.get("enable_feeder_optimization", True):
|
||||||
|
content += f"echo Enabling MCM04 feeder optimization...\n"
|
||||||
|
|
||||||
|
content += f"echo Starting compilation...\n"
|
||||||
|
content += f"echo Command: {compilation_cmd}\n"
|
||||||
|
content += f"echo.\n\n"
|
||||||
|
|
||||||
|
# Execute compilation
|
||||||
|
content += f"{compilation_cmd}\n\n"
|
||||||
|
|
||||||
|
# Check compilation result
|
||||||
|
expected_acd = l5x_file_win.replace('.L5X', '.ACD')
|
||||||
|
content += f"if exist \"{expected_acd}\" (\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" echo SUCCESS: Compilation completed!\n"
|
||||||
|
content += f" echo Output: {expected_acd}\n"
|
||||||
|
content += f" for %%F in (\"{expected_acd}\") do echo ACD size: %%~zF bytes\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f") else (\n"
|
||||||
|
content += f" echo.\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f" echo ERROR: Compilation failed!\n"
|
||||||
|
content += f" echo Expected output: {expected_acd}\n"
|
||||||
|
content += f" echo ====================================\n"
|
||||||
|
content += f")\n\n"
|
||||||
|
|
||||||
|
# Footer
|
||||||
|
content += f"echo.\n"
|
||||||
|
content += f"echo Press any key to close...\n"
|
||||||
|
content += f"pause\n"
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
def _detect_project_type(self, project_name: str) -> str:
|
||||||
|
"""Detect the project type from the project name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name: Name of the project
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Project type (MCM01, MCM04, MCM05, or UNKNOWN)
|
||||||
|
"""
|
||||||
|
project_upper = project_name.upper()
|
||||||
|
|
||||||
|
if "MCM01" in project_upper:
|
||||||
|
return "MCM01"
|
||||||
|
elif "MCM04" in project_upper:
|
||||||
|
return "MCM04"
|
||||||
|
elif "MCM05" in project_upper:
|
||||||
|
return "MCM05"
|
||||||
|
else:
|
||||||
|
return "UNKNOWN"
|
||||||
|
|
||||||
|
def setup_compilation(self, source_l5x: Path, project_name: str,
|
||||||
|
compilation_options: Optional[Dict] = None,
|
||||||
|
wipe_existing: bool = True) -> Dict[str, Path]:
|
||||||
|
"""Complete compilation setup: wipe, copy, and generate batch file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_l5x: Path to the source L5X file
|
||||||
|
project_name: Name of the project
|
||||||
|
compilation_options: Optional compilation settings
|
||||||
|
wipe_existing: Whether to wipe existing files first
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with paths to generated files
|
||||||
|
"""
|
||||||
|
print(f"🚀 Setting up compilation for project: {project_name}")
|
||||||
|
print(f"📂 Compilation directory: {self.compilation_dir}")
|
||||||
|
print(f"📄 Source L5X: {source_l5x}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Step 1: Wipe existing files
|
||||||
|
if wipe_existing:
|
||||||
|
self.wipe_compilation_files(preserve_core=True)
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Step 2: Copy L5X file
|
||||||
|
copied_l5x = self.copy_l5x_file(source_l5x, project_name)
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Step 3: Generate batch file
|
||||||
|
batch_file = self.generate_batch_file(
|
||||||
|
project_name, copied_l5x.name, compilation_options
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
|
||||||
|
result = {
|
||||||
|
'l5x_file': copied_l5x,
|
||||||
|
'batch_file': batch_file,
|
||||||
|
'compilation_dir': self.compilation_dir
|
||||||
|
}
|
||||||
|
|
||||||
|
print("✅ Compilation setup complete!")
|
||||||
|
print(f" L5X File: {copied_l5x}")
|
||||||
|
print(f" Batch File: {batch_file}")
|
||||||
|
print()
|
||||||
|
print("🪟 To compile on Windows:")
|
||||||
|
print(f" 1. Run: {batch_file}")
|
||||||
|
print(f" 2. Or double-click: {batch_file.name}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point for the compilation manager."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Dynamic PLC Compilation Manager",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Examples:
|
||||||
|
# Setup compilation for MCM01 project
|
||||||
|
python compilation_manager.py --project MTN6_MCM01_UL1_UL3 --l5x-file "../IO Tree Configuration Generator/generated_projects/MTN6_MCM01_UL1_UL3.L5X"
|
||||||
|
|
||||||
|
# Setup compilation for MCM04 project
|
||||||
|
python compilation_manager.py --project MTN6_MCM04_CHUTE_LOAD --l5x-file "../IO Tree Configuration Generator/generated_projects/MTN6_MCM04_CHUTE_LOAD.L5X"
|
||||||
|
|
||||||
|
# Wipe only (no setup)
|
||||||
|
python compilation_manager.py --wipe-only
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument('--project', '-p',
|
||||||
|
help='Project name (e.g., MTN6_MCM01_UL1_UL3)')
|
||||||
|
parser.add_argument('--l5x-file', '-l', type=Path,
|
||||||
|
help='Path to the source L5X file')
|
||||||
|
parser.add_argument('--compilation-dir', '-d', type=Path,
|
||||||
|
default=Path(__file__).parent,
|
||||||
|
help='Compilation directory (default: current directory)')
|
||||||
|
parser.add_argument('--wipe-only', action='store_true',
|
||||||
|
help='Only wipe existing files, do not setup compilation')
|
||||||
|
parser.add_argument('--no-wipe', action='store_true',
|
||||||
|
help='Do not wipe existing files before setup')
|
||||||
|
parser.add_argument('--enable-safety-validation', action='store_true',
|
||||||
|
help='Enable safety validation for MCM01 projects')
|
||||||
|
parser.add_argument('--enable-feeder-optimization', action='store_true',
|
||||||
|
help='Enable feeder optimization for MCM04 projects')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Create compilation manager
|
||||||
|
manager = CompilationManager(args.compilation_dir)
|
||||||
|
|
||||||
|
# Handle wipe-only mode
|
||||||
|
if args.wipe_only:
|
||||||
|
manager.wipe_compilation_files(preserve_core=True)
|
||||||
|
print("✅ Wipe completed.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Validate arguments for full setup
|
||||||
|
if not args.project:
|
||||||
|
print("❌ ERROR: --project is required for compilation setup")
|
||||||
|
parser.print_help()
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if not args.l5x_file:
|
||||||
|
print("❌ ERROR: --l5x-file is required for compilation setup")
|
||||||
|
parser.print_help()
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if not args.l5x_file.exists():
|
||||||
|
print(f"❌ ERROR: L5X file not found: {args.l5x_file}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Setup compilation options
|
||||||
|
options = {
|
||||||
|
'enable_safety_validation': args.enable_safety_validation,
|
||||||
|
'enable_feeder_optimization': args.enable_feeder_optimization
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Setup compilation
|
||||||
|
result = manager.setup_compilation(
|
||||||
|
source_l5x=args.l5x_file,
|
||||||
|
project_name=args.project,
|
||||||
|
compilation_options=options,
|
||||||
|
wipe_existing=not args.no_wipe
|
||||||
|
)
|
||||||
|
|
||||||
|
print("🎉 Ready for Windows compilation!")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ ERROR: {e}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
||||||
91
L5X2ACD Compiler/compile_MTN6_MCM05_CHUTE_LOAD.bat
Normal file
91
L5X2ACD Compiler/compile_MTN6_MCM05_CHUTE_LOAD.bat
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
@echo off
|
||||||
|
echo ====================================
|
||||||
|
echo PLC Compilation: MTN6_MCM05_CHUTE_LOAD
|
||||||
|
echo Project Type: MCM05
|
||||||
|
echo ====================================
|
||||||
|
echo.
|
||||||
|
|
||||||
|
cd /d "C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler"
|
||||||
|
echo Working directory: %CD%
|
||||||
|
echo.
|
||||||
|
|
||||||
|
if not exist "C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.L5X" (
|
||||||
|
echo ERROR: L5X file not found: C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.L5X
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo Checking for Python 3.12...
|
||||||
|
py -3.12 --version >nul 2>&1
|
||||||
|
if errorlevel 1 (
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
echo ERROR: Python 3.12 not found!
|
||||||
|
echo ====================================
|
||||||
|
echo.
|
||||||
|
echo This compilation requires Python 3.12 specifically.
|
||||||
|
echo.
|
||||||
|
echo INSTALLATION STEPS:
|
||||||
|
echo 1. Download Python 3.12 from: https://www.python.org/downloads/
|
||||||
|
echo 2. During installation, check 'Add Python to PATH'
|
||||||
|
echo 3. Verify installation: py -3.12 --version
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
echo ✓ Python 3.12 found
|
||||||
|
echo Checking for Logix Designer SDK...
|
||||||
|
py -3.12 -c "import logix_designer_sdk" 2>nul
|
||||||
|
if errorlevel 1 (
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
echo ERROR: Logix Designer SDK not found!
|
||||||
|
echo ====================================
|
||||||
|
echo.
|
||||||
|
echo The Logix Designer SDK is required for L5X to ACD compilation.
|
||||||
|
echo.
|
||||||
|
echo INSTALLATION STEPS:
|
||||||
|
echo 1. Install the logix_designer_sdk package with Python 3.12:
|
||||||
|
echo py -3.12 -m pip install logix_designer_sdk-2.0.1-py3-none-any.whl
|
||||||
|
echo.
|
||||||
|
echo 2. Or run the setup script first:
|
||||||
|
echo setup_windows_sdk.bat
|
||||||
|
echo.
|
||||||
|
echo 3. Make sure you have Logix Designer installed on this Windows machine
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
echo ✓ Logix Designer SDK found
|
||||||
|
echo.
|
||||||
|
|
||||||
|
echo Input L5X file: C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.L5X
|
||||||
|
for %%F in ("C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.L5X") do echo File size: %%~zF bytes
|
||||||
|
echo.
|
||||||
|
|
||||||
|
echo Starting compilation...
|
||||||
|
echo Command: py -3.12 l5x_to_acd.py "C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.L5X"
|
||||||
|
echo.
|
||||||
|
|
||||||
|
py -3.12 l5x_to_acd.py "C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.L5X"
|
||||||
|
|
||||||
|
if exist "C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.ACD" (
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
echo SUCCESS: Compilation completed!
|
||||||
|
echo Output: C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.ACD
|
||||||
|
for %%F in ("C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.ACD") do echo ACD size: %%~zF bytes
|
||||||
|
echo ====================================
|
||||||
|
) else (
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
echo ERROR: Compilation failed!
|
||||||
|
echo Expected output: C:\Users\ilia.gurielidze\Projects\PLC Generation\L5X2ACD Compiler\MTN6_MCM05_CHUTE_LOAD.ACD
|
||||||
|
echo ====================================
|
||||||
|
)
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo Press any key to close...
|
||||||
|
pause
|
||||||
459
L5X2ACD Compiler/l5x_to_acd.py
Normal file
459
L5X2ACD Compiler/l5x_to_acd.py
Normal file
@ -0,0 +1,459 @@
|
|||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
from typing import List, Tuple, Dict, Any
|
||||||
|
from logix_designer_sdk import LogixProject, StdOutEventLogger
|
||||||
|
from logix_designer_sdk.exceptions import (
|
||||||
|
LogixSdkError,
|
||||||
|
OperationFailedError,
|
||||||
|
OperationNotPerformedError,
|
||||||
|
LoggerFailedError,
|
||||||
|
EventLoggerRuntimeError
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
TARGET_REV = 36
|
||||||
|
|
||||||
|
class DetailedEventLogger:
|
||||||
|
"""Enhanced event logger that captures all SDK messages, errors, and progress"""
|
||||||
|
|
||||||
|
def __init__(self, filename: str):
|
||||||
|
self.filename = filename
|
||||||
|
self.start_time = time.time()
|
||||||
|
self.messages = []
|
||||||
|
self.errors = []
|
||||||
|
self.warnings = []
|
||||||
|
self.progress_messages = []
|
||||||
|
self.sdk_messages = [] # Store messages from StdOutEventLogger
|
||||||
|
|
||||||
|
def log(self, message: str, level: str = "INFO"):
|
||||||
|
"""Log a message with detailed categorization"""
|
||||||
|
current_time = time.time()
|
||||||
|
elapsed = current_time - self.start_time
|
||||||
|
|
||||||
|
# Categorize the message
|
||||||
|
message_lower = message.lower()
|
||||||
|
if any(keyword in message_lower for keyword in ['error', 'failed', 'exception']):
|
||||||
|
self.errors.append(message)
|
||||||
|
level = "ERROR"
|
||||||
|
elif any(keyword in message_lower for keyword in ['warning', 'warn']):
|
||||||
|
self.warnings.append(message)
|
||||||
|
level = "WARN"
|
||||||
|
elif any(keyword in message_lower for keyword in ['progress', 'converting', 'saving', 'loading']):
|
||||||
|
self.progress_messages.append(message)
|
||||||
|
level = "PROGRESS"
|
||||||
|
|
||||||
|
self.messages.append({
|
||||||
|
'timestamp': elapsed,
|
||||||
|
'level': level,
|
||||||
|
'message': message
|
||||||
|
})
|
||||||
|
|
||||||
|
# Format and print the message
|
||||||
|
timestamp_str = f"[{elapsed:>6.1f}s]"
|
||||||
|
level_str = f"[{level:<8}]"
|
||||||
|
print(f"[LOG] {timestamp_str} {level_str} {self.filename} | {message}")
|
||||||
|
|
||||||
|
def capture_sdk_message(self, message: str):
|
||||||
|
"""Capture messages from the SDK's StdOutEventLogger"""
|
||||||
|
self.sdk_messages.append(message)
|
||||||
|
# Also categorize SDK messages
|
||||||
|
message_lower = message.lower()
|
||||||
|
if any(keyword in message_lower for keyword in ['error', 'failed', 'exception']):
|
||||||
|
self.errors.append(f"SDK: {message}")
|
||||||
|
elif any(keyword in message_lower for keyword in ['warning', 'warn']):
|
||||||
|
self.warnings.append(f"SDK: {message}")
|
||||||
|
elif any(keyword in message_lower for keyword in ['convert', 'save', 'load', 'build']):
|
||||||
|
self.progress_messages.append(f"SDK: {message}")
|
||||||
|
|
||||||
|
def get_summary(self) -> Dict[str, Any]:
|
||||||
|
"""Get a summary of all captured messages"""
|
||||||
|
return {
|
||||||
|
'total_messages': len(self.messages),
|
||||||
|
'sdk_messages': len(self.sdk_messages),
|
||||||
|
'errors': len(self.errors),
|
||||||
|
'warnings': len(self.warnings),
|
||||||
|
'progress_updates': len(self.progress_messages),
|
||||||
|
'duration': time.time() - self.start_time
|
||||||
|
}
|
||||||
|
|
||||||
|
class CustomStdOutEventLogger(StdOutEventLogger):
|
||||||
|
"""Custom wrapper around StdOutEventLogger to capture messages"""
|
||||||
|
|
||||||
|
def __init__(self, capture_logger: DetailedEventLogger):
|
||||||
|
super().__init__()
|
||||||
|
self.capture_logger = capture_logger
|
||||||
|
|
||||||
|
def log(self, message):
|
||||||
|
# Capture the message for our detailed logger
|
||||||
|
self.capture_logger.capture_sdk_message(message)
|
||||||
|
# Still output to stdout via parent class
|
||||||
|
super().log(message)
|
||||||
|
|
||||||
|
def categorize_exception(exc: Exception) -> Dict[str, Any]:
|
||||||
|
"""Categorize exceptions based on SDK documentation patterns"""
|
||||||
|
|
||||||
|
exc_info = {
|
||||||
|
'type': type(exc).__name__,
|
||||||
|
'message': str(exc),
|
||||||
|
'category': 'Unknown',
|
||||||
|
'severity': 'High',
|
||||||
|
'suggested_action': 'Check error details and retry',
|
||||||
|
'is_recoverable': False
|
||||||
|
}
|
||||||
|
|
||||||
|
if isinstance(exc, OperationFailedError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'Operation Failed',
|
||||||
|
'severity': 'High',
|
||||||
|
'suggested_action': 'Check input parameters, file permissions, and target revision compatibility',
|
||||||
|
'is_recoverable': True
|
||||||
|
})
|
||||||
|
elif isinstance(exc, OperationNotPerformedError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'Operation Not Performed',
|
||||||
|
'severity': 'Critical',
|
||||||
|
'suggested_action': 'Check SDK server connection and project state',
|
||||||
|
'is_recoverable': False
|
||||||
|
})
|
||||||
|
elif isinstance(exc, LoggerFailedError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'Logger Failed',
|
||||||
|
'severity': 'Medium',
|
||||||
|
'suggested_action': 'Check logging permissions and disk space',
|
||||||
|
'is_recoverable': True
|
||||||
|
})
|
||||||
|
elif isinstance(exc, EventLoggerRuntimeError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'Event Logger Runtime Error',
|
||||||
|
'severity': 'Medium',
|
||||||
|
'suggested_action': 'Check event logging configuration',
|
||||||
|
'is_recoverable': True
|
||||||
|
})
|
||||||
|
elif isinstance(exc, LogixSdkError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'SDK Error',
|
||||||
|
'severity': 'High',
|
||||||
|
'suggested_action': 'Check SDK installation and project file integrity',
|
||||||
|
'is_recoverable': True
|
||||||
|
})
|
||||||
|
elif isinstance(exc, FileNotFoundError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'File Not Found',
|
||||||
|
'severity': 'High',
|
||||||
|
'suggested_action': 'Verify input file path exists and is accessible',
|
||||||
|
'is_recoverable': False
|
||||||
|
})
|
||||||
|
elif isinstance(exc, PermissionError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'Permission Error',
|
||||||
|
'severity': 'High',
|
||||||
|
'suggested_action': 'Check file permissions and run as administrator if needed',
|
||||||
|
'is_recoverable': True
|
||||||
|
})
|
||||||
|
elif isinstance(exc, TypeError):
|
||||||
|
exc_info.update({
|
||||||
|
'category': 'Type Error',
|
||||||
|
'severity': 'High',
|
||||||
|
'suggested_action': 'Check parameter types and SDK API usage',
|
||||||
|
'is_recoverable': True
|
||||||
|
})
|
||||||
|
|
||||||
|
return exc_info
|
||||||
|
|
||||||
|
async def convert_with_comprehensive_error_handling(input_file: str, output_file: str) -> Dict[str, Any]:
|
||||||
|
"""Convert L5X to ACD with comprehensive error handling and logging"""
|
||||||
|
|
||||||
|
filename = os.path.basename(input_file)
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
print(f"\n[START] Starting conversion: {filename}")
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
# Create enhanced event logger
|
||||||
|
event_logger = DetailedEventLogger(filename)
|
||||||
|
|
||||||
|
# Initialize stop_heartbeat early to avoid UnboundLocalError
|
||||||
|
stop_heartbeat = asyncio.Event()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Validate input file first
|
||||||
|
if not os.path.exists(input_file):
|
||||||
|
raise FileNotFoundError(f"Input file not found: {input_file}")
|
||||||
|
|
||||||
|
if not os.access(input_file, os.R_OK):
|
||||||
|
raise PermissionError(f"Cannot read input file: {input_file}")
|
||||||
|
|
||||||
|
# Check file size
|
||||||
|
file_size_mb = os.path.getsize(input_file) / (1024*1024)
|
||||||
|
if file_size_mb > 500: # SDK limit
|
||||||
|
event_logger.log(f"WARNING: Large file ({file_size_mb:.1f} MB) - may take longer to process", "WARN")
|
||||||
|
|
||||||
|
event_logger.log(f"Starting conversion of {input_file} (size: {file_size_mb:.2f} MB)")
|
||||||
|
event_logger.log(f"Target revision: {TARGET_REV}")
|
||||||
|
|
||||||
|
async def heartbeat():
|
||||||
|
"""Print elapsed time every 2 s until stop_heartbeat is set"""
|
||||||
|
while not stop_heartbeat.is_set():
|
||||||
|
await asyncio.sleep(2)
|
||||||
|
elapsed_hb = time.time() - start_time
|
||||||
|
print(f"Elapsed: {elapsed_hb:.1f}s")
|
||||||
|
|
||||||
|
hb_task = asyncio.create_task(heartbeat())
|
||||||
|
|
||||||
|
# Create custom event logger that captures SDK messages
|
||||||
|
custom_sdk_logger = CustomStdOutEventLogger(event_logger)
|
||||||
|
|
||||||
|
# Convert with comprehensive event logging
|
||||||
|
proj = await LogixProject.convert(input_file, TARGET_REV, custom_sdk_logger)
|
||||||
|
|
||||||
|
# Stop heartbeat once operations finished
|
||||||
|
stop_heartbeat.set()
|
||||||
|
await hb_task
|
||||||
|
|
||||||
|
event_logger.log("[SUCCESS] Conversion completed successfully")
|
||||||
|
event_logger.log(f"[SAVING] Saving to {output_file}")
|
||||||
|
|
||||||
|
# Validate output directory
|
||||||
|
output_dir = os.path.dirname(output_file)
|
||||||
|
if output_dir and not os.path.exists(output_dir):
|
||||||
|
os.makedirs(output_dir)
|
||||||
|
event_logger.log(f"Created output directory: {output_dir}")
|
||||||
|
|
||||||
|
# Save the converted project
|
||||||
|
await proj.save_as(output_file, True)
|
||||||
|
|
||||||
|
# Verify output file was created
|
||||||
|
if not os.path.exists(output_file):
|
||||||
|
raise OperationFailedError("Output file was not created successfully")
|
||||||
|
|
||||||
|
# Calculate final results
|
||||||
|
elapsed_time = time.time() - start_time
|
||||||
|
output_size_mb = os.path.getsize(output_file) / (1024*1024)
|
||||||
|
|
||||||
|
event_logger.log(f"[SUCCESS] File saved successfully")
|
||||||
|
event_logger.log(f"[INFO] Output size: {output_size_mb:.2f} MB")
|
||||||
|
event_logger.log(f"[INFO] Total time: {elapsed_time:.1f}s")
|
||||||
|
|
||||||
|
logger_summary = event_logger.get_summary()
|
||||||
|
|
||||||
|
print(f"\n[SUCCESS]: {filename}")
|
||||||
|
print(f" Input: {file_size_mb:.2f} MB")
|
||||||
|
print(f" Output: {output_size_mb:.2f} MB")
|
||||||
|
print(f" Duration: {elapsed_time:.1f}s")
|
||||||
|
print(f" Messages: {logger_summary['total_messages']} app + {logger_summary['sdk_messages']} SDK")
|
||||||
|
if logger_summary['warnings'] > 0:
|
||||||
|
print(f" Warnings: {logger_summary['warnings']}")
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'input': input_file,
|
||||||
|
'output': output_file,
|
||||||
|
'input_size_mb': round(file_size_mb, 2),
|
||||||
|
'output_size_mb': round(output_size_mb, 2),
|
||||||
|
'duration_seconds': round(elapsed_time, 1),
|
||||||
|
'messages_captured': logger_summary['total_messages'],
|
||||||
|
'sdk_messages': logger_summary['sdk_messages'],
|
||||||
|
'warnings': logger_summary['warnings'],
|
||||||
|
'errors_logged': logger_summary['errors']
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Stop heartbeat so it doesn't continue after error
|
||||||
|
stop_heartbeat.set()
|
||||||
|
if 'hb_task' in locals():
|
||||||
|
try:
|
||||||
|
await hb_task
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
elapsed_time = time.time() - start_time
|
||||||
|
|
||||||
|
# Categorize and analyze the exception
|
||||||
|
exc_info = categorize_exception(e)
|
||||||
|
logger_summary = event_logger.get_summary()
|
||||||
|
|
||||||
|
event_logger.log(f"FAILED: Conversion failed: {exc_info['message']}", "ERROR")
|
||||||
|
|
||||||
|
print(f"\nFAILED: {filename}")
|
||||||
|
print(f" Error Type: {exc_info['type']} ({exc_info['category']})")
|
||||||
|
print(f" Message: {exc_info['message']}")
|
||||||
|
print(f" Severity: {exc_info['severity']}")
|
||||||
|
print(f" Suggested Action: {exc_info['suggested_action']}")
|
||||||
|
print(f" Recoverable: {'Yes' if exc_info['is_recoverable'] else 'No'}")
|
||||||
|
print(f" Failed after: {elapsed_time:.1f}s")
|
||||||
|
print(f" Messages captured: {logger_summary['total_messages']} app + {logger_summary['sdk_messages']} SDK")
|
||||||
|
|
||||||
|
# Print detailed stack trace for debugging
|
||||||
|
if logger_summary['errors'] > 0:
|
||||||
|
print(f" Errors logged: {logger_summary['errors']}")
|
||||||
|
print(" Recent error messages:")
|
||||||
|
for error_msg in event_logger.errors[-3:]: # Show last 3 errors
|
||||||
|
print(f" • {error_msg}")
|
||||||
|
|
||||||
|
print("\nFull Stack Trace:")
|
||||||
|
print(traceback.format_exc())
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'status': 'failed',
|
||||||
|
'input': input_file,
|
||||||
|
'output': output_file,
|
||||||
|
'error': exc_info['message'],
|
||||||
|
'error_type': exc_info['type'],
|
||||||
|
'error_category': exc_info['category'],
|
||||||
|
'severity': exc_info['severity'],
|
||||||
|
'suggested_action': exc_info['suggested_action'],
|
||||||
|
'is_recoverable': exc_info['is_recoverable'],
|
||||||
|
'duration_seconds': round(elapsed_time, 1),
|
||||||
|
'messages_captured': logger_summary['total_messages'],
|
||||||
|
'sdk_messages': logger_summary['sdk_messages'],
|
||||||
|
'errors_logged': logger_summary['errors'],
|
||||||
|
'stack_trace': traceback.format_exc()
|
||||||
|
}
|
||||||
|
|
||||||
|
async def convert_multiple_files_with_error_recovery(file_pairs: List[Tuple[str, str]]) -> List[Dict[str, Any]]:
|
||||||
|
"""Convert multiple L5X files with error recovery and detailed reporting"""
|
||||||
|
|
||||||
|
if not file_pairs:
|
||||||
|
print("ERROR: No files to convert")
|
||||||
|
return []
|
||||||
|
|
||||||
|
print(f"Converting {len(file_pairs)} file(s) to ACD format")
|
||||||
|
print(f"Target Logix revision: {TARGET_REV}")
|
||||||
|
print(f"Using Logix Designer SDK with comprehensive error handling")
|
||||||
|
print(f"Error recovery and detailed logging enabled")
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for i, (input_file, output_file) in enumerate(file_pairs, 1):
|
||||||
|
print(f"\nProcessing file {i}/{len(file_pairs)}")
|
||||||
|
|
||||||
|
# Convert the file with comprehensive error handling
|
||||||
|
result = await convert_with_comprehensive_error_handling(input_file, output_file)
|
||||||
|
results.append(result)
|
||||||
|
|
||||||
|
# Add recovery suggestions for failed files
|
||||||
|
if result['status'] == 'failed' and result.get('is_recoverable', False):
|
||||||
|
print(f"Recovery suggestion: {result['suggested_action']}")
|
||||||
|
|
||||||
|
# Print comprehensive final summary
|
||||||
|
print_comprehensive_summary(results)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def print_comprehensive_summary(results: List[Dict[str, Any]]):
|
||||||
|
"""Print a comprehensive summary with error analysis"""
|
||||||
|
|
||||||
|
successful = [r for r in results if r['status'] == 'success']
|
||||||
|
failed = [r for r in results if r['status'] == 'failed']
|
||||||
|
|
||||||
|
total_time = sum(r.get('duration_seconds', 0) for r in results)
|
||||||
|
total_input_size = sum(r.get('input_size_mb', 0) for r in successful)
|
||||||
|
total_output_size = sum(r.get('output_size_mb', 0) for r in successful)
|
||||||
|
total_messages = sum(r.get('messages_captured', 0) for r in results)
|
||||||
|
total_sdk_messages = sum(r.get('sdk_messages', 0) for r in results)
|
||||||
|
total_warnings = sum(r.get('warnings', 0) for r in results)
|
||||||
|
|
||||||
|
print(f"\n{'COMPREHENSIVE CONVERSION SUMMARY':^80}")
|
||||||
|
print("=" * 80)
|
||||||
|
print(f"Total files processed: {len(results)}")
|
||||||
|
print(f"Successfully converted: {len(successful)}")
|
||||||
|
print(f"Failed conversions: {len(failed)}")
|
||||||
|
print(f"Total processing time: {total_time:.1f}s")
|
||||||
|
print(f"Total messages captured: {total_messages} app + {total_sdk_messages} SDK")
|
||||||
|
|
||||||
|
if total_warnings > 0:
|
||||||
|
print(f"Total warnings: {total_warnings}")
|
||||||
|
|
||||||
|
if successful:
|
||||||
|
print(f"Total input size: {total_input_size:.2f} MB")
|
||||||
|
print(f"Total output size: {total_output_size:.2f} MB")
|
||||||
|
avg_time = total_time / len(successful) if successful else 0
|
||||||
|
print(f"Average time per file: {avg_time:.1f}s")
|
||||||
|
|
||||||
|
if total_input_size > 0:
|
||||||
|
compression_ratio = (total_output_size / total_input_size) * 100
|
||||||
|
print(f"Size ratio: {compression_ratio:.1f}% (output/input)")
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
print(f"\nFailed Files Analysis:")
|
||||||
|
|
||||||
|
# Group failures by category
|
||||||
|
failure_categories = {}
|
||||||
|
for result in failed:
|
||||||
|
category = result.get('error_category', 'Unknown')
|
||||||
|
if category not in failure_categories:
|
||||||
|
failure_categories[category] = []
|
||||||
|
failure_categories[category].append(result)
|
||||||
|
|
||||||
|
for category, category_failures in failure_categories.items():
|
||||||
|
print(f"\n {category} ({len(category_failures)} files):")
|
||||||
|
for result in category_failures:
|
||||||
|
recovery_status = "Recoverable" if result.get('is_recoverable', False) else "Not recoverable"
|
||||||
|
print(f" FAILED: {os.path.basename(result['input'])}")
|
||||||
|
print(f" Error: {result.get('error', 'Unknown error')}")
|
||||||
|
print(f" Status: {recovery_status}")
|
||||||
|
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
"""Main execution function with comprehensive error handling"""
|
||||||
|
|
||||||
|
print("Logix Designer SDK L5X to ACD Converter")
|
||||||
|
print("Enhanced with comprehensive error handling and progress tracking")
|
||||||
|
print("-" * 80)
|
||||||
|
|
||||||
|
# Check if command-line arguments were provided
|
||||||
|
import sys
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
# Use command-line argument as input file
|
||||||
|
input_file = sys.argv[1]
|
||||||
|
output_file = input_file.replace('.L5X', '.ACD').replace('.l5x', '.ACD')
|
||||||
|
file_pairs = [(input_file, output_file)]
|
||||||
|
else:
|
||||||
|
# Define files to convert - add more files here as needed
|
||||||
|
file_pairs = [
|
||||||
|
(r"MTN6_MCM01_UL1_UL3.L5X", r"MTN6_MCM01_UL1_UL3.ACD"),
|
||||||
|
# Add more file pairs here like:
|
||||||
|
# (r"Project2.L5X", r"Project2.ACD"),
|
||||||
|
# (r"Project3.L5X", r"Project3.ACD"),
|
||||||
|
]
|
||||||
|
|
||||||
|
if not file_pairs:
|
||||||
|
print("ERROR: No files defined for conversion!")
|
||||||
|
print("Edit the file_pairs list in main() to add files to convert.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Execute conversions with comprehensive error handling
|
||||||
|
results = await convert_multiple_files_with_error_recovery(file_pairs)
|
||||||
|
|
||||||
|
# Determine exit code based on results
|
||||||
|
failed_count = len([r for r in results if r['status'] == 'failed'])
|
||||||
|
critical_failures = len([r for r in results if r['status'] == 'failed' and not r.get('is_recoverable', True)])
|
||||||
|
|
||||||
|
if critical_failures > 0:
|
||||||
|
print(f"\nCRITICAL: {critical_failures} non-recoverable failures detected")
|
||||||
|
return 2 # Critical failure exit code
|
||||||
|
elif failed_count > 0:
|
||||||
|
print(f"\nWARNING: {failed_count} recoverable failures detected")
|
||||||
|
return 1 # Warning exit code
|
||||||
|
else:
|
||||||
|
print(f"\nSUCCESS: All {len(results)} files converted successfully!")
|
||||||
|
return 0 # Success exit code
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\nFATAL ERROR in main execution:")
|
||||||
|
print(f" Type: {type(e).__name__}")
|
||||||
|
print(f" Message: {str(e)}")
|
||||||
|
print(f"\nFull Stack Trace:")
|
||||||
|
print(traceback.format_exc())
|
||||||
|
return 3 # Fatal error exit code
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
exit_code = asyncio.run(main())
|
||||||
|
print(f"\nProcess completed with exit code: {exit_code}")
|
||||||
|
exit(exit_code)
|
||||||
BIN
L5X2ACD Compiler/logix_designer_sdk-2.0.1-py3-none-any.whl
Normal file
BIN
L5X2ACD Compiler/logix_designer_sdk-2.0.1-py3-none-any.whl
Normal file
Binary file not shown.
107
L5X2ACD Compiler/setup_windows_sdk.bat
Normal file
107
L5X2ACD Compiler/setup_windows_sdk.bat
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
@echo off
|
||||||
|
echo ====================================
|
||||||
|
echo Logix Designer SDK Setup for Windows
|
||||||
|
echo ====================================
|
||||||
|
echo.
|
||||||
|
|
||||||
|
REM Check if Python 3.12 is available
|
||||||
|
py -3.12 --version >nul 2>&1
|
||||||
|
if errorlevel 1 (
|
||||||
|
echo ERROR: Python 3.12 is not installed or not in PATH
|
||||||
|
echo This compilation system requires Python 3.12 specifically
|
||||||
|
echo Please install Python 3.12 and add it to your PATH
|
||||||
|
echo Download from: https://www.python.org/downloads/
|
||||||
|
echo During installation, check "Add Python to PATH"
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo ✓ Python 3.12 found
|
||||||
|
py -3.12 --version
|
||||||
|
echo.
|
||||||
|
|
||||||
|
REM Check if pip is available with Python 3.12
|
||||||
|
py -3.12 -m pip --version >nul 2>&1
|
||||||
|
if errorlevel 1 (
|
||||||
|
echo ERROR: pip is not available with Python 3.12
|
||||||
|
echo Please ensure pip is installed with Python 3.12
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo ✓ pip found for Python 3.12
|
||||||
|
py -3.12 -m pip --version
|
||||||
|
echo.
|
||||||
|
|
||||||
|
REM Check if the wheel file exists
|
||||||
|
set WHEEL_FILE=logix_designer_sdk-2.0.1-py3-none-any.whl
|
||||||
|
|
||||||
|
if exist "%WHEEL_FILE%" (
|
||||||
|
echo ✓ Found SDK wheel file: %WHEEL_FILE%
|
||||||
|
) else (
|
||||||
|
echo ERROR: SDK wheel file not found: %WHEEL_FILE%
|
||||||
|
echo.
|
||||||
|
echo Please ensure the wheel file is in this directory:
|
||||||
|
echo %CD%
|
||||||
|
echo.
|
||||||
|
echo The wheel file should be copied from the project root.
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo Installing Logix Designer SDK with Python 3.12...
|
||||||
|
echo Command: py -3.12 -m pip install "%WHEEL_FILE%"
|
||||||
|
echo.
|
||||||
|
|
||||||
|
py -3.12 -m pip install "%WHEEL_FILE%"
|
||||||
|
|
||||||
|
if errorlevel 1 (
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
echo ERROR: SDK installation failed!
|
||||||
|
echo ====================================
|
||||||
|
echo.
|
||||||
|
echo This could be due to:
|
||||||
|
echo 1. Python 3.12 compatibility issues
|
||||||
|
echo 2. Missing dependencies
|
||||||
|
echo 3. Permission issues (try running as administrator)
|
||||||
|
echo 4. Corrupted wheel file
|
||||||
|
echo.
|
||||||
|
echo Try running this batch file as administrator.
|
||||||
|
echo.
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
echo Testing SDK installation...
|
||||||
|
echo ====================================
|
||||||
|
|
||||||
|
py -3.12 -c "import logix_designer_sdk; print('✓ Logix Designer SDK successfully imported')"
|
||||||
|
|
||||||
|
if errorlevel 1 (
|
||||||
|
echo ERROR: SDK import test failed
|
||||||
|
echo The package was installed but cannot be imported
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo ====================================
|
||||||
|
echo SUCCESS: Logix Designer SDK installed!
|
||||||
|
echo ====================================
|
||||||
|
echo.
|
||||||
|
echo You can now run PLC L5X to ACD compilation.
|
||||||
|
echo.
|
||||||
|
echo IMPORTANT NOTES:
|
||||||
|
echo 1. Make sure Logix Designer is installed on this Windows machine
|
||||||
|
echo 2. The SDK requires Logix Designer to be present for compilation
|
||||||
|
echo 3. Large L5X files may take 15-20 minutes to compile
|
||||||
|
echo.
|
||||||
|
echo Next steps:
|
||||||
|
echo 1. Run your project's compilation batch file
|
||||||
|
echo 2. Or use: py -3.12 l5x_to_acd.py "your_file.L5X"
|
||||||
|
echo.
|
||||||
|
pause
|
||||||
BIN
PLC Data Generator/DESC_IP_MERGED.xlsx
Normal file
BIN
PLC Data Generator/DESC_IP_MERGED.xlsx
Normal file
Binary file not shown.
BIN
PLC Data Generator/IO-To-Path.xlsx
Normal file
BIN
PLC Data Generator/IO-To-Path.xlsx
Normal file
Binary file not shown.
BIN
PLC Data Generator/IO-To-Path.xlsx.backup
Normal file
BIN
PLC Data Generator/IO-To-Path.xlsx.backup
Normal file
Binary file not shown.
BIN
PLC Data Generator/IO_Assignment.xlsm
Normal file
BIN
PLC Data Generator/IO_Assignment.xlsm
Normal file
Binary file not shown.
BIN
PLC Data Generator/MCM01_DESC_IP_MERGED.xlsx
Normal file
BIN
PLC Data Generator/MCM01_DESC_IP_MERGED.xlsx
Normal file
Binary file not shown.
2064
PLC Data Generator/MCM01_OUTPUT.csv
Normal file
2064
PLC Data Generator/MCM01_OUTPUT.csv
Normal file
File diff suppressed because it is too large
Load Diff
BIN
PLC Data Generator/MCM04_DESC_IP_MERGED.xlsx
Normal file
BIN
PLC Data Generator/MCM04_DESC_IP_MERGED.xlsx
Normal file
Binary file not shown.
1887
PLC Data Generator/MCM04_OUTPUT.csv
Normal file
1887
PLC Data Generator/MCM04_OUTPUT.csv
Normal file
File diff suppressed because it is too large
Load Diff
BIN
PLC Data Generator/MCM05_DESC_IP_MERGED.xlsx
Normal file
BIN
PLC Data Generator/MCM05_DESC_IP_MERGED.xlsx
Normal file
Binary file not shown.
1682
PLC Data Generator/MCM05_OUTPUT.csv
Normal file
1682
PLC Data Generator/MCM05_OUTPUT.csv
Normal file
File diff suppressed because it is too large
Load Diff
44
PLC Data Generator/README_Hub_Updates.md
Normal file
44
PLC Data Generator/README_Hub_Updates.md
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# Hub IO Path Updates
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Updated the PLC Data Generator to support two different Hub IO path configurations:
|
||||||
|
- **SorterHub**: For hubs with "S0" in their tagname (e.g., S01_FIOH1, S02_FIOH2)
|
||||||
|
- **Hub**: For regular hubs without "S0" in their tagname
|
||||||
|
|
||||||
|
## Changes Made
|
||||||
|
|
||||||
|
### 1. Excel File Structure (IO-To-Path.xlsx)
|
||||||
|
- The original "Hub" sheet has been renamed to "SorterHub"
|
||||||
|
- A new "Hub" sheet has been created for regular hub configurations
|
||||||
|
- Both sheets maintain the same column structure: IO, IPath, OPath
|
||||||
|
|
||||||
|
### 2. Code Updates
|
||||||
|
|
||||||
|
#### io_paths.py
|
||||||
|
- Updated `load_io_path_mappings()` to load both Hub and SorterHub sheets
|
||||||
|
- Modified `get_io_path()` to automatically select the correct sheet based on tagname:
|
||||||
|
```python
|
||||||
|
if device_type == 'Hub':
|
||||||
|
# Check if the hub has S0 in its name to determine which sheet to use
|
||||||
|
if 'S0' in str(tagname).upper():
|
||||||
|
df = sorter_hub_df
|
||||||
|
else:
|
||||||
|
df = hub_df
|
||||||
|
```
|
||||||
|
|
||||||
|
#### main.py
|
||||||
|
- Updated to handle the additional sorter_hub_df parameter from load_io_path_mappings()
|
||||||
|
- Passes sorter_hub_df to process_data()
|
||||||
|
|
||||||
|
#### process.py
|
||||||
|
- Updated function signature to accept sorter_hub_df parameter
|
||||||
|
- Passes sorter_hub_df to get_io_path() calls
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
No changes required from the user perspective. The system automatically:
|
||||||
|
1. Detects if a hub tagname contains "S0"
|
||||||
|
2. Uses the appropriate IO path mapping sheet
|
||||||
|
3. Generates correct IO paths based on the hub type
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
Tested successfully with MCM01 data, showing proper loading of both Hub sheets and 100% successful IO path mappings.
|
||||||
BIN
PLC Data Generator/__pycache__/classifiers.cpython-312.pyc
Normal file
BIN
PLC Data Generator/__pycache__/classifiers.cpython-312.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/classifiers.cpython-313.pyc
Normal file
BIN
PLC Data Generator/__pycache__/classifiers.cpython-313.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/io_paths.cpython-312.pyc
Normal file
BIN
PLC Data Generator/__pycache__/io_paths.cpython-312.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/io_paths.cpython-313.pyc
Normal file
BIN
PLC Data Generator/__pycache__/io_paths.cpython-313.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/post_process.cpython-312.pyc
Normal file
BIN
PLC Data Generator/__pycache__/post_process.cpython-312.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/post_process.cpython-313.pyc
Normal file
BIN
PLC Data Generator/__pycache__/post_process.cpython-313.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/process.cpython-312.pyc
Normal file
BIN
PLC Data Generator/__pycache__/process.cpython-312.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/process.cpython-313.pyc
Normal file
BIN
PLC Data Generator/__pycache__/process.cpython-313.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/utils.cpython-312.pyc
Normal file
BIN
PLC Data Generator/__pycache__/utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
PLC Data Generator/__pycache__/utils.cpython-313.pyc
Normal file
BIN
PLC Data Generator/__pycache__/utils.cpython-313.pyc
Normal file
Binary file not shown.
96
PLC Data Generator/classifiers.py
Normal file
96
PLC Data Generator/classifiers.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import pandas as pd
|
||||||
|
import re
|
||||||
|
|
||||||
|
def classify_signal(desca, tagname, descb=None):
|
||||||
|
"""
|
||||||
|
Classify signal based on DESCA content, TAGNAME, and DESCB
|
||||||
|
Priority order matters: PB_LT before PB, FIOH before FIO, SOL+DIVERT before SOL
|
||||||
|
"""
|
||||||
|
if pd.isna(desca):
|
||||||
|
return 'UNKNOWN'
|
||||||
|
|
||||||
|
desca_str = str(desca).upper()
|
||||||
|
tagname_str = str(tagname).upper()
|
||||||
|
descb_str = str(descb).upper() if pd.notna(descb) else ''
|
||||||
|
|
||||||
|
# Check for SPARE first
|
||||||
|
if re.search(r'SPARE', desca_str):
|
||||||
|
return 'SPARE'
|
||||||
|
|
||||||
|
# Signal O patterns (check higher priority first)
|
||||||
|
if re.search(r'BCN\d+_[AGBR]', desca_str): # e.g., BCN3_B, BCN1_A
|
||||||
|
return 'O'
|
||||||
|
elif re.search(r'PB_LT', desca_str):
|
||||||
|
return 'O'
|
||||||
|
|
||||||
|
# Check for PR sensors (user rule: PR is I) - moved after specific patterns
|
||||||
|
elif re.search(r'_PR\d+|PR\d+', desca_str): # Match PR followed by digits (proximity sensors)
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'STO', desca_str):
|
||||||
|
return 'O'
|
||||||
|
elif re.search(r'BCN', desca_str) and 'FIOH' in tagname_str:
|
||||||
|
return 'O'
|
||||||
|
|
||||||
|
# IOLink patterns (check FIOH before FIO due to priority)
|
||||||
|
elif re.search(r'SOL', desca_str) and re.search(r'DIVERT', descb_str):
|
||||||
|
return 'IOLink'
|
||||||
|
elif re.search(r'LPE', desca_str):
|
||||||
|
return 'IOLink'
|
||||||
|
elif re.search(r'FIOH', desca_str):
|
||||||
|
return 'IOLink'
|
||||||
|
elif re.search(r'BCN', desca_str) and 'FIO' in tagname_str:
|
||||||
|
return 'IOLink'
|
||||||
|
|
||||||
|
# Signal O patterns continued (SOL without DIVERT)
|
||||||
|
elif re.search(r'SOL', desca_str):
|
||||||
|
return 'O'
|
||||||
|
|
||||||
|
# Signal I patterns
|
||||||
|
elif re.search(r'PWM', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'CB', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'FPE', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'ENC', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'PS', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'EPC', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'PX', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'DISC', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'PE', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'PB', desca_str): # This comes after PB_LT check
|
||||||
|
return 'I'
|
||||||
|
elif re.search(r'ESTOP', desca_str):
|
||||||
|
return 'I'
|
||||||
|
elif 'IB16' in tagname_str or 'IB16S' in tagname_str:
|
||||||
|
return 'I'
|
||||||
|
elif 'OB16E' in tagname_str:
|
||||||
|
return 'O'
|
||||||
|
|
||||||
|
return 'UNKNOWN'
|
||||||
|
|
||||||
|
def get_device_type(tagname):
|
||||||
|
"""Determine device type from TAGNAME"""
|
||||||
|
tagname_str = str(tagname).upper()
|
||||||
|
|
||||||
|
if 'VFD' in tagname_str:
|
||||||
|
return 'APF'
|
||||||
|
elif 'FIOH' in tagname_str:
|
||||||
|
return 'Hub'
|
||||||
|
elif 'FIO' in tagname_str:
|
||||||
|
return 'M12DR'
|
||||||
|
elif 'SIO' in tagname_str:
|
||||||
|
return 'SIO'
|
||||||
|
elif 'OB16E' in tagname_str:
|
||||||
|
return 'OB16E'
|
||||||
|
elif 'IB16S' in tagname_str:
|
||||||
|
return 'IB16S'
|
||||||
|
elif 'IB16' in tagname_str:
|
||||||
|
return 'IB16'
|
||||||
|
return 'UNKNOWN'
|
||||||
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM01_COMPLETE.xlsm
Normal file
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM01_COMPLETE.xlsm
Normal file
Binary file not shown.
Binary file not shown.
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM02_COMPLETE.xlsm
Normal file
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM02_COMPLETE.xlsm
Normal file
Binary file not shown.
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM03_COMPLETE.xlsm
Normal file
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM03_COMPLETE.xlsm
Normal file
Binary file not shown.
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM04_COMPLETE.xlsm
Normal file
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM04_COMPLETE.xlsm
Normal file
Binary file not shown.
Binary file not shown.
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM05_COMPLETE.xlsm
Normal file
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM05_COMPLETE.xlsm
Normal file
Binary file not shown.
Binary file not shown.
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM06_COMPLETE.xlsm
Normal file
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM06_COMPLETE.xlsm
Normal file
Binary file not shown.
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM07_COMPLETE.xlsm
Normal file
BIN
PLC Data Generator/data/IO Assignment_MTN6_MCM07_COMPLETE.xlsm
Normal file
Binary file not shown.
Binary file not shown.
189
PLC Data Generator/extract_fio_fioh.py
Normal file
189
PLC Data Generator/extract_fio_fioh.py
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
import pandas as pd
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
def extract_fio_fioh_modules(input_file='MCM04_DESC_IP_MERGED.xlsx', output_file='FIO_FIOH_OUTPUT.csv'):
|
||||||
|
"""
|
||||||
|
Extract FIO and FIOH modules from DESC/IP sheet and create output with format:
|
||||||
|
TAGNAME, ADDR, TERM, TERMDESC, DESCA, DESCB
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Read the DESC/IP merged data
|
||||||
|
print(f"Reading input file: {input_file}")
|
||||||
|
xl = pd.ExcelFile(input_file)
|
||||||
|
# Try to auto-detect the DESC/IP sheet (kept for backward compatibility)
|
||||||
|
df = pd.read_excel(xl, sheet_name='DESC_IP')
|
||||||
|
print(f"Total rows in DESC_IP sheet: {len(df)}")
|
||||||
|
|
||||||
|
# --- Load NETWORK sheet for DPM mapping ----------------------------
|
||||||
|
network_sheet = None
|
||||||
|
for sheet in xl.sheet_names:
|
||||||
|
if 'NETWORK' in sheet.upper():
|
||||||
|
network_sheet = sheet
|
||||||
|
break
|
||||||
|
if network_sheet:
|
||||||
|
network_df = pd.read_excel(xl, sheet_name=network_sheet)
|
||||||
|
print(f"Loaded NETWORK sheet: {network_sheet} ({len(network_df)} rows)")
|
||||||
|
# Build mapping from Name -> DPM (blank-safe)
|
||||||
|
network_df['Name'] = network_df['Name'].astype(str).str.strip()
|
||||||
|
network_df['DPM'] = network_df['DPM'].fillna('').astype(str).str.strip()
|
||||||
|
name_to_dpm = dict(zip(network_df['Name'], network_df['DPM']))
|
||||||
|
else:
|
||||||
|
print("WARNING: NETWORK sheet not found in workbook – DPM column will be blank for masters")
|
||||||
|
name_to_dpm = {}
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"ERROR: File {input_file} not found!")
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR: Failed to read {input_file}: {str(e)}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# Build a mapping of FIOH tag -> its MASTER FIO tag by scanning DESC_IP
|
||||||
|
# Rows where DESCA contains 'FIOH' typically reference the hub on a master
|
||||||
|
# channel. We use these to derive the master relationship.
|
||||||
|
fioh_master_map = {}
|
||||||
|
fioh_ref_rows = df[df['DESCA'].astype(str).str.contains('FIOH', case=False, na=False)]
|
||||||
|
for _, r in fioh_ref_rows.iterrows():
|
||||||
|
fioh_tag = str(r['DESCA']).strip()
|
||||||
|
master_tag = str(r['TAGNAME']).strip()
|
||||||
|
# Keep the first master encountered to avoid overriding inconsistencies
|
||||||
|
fioh_master_map.setdefault(fioh_tag, master_tag)
|
||||||
|
|
||||||
|
# Filter for FIO and FIOH modules (TAGNAME containing "FIO")
|
||||||
|
fio_fioh_filter = df['TAGNAME'].str.contains('FIO', case=False, na=False)
|
||||||
|
fio_fioh_data = df[fio_fioh_filter].copy()
|
||||||
|
|
||||||
|
print(f"Found {len(fio_fioh_data)} FIO/FIOH entries")
|
||||||
|
|
||||||
|
if len(fio_fioh_data) == 0:
|
||||||
|
print("No FIO/FIOH modules found in the data!")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get unique module names
|
||||||
|
unique_modules = fio_fioh_data['TAGNAME'].unique()
|
||||||
|
print(f"Found {len(unique_modules)} unique FIO/FIOH modules")
|
||||||
|
|
||||||
|
# Define channel mappings based on device type
|
||||||
|
def get_channels_for_device(device_type):
|
||||||
|
"""Return list of channels for a given device type"""
|
||||||
|
if device_type == 'M12DR': # FIO devices
|
||||||
|
return [f'IO{i}' for i in range(16)] # IO0 to IO15
|
||||||
|
elif device_type == 'Hub': # FIOH devices
|
||||||
|
return [f'IO{i}' for i in range(16)] # IO0 to IO15
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Prepare output data
|
||||||
|
output_rows = []
|
||||||
|
|
||||||
|
for module_name in unique_modules:
|
||||||
|
# Get module data
|
||||||
|
module_data = fio_fioh_data[fio_fioh_data['TAGNAME'] == module_name]
|
||||||
|
|
||||||
|
if len(module_data) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get device type from first row
|
||||||
|
device_type = module_data.iloc[0]['DEVICE_TYPE']
|
||||||
|
channels = get_channels_for_device(device_type)
|
||||||
|
|
||||||
|
print(f"Processing {module_name} ({device_type}) - {len(channels)} channels")
|
||||||
|
|
||||||
|
# Create a mapping of existing data by TERM
|
||||||
|
existing_data = {}
|
||||||
|
for _, row in module_data.iterrows():
|
||||||
|
term = str(row['TERM']).strip()
|
||||||
|
existing_data[term] = {
|
||||||
|
'DESCA': row['DESCA'] if pd.notna(row['DESCA']) else '',
|
||||||
|
'DESCB': row['DESCB'] if pd.notna(row['DESCB']) else ''
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate output rows for all channels
|
||||||
|
for channel in channels:
|
||||||
|
# Create ADDR by combining module name with channel
|
||||||
|
addr = f"{module_name}_{channel}"
|
||||||
|
|
||||||
|
# Get DESCA and DESCB from existing data if available
|
||||||
|
if channel in existing_data:
|
||||||
|
desca = existing_data[channel]['DESCA']
|
||||||
|
descb = existing_data[channel]['DESCB']
|
||||||
|
else:
|
||||||
|
# Default to SPARE if no existing data
|
||||||
|
desca = 'SPARE'
|
||||||
|
descb = ''
|
||||||
|
|
||||||
|
# Determine DPM value based on device type
|
||||||
|
if device_type == 'M12DR': # Master FIO
|
||||||
|
dpm_value = name_to_dpm.get(module_name, '')
|
||||||
|
elif device_type == 'Hub': # FIOH – use its master
|
||||||
|
dpm_value = fioh_master_map.get(module_name, '')
|
||||||
|
else:
|
||||||
|
dpm_value = ''
|
||||||
|
|
||||||
|
output_rows.append({
|
||||||
|
'TAGNAME': module_name,
|
||||||
|
'ADDR': addr,
|
||||||
|
'TERM': channel,
|
||||||
|
'TERMDESC': '', # Empty as shown in example
|
||||||
|
'DESCA': desca,
|
||||||
|
'DESCB': descb,
|
||||||
|
'DPM': dpm_value
|
||||||
|
})
|
||||||
|
|
||||||
|
# Create output DataFrame
|
||||||
|
output_df = pd.DataFrame(output_rows)
|
||||||
|
|
||||||
|
# Extract numeric part from TERM for natural sorting
|
||||||
|
def extract_io_number(term):
|
||||||
|
"""Extract the numeric part from IO term for proper sorting"""
|
||||||
|
match = re.match(r'IO(\d+)', term)
|
||||||
|
if match:
|
||||||
|
return int(match.group(1))
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Add a temporary column for sorting
|
||||||
|
output_df['TERM_NUM'] = output_df['TERM'].apply(extract_io_number)
|
||||||
|
|
||||||
|
# Sort by TAGNAME and then by the numeric value of TERM
|
||||||
|
output_df = output_df.sort_values(['TAGNAME', 'TERM_NUM'])
|
||||||
|
|
||||||
|
# Drop the temporary column
|
||||||
|
output_df = output_df.drop(columns=['TERM_NUM'])
|
||||||
|
|
||||||
|
print(f"\nGenerated {len(output_df)} output rows")
|
||||||
|
print(f"Saving to: {output_file}")
|
||||||
|
|
||||||
|
# Replace any NaN values with empty strings for clean output
|
||||||
|
output_df = output_df.fillna('')
|
||||||
|
|
||||||
|
# Ensure DPM column is last (you can change order if desired)
|
||||||
|
cols = ['TAGNAME', 'ADDR', 'TERM', 'TERMDESC', 'DESCA', 'DESCB', 'DPM']
|
||||||
|
output_df = output_df[cols]
|
||||||
|
|
||||||
|
# Save to CSV
|
||||||
|
output_df.to_csv(output_file, index=False)
|
||||||
|
|
||||||
|
print(f"\nSample output:")
|
||||||
|
print(output_df.head(15))
|
||||||
|
|
||||||
|
print(f"\nOutput saved successfully to {output_file}")
|
||||||
|
return output_df
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Check if custom input file is provided
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
input_file = sys.argv[1]
|
||||||
|
else:
|
||||||
|
input_file = 'MCM04_DESC_IP_MERGED.xlsx'
|
||||||
|
|
||||||
|
# Check if custom output file is provided
|
||||||
|
if len(sys.argv) > 2:
|
||||||
|
output_file = sys.argv[2]
|
||||||
|
else:
|
||||||
|
output_file = 'FIO_FIOH_OUTPUT.csv'
|
||||||
|
|
||||||
|
extract_fio_fioh_modules(input_file, output_file)
|
||||||
75
PLC Data Generator/extract_io_sheet.py
Normal file
75
PLC Data Generator/extract_io_sheet.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import pandas as pd
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Read the Excel file
|
||||||
|
input_file = 'mcm04 very last.xlsx'
|
||||||
|
df = pd.read_excel(input_file)
|
||||||
|
|
||||||
|
# Prepare output rows
|
||||||
|
output_rows = []
|
||||||
|
|
||||||
|
io_columns = [f'IO{i}' for i in range(16)]
|
||||||
|
|
||||||
|
# First pass: collect all prefixes with JR1_PB (JAM RESET PUSHBUTTON)
|
||||||
|
jam_reset_prefixes = set()
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
for io_col in io_columns:
|
||||||
|
val = row.get(io_col, '')
|
||||||
|
if pd.isna(val) or val == '':
|
||||||
|
continue
|
||||||
|
if 'JR1_PB' in str(val):
|
||||||
|
m = re.match(r'(S\d+)_', str(val))
|
||||||
|
if m:
|
||||||
|
jam_reset_prefixes.add(m.group(1))
|
||||||
|
|
||||||
|
# Second pass: build output with DESB logic
|
||||||
|
def get_prefix(tag):
|
||||||
|
m = re.match(r'(S\d+)_', str(tag))
|
||||||
|
return m.group(1) if m else None
|
||||||
|
|
||||||
|
def get_desb(desca):
|
||||||
|
if desca == 'SPARE' or pd.isna(desca) or desca == '':
|
||||||
|
return ''
|
||||||
|
tag = str(desca)
|
||||||
|
prefix = get_prefix(tag)
|
||||||
|
if 'BCN1_A' in tag:
|
||||||
|
return 'AMBER BEACON LIGHT'
|
||||||
|
if 'BCN1_B' in tag:
|
||||||
|
return 'BLUE BEACON LIGHT'
|
||||||
|
if 'BCN1' in tag:
|
||||||
|
if prefix in jam_reset_prefixes:
|
||||||
|
return '3 STACK IOLINK BEACON'
|
||||||
|
else:
|
||||||
|
return '2 STACK IOLINK BEACON'
|
||||||
|
if 'SOL' in tag:
|
||||||
|
return 'PKG RELEASE SOLENOID'
|
||||||
|
if 'PR' in tag:
|
||||||
|
return 'PKG RELEASE PUSHBUTTON'
|
||||||
|
if 'PE1' in tag:
|
||||||
|
return 'FULL PHOTOEYE 50%'
|
||||||
|
if 'PE2' in tag:
|
||||||
|
return 'FULL PHOTOEYE 100%'
|
||||||
|
if 'GS1_PB_LT' in tag or 'GS1_PB' in tag:
|
||||||
|
return 'CHUTE ENABLE PUSHBUTTON LIGHT'
|
||||||
|
if 'JR1_PB_LT' in tag:
|
||||||
|
return 'SORTER JAM RESET PUSHBUTTON LIGHT'
|
||||||
|
if 'JR1_PB' in tag:
|
||||||
|
return 'SORTER JAM RESET PUSHBUTTON'
|
||||||
|
if 'FIOH' in tag:
|
||||||
|
return 'HUB ARMOR BLOCK'
|
||||||
|
return ''
|
||||||
|
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
tagname = row['P_TAG1']
|
||||||
|
for io_col in io_columns:
|
||||||
|
term = io_col
|
||||||
|
desca = row.get(io_col, '')
|
||||||
|
if pd.isna(desca) or desca == '':
|
||||||
|
desca = 'SPARE'
|
||||||
|
desb = get_desb(desca)
|
||||||
|
output_rows.append({'TAGNAME': tagname, 'TERM': term, 'DESCA': desca, 'DESB': desb})
|
||||||
|
|
||||||
|
# Output to CSV
|
||||||
|
output_df = pd.DataFrame(output_rows)
|
||||||
|
output_df.to_csv('MCM04_IO_EXPANDED.csv', index=False)
|
||||||
|
print('Output written to MCM04_IO_EXPANDED.csv')
|
||||||
133
PLC Data Generator/io_paths.py
Normal file
133
PLC Data Generator/io_paths.py
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
import pandas as pd
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
|
||||||
|
from utils import normalize_io_term
|
||||||
|
|
||||||
|
def load_io_path_mappings():
|
||||||
|
"""Load IO path mappings from IO-To-Path.xlsx"""
|
||||||
|
io_path_file = "IO-To-Path.xlsx"
|
||||||
|
|
||||||
|
if not os.path.exists(io_path_file):
|
||||||
|
print(f"CRITICAL: IO path file not found: {io_path_file}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Read all sheets from IO-To-Path.xlsx
|
||||||
|
apf_df = pd.read_excel(io_path_file, sheet_name='APF')
|
||||||
|
m12dr_df = pd.read_excel(io_path_file, sheet_name='M12DR')
|
||||||
|
hub_df = pd.read_excel(io_path_file, sheet_name='Hub')
|
||||||
|
sorter_hub_df = pd.read_excel(io_path_file, sheet_name='SorterHub')
|
||||||
|
sio_df = pd.read_excel(io_path_file, sheet_name='SIO')
|
||||||
|
ib16_df = pd.read_excel(io_path_file, sheet_name='IB16')
|
||||||
|
ob16e_df = pd.read_excel(io_path_file, sheet_name='OB16E')
|
||||||
|
ib16s_df = pd.read_excel(io_path_file, sheet_name='IB16S')
|
||||||
|
|
||||||
|
print(f"Loaded IO path mappings:")
|
||||||
|
print(f" APF: {len(apf_df)} rows")
|
||||||
|
print(f" M12DR: {len(m12dr_df)} rows")
|
||||||
|
print(f" Hub: {len(hub_df)} rows")
|
||||||
|
print(f" SorterHub: {len(sorter_hub_df)} rows")
|
||||||
|
print(f" SIO: {len(sio_df)} rows")
|
||||||
|
print(f" IB16: {len(ib16_df)} rows")
|
||||||
|
print(f" OB16E: {len(ob16e_df)} rows")
|
||||||
|
print(f" IB16S: {len(ib16s_df)} rows")
|
||||||
|
|
||||||
|
return apf_df, m12dr_df, hub_df, sorter_hub_df, sio_df, ib16_df, ob16e_df, ib16s_df
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"CRITICAL: Error loading IO path mappings: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def get_io_path(tagname, term, signal_type, device_type, apf_df, m12dr_df, hub_df, sorter_hub_df, sio_df, ib16_df, ob16e_df, ib16s_df):
|
||||||
|
"""Get IO path for a given tagname, term, and signal type"""
|
||||||
|
|
||||||
|
if device_type == 'UNKNOWN' or pd.isna(term):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Select appropriate dataframe
|
||||||
|
if device_type == 'APF':
|
||||||
|
df = apf_df
|
||||||
|
elif device_type == 'M12DR':
|
||||||
|
df = m12dr_df
|
||||||
|
elif device_type == 'Hub':
|
||||||
|
# Check if the hub has S0 in its name to determine which sheet to use
|
||||||
|
if 'S0' in str(tagname).upper():
|
||||||
|
df = sorter_hub_df
|
||||||
|
else:
|
||||||
|
df = hub_df
|
||||||
|
elif device_type == 'SIO':
|
||||||
|
df = sio_df
|
||||||
|
elif device_type == 'IB16':
|
||||||
|
df = ib16_df
|
||||||
|
elif device_type == 'OB16E':
|
||||||
|
df = ob16e_df
|
||||||
|
elif device_type == 'IB16S':
|
||||||
|
df = ib16s_df
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if df is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get term variations to handle IO1 vs IO01 inconsistencies
|
||||||
|
term_variations = normalize_io_term(term)
|
||||||
|
|
||||||
|
# Try to find matching row
|
||||||
|
matching_row = None
|
||||||
|
for term_var in term_variations:
|
||||||
|
mask = df['IO'] == term_var
|
||||||
|
if mask.any():
|
||||||
|
matching_row = df[mask].iloc[0]
|
||||||
|
break
|
||||||
|
|
||||||
|
if matching_row is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Select appropriate path column based on term type first, then signal type
|
||||||
|
path_value = None
|
||||||
|
term_upper = str(term).upper()
|
||||||
|
|
||||||
|
# Check term prefix first (SI/SO have dedicated columns)
|
||||||
|
if term_upper.startswith('SI') and (device_type == 'APF' or device_type == 'SIO'):
|
||||||
|
path_value = matching_row.get('SIPath')
|
||||||
|
elif term_upper.startswith('SO') and (device_type == 'APF' or device_type == 'SIO'):
|
||||||
|
path_value = matching_row.get('SOPath')
|
||||||
|
elif signal_type == 'IOLink' and (device_type == 'M12DR' or device_type == 'SIO'):
|
||||||
|
path_value = matching_row.get('IOLinkPath')
|
||||||
|
# Then check signal type for regular I/O
|
||||||
|
elif signal_type == 'I':
|
||||||
|
path_value = matching_row.get('IPath')
|
||||||
|
elif signal_type == 'O':
|
||||||
|
path_value = matching_row.get('OPath')
|
||||||
|
elif signal_type == 'SPARE': # SPARE entries default to IPath, fallback to OPath
|
||||||
|
path_value = matching_row.get('IPath')
|
||||||
|
# If IPath is empty, try OPath as fallback
|
||||||
|
if pd.isna(path_value) or path_value == '':
|
||||||
|
path_value = matching_row.get('OPath')
|
||||||
|
|
||||||
|
if pd.isna(path_value) or path_value == '':
|
||||||
|
return None
|
||||||
|
|
||||||
|
path_str = str(path_value)
|
||||||
|
# Replace device prefix with actual tagname for non-local devices
|
||||||
|
if device_type in ['APF', 'M12DR', 'Hub', 'SIO']:
|
||||||
|
if device_type == 'APF':
|
||||||
|
path_str = path_str.replace('APF', str(tagname))
|
||||||
|
elif device_type == 'M12DR':
|
||||||
|
path_str = path_str.replace('M12DR', str(tagname))
|
||||||
|
elif device_type == 'Hub':
|
||||||
|
path_str = path_str.replace('Hub', str(tagname))
|
||||||
|
elif device_type == 'SIO':
|
||||||
|
path_str = path_str.replace('SIO', str(tagname))
|
||||||
|
else:
|
||||||
|
# For local slot devices (IB16, OB16E, IB16S)
|
||||||
|
slot_match = re.search(r'SLOT(\d+)', str(tagname).upper())
|
||||||
|
if slot_match:
|
||||||
|
slot = slot_match.group(1)
|
||||||
|
path_str = f"Local:{slot}:{path_str}"
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return path_str
|
||||||
149
PLC Data Generator/main.py
Normal file
149
PLC Data Generator/main.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
import pandas as pd
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
|
||||||
|
from io_paths import load_io_path_mappings
|
||||||
|
from process import process_data
|
||||||
|
from post_process import post_process_io_data
|
||||||
|
|
||||||
|
def create_desc_ip_sheet():
|
||||||
|
# Get Excel file path from command line arguments
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Usage: python main.py <excel_file_path>")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
excel_file = sys.argv[1]
|
||||||
|
|
||||||
|
if not os.path.exists(excel_file):
|
||||||
|
print(f"CRITICAL: Excel file not found: {excel_file}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Load IO path mappings
|
||||||
|
print("Loading IO path mappings...")
|
||||||
|
apf_df, m12dr_df, hub_df, sorter_hub_df, sio_df, ib16_df, ob16e_df, ib16s_df = load_io_path_mappings()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Read Excel file to check available sheets
|
||||||
|
xl = pd.ExcelFile(excel_file)
|
||||||
|
print(f"Available sheets: {xl.sheet_names}")
|
||||||
|
|
||||||
|
# Try to find sheets with similar names
|
||||||
|
desc_sheet = None
|
||||||
|
network_sheet = None
|
||||||
|
|
||||||
|
for sheet in xl.sheet_names:
|
||||||
|
if 'DESC' in sheet.upper():
|
||||||
|
desc_sheet = sheet
|
||||||
|
if 'NETWORK' in sheet.upper():
|
||||||
|
network_sheet = sheet
|
||||||
|
|
||||||
|
print(f"Found DESC sheet: {desc_sheet}")
|
||||||
|
print(f"Found NETWORK sheet: {network_sheet}")
|
||||||
|
|
||||||
|
if not desc_sheet or not network_sheet:
|
||||||
|
print("CRITICAL: Required sheets 'DESC...' and 'NETWORK...' not found in the Excel file.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Read the sheets
|
||||||
|
desc_df = pd.read_excel(xl, sheet_name=desc_sheet)
|
||||||
|
network_df = pd.read_excel(xl, sheet_name=network_sheet)
|
||||||
|
|
||||||
|
print(f"\nDESC columns: {list(desc_df.columns)}")
|
||||||
|
print(f"NETWORK columns: {list(network_df.columns)}")
|
||||||
|
|
||||||
|
# Sort network data by PartNumber, DPM, and then Name
|
||||||
|
network_df['PartNumber'] = network_df['PartNumber'].fillna('') # Handle NaN in PartNumber
|
||||||
|
network_df['DPM'] = network_df['DPM'].fillna('') # Handle NaN in DPM
|
||||||
|
network_df = network_df.sort_values(by=['PartNumber', 'DPM', 'Name'])
|
||||||
|
|
||||||
|
# Process the data based on user requirements
|
||||||
|
process_data(
|
||||||
|
desc_df,
|
||||||
|
network_df,
|
||||||
|
excel_file,
|
||||||
|
apf_df,
|
||||||
|
m12dr_df,
|
||||||
|
hub_df,
|
||||||
|
sorter_hub_df,
|
||||||
|
sio_df,
|
||||||
|
ib16_df,
|
||||||
|
ob16e_df,
|
||||||
|
ib16s_df
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine subsystem (e.g. MCM04) from the Excel file path so that
|
||||||
|
# we reference the exact file produced in process_data
|
||||||
|
subsystem_match = re.search(r"(MCM\d+)", excel_file, re.IGNORECASE)
|
||||||
|
subsystem = subsystem_match.group(1).upper() if subsystem_match else "MCM"
|
||||||
|
|
||||||
|
# Now run post-processing on the freshly generated workbook
|
||||||
|
new_file = f"{subsystem}_DESC_IP_MERGED.xlsx"
|
||||||
|
output_file = f"{subsystem}_OUTPUT.csv"
|
||||||
|
post_process_io_data(new_file, output_file)
|
||||||
|
|
||||||
|
# Copy the output file to the standard name expected by streamlined generator
|
||||||
|
import shutil
|
||||||
|
if os.path.exists(new_file):
|
||||||
|
shutil.copy2(new_file, "DESC_IP_MERGED.xlsx")
|
||||||
|
print(f"Created standard output file: DESC_IP_MERGED.xlsx")
|
||||||
|
|
||||||
|
# Add minimal safety sheets for Routines Generator compatibility
|
||||||
|
print("Adding minimal safety sheets for Routines Generator...")
|
||||||
|
with pd.ExcelWriter("DESC_IP_MERGED.xlsx", mode='a', if_sheet_exists='replace') as writer:
|
||||||
|
# Create minimal empty safety sheets that LimitedDataLoader expects
|
||||||
|
# Note: These are minimal empty sheets - the actual safety devices will be extracted from DESC_IP
|
||||||
|
empty_rst = pd.DataFrame(columns=['TAGNAME', 'DESCA', 'IO_PATH', 'TERM'])
|
||||||
|
empty_sto = pd.DataFrame(columns=['TAGNAME', 'DESCA', 'IO_PATH', 'TERM'])
|
||||||
|
empty_epc = pd.DataFrame(columns=['TAGNAME', 'DESCA', 'IO_PATH', 'TERM'])
|
||||||
|
|
||||||
|
# Load zones configuration from zones_config.py
|
||||||
|
print("Loading zones configuration...")
|
||||||
|
try:
|
||||||
|
# Import zones configuration
|
||||||
|
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../')
|
||||||
|
from zones_config import ZONES_CONFIGS, DEFAULT_ZONES
|
||||||
|
|
||||||
|
# Determine which zones configuration to use based on subsystem
|
||||||
|
if subsystem in ZONES_CONFIGS:
|
||||||
|
zones_config = ZONES_CONFIGS[subsystem]
|
||||||
|
print(f"Using {subsystem} zones configuration")
|
||||||
|
else:
|
||||||
|
zones_config = DEFAULT_ZONES
|
||||||
|
print(f"Using default zones configuration (subsystem {subsystem} not found)")
|
||||||
|
|
||||||
|
# Convert zones configuration to DataFrame format
|
||||||
|
zone_data = []
|
||||||
|
for zone_config in zones_config:
|
||||||
|
zone_data.append({
|
||||||
|
'name': zone_config.get('name', ''),
|
||||||
|
'start': zone_config.get('start', ''),
|
||||||
|
'stop': zone_config.get('stop', ''),
|
||||||
|
'interlock': zone_config.get('interlock', '')
|
||||||
|
})
|
||||||
|
|
||||||
|
zones_df = pd.DataFrame(zone_data)
|
||||||
|
print(f"Loaded {len(zone_data)} zones: {[z['name'] for z in zone_data]}")
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"Warning: Could not load zones_config.py ({e}), falling back to minimal MCM zone")
|
||||||
|
# Fallback to minimal MCM zone if zones_config.py is not available
|
||||||
|
zones_df = pd.DataFrame([{
|
||||||
|
'name': 'MCM',
|
||||||
|
'start': 'MCM_S_PB',
|
||||||
|
'stop': 'MCM_EPB_STATUS',
|
||||||
|
'interlock': ''
|
||||||
|
}])
|
||||||
|
|
||||||
|
empty_rst.to_excel(writer, sheet_name='RST', index=False)
|
||||||
|
empty_sto.to_excel(writer, sheet_name='STO', index=False)
|
||||||
|
empty_epc.to_excel(writer, sheet_name='EPC', index=False)
|
||||||
|
zones_df.to_excel(writer, sheet_name='ZONES', index=False)
|
||||||
|
print("Added empty RST, STO, EPC, and configured ZONES sheets")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error occurred during processing: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
create_desc_ip_sheet()
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user