2025-09-02 11:13:29 +04:00

1446 lines
66 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env python3
"""
Enhanced MCM Project Generator
==============================
Enhanced version of MCM Project Generator that processes Excel data with
TAGNAME, IP, PARTNUMBER, IO_PATH, DESC, TERM columns and generates complete L5X projects.
"""
import pandas as pd
import os
import tempfile
import shutil
import xml.etree.ElementTree as ET
from datetime import datetime
from typing import Dict, List, Tuple
from excel_data_processor import ExcelDataProcessor
# Import existing models
from models.apf_boilerplate_model import create_apf_module, APFModuleGenerator
from models.vfd_boilerplate_model import create_vfd_module, VFDModuleGenerator
from models.l83es_boilerplate_model import create_l83es_controller, L83ESControllerGenerator
from models.en4tr_boilerplate_model import create_en4tr_module, EN4TRModuleGenerator
from models.ib16_boilerplate_model import create_ib16_module, IB16ModuleGenerator
from models.ob16e_boilerplate_model import create_ob16e_module, OB16EModuleGenerator
from models.ib16s_boilerplate_model import create_ib16s_module, IB16SModuleGenerator
from models.m12dr_boilerplate_model import create_m12dr_module, M12DRModuleGenerator
from models.zmx_boilerplate_model import create_zmx_module, ZMXModuleGenerator
from models.extendo_boilerplate_model import create_extendo_module, ExtendoModuleGenerator
from models.turck_hub_boilerplate_model import create_turck_hub_module, TurckHubModuleGenerator
from models.tl70_beacon_boilerplate_model import create_tl70_beacon, TL70BeaconGenerator, TL70BeaconConfig
from models.dpm_boilerplate_model import create_dpm_module, DPMModuleGenerator
from models.lpe_boilerplate_model import create_lpe_module, LPEModuleConfig, LPEBoilerplateGenerator
from models.pmm_boilerplate_model import create_pmm_module, PMMModuleGenerator
from models.festo_solenoid_boilerplate_model import create_festo_solenoid, FestoSolenoidGenerator
from models.sio_boilerplate_model import create_sio_module, SIOModuleGenerator
class EnhancedMCMGenerator:
"""Enhanced MCM generator that processes Excel data and generates complete L5X projects."""
def __init__(self, project_name: str, excel_file: str = "MCM04_Data.xlsx", zones_dict=None, boilerplate_dir: str = "boilerplate"):
self.project_name = project_name
self.controller_name = project_name
self.excel_file = excel_file
self.zones_dict = zones_dict
self.boilerplate_dir = boilerplate_dir
self.generated_dir = "generated_projects"
os.makedirs(self.generated_dir, exist_ok=True)
# Set global boilerplate directory for all models to use
os.environ['MCM_BOILERPLATE_DIR'] = boilerplate_dir
# Initialize data processor
self.data_processor = ExcelDataProcessor(excel_file)
# Module lists (organized by type)
self.apf_modules = []
self.vfd_modules = []
self.iolm_modules = []
self.zmx_modules = []
self.extendo_modules = []
self.fioh_modules = []
self.dpm_modules = []
self.beacon_modules = []
self.lpe_modules = []
self.pmm_modules = []
self.solenoid_modules = []
self.sio_modules = []
self.unknown_modules = []
self.ib16_modules = []
self.ib16s_modules = []
self.ob16e_modules = []
def _set_generator_boilerplate_dir(self, generator):
"""Set the boilerplate directory for a generator if it supports it."""
if hasattr(generator, 'boilerplate_path') and hasattr(generator, 'boilerplate_filename'):
# Update the boilerplate path to use the project-specific directory
old_path = generator.boilerplate_path
generator.boilerplate_path = os.path.join(self.boilerplate_dir, generator.boilerplate_filename)
# Force reset and reload with the new path
if hasattr(generator, 'tree'):
generator.tree = None
if hasattr(generator, 'root'):
generator.root = None
try:
generator.load_boilerplate()
print(f" Successfully loaded boilerplate: {generator.boilerplate_path}")
except FileNotFoundError:
# If project-specific boilerplate doesn't exist, fall back to default
print(f" Warning: Project-specific boilerplate not found, using default for {generator.boilerplate_filename}")
generator.boilerplate_path = old_path
if hasattr(generator, 'tree'):
generator.tree = None
if hasattr(generator, 'root'):
generator.root = None
generator.load_boilerplate()
elif hasattr(generator, 'config') and hasattr(generator.config, 'boilerplate_path'):
# For generators that store boilerplate_path in config
filename = os.path.basename(generator.config.boilerplate_path)
old_path = generator.config.boilerplate_path
generator.config.boilerplate_path = os.path.join(self.boilerplate_dir, filename)
# Force reset and reload with the new path
if hasattr(generator, 'tree'):
generator.tree = None
if hasattr(generator, 'root'):
generator.root = None
try:
generator.load_boilerplate()
print(f" Successfully loaded boilerplate: {generator.config.boilerplate_path}")
except FileNotFoundError:
# If project-specific boilerplate doesn't exist, fall back to default
print(f" Warning: Project-specific boilerplate not found, using default for {filename}")
generator.config.boilerplate_path = old_path
if hasattr(generator, 'tree'):
generator.tree = None
if hasattr(generator, 'root'):
generator.root = None
generator.load_boilerplate()
return generator
def _optimize_for_large_projects(self):
"""Apply optimizations for large projects to reduce SDK compilation burden."""
total_modules = sum([
len(self.iolm_modules),
len(self.zmx_modules),
len(self.extendo_modules),
len(self.apf_modules),
len(self.vfd_modules),
len(self.dpm_modules),
len(self.pmm_modules),
len(self.beacon_modules),
len(self.lpe_modules),
len(self.fioh_modules),
len(self.ib16_modules),
len(self.ib16s_modules),
len(self.ob16e_modules),
len(self.solenoid_modules),
len(self.sio_modules),
])
if total_modules > 200: # Optimization threshold
print(f"Large project detected ({total_modules} modules) - applying optimizations...")
return True
return False
def _apply_module_optimizations(self, generator):
"""Apply optimizations to a module generator to reduce complexity."""
# No optimizations needed - keep boilerplate data intact
# Additional optimizations can be added here
return generator
def _optimize_for_large_projects(self):
"""Apply optimizations for large projects to reduce SDK compilation burden."""
total_modules = sum([
len(self.iolm_modules),
len(self.zmx_modules),
len(self.extendo_modules),
len(self.apf_modules),
len(self.vfd_modules),
len(self.dpm_modules),
len(self.pmm_modules),
len(self.beacon_modules),
len(self.lpe_modules),
len(self.fioh_modules),
len(self.ib16_modules),
len(self.ib16s_modules),
len(self.ob16e_modules),
len(self.solenoid_modules),
len(self.sio_modules),
])
if total_modules > 200: # Optimization threshold
# print(f"Large project detected ({total_modules} modules) - applying optimizations...")
return True
return False
def _apply_module_optimizations(self, generator):
"""Apply optimizations to a module generator to reduce complexity."""
# No optimizations needed - keep boilerplate data intact
# Additional optimizations can be added here
return generator
def load_and_process_data(self) -> bool:
"""Load and process Excel data."""
if not self.data_processor.load_data():
return False
if not self.data_processor.process_data():
return False
# Organize modules by type
self._organize_modules_by_type()
return True
def _organize_modules_by_type(self):
"""Organize loaded modules by their type."""
for tagname, module_data in self.data_processor.modules.items():
if module_data.unknown_part_number:
self.unknown_modules.append({
'tagname': tagname,
'part_number': module_data.part_number,
'ip_address': module_data.ip_address,
'io_mappings': module_data.io_mappings
})
continue
# Get module type from part number
if module_data.part_number == "TBIL-M1-16DXP":
# FIOH modules are now created dynamically based on TERM IO4/IO12 analysis
self.fioh_modules.append({
'name': tagname,
'parent_module': module_data.parent_module,
'part_number': module_data.part_number,
'terminal': module_data.terminal, # Store terminal info (IO4/IO12)
'comments': self.data_processor.get_comments_for_module(tagname)
})
elif module_data.part_number in self.data_processor.PART_NUMBER_MAP:
part_info = self.data_processor.PART_NUMBER_MAP[module_data.part_number]
module_type = part_info["type"]
# Get comments for this module
comments = self.data_processor.get_comments_for_module(tagname)
if module_type == "APF":
hp = part_info["hp"]
self.apf_modules.append({
'name': tagname,
'hp': hp,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number,
'comments': comments
})
elif module_type == "VFD":
hp = part_info["hp"]
self.vfd_modules.append({
'name': tagname,
'hp': hp,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number,
'comments': comments
})
elif module_type == "IOLM":
# Register an IO-Link Master (M12DR). Variant detection is now
# performed inside M12DRModuleGenerator.from_excel(), so the
# generator no longer needs the caller to supply it.
# Store minimal info only the name is required later when
# the generic factory creates the actual module XML.
self.iolm_modules.append({
'name': tagname,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number
})
# Check for beacons within this IOLM module
# Only treat as TL70 beacons if they are IO-Link devices
beacon_mappings = [mapping for mapping in module_data.io_mappings
if mapping.description and "BCN" in mapping.description.upper() and
mapping.signal.upper() == "IOLINK"]
# Regular output BCN mappings should go to comments
output_bcn_mappings = [mapping for mapping in module_data.io_mappings
if mapping.description and "BCN" in mapping.description.upper() and
mapping.signal.upper() == "O"]
# Check for LPE within this IOLM module (even channels)
lpe_mappings = [mapping for mapping in module_data.io_mappings
if mapping.description and "LPE" in mapping.description.upper() and
mapping.terminal and mapping.terminal.startswith("IO") and
int(mapping.terminal.replace("IO", "")) % 2 == 0 and
mapping.signal.upper() == "IOLINK"]
# Check for solenoids within this IOLM module
# Only treat as Festo solenoids if they are IO-Link devices
solenoid_mappings = [mapping for mapping in module_data.io_mappings
if mapping.description and "SOL" in mapping.description.upper() and
mapping.signal.upper() == "IOLINK"]
# Create input comments dictionary for the M12DR module
input_comments = {}
# Add output BCN mappings to comments
if output_bcn_mappings:
for mapping in output_bcn_mappings:
input_comments[mapping.terminal] = mapping.description
# Add LPE mappings to comments
if lpe_mappings:
for mapping in lpe_mappings:
input_comments[mapping.terminal] = mapping.description
# Create separate beacon modules for each beacon found
for beacon_mapping in beacon_mappings:
beacon_name = self._extract_beacon_name(beacon_mapping.description)
if beacon_name:
# Extract terminal number from IO terminal (IO2 -> 2, IO8 -> 8, etc.)
terminal_number = beacon_mapping.terminal.replace("IO", "") if beacon_mapping.terminal.startswith("IO") else "0"
# Use DESB for beacon description if available, otherwise fall back to DESC
beacon_description = beacon_mapping.desb if beacon_mapping.desb else beacon_mapping.description
self.beacon_modules.append({
'name': beacon_name,
'parent_module': tagname, # The M12DR module
'parent_port_id': "4", # All beacons connect to the IO-Link port (port 4)
'port_address': terminal_number, # Use terminal number as IO-Link address
'application_tag': beacon_name[:29], # Truncate to 29 chars
'description': beacon_description # Use DESB if available, otherwise DESC
})
# Add beacon to parent module's comments
input_comments[beacon_mapping.terminal] = beacon_mapping.description
# Create separate LPE modules for each LPE found
for lpe_mapping in lpe_mappings:
lpe_name = self._extract_lpe_name(lpe_mapping.description)
if lpe_name:
terminal_number = lpe_mapping.terminal.replace("IO", "") # IO-Link address
self.lpe_modules.append({
'name': lpe_name,
'parent_module': tagname, # The M12DR module
'parent_port_id': "4", # All LPEs connect to the IO-Link port (port 4)
'port_address': terminal_number,
'description': lpe_mapping.description
})
# Create separate solenoid modules for each solenoid found
for solenoid_mapping in solenoid_mappings:
solenoid_name = self._extract_solenoid_name(solenoid_mapping.description)
if solenoid_name:
# Extract terminal number from IO terminal (IO04 -> 4, IO06 -> 6, etc.)
terminal_num_str = solenoid_mapping.terminal.replace("IO", "") if solenoid_mapping.terminal.startswith("IO") else "0"
terminal_number = str(int(terminal_num_str)) # Convert to int then back to string to remove leading zeros
self.solenoid_modules.append({
'name': solenoid_name,
'parent_module': tagname, # The M12DR module
'parent_port_id': "4", # All solenoids connect to the IO-Link port (port 4)
'port_address': terminal_number, # Use terminal number as IO-Link address
'description': solenoid_mapping.description
})
# Add solenoid to parent module's comments
input_comments[solenoid_mapping.terminal] = solenoid_mapping.description
elif module_type == "IB16":
self.ib16_modules.append({
'name': tagname,
'slot_address': tagname.split('_')[0].replace('SLOT', ''),
'comments': module_data.comments
})
elif module_type == "IB16S":
self.ib16s_modules.append({
'name': tagname,
'slot_address': tagname.split('_')[0].replace('SLOT', ''),
'comments': module_data.comments
})
elif module_type == "OB16E":
self.ob16e_modules.append({
'name': tagname,
'slot_address': tagname.split('_')[0].replace('SLOT', ''),
'comments': module_data.comments
})
elif module_type == "ZMX":
self.zmx_modules.append({
'name': tagname,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number,
'comments': comments
})
elif module_type == "EXTENDO":
self.extendo_modules.append({
'name': tagname,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number,
'comments': comments
})
elif module_type == "DPM":
self.dpm_modules.append({
'name': tagname,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number,
'comments': comments
})
elif module_type == "PMM":
self.pmm_modules.append({
'name': tagname,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number,
'comments': comments
})
elif module_type == "SIO":
self.sio_modules.append({
'name': tagname,
'ip_address': module_data.ip_address,
'part_number': module_data.part_number,
'comments': comments
})
else:
# Unknown module
self.unknown_modules.append({
'tagname': tagname,
'part_number': module_data.part_number,
'ip_address': module_data.ip_address,
'io_mappings': module_data.io_mappings
})
# Print summary of modules found
if self.iolm_modules:
print(f"Found {len(self.iolm_modules)} IOLM modules")
if self.lpe_modules:
print(f"Found {len(self.lpe_modules)} LPE modules")
if self.beacon_modules:
print(f"Found {len(self.beacon_modules)} Beacon modules")
if self.solenoid_modules:
print(f"Found {len(self.solenoid_modules)} Solenoid modules")
if self.apf_modules:
print(f"Found {len(self.apf_modules)} APF modules")
if self.vfd_modules:
print(f"Found {len(self.vfd_modules)} VFD modules")
if self.dpm_modules:
print(f"Found {len(self.dpm_modules)} DPM modules")
if self.pmm_modules:
print(f"Found {len(self.pmm_modules)} PMM modules")
if self.unknown_modules:
print(f"WARNING: {len(self.unknown_modules)} unknown modules found")
for i, module in enumerate(self.unknown_modules, 1):
print(f" {i:2d}. {module['tagname']} - {module['part_number']} ({module['ip_address']})")
def generate_complete_project(self, split_mode: bool = False):
"""Generate the complete project L5X file(s) using the new ControllerBuilder.
Args:
split_mode: If True, generates two separate L5X files split in half
"""
if split_mode:
return self.generate_split_projects()
else:
return self._generate_single_project()
def _generate_single_project(self):
"""Generate a single complete project L5X file."""
from controller_builder import ControllerBuilder
# 1. Initialise builder (creates base controller + fixed chassis modules)
builder = ControllerBuilder(self.controller_name, skip_chassis_modules=True, boilerplate_dir=self.boilerplate_dir)
# 2. Append all Excel-derived modules into the builder's <Modules> section
modules_section = builder.get_modules_section()
self._add_excel_modules(modules_section)
# 2a. Create EN4TR modules that were registered during Excel module processing
self._create_registered_en4tr_modules(modules_section)
# 2b. Import AOIs and DataTypes from BaseProgram.L5X with required ordering
try:
base_l5x_path = os.path.join(os.path.dirname(__file__), "BaseProgram.L5X")
if os.path.exists(base_l5x_path):
print(f" Importing AOIs/DataTypes from base: {base_l5x_path}")
builder.import_base_sections_from_l5x(base_l5x_path)
else:
print(f" WARNING: BaseProgram.L5X not found at {base_l5x_path}")
except Exception as e:
print(f" WARNING: Failed importing base sections: {e}")
# 3. Embed generated programs from Routines Generator - TEMPORARILY DISABLED
# routines_generator_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "Routines Generator")
# print(f"Looking for generated programs in: {routines_generator_dir}")
# builder.add_generated_programs(routines_generator_dir)
# Skipping generated programs embedding (temporarily disabled)
# 4. Embed generated tags from Routines Generator - TEMPORARILY DISABLED
# print(f"Looking for generated tags in: {routines_generator_dir}")
# builder.add_generated_tags(routines_generator_dir, zones_dict=getattr(self, 'zones_dict', None))
# Skipping generated tags embedding (temporarily disabled)
# 5. Finalise and save with timestamp in project-specific folder at root level
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
# Create project-specific directory at root level (not inside IO Tree Generator)
project_root = os.path.dirname(os.path.dirname(__file__))
root_generated_dir = os.path.join(project_root, "generated_projects")
project_output_dir = os.path.join(root_generated_dir, self.controller_name)
os.makedirs(project_output_dir, exist_ok=True)
# Create old folder for archiving previous versions
old_dir = os.path.join(project_output_dir, "old")
os.makedirs(old_dir, exist_ok=True)
# Move existing L5X files to old folder
import glob
existing_l5x = glob.glob(os.path.join(project_output_dir, f"{self.controller_name}_*.L5X"))
for old_l5x in existing_l5x:
old_filename = os.path.basename(old_l5x)
shutil.move(old_l5x, os.path.join(old_dir, old_filename))
print(f" Archived old L5X: {old_filename}")
output_file = os.path.join(project_output_dir, f"{self.controller_name}_{timestamp}.L5X")
builder.finalise_and_save(output_file)
print(f"OK: Generated project: {output_file}")
return [output_file]
def generate_split_projects(self) -> Tuple[str, str]:
"""Generate two L5X files with modules split in half, preserving parent-child relationships.
Returns:
Tuple of (file1_path, file2_path)
"""
print("Split projects generation")
# Validate parent-child relationships before splitting
self._validate_parent_child_relationships()
# Create module groups that preserve parent-child relationships
group1_modules, group2_modules = self._create_balanced_module_groups()
# Validate the split maintains relationships
self._validate_split_integrity(group1_modules, group2_modules)
# Generate first project file
file1_path = self._generate_project_with_modules(
f"{self.project_name}_Part1",
group1_modules,
1
)
# Generate second project file
file2_path = self._generate_project_with_modules(
f"{self.project_name}_Part2",
group2_modules,
2
)
print("OK: Split projects generated")
print(f"- Part 1: {file1_path} ({self._count_modules_in_group(group1_modules)} modules)")
print(f"- Part 2: {file2_path} ({self._count_modules_in_group(group2_modules)} modules)")
return file1_path, file2_path
def _create_balanced_module_groups(self) -> Tuple[Dict, Dict]:
"""Create two balanced module groups while preserving parent-child relationships.
Returns:
Tuple of (group1_dict, group2_dict) where each dict contains module lists by type
"""
# Creating balanced module groups
# Initialize empty groups
group1 = {
"iolm_modules": [],
"zmx_modules": [],
"extendo_modules": [],
"apf_modules": [],
"vfd_modules": [],
"dpm_modules": [],
"pmm_modules": [],
"beacon_modules": [],
"lpe_modules": [],
"fioh_modules": [],
"ib16_modules": [],
"ib16s_modules": [],
"ob16e_modules": [],
"solenoid_modules": [],
"sio_modules": [],
}
group2 = {
"iolm_modules": [],
"zmx_modules": [],
"extendo_modules": [],
"apf_modules": [],
"vfd_modules": [],
"dpm_modules": [],
"pmm_modules": [],
"beacon_modules": [],
"lpe_modules": [],
"fioh_modules": [],
"ib16_modules": [],
"ib16s_modules": [],
"ob16e_modules": [],
"solenoid_modules": [],
"sio_modules": [],
}
# Create parent-child relationship maps
parent_child_map = self._build_parent_child_relationships()
# Show relationship summary
if parent_child_map:
print(f" Found {len(parent_child_map)} parent modules with children:")
beacon_count = sum(1 for children in parent_child_map.values() for child_type, _ in children if child_type == 'beacon')
fioh_count = sum(1 for children in parent_child_map.values() for child_type, _ in children if child_type == 'fioh')
lpe_count = sum(1 for children in parent_child_map.values() for child_type, _ in children if child_type == 'lpe')
solenoid_count = sum(1 for children in parent_child_map.values() for child_type, _ in children if child_type == 'solenoid')
print(f" - {beacon_count} beacons")
print(f" - {fioh_count} FIOH modules")
print(f" - {lpe_count} LPE modules")
print(f" - {solenoid_count} solenoid modules")
# Distribute modules while keeping families together
self._distribute_module_families(group1, group2, parent_child_map)
return group1, group2
def _build_parent_child_relationships(self) -> Dict[str, List[str]]:
"""Build a map of parent modules to their children.
Returns:
Dict mapping parent_module_name -> [list_of_child_module_names]
"""
parent_child_map = {}
# Map FIOH modules to their parents
for fioh in self.fioh_modules:
parent = fioh.get('parent_module')
if parent:
if parent not in parent_child_map:
parent_child_map[parent] = []
parent_child_map[parent].append(('fioh', fioh))
# Map beacon modules to their parents (IOLM modules)
for beacon in self.beacon_modules:
parent = beacon.get('parent_module')
if parent:
if parent not in parent_child_map:
parent_child_map[parent] = []
parent_child_map[parent].append(('beacon', beacon))
# Map LPE modules to their parents (IOLM modules)
for lpe in self.lpe_modules:
parent = lpe.get('parent_module')
if parent:
if parent not in parent_child_map:
parent_child_map[parent] = []
parent_child_map[parent].append(('lpe', lpe))
# Map solenoid modules to their parents (IOLM modules)
for solenoid in self.solenoid_modules:
parent = solenoid.get('parent_module')
if parent:
if parent not in parent_child_map:
parent_child_map[parent] = []
parent_child_map[parent].append(('solenoid', solenoid))
return parent_child_map
def _distribute_module_families(self, group1: Dict, group2: Dict, parent_child_map: Dict[str, List[str]]):
"""Distribute modules between groups while keeping families together."""
# Track assigned modules to avoid duplicates
assigned_modules = set()
# Lists of all module types with their modules
all_module_lists = [
("iolm_modules", self.iolm_modules),
("zmx_modules", self.zmx_modules),
("extendo_modules", self.extendo_modules),
("apf_modules", self.apf_modules),
("vfd_modules", self.vfd_modules),
("dpm_modules", self.dpm_modules),
("pmm_modules", self.pmm_modules),
("ib16_modules", self.ib16_modules),
("ib16s_modules", self.ib16s_modules),
("ob16e_modules", self.ob16e_modules),
("sio_modules", self.sio_modules),
]
# Alternate assignment between groups
group1_total = 0
group2_total = 0
for module_type, modules in all_module_lists:
for module in sorted(modules, key=lambda m: m.get('name', '')):
module_name = module.get('name', '')
if module_name in assigned_modules:
continue
# Calculate family size (parent + all children)
family_size = 1 # The module itself
children = parent_child_map.get(module_name, [])
family_size += len(children)
# Log family assignment for debugging
# Assignment details suppressed for concise output
# Assign to group with fewer modules
if group1_total <= group2_total:
target_group = group1
group1_total += family_size
group_name = "Group 1"
else:
target_group = group2
group2_total += family_size
group_name = "Group 2"
# Assignment target suppressed for concise output
# Add parent module
target_group[module_type].append(module)
assigned_modules.add(module_name)
# Add all children to the same group
for child_type, child_module in children:
target_group[f"{child_type}_modules"].append(child_module)
assigned_modules.add(child_module.get('name', ''))
# Handle orphaned modules (those without assigned parents)
orphaned_modules = []
# Check FIOH modules
for fioh in self.fioh_modules:
fioh_name = fioh.get('name', '')
if fioh_name not in assigned_modules:
orphaned_modules.append(('fioh', fioh))
# Check beacon modules
for beacon in self.beacon_modules:
beacon_name = beacon.get('name', '')
if beacon_name not in assigned_modules:
orphaned_modules.append(('beacon', beacon))
# Check LPE modules
for lpe in self.lpe_modules:
lpe_name = lpe.get('name', '')
if lpe_name not in assigned_modules:
orphaned_modules.append(('lpe', lpe))
# Check solenoid modules
for solenoid in self.solenoid_modules:
solenoid_name = solenoid.get('name', '')
if solenoid_name not in assigned_modules:
orphaned_modules.append(('solenoid', solenoid))
# Distribute orphaned modules
# Orphan details suppressed; only assignment occurs
for module_type, module in orphaned_modules:
if group1_total <= group2_total:
group1[f"{module_type}_modules"].append(module)
group1_total += 1
group_name = "Group 1"
else:
group2[f"{module_type}_modules"].append(module)
group2_total += 1
group_name = "Group 2"
# Assignment summary suppressed
assigned_modules.add(module_name)
def _generate_project_with_modules(self, project_name: str, module_groups: Dict, part_number: int) -> str:
"""Generate a project file with specific module groups.
Args:
project_name: Name for the project
module_groups: Dict containing module lists by type
part_number: Part number (1 or 2) for identification
Returns:
Path to generated file
"""
from controller_builder import ControllerBuilder
# Create builder
builder = ControllerBuilder(project_name, skip_chassis_modules=True, boilerplate_dir=self.boilerplate_dir)
modules_section = builder.get_modules_section()
# Add modules from the specific groups
self._add_excel_modules_from_groups(modules_section, module_groups)
# Create EN4TR modules that were registered during module processing
self._create_registered_en4tr_modules(modules_section)
# Import AOIs and DataTypes from BaseProgram.L5X
try:
base_l5x_path = os.path.join(os.path.dirname(__file__), "BaseProgram.L5X")
if os.path.exists(base_l5x_path):
print(f" Importing AOIs/DataTypes from base: {base_l5x_path}")
builder.import_base_sections_from_l5x(base_l5x_path)
else:
print(f" WARNING: BaseProgram.L5X not found at {base_l5x_path}")
except Exception as e:
print(f" WARNING: Failed importing base sections: {e}")
# Save the file with timestamp in project-specific folder at root level
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
# Create project-specific directory at root level (not inside IO Tree Generator)
project_root = os.path.dirname(os.path.dirname(__file__))
root_generated_dir = os.path.join(project_root, "generated_projects")
project_output_dir = os.path.join(root_generated_dir, project_name)
os.makedirs(project_output_dir, exist_ok=True)
# Create old folder for archiving previous versions
old_dir = os.path.join(project_output_dir, "old")
os.makedirs(old_dir, exist_ok=True)
# Move existing L5X files to old folder
existing_l5x = glob.glob(os.path.join(project_output_dir, f"{project_name}_*.L5X"))
for old_l5x in existing_l5x:
old_filename = os.path.basename(old_l5x)
shutil.move(old_l5x, os.path.join(old_dir, old_filename))
print(f" Archived old L5X: {old_filename}")
output_filename = os.path.join(project_output_dir, f"{project_name}_{timestamp}.L5X")
builder.finalise_and_save(output_filename)
return output_filename
def _add_excel_modules_from_groups(self, modules_section, module_groups: Dict):
"""Add modules from specific groups to the modules section."""
factory_map = {
"iolm_modules": lambda entry: self._set_generator_boilerplate_dir(
M12DRModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"zmx_modules": lambda entry: self._set_generator_boilerplate_dir(
ZMXModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"extendo_modules": lambda entry: self._set_generator_boilerplate_dir(
ExtendoModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"fioh_modules": lambda entry: self._set_generator_boilerplate_dir(
TurckHubModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"apf_modules": lambda entry: self._set_generator_boilerplate_dir(
APFModuleGenerator.from_excel(self.data_processor.modules[entry["name"]], hp=entry["hp"])
),
"vfd_modules": lambda entry: self._set_generator_boilerplate_dir(
VFDModuleGenerator.from_excel(self.data_processor.modules[entry["name"]], hp=entry["hp"])
),
"dpm_modules": lambda entry: self._set_generator_boilerplate_dir(
DPMModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"pmm_modules": lambda entry: self._set_generator_boilerplate_dir(
PMMModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"sio_modules": lambda entry: self._set_generator_boilerplate_dir(
SIOModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"beacon_modules": lambda entry: TL70BeaconGenerator.from_mapping(entry),
"lpe_modules": lambda entry: LPEBoilerplateGenerator.from_mapping(entry),
"ib16_modules": lambda entry: IB16ModuleGenerator.from_mapping(entry, comments=entry['comments']),
"ib16s_modules": lambda entry: IB16SModuleGenerator.from_mapping(entry, comments=entry['comments']),
"ob16e_modules": lambda entry: OB16EModuleGenerator.from_mapping(entry, comments=entry['comments']),
"solenoid_modules": lambda entry: FestoSolenoidGenerator.from_mapping(entry),
}
ordered_lists = [
"iolm_modules",
"zmx_modules",
"extendo_modules",
"apf_modules",
"vfd_modules",
"dpm_modules",
"pmm_modules",
"beacon_modules",
"lpe_modules",
"fioh_modules",
"ib16_modules",
"ib16s_modules",
"ob16e_modules",
"solenoid_modules",
]
for list_name in ordered_lists:
raw_entries = module_groups.get(list_name, [])
entries = raw_entries if list_name == "beacon_modules" else sorted(raw_entries, key=lambda e: e.get('name', ''))
if not entries:
continue
# Add summary message for IOLM modules similar to FIOH
if list_name == "iolm_modules":
print(f"Created {len(entries)} IOLM modules with boilerplate selection:")
factory = factory_map[list_name]
for entry in entries:
try:
gen = factory(entry)
# Ensure boilerplate directory is set for all generators
gen = self._set_generator_boilerplate_dir(gen)
# Re-apply updates after boilerplate directory change
if hasattr(gen, 'apply_updates'):
gen.apply_updates()
module_elem = gen.root.find(
f".//Module[@Name='{entry['name']}']"
)
if module_elem is not None:
# Preserve APF safety attributes as before
if list_name == "apf_modules":
module_elem.set("SafetyNetwork", "16#0000_4c14_03e7_33a8")
module_elem.set("SafetyEnabled", "true")
modules_section.append(module_elem)
except Exception as e:
print(
f" ERROR: Failed to generate {list_name[:-8]} module {entry['name']}: {e}"
)
def _count_modules_in_group(self, module_group: Dict) -> int:
"""Count total modules in a group."""
total = 0
for module_list in module_group.values():
total += len(module_list)
return total
def _validate_parent_child_relationships(self):
"""Validate parent-child relationships before splitting."""
# Validating parent-child relationships
issues = []
# Check FIOH modules
for fioh in self.fioh_modules:
parent = fioh.get('parent_module')
if not parent:
issues.append(f"FIOH module '{fioh.get('name')}' has no parent_module")
continue
# Check if parent exists in our modules
parent_found = False
for module_list_name in ['iolm_modules', 'zmx_modules', 'extendo_modules', 'apf_modules', 'vfd_modules', 'dpm_modules', 'pmm_modules', 'sio_modules']:
module_list = getattr(self, module_list_name, [])
if any(m.get('name') == parent for m in module_list):
parent_found = True
break
if not parent_found:
issues.append(f"FIOH module '{fioh.get('name')}' references non-existent parent '{parent}'")
# Check beacon modules
for beacon in self.beacon_modules:
parent = beacon.get('parent_module')
if not parent:
issues.append(f"Beacon module '{beacon.get('name')}' has no parent_module")
continue
# Check if parent exists in IOLM modules (beacons typically connect to IOLM)
parent_found = any(m.get('name') == parent for m in self.iolm_modules)
if not parent_found:
issues.append(f"Beacon module '{beacon.get('name')}' references non-existent IOLM parent '{parent}'")
# Check LPE modules
for lpe in self.lpe_modules:
parent = lpe.get('parent_module')
if not parent:
issues.append(f"LPE module '{lpe.get('name')}' has no parent_module")
continue
# Check if parent exists in IOLM modules
parent_found = any(m.get('name') == parent for m in self.iolm_modules)
if not parent_found:
issues.append(f"LPE module '{lpe.get('name')}' references non-existent IOLM parent '{parent}'")
# Check solenoid modules
for solenoid in self.solenoid_modules:
parent = solenoid.get('parent_module')
if not parent:
issues.append(f"Solenoid module '{solenoid.get('name')}' has no parent_module")
continue
# Check if parent exists in IOLM modules
parent_found = any(m.get('name') == parent for m in self.iolm_modules)
if not parent_found:
issues.append(f"Solenoid module '{solenoid.get('name')}' references non-existent IOLM parent '{parent}'")
if issues:
print(f"Warning: {len(issues)} parent-child relationship issues detected")
for issue in issues[:5]:
print(f"- {issue}")
else:
print("OK: Parent-child relationships valid")
def _validate_split_integrity(self, group1: Dict, group2: Dict):
"""Validate that the split maintains parent-child relationships."""
# Validating split integrity
# Build module name to group mapping
group1_modules = set()
group2_modules = set()
for module_list in group1.values():
for module in module_list:
group1_modules.add(module.get('name', ''))
for module_list in group2.values():
for module in module_list:
group2_modules.add(module.get('name', ''))
violations = []
# Check all child modules to ensure they're in the same group as their parents
all_child_modules = [
('beacon', self.beacon_modules),
('lpe', self.lpe_modules),
('solenoid', self.solenoid_modules),
('fioh', self.fioh_modules)
]
for child_type, child_list in all_child_modules:
for child in child_list:
child_name = child.get('name', '')
parent_name = child.get('parent_module', '')
if not parent_name:
continue # Skip modules without parents (handled in validation)
child_in_group1 = child_name in group1_modules
child_in_group2 = child_name in group2_modules
parent_in_group1 = parent_name in group1_modules
parent_in_group2 = parent_name in group2_modules
# Child and parent must be in the same group
if (child_in_group1 and not parent_in_group1) or (child_in_group2 and not parent_in_group2):
violations.append(f"{child_type} '{child_name}' separated from parent '{parent_name}'")
if violations:
print(f"Error: {len(violations)} split integrity violations detected")
for violation in violations[:5]:
print(f"- {violation}")
raise ValueError("Split integrity validation failed - parent-child relationships would be broken")
else:
print("OK: Split integrity valid")
def _configure_controller_settings(self, root):
"""Configure controller-specific settings."""
# Update root attributes
root.set("TargetName", self.controller_name)
root.set("TargetType", "Controller")
root.set("ContainsContext", "false")
root.set("ExportOptions", "NoRawData L5KData DecoratedData ForceProtectedEncoding AllProjDocTrans")
# Update Controller element
controller = root.find(".//Controller[@Use='Target']")
if controller is not None:
controller.set("Name", self.controller_name)
controller.set("ProcessorType", "1756-L83ES")
controller.set("MajorRev", "36")
controller.set("MinorRev", "11")
controller.set("ProjectSN", "16#0000_0000")
controller.set("MatchProjectToController", "false")
controller.set("CanUseRPIFromProducer", "false")
controller.set("InhibitAutomaticFirmwareUpdate", "0")
controller.set("PassThroughConfiguration", "EnabledWithAppend")
controller.set("DownloadProjectDocumentationAndExtendedProperties", "true")
controller.set("DownloadProjectCustomProperties", "true")
controller.set("ReportMinorOverflow", "false")
controller.set("AutoDiagsEnabled", "true")
controller.set("WebServerEnabled", "false")
# Add SafetyInfo if not exists
safety_info = controller.find("SafetyInfo")
if safety_info is None:
safety_info = ET.SubElement(controller, "SafetyInfo")
safety_info.set("SafetyLocked", "false")
safety_info.set("SignatureRunModeProtect", "false")
safety_info.set("ConfigureSafetyIOAlways", "false")
safety_info.set("SafetyLevel", "SIL2/PLd")
def _add_all_modules(self, root):
"""Add all modules to the controller's Modules section."""
controller = root.find(".//Controller[@Use='Target']")
if controller is None:
raise ValueError("Controller element not found")
# Find or create Modules section
modules_section = controller.find("Modules")
if modules_section is None:
# Find position after SafetyInfo
safety_info = controller.find("SafetyInfo")
if safety_info is not None:
idx = list(controller).index(safety_info) + 1
modules_section = ET.Element("Modules")
controller.insert(idx, modules_section)
else:
modules_section = ET.SubElement(controller, "Modules")
# Configure Local module
self._configure_local_module(modules_section)
# Add EN4TR module
self._add_en4tr_module(modules_section)
# Add Excel-sourced modules (including IB16/IB16S/OB16E from Excel data)
self._add_excel_modules(modules_section)
def _configure_local_module(self, modules_section):
"""Configure the Local module properly."""
local_module = modules_section.find("Module[@Name='Local']")
if local_module is not None:
# Update Local module safety networks
ports = local_module.find("Ports")
if ports is not None:
for port in ports.findall("Port"):
if port.get("Id") == "1":
port.set("SafetyNetwork", "16#0000_4c33_031d_8f1b")
bus = port.find("Bus")
if bus is None:
bus = ET.SubElement(port, "Bus")
bus.set("Size", "10")
elif port.get("Id") == "2":
port.set("SafetyNetwork", "16#0000_4c33_031d_8f1c")
def _add_en4tr_module(self, modules_section):
"""Add EN4TR module to the Modules section."""
config = create_en4tr_module("SLOT2_EN4TR", self.controller_name)
generator = self._set_generator_boilerplate_dir(EN4TRModuleGenerator(config))
generator.load_boilerplate()
generator.apply_updates()
module = generator.root.find(".//Module[@Name='SLOT2_EN4TR']")
if module is not None:
module.set("ParentModule", "Local")
module.set("ParentModPortId", "1")
port = module.find(".//Port[@Type='Ethernet']")
if port is not None:
port.set("Address", "11.200.1.1")
modules_section.append(module)
def _add_excel_modules(self, modules_section):
"""Add all modules sourced from Excel data."""
# Generic handling for most module families (FIOH and Beacon keep
# specialised helpers due to their CDATA or dict-based quirks).
factory_map = {
"iolm_modules": lambda entry: self._set_generator_boilerplate_dir(
M12DRModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"zmx_modules": lambda entry: self._set_generator_boilerplate_dir(
ZMXModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"extendo_modules": lambda entry: self._set_generator_boilerplate_dir(
ExtendoModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"fioh_modules": lambda entry: self._set_generator_boilerplate_dir(
TurckHubModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"apf_modules": lambda entry: self._set_generator_boilerplate_dir(
APFModuleGenerator.from_excel(self.data_processor.modules[entry["name"]], hp=entry["hp"])
),
"vfd_modules": lambda entry: self._set_generator_boilerplate_dir(
VFDModuleGenerator.from_excel(self.data_processor.modules[entry["name"]], hp=entry["hp"])
),
"dpm_modules": lambda entry: self._set_generator_boilerplate_dir(
DPMModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"pmm_modules": lambda entry: PMMModuleGenerator.from_excel(
self.data_processor.modules[entry["name"]]
),
"sio_modules": lambda entry: self._set_generator_boilerplate_dir(
SIOModuleGenerator.from_excel(self.data_processor.modules[entry["name"]])
),
"beacon_modules": lambda entry: TL70BeaconGenerator.from_mapping(entry),
"lpe_modules": lambda entry: LPEBoilerplateGenerator.from_mapping(entry),
"ib16_modules": lambda entry: IB16ModuleGenerator.from_mapping(entry, comments=entry['comments']),
"ib16s_modules": lambda entry: IB16SModuleGenerator.from_mapping(entry, comments=entry['comments']),
"ob16e_modules": lambda entry: OB16EModuleGenerator.from_mapping(entry, comments=entry['comments']),
"solenoid_modules": lambda entry: FestoSolenoidGenerator.from_mapping(entry),
}
ordered_lists = [
"iolm_modules",
"zmx_modules",
"extendo_modules",
"apf_modules",
"vfd_modules",
"dpm_modules",
"pmm_modules",
"sio_modules",
"beacon_modules",
"lpe_modules",
"fioh_modules",
"ib16_modules",
"ib16s_modules",
"ob16e_modules",
"solenoid_modules",
]
for list_name in ordered_lists:
raw_entries = getattr(self, list_name, [])
entries = raw_entries if list_name == "beacon_modules" else sorted(raw_entries, key=lambda e: e.get('name', ''))
if not entries:
continue
# Add summary message for IOLM modules similar to FIOH
if list_name == "iolm_modules":
print(f"Created {len(entries)} IOLM modules with boilerplate selection:")
factory = factory_map[list_name]
for entry in entries:
try:
gen = factory(entry)
# Ensure boilerplate directory is set for all generators
gen = self._set_generator_boilerplate_dir(gen)
# Re-apply updates after boilerplate directory change
if hasattr(gen, 'apply_updates'):
gen.apply_updates()
# Apply optimizations for large projects
if self._optimize_for_large_projects():
gen = self._apply_module_optimizations(gen)
module_elem = gen.root.find(
f".//Module[@Name='{entry['name']}']"
)
if module_elem is not None:
# Preserve APF safety attributes as before
if list_name == "apf_modules":
module_elem.set("SafetyNetwork", "16#0000_4c14_03e7_33a8")
module_elem.set("SafetyEnabled", "true")
modules_section.append(module_elem)
except Exception as e:
print(
f" ERROR: Failed to generate {list_name[:-8]} module {entry['name']}: {e}"
)
# No extra post-processing needed; DPM and LPE are now added via the generic loop.
def _create_registered_en4tr_modules(self, modules_section):
"""Create EN4TR modules that were registered during Excel module processing."""
from models.mcm_pattern_utils import get_required_en4tr_modules, create_en4tr_modules_from_registry, clear_required_en4tr_modules
# Get registered EN4TR modules
required_modules = get_required_en4tr_modules()
if not required_modules:
print(" No EN4TR modules registered for creation")
return
print(f" Creating {len(required_modules)} registered EN4TR modules:")
for module_name, (slot, ip) in required_modules.items():
print(f" - {module_name}: Slot {slot}, IP {ip}")
# Create EN4TR module configs from registry
en4tr_configs = create_en4tr_modules_from_registry()
# Generate and add EN4TR modules to the project
for module_name, config in en4tr_configs.items():
try:
# Create generator
generator = self._set_generator_boilerplate_dir(EN4TRModuleGenerator(config))
generator.load_boilerplate()
generator.apply_updates()
# Extract module element
module_elem = generator.root.find(f".//Module[@Name='{module_name}']")
if module_elem is not None:
# Set parent to Local chassis
module_elem.set("ParentModule", "Local")
module_elem.set("ParentModPortId", "1")
# Insert EN4TR module in correct position (after existing EN4TR modules)
insert_position = self._find_en4tr_insert_position(modules_section, module_name)
modules_section.insert(insert_position, module_elem)
print(f" ✅ Created EN4TR module: {module_name} -> {config.ethernet_address} (inserted at position {insert_position})")
else:
print(f" ❌ Failed to find module element for: {module_name}")
except Exception as e:
print(f" ❌ Error creating EN4TR module {module_name}: {e}")
# Clear registry after creation
clear_required_en4tr_modules()
print(f" Cleared EN4TR registry")
def _find_en4tr_insert_position(self, modules_section, new_module_name):
"""Find the correct position to insert a new EN4TR module.
Args:
modules_section: The XML modules section element
new_module_name: Name of the new EN4TR module to insert (e.g., "SLOT3_EN4TR")
Returns:
Index where the new module should be inserted
"""
import re
# Extract slot number from new module name
match = re.search(r'SLOT(\d+)_EN4TR', new_module_name, re.IGNORECASE)
new_slot = int(match.group(1)) if match else 999
last_en4tr_position = -1
# Find all existing EN4TR modules and their positions
for i, module in enumerate(modules_section):
if module.tag == "Module" and module.get("Name", "").endswith("_EN4TR"):
module_name = module.get("Name", "")
match = re.search(r'SLOT(\d+)_EN4TR', module_name, re.IGNORECASE)
existing_slot = int(match.group(1)) if match else 0
if existing_slot < new_slot:
last_en4tr_position = i
elif existing_slot >= new_slot:
# Insert before this higher-numbered EN4TR
return i
# Insert after the last EN4TR module found, or at the end if none found
return last_en4tr_position + 1 if last_en4tr_position >= 0 else len(modules_section)
def _extract_beacon_name(self, description: str) -> str:
"""Extract beacon name from description (e.g., 'S011047_BCN1 2 STACK IO LINK BEACON' -> 'S011047_BCN1')."""
if not description:
return ""
# Split by whitespace and take the first part that contains BCN
parts = description.split()
for part in parts:
if "BCN" in part.upper():
return part
# If no BCN found, return first part (fallback)
return parts[0] if parts else ""
def _extract_lpe_name(self, description: str) -> str:
"""Extract LPE name from description (e.g., 'LPE1_A' -> 'LPE1' or 'S011047_LPE1 2 STACK' -> 'S011047_LPE1')."""
if not description:
return ""
# Split by whitespace and take the first part that contains LPE
parts = description.split()
for part in parts:
if "LPE" in part.upper():
return part
# If no LPE found, return first part (fallback)
return parts[0] if parts else ""
def _extract_solenoid_name(self, description: str) -> str:
"""Extract solenoid name from description (e.g., 'UL11_13_SOL1 DIVERT MODULE' -> 'UL11_13_SOL1')."""
if not description:
return ""
# Split by whitespace and take the first part that contains SOL
parts = description.split()
for part in parts:
if "SOL" in part.upper():
return part
# If no SOL found, return first part (fallback)
return parts[0] if parts else ""
def main():
"""Example usage of the enhanced MCM generator."""
import sys
import json
# Check for command-line arguments
split_mode = "--split" in sys.argv
project_name = "MTN6_MCM01_UL1_UL3"
excel_file = "MCM01_UL1_UL3_Data.xlsx"
zones_dict = None
# Parse zones argument
if "--zones" in sys.argv:
zones_index = sys.argv.index("--zones")
if zones_index + 1 < len(sys.argv):
zones_json = sys.argv[zones_index + 1]
try:
zones_dict = json.loads(zones_json)
print(f"Using provided zones configuration with {len(zones_dict)} zones")
except json.JSONDecodeError as e:
print(f"ERROR: Invalid zones JSON: {e}")
return
# Allow specifying Excel file, project name, and boilerplate directory via command line
# Usage: python enhanced_mcm_generator.py <excel_file> <project_name> [boilerplate_dir] [--split] [--zones <json>]
boilerplate_dir = "boilerplate" # Default
if len(sys.argv) > 1 and not sys.argv[1].startswith("--"):
excel_file = sys.argv[1]
if len(sys.argv) > 2 and not sys.argv[2].startswith("--"):
project_name = sys.argv[2]
if len(sys.argv) > 3 and not sys.argv[3].startswith("--"):
boilerplate_dir = sys.argv[3]
print("Enhanced MCM Generator")
print(f"- Project: {project_name}")
print(f"- Excel: {excel_file}")
print(f"- Boilerplate: {boilerplate_dir}")
print(f"- Mode: {'Split' if split_mode else 'Single file'}")
print("-" * 50)
# Create generator with zones and boilerplate directory
generator = EnhancedMCMGenerator(project_name, excel_file, zones_dict, boilerplate_dir)
# Load and process Excel data
if generator.load_and_process_data():
if split_mode:
# Generate split projects
file1, file2 = generator.generate_complete_project(split_mode=True)
print("Split generation complete")
print(f"- {file1}")
print(f"- {file2}")
else:
# Generate single project
output_file = generator.generate_complete_project(split_mode=False)
print("Single file generation complete")
print(f"- {output_file}")
else:
print("ERROR: Failed to load/process Excel data")
def demo_split_usage():
"""Demonstrate split functionality usage."""
print("Demo: Creating split projects...")
# Create generator
generator = EnhancedMCMGenerator("DEMO_MCM", "MCM04_Data.xlsx")
# Load and process data
if generator.load_and_process_data():
# Generate split projects
file1, file2 = generator.generate_split_projects()
print(f"Demo complete! Generated:")
print(f" {file1}")
print(f" {file2}")
# Show module distribution
group1, group2 = generator._create_balanced_module_groups()
print(f"\nModule distribution:")
print(f" Part 1: {generator._count_modules_in_group(group1)} modules")
print(f" Part 2: {generator._count_modules_in_group(group2)} modules")
return file1, file2
else:
print("ERROR: Failed to load demo data")
return None, None
if __name__ == "__main__":
main()