Resolved merge conflicts using main branch versions
This commit is contained in:
commit
e0cacb6f13
91844
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM01-As-Built.pdf
Normal file
91844
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM01-As-Built.pdf
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
79022
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM02-As-Built.pdf
Normal file
79022
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM02-As-Built.pdf
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
31588
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM03-As-Built.pdf
Normal file
31588
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM03-As-Built.pdf
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
55850
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM04-As-Built.pdf
Normal file
55850
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM04-As-Built.pdf
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
54490
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM05-As-Built.pdf
Normal file
54490
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM05-As-Built.pdf
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
48229
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM06-As-Built.pdf
Normal file
48229
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM06-As-Built.pdf
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
70115
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM07-As-Built.pdf
Normal file
70115
Additional/duplicate_PDF/2429_AMAZON-MTN6-MCM07-As-Built.pdf
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
316
Additional/duplicate_PDF/check_duplicate.py
Normal file
316
Additional/duplicate_PDF/check_duplicate.py
Normal file
@ -0,0 +1,316 @@
|
|||||||
|
"""
|
||||||
|
Scan each PDF in this folder and find duplicate connection tags (inside the SAME PDF)
|
||||||
|
based on user-provided wildcard patterns (using '*' as "anything").
|
||||||
|
|
||||||
|
Output: one Excel file per PDF: <PDF_NAME>_duplicates.xlsx
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Iterable, List, Set, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# USER PATTERNS (wildcards)
|
||||||
|
# -----------------------------
|
||||||
|
# '*' means "any characters". Everything else is treated literally.
|
||||||
|
WILDCARD_PATTERNS: List[str] = [
|
||||||
|
"*_*_TPE*",
|
||||||
|
"*_*_S*_PB",
|
||||||
|
"*_*_S*_PB_LT",
|
||||||
|
"*_*_JR*_PB",
|
||||||
|
"*_*_JR*_PB_LT",
|
||||||
|
"*_*_SS*_SPB",
|
||||||
|
"*_*_SS*_STPB",
|
||||||
|
"*_*_SS*_SPB_LT",
|
||||||
|
"*_*_EN*_PB",
|
||||||
|
"*_*_EN*_PB_LT",
|
||||||
|
"*_*_PE*",
|
||||||
|
"*_*_LPE*",
|
||||||
|
"*_*_FPE*",
|
||||||
|
"*_*_BCN*_R",
|
||||||
|
"*_*_BCN*_B",
|
||||||
|
"*_*_BCN*_A",
|
||||||
|
"*_*_BCN*_G",
|
||||||
|
"*_*_BCN*_H",
|
||||||
|
"*_*_EPC*_1",
|
||||||
|
"*_*_EPC*_2",
|
||||||
|
"*_*_VFD1_DISC",
|
||||||
|
"*_*_*_STO1",
|
||||||
|
"*_*_*_ESTOP1",
|
||||||
|
"*_*_LS*",
|
||||||
|
"*_*_ENC*",
|
||||||
|
"*_*_ENW*",
|
||||||
|
"*_*_ENS*",
|
||||||
|
"*_*_PX*",
|
||||||
|
"*_*_SOL*",
|
||||||
|
"*_*_DIV*",
|
||||||
|
"*_*_PS*",
|
||||||
|
"*_*_BDS*",
|
||||||
|
"*_*_TS*",
|
||||||
|
]
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# CABLE PATTERNS (separate check)
|
||||||
|
# -----------------------------
|
||||||
|
# Rule: if a cable label appears more than 2 times in the SAME PDF => duplicated/overused.
|
||||||
|
CABLE_WILDCARD_PATTERNS: List[str] = [
|
||||||
|
"*_*_VFD*_I0",
|
||||||
|
"*_*_VFD*_I1",
|
||||||
|
"*_*_VFD*_I2",
|
||||||
|
"*_*_VFD*_I3",
|
||||||
|
"*_*_VFD*_IO0",
|
||||||
|
"*_*_VFD*_IO1",
|
||||||
|
"*_*_VFD*_SI0",
|
||||||
|
"*_*_VFD*_SI1",
|
||||||
|
"*_*_VFD*_SI2",
|
||||||
|
"*_*_VFD*_SI3",
|
||||||
|
"*_*_VFD*_SO0",
|
||||||
|
"*_FIO*_P0_C0",
|
||||||
|
"*_FIO*_P0_C1",
|
||||||
|
"*_FIO*_P1_C2",
|
||||||
|
"*_FIO*_P1_C3",
|
||||||
|
"*_FIO*_P2_C4",
|
||||||
|
"*_FIO*_P2_C5",
|
||||||
|
"*_FIO*_P3_C6",
|
||||||
|
"*_FIO*_P3_C7",
|
||||||
|
"*_FIO*_P4_C8",
|
||||||
|
"*_FIO*_P4_C9",
|
||||||
|
"*_FIO*_P5_C10",
|
||||||
|
"*_FIO*_P5_C11",
|
||||||
|
"*_FIO*_P6_C12",
|
||||||
|
"*_FIO*_P6_C13",
|
||||||
|
"*_FIO*_P7_C14",
|
||||||
|
"*_FIO*_P7_C15",
|
||||||
|
"*_FIOH*_C7_A",
|
||||||
|
"*_FIOH*_C7_B",
|
||||||
|
"*_FIOH*_C5_A",
|
||||||
|
"*_FIOH*_C5_B",
|
||||||
|
"*_FIOH*_C3_A",
|
||||||
|
"*_FIOH*_C3_B",
|
||||||
|
"*_FIOH*_C1_A",
|
||||||
|
"*_FIOH*_C1_B",
|
||||||
|
"*_FIOH*_C8_A",
|
||||||
|
"*_FIOH*_C8_B",
|
||||||
|
"*_FIOH*_C6_A",
|
||||||
|
"*_FIOH*_C6_B",
|
||||||
|
"*_FIOH*_C4_A",
|
||||||
|
"*_FIOH*_C4_B",
|
||||||
|
"*_FIOH*_C2_A",
|
||||||
|
"*_FIOH*_C2_B",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Candidate token: something like "PS3_2_VFD1_DISC" (>= 2 underscore-separated parts)
|
||||||
|
TOKEN_RE = re.compile(r"\b[A-Z0-9]+(?:_[A-Z0-9]+)+\b", re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
|
def _compile_wildcard_patterns(patterns: Iterable[str]) -> List[re.Pattern]:
|
||||||
|
compiled: List[re.Pattern] = []
|
||||||
|
for p in patterns:
|
||||||
|
# Treat everything literally except '*' which becomes '.*'
|
||||||
|
parts = [re.escape(x) for x in p.split("*")]
|
||||||
|
regex = ".*".join(parts)
|
||||||
|
# Match full token
|
||||||
|
compiled.append(re.compile(rf"^{regex}$", re.IGNORECASE))
|
||||||
|
return compiled
|
||||||
|
|
||||||
|
|
||||||
|
def _tokenize(text: str) -> List[str]:
|
||||||
|
# Normalize common oddities
|
||||||
|
text = text.replace("\u00ad", "") # soft hyphen
|
||||||
|
# PDFs sometimes insert whitespace/newlines around underscores; normalize that.
|
||||||
|
# Example: "PS3_2_VFD1_\nDISC" -> "PS3_2_VFD1_DISC"
|
||||||
|
text = re.sub(r"\s*_\s*", "_", text)
|
||||||
|
return [m.group(0).upper() for m in TOKEN_RE.finditer(text)]
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_deps() -> Tuple[object, object]:
|
||||||
|
"""
|
||||||
|
Returns (fitz_module, pandas_module). Exits with helpful message if missing.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
import fitz # PyMuPDF
|
||||||
|
except Exception:
|
||||||
|
print(
|
||||||
|
"Missing dependency: PyMuPDF\n"
|
||||||
|
"Install with:\n"
|
||||||
|
" python -m pip install --upgrade pip\n"
|
||||||
|
" python -m pip install pymupdf\n",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
try:
|
||||||
|
import pandas as pd
|
||||||
|
except Exception:
|
||||||
|
print(
|
||||||
|
"Missing dependency: pandas (and openpyxl for Excel)\n"
|
||||||
|
"Install with:\n"
|
||||||
|
" python -m pip install --upgrade pip\n"
|
||||||
|
" python -m pip install pandas openpyxl\n",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return fitz, pd
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Occurrence:
|
||||||
|
token: str
|
||||||
|
pages: Set[int] # 1-based page numbers
|
||||||
|
count: int
|
||||||
|
|
||||||
|
|
||||||
|
def find_duplicates_in_pdf(pdf_path: Path, compiled_patterns: List[re.Pattern]) -> List[Occurrence]:
|
||||||
|
fitz, _ = _ensure_deps()
|
||||||
|
|
||||||
|
occurrences: Dict[str, Occurrence] = {}
|
||||||
|
|
||||||
|
with fitz.open(pdf_path) as doc:
|
||||||
|
for page_index in range(doc.page_count):
|
||||||
|
page = doc.load_page(page_index)
|
||||||
|
text = page.get_text("text") or ""
|
||||||
|
tokens = _tokenize(text)
|
||||||
|
if not tokens:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for t in tokens:
|
||||||
|
if not any(r.match(t) for r in compiled_patterns):
|
||||||
|
continue
|
||||||
|
if t not in occurrences:
|
||||||
|
occurrences[t] = Occurrence(token=t, pages=set(), count=0)
|
||||||
|
occurrences[t].pages.add(page_index + 1)
|
||||||
|
occurrences[t].count += 1
|
||||||
|
|
||||||
|
# "Duplicate" = appears on more than one page (what you asked for)
|
||||||
|
dups = [o for o in occurrences.values() if len(o.pages) > 1]
|
||||||
|
dups.sort(key=lambda o: (-len(o.pages), -o.count, o.token))
|
||||||
|
return dups
|
||||||
|
|
||||||
|
|
||||||
|
def find_cable_overuse_in_pdf(
|
||||||
|
pdf_path: Path,
|
||||||
|
compiled_patterns: List[re.Pattern],
|
||||||
|
*,
|
||||||
|
allowed_occurrences: int = 2,
|
||||||
|
) -> List[Occurrence]:
|
||||||
|
"""
|
||||||
|
Separate check from page-duplicate logic:
|
||||||
|
- Cable labels are often printed twice (both ends) => OK up to allowed_occurrences (default 2).
|
||||||
|
- If total occurrences > allowed_occurrences, flag it.
|
||||||
|
"""
|
||||||
|
fitz, _ = _ensure_deps()
|
||||||
|
|
||||||
|
occurrences: Dict[str, Occurrence] = {}
|
||||||
|
|
||||||
|
with fitz.open(pdf_path) as doc:
|
||||||
|
for page_index in range(doc.page_count):
|
||||||
|
page = doc.load_page(page_index)
|
||||||
|
text = page.get_text("text") or ""
|
||||||
|
tokens = _tokenize(text)
|
||||||
|
if not tokens:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for t in tokens:
|
||||||
|
if not any(r.match(t) for r in compiled_patterns):
|
||||||
|
continue
|
||||||
|
if t not in occurrences:
|
||||||
|
occurrences[t] = Occurrence(token=t, pages=set(), count=0)
|
||||||
|
occurrences[t].pages.add(page_index + 1)
|
||||||
|
occurrences[t].count += 1
|
||||||
|
|
||||||
|
overused = [o for o in occurrences.values() if o.count > allowed_occurrences]
|
||||||
|
overused.sort(key=lambda o: (-o.count, -len(o.pages), o.token))
|
||||||
|
return overused
|
||||||
|
|
||||||
|
|
||||||
|
def write_excel_for_pdf(
|
||||||
|
pdf_path: Path,
|
||||||
|
duplicates: List[Occurrence],
|
||||||
|
cable_overuse: List[Occurrence],
|
||||||
|
) -> Path:
|
||||||
|
_, pd = _ensure_deps()
|
||||||
|
|
||||||
|
out_path = pdf_path.with_name(pdf_path.stem + "_duplicates.xlsx")
|
||||||
|
rows = []
|
||||||
|
for d in duplicates:
|
||||||
|
rows.append(
|
||||||
|
{
|
||||||
|
"Token": d.token,
|
||||||
|
"Pages": ", ".join(map(str, sorted(d.pages))),
|
||||||
|
"UniquePagesCount": len(d.pages),
|
||||||
|
"TotalOccurrences": d.count,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
df = pd.DataFrame(rows, columns=["Token", "Pages", "UniquePagesCount", "TotalOccurrences"])
|
||||||
|
|
||||||
|
cable_rows = []
|
||||||
|
for c in cable_overuse:
|
||||||
|
cable_rows.append(
|
||||||
|
{
|
||||||
|
"CableLabel": c.token,
|
||||||
|
"Pages": ", ".join(map(str, sorted(c.pages))),
|
||||||
|
"UniquePagesCount": len(c.pages),
|
||||||
|
"TotalOccurrences": c.count,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
cable_df = pd.DataFrame(
|
||||||
|
cable_rows, columns=["CableLabel", "Pages", "UniquePagesCount", "TotalOccurrences"]
|
||||||
|
)
|
||||||
|
|
||||||
|
with pd.ExcelWriter(out_path, engine="openpyxl") as writer:
|
||||||
|
df.to_excel(writer, index=False, sheet_name="Duplicates")
|
||||||
|
cable_df.to_excel(writer, index=False, sheet_name="CableOveruse")
|
||||||
|
|
||||||
|
summary = pd.DataFrame(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"PDF": pdf_path.name,
|
||||||
|
"DuplicateTokens": len(duplicates),
|
||||||
|
"CableOverusedLabels": len(cable_overuse),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
summary.to_excel(writer, index=False, sheet_name="Summary")
|
||||||
|
|
||||||
|
return out_path
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
base_dir = Path(__file__).resolve().parent
|
||||||
|
pdfs = sorted(base_dir.glob("*.pdf"))
|
||||||
|
if not pdfs:
|
||||||
|
print(f"No PDFs found in: {base_dir}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
compiled_patterns = _compile_wildcard_patterns(WILDCARD_PATTERNS)
|
||||||
|
compiled_cable_patterns = _compile_wildcard_patterns(CABLE_WILDCARD_PATTERNS)
|
||||||
|
|
||||||
|
print(f"Found {len(pdfs)} PDF(s). Checking duplicates INSIDE each PDF only...")
|
||||||
|
for pdf in pdfs:
|
||||||
|
print(f"\n--- {pdf.name} ---")
|
||||||
|
try:
|
||||||
|
dups = find_duplicates_in_pdf(pdf, compiled_patterns)
|
||||||
|
cable_overuse = find_cable_overuse_in_pdf(pdf, compiled_cable_patterns, allowed_occurrences=2)
|
||||||
|
out_xlsx = write_excel_for_pdf(pdf, dups, cable_overuse)
|
||||||
|
print(f"Duplicate tokens (appear on >1 page): {len(dups)}")
|
||||||
|
print(f"Cable labels overused (total occurrences > 2): {len(cable_overuse)}")
|
||||||
|
print(f"Excel written: {out_xlsx.name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR processing {pdf.name}: {e}", file=sys.stderr)
|
||||||
|
return 2
|
||||||
|
|
||||||
|
print("\nDone.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
raise SystemExit(main())
|
||||||
|
|
||||||
BIN
Additional/format/Amazon CDW5_IP Addresses_Local.xlsx
Normal file
BIN
Additional/format/Amazon CDW5_IP Addresses_Local.xlsx
Normal file
Binary file not shown.
BIN
Additional/format/Amazon CDW5_IP Addresses_Local_formatted.xlsx
Normal file
BIN
Additional/format/Amazon CDW5_IP Addresses_Local_formatted.xlsx
Normal file
Binary file not shown.
232
Additional/format/format.py
Normal file
232
Additional/format/format.py
Normal file
@ -0,0 +1,232 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""
|
||||||
|
Convert Excel format from network structure format to standardized format.
|
||||||
|
|
||||||
|
The source file has network structure data that needs to be extracted and reformatted
|
||||||
|
to match the target format with columns: DPM, DPM_IP, Name, PartNumber, IP
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python format.py <source_file.xlsx> [output_file.xlsx]
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
python format.py "Amazon CDW5_IP Addresses_Local.xlsx"
|
||||||
|
python format.py "input.xlsx" "output.xlsx"
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import pandas as pd
|
||||||
|
from pathlib import Path
|
||||||
|
from openpyxl import load_workbook
|
||||||
|
from openpyxl.styles import Font, Alignment, PatternFill, Border, Side
|
||||||
|
from openpyxl.utils import get_column_letter
|
||||||
|
|
||||||
|
|
||||||
|
def convert_format(source_file, output_file):
|
||||||
|
"""Convert source Excel format to target format."""
|
||||||
|
print(f"Reading source file: {source_file}")
|
||||||
|
# Read with header=2 to skip first 2 rows and use row 2 as header
|
||||||
|
source_df = pd.read_excel(source_file, header=2)
|
||||||
|
|
||||||
|
print(f"Source file shape: {source_df.shape}")
|
||||||
|
print(f"Source columns: {list(source_df.columns)}")
|
||||||
|
|
||||||
|
# Find the column names - they should be in row 2
|
||||||
|
print("\nFirst few rows of source:")
|
||||||
|
print(source_df.head(10).to_string())
|
||||||
|
|
||||||
|
# Map source columns to target columns
|
||||||
|
# Based on the structure, we need to find columns by their actual header values
|
||||||
|
cols = list(source_df.columns)
|
||||||
|
|
||||||
|
# Find columns by exact name match first
|
||||||
|
dpm_col = None
|
||||||
|
dpm_ip_col = None
|
||||||
|
name_col = None
|
||||||
|
part_number_col = None
|
||||||
|
ip_col = None
|
||||||
|
|
||||||
|
for col in source_df.columns:
|
||||||
|
col_str = str(col).strip()
|
||||||
|
if col_str == 'DPM' and 'PORT' not in col_str:
|
||||||
|
dpm_col = col
|
||||||
|
elif col_str == 'IP' and dpm_ip_col is None and 'PORT' not in col_str:
|
||||||
|
# First IP column is DPM_IP
|
||||||
|
dpm_ip_col = col
|
||||||
|
elif col_str == 'Assigned Device':
|
||||||
|
name_col = col
|
||||||
|
elif col_str == 'Part Number':
|
||||||
|
part_number_col = col
|
||||||
|
elif col_str == 'IP.1' or (col_str == 'IP' and dpm_ip_col is not None):
|
||||||
|
# Second IP column is device IP (pandas renames duplicate columns)
|
||||||
|
ip_col = col
|
||||||
|
|
||||||
|
# Fallback: use column indices if names don't match
|
||||||
|
# Based on the structure: columns are at indices 3, 4, 5, 6, 7
|
||||||
|
if dpm_col is None and len(cols) >= 8:
|
||||||
|
dpm_col = cols[3] # 'DPM'
|
||||||
|
dpm_ip_col = cols[4] # 'IP'
|
||||||
|
name_col = cols[5] # 'Assigned Device'
|
||||||
|
part_number_col = cols[6] # 'Part Number'
|
||||||
|
ip_col = cols[7] # 'IP.1' (second IP column)
|
||||||
|
|
||||||
|
print(f"\nColumn mapping:")
|
||||||
|
print(f" DPM: {dpm_col}")
|
||||||
|
print(f" DPM_IP: {dpm_ip_col}")
|
||||||
|
print(f" Name: {name_col}")
|
||||||
|
print(f" PartNumber: {part_number_col}")
|
||||||
|
print(f" IP: {ip_col}")
|
||||||
|
|
||||||
|
# Validate that we found the essential columns
|
||||||
|
if not dpm_col or not name_col:
|
||||||
|
raise ValueError(f"Could not find required columns. Found: DPM={dpm_col}, Name={name_col}")
|
||||||
|
|
||||||
|
# Forward-fill DPM and DPM_IP values (they're merged across multiple rows)
|
||||||
|
source_df[dpm_col] = source_df[dpm_col].ffill()
|
||||||
|
if dpm_ip_col:
|
||||||
|
source_df[dpm_ip_col] = source_df[dpm_ip_col].ffill()
|
||||||
|
|
||||||
|
# Extract data
|
||||||
|
output_data = []
|
||||||
|
for idx, row in source_df.iterrows():
|
||||||
|
dpm = row[dpm_col] if dpm_col and pd.notna(row[dpm_col]) else None
|
||||||
|
dpm_ip = row[dpm_ip_col] if dpm_ip_col and pd.notna(row[dpm_ip_col]) else None
|
||||||
|
name = row[name_col] if name_col and pd.notna(row[name_col]) else None
|
||||||
|
part_number = row[part_number_col] if part_number_col and pd.notna(row[part_number_col]) else None
|
||||||
|
ip = row[ip_col] if ip_col and pd.notna(row[ip_col]) else None
|
||||||
|
|
||||||
|
# Only add rows that have at least DPM and Name (the essential data)
|
||||||
|
if pd.notna(dpm) and pd.notna(name):
|
||||||
|
output_data.append({
|
||||||
|
'DPM': str(dpm).strip(),
|
||||||
|
'DPM_IP': str(dpm_ip).strip() if pd.notna(dpm_ip) else '',
|
||||||
|
'Name': str(name).strip(),
|
||||||
|
'PartNumber': str(part_number).strip() if pd.notna(part_number) else '',
|
||||||
|
'IP': str(ip).strip() if pd.notna(ip) else ''
|
||||||
|
})
|
||||||
|
|
||||||
|
# Create output dataframe
|
||||||
|
output_df = pd.DataFrame(output_data)
|
||||||
|
|
||||||
|
print(f"\nExtracted {len(output_df)} rows of network data")
|
||||||
|
print("\nFirst few rows of output:")
|
||||||
|
print(output_df.head(10).to_string())
|
||||||
|
|
||||||
|
# Write output with formatting
|
||||||
|
print(f"\nWriting output to: {output_file}")
|
||||||
|
|
||||||
|
with pd.ExcelWriter(output_file, engine='openpyxl') as writer:
|
||||||
|
output_df.to_excel(writer, sheet_name='NETWORK_PLC', index=False)
|
||||||
|
|
||||||
|
# Get the worksheet
|
||||||
|
output_ws = writer.sheets['NETWORK_PLC']
|
||||||
|
|
||||||
|
# Set column widths (reasonable defaults)
|
||||||
|
column_widths = {
|
||||||
|
'A': 20, # DPM
|
||||||
|
'B': 16, # DPM_IP
|
||||||
|
'C': 30, # Name
|
||||||
|
'D': 18, # PartNumber
|
||||||
|
'E': 16 # IP
|
||||||
|
}
|
||||||
|
for col_letter, width in column_widths.items():
|
||||||
|
output_ws.column_dimensions[col_letter].width = width
|
||||||
|
|
||||||
|
# Format header row
|
||||||
|
header_fill = PatternFill(start_color="D3D3D3", end_color="D3D3D3", fill_type="solid")
|
||||||
|
header_font = Font(bold=True, size=11)
|
||||||
|
header_alignment = Alignment(horizontal="center", vertical="center")
|
||||||
|
thin_border = Border(
|
||||||
|
left=Side(style="thin"),
|
||||||
|
right=Side(style="thin"),
|
||||||
|
top=Side(style="thin"),
|
||||||
|
bottom=Side(style="thin")
|
||||||
|
)
|
||||||
|
|
||||||
|
for col_idx in range(1, len(output_df.columns) + 1):
|
||||||
|
header_cell = output_ws.cell(row=1, column=col_idx)
|
||||||
|
header_cell.font = header_font
|
||||||
|
header_cell.fill = header_fill
|
||||||
|
header_cell.alignment = header_alignment
|
||||||
|
header_cell.border = thin_border
|
||||||
|
|
||||||
|
# Format data rows
|
||||||
|
data_alignment = Alignment(horizontal="left", vertical="center")
|
||||||
|
for row_idx in range(2, len(output_df) + 2):
|
||||||
|
for col_idx in range(1, len(output_df.columns) + 1):
|
||||||
|
data_cell = output_ws.cell(row=row_idx, column=col_idx)
|
||||||
|
data_cell.alignment = data_alignment
|
||||||
|
data_cell.border = thin_border
|
||||||
|
|
||||||
|
# Center align IP columns (DPM_IP and IP)
|
||||||
|
for row_idx in range(2, len(output_df) + 2):
|
||||||
|
output_ws.cell(row=row_idx, column=2).alignment = Alignment(horizontal="center", vertical="center") # DPM_IP
|
||||||
|
output_ws.cell(row=row_idx, column=5).alignment = Alignment(horizontal="center", vertical="center") # IP
|
||||||
|
|
||||||
|
print(f"\nConversion complete!")
|
||||||
|
print(f" Output saved to: {output_file}")
|
||||||
|
print(f" Total rows: {len(output_df)}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Convert Excel network structure format to standardized format",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Examples:
|
||||||
|
python format.py "Amazon CDW5_IP Addresses_Local.xlsx"
|
||||||
|
python format.py "input.xlsx" "output.xlsx"
|
||||||
|
python format.py "C:\\path\\to\\file.xlsx"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"source_file",
|
||||||
|
help="Path to the source Excel file to convert"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"output_file",
|
||||||
|
nargs="?",
|
||||||
|
default=None,
|
||||||
|
help="Path to the output Excel file (default: source_file with '_formatted' suffix)"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Resolve file paths
|
||||||
|
source_file = Path(args.source_file)
|
||||||
|
if not source_file.is_absolute():
|
||||||
|
# If relative, assume it's in the same directory as the script
|
||||||
|
source_file = Path(__file__).parent / source_file
|
||||||
|
|
||||||
|
if not source_file.exists():
|
||||||
|
print(f"ERROR: Source file not found: {source_file}")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Determine output file
|
||||||
|
if args.output_file:
|
||||||
|
output_file = Path(args.output_file)
|
||||||
|
if not output_file.is_absolute():
|
||||||
|
output_file = Path(__file__).parent / output_file
|
||||||
|
else:
|
||||||
|
# Default: add "_formatted" before the extension
|
||||||
|
stem = source_file.stem
|
||||||
|
suffix = source_file.suffix
|
||||||
|
output_file = source_file.parent / f"{stem}_formatted{suffix}"
|
||||||
|
|
||||||
|
print("=" * 60)
|
||||||
|
print("CONVERTING EXCEL FORMAT")
|
||||||
|
print("=" * 60)
|
||||||
|
print(f"Source: {source_file}")
|
||||||
|
print(f"Output: {output_file}")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
try:
|
||||||
|
convert_format(source_file, output_file)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\nERROR: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
336
AutoCAD/Attribute/AS_AttrBatch.lsp
Normal file
336
AutoCAD/Attribute/AS_AttrBatch.lsp
Normal file
@ -0,0 +1,336 @@
|
|||||||
|
;; AS_AttrBatch.lsp
|
||||||
|
;; Batch-edit a single attribute tag for all matching AS* blocks inside a user selection.
|
||||||
|
;; Command: ASBATCHATTR
|
||||||
|
;;
|
||||||
|
;; Command-line version (no dialog/DCL).
|
||||||
|
|
||||||
|
(defun asba:_unique (lst / out)
|
||||||
|
(foreach x lst
|
||||||
|
(if (not (member x out))
|
||||||
|
(setq out (cons x out))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(reverse out)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_ss->list (ss / i out)
|
||||||
|
(setq i 0 out '())
|
||||||
|
(if ss
|
||||||
|
(while (< i (sslength ss))
|
||||||
|
(if (= (type (ssname ss i)) 'ENAME)
|
||||||
|
(setq out (cons (ssname ss i) out))
|
||||||
|
)
|
||||||
|
(setq i (1+ i))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(reverse out)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_effname (ename / ed nm r obj)
|
||||||
|
;; Try to get the "EffectiveName" (handles dynamic blocks).
|
||||||
|
;; Fully guarded: if anything fails, fall back to DXF group 2.
|
||||||
|
(setq nm nil)
|
||||||
|
(if (= (type ename) 'ENAME)
|
||||||
|
(progn
|
||||||
|
;; COM path (safe) -> try to set nm
|
||||||
|
(setq r (vl-catch-all-apply 'vl-load-com '()))
|
||||||
|
(setq r (vl-catch-all-apply 'vlax-ename->vla-object (list ename)))
|
||||||
|
(if (not (vl-catch-all-error-p r))
|
||||||
|
(progn
|
||||||
|
(setq obj r)
|
||||||
|
(setq r (vl-catch-all-apply 'vlax-property-available-p (list obj 'EffectiveName)))
|
||||||
|
(if (and (not (vl-catch-all-error-p r)) r)
|
||||||
|
(progn
|
||||||
|
(setq r (vl-catch-all-apply 'vla-get-EffectiveName (list obj)))
|
||||||
|
(if (and (not (vl-catch-all-error-p r)) (= (type r) 'STR))
|
||||||
|
(setq nm r)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
;; DXF fallback if nm not found
|
||||||
|
(if (not nm)
|
||||||
|
(progn
|
||||||
|
(setq ed (entget ename))
|
||||||
|
(setq r (cdr (assoc 2 ed)))
|
||||||
|
(if (= (type r) 'STR) (setq nm r))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
nm
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_attrib-tags-of-insert (ins / e ed tags tagTmp)
|
||||||
|
;; Read attribute TAGs using DXF traversal (avoids VLA objects/variants).
|
||||||
|
(setq tags '())
|
||||||
|
(setq e (entnext ins))
|
||||||
|
(while (and e (setq ed (entget e)) (= (cdr (assoc 0 ed)) "ATTRIB"))
|
||||||
|
(setq tagTmp (cdr (assoc 2 ed)))
|
||||||
|
(if (= (type tagTmp) 'STR)
|
||||||
|
(setq tags (cons (strcase tagTmp) tags))
|
||||||
|
)
|
||||||
|
(setq e (entnext e))
|
||||||
|
)
|
||||||
|
tags
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_find-first-insert-by-name (ss blockName / enames e nm)
|
||||||
|
(setq enames (asba:_ss->list ss))
|
||||||
|
(setq e nil)
|
||||||
|
(foreach x enames
|
||||||
|
(if (and (null e)
|
||||||
|
(= (type x) 'ENAME)
|
||||||
|
(= (cdr (assoc 0 (entget x))) "INSERT"))
|
||||||
|
(progn
|
||||||
|
(setq nm (asba:_effname x))
|
||||||
|
(if (and (= (type nm) 'STR) (= (strcase nm) (strcase blockName)))
|
||||||
|
(setq e x)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
e
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_set-attrib-value-on-insert (ins tag newValue / e ed curTag)
|
||||||
|
;; Update only the chosen TAG on a single INSERT.
|
||||||
|
;; Returns T if changed, NIL otherwise.
|
||||||
|
(setq e (entnext ins))
|
||||||
|
(while (and e (setq ed (entget e)) (= (cdr (assoc 0 ed)) "ATTRIB"))
|
||||||
|
(setq curTag (cdr (assoc 2 ed)))
|
||||||
|
(if (and (= (type curTag) 'STR) (= (strcase curTag) (strcase tag)))
|
||||||
|
(progn
|
||||||
|
(if (assoc 1 ed)
|
||||||
|
(setq ed (subst (cons 1 newValue) (assoc 1 ed) ed))
|
||||||
|
(setq ed (append ed (list (cons 1 newValue))))
|
||||||
|
)
|
||||||
|
(entmod ed)
|
||||||
|
(entupd ins)
|
||||||
|
(setq e nil) ;; stop after first match
|
||||||
|
(setq ed T) ;; reuse as "changed" flag
|
||||||
|
)
|
||||||
|
(setq e (entnext e))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(if (= ed T) T nil)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_blocks-in-selection (ss / enames names nm)
|
||||||
|
;; Kept for backward compatibility (returns unique names only).
|
||||||
|
(mapcar 'car (asba:_block-counts-in-selection ss))
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_block-counts-in-selection (ss / enames counts nm cell)
|
||||||
|
;; Returns alist: (("AS_VFD" . 50) ("AS_PMM" . 1) ...)
|
||||||
|
(setq enames (asba:_ss->list ss))
|
||||||
|
(setq counts '())
|
||||||
|
(foreach e enames
|
||||||
|
(if (= (cdr (assoc 0 (entget e))) "INSERT")
|
||||||
|
(progn
|
||||||
|
(setq nm (asba:_effname e))
|
||||||
|
(if (and (= (type nm) 'STR) (wcmatch (strcase nm) "AS*"))
|
||||||
|
(progn
|
||||||
|
(setq nm (strcase nm))
|
||||||
|
(setq cell (assoc nm counts))
|
||||||
|
(if cell
|
||||||
|
(setq counts (subst (cons nm (1+ (cdr cell))) cell counts))
|
||||||
|
(setq counts (cons (cons nm 1) counts))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
;; sort by name
|
||||||
|
(vl-sort counts '(lambda (a b) (< (car a) (car b))))
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_counts->display (alist / out)
|
||||||
|
(setq out '())
|
||||||
|
(foreach p alist
|
||||||
|
(setq out (cons (strcat (car p) " (" (itoa (cdr p)) ")") out))
|
||||||
|
)
|
||||||
|
(reverse out)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_attrs-for-block (ss blockName / ins tags)
|
||||||
|
;; Attributes are normally consistent across block references.
|
||||||
|
;; To avoid scanning every insert (and any weird/proxy entities), read tags from the first valid insert.
|
||||||
|
(setq ins (asba:_find-first-insert-by-name ss blockName))
|
||||||
|
(if ins
|
||||||
|
(progn
|
||||||
|
(setq tags (asba:_attrib-tags-of-insert ins))
|
||||||
|
(setq tags (asba:_unique tags))
|
||||||
|
(vl-sort tags '<)
|
||||||
|
)
|
||||||
|
'()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_apply-attr (ss blockName tag newValue / enames changed nm attrefs att)
|
||||||
|
(setq enames (asba:_ss->list ss))
|
||||||
|
(setq changed 0)
|
||||||
|
(foreach e enames
|
||||||
|
(if (= (cdr (assoc 0 (entget e))) "INSERT")
|
||||||
|
(progn
|
||||||
|
(setq nm (asba:_effname e))
|
||||||
|
(if (and (= (type nm) 'STR) (= (strcase nm) (strcase blockName)))
|
||||||
|
(progn
|
||||||
|
(if (asba:_set-attrib-value-on-insert e tag newValue)
|
||||||
|
(setq changed (1+ changed))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
changed
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_print-numbered (title items / i)
|
||||||
|
(prompt (strcat "\n" title))
|
||||||
|
(setq i 1)
|
||||||
|
(foreach it items
|
||||||
|
(prompt (strcat "\n " (itoa i) ") " it))
|
||||||
|
(setq i (1+ i))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_choose-index (promptText maxN / n)
|
||||||
|
(setq n nil)
|
||||||
|
(while (not n)
|
||||||
|
(setq n (getint (strcat "\n" promptText " (1-" (itoa maxN) ", Enter to cancel): ")))
|
||||||
|
(cond
|
||||||
|
((null n) (setq n 0))
|
||||||
|
((or (< n 1) (> n maxN))
|
||||||
|
(prompt "\nInvalid number.")
|
||||||
|
(setq n nil)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
n
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_ss-has (ss ename)
|
||||||
|
;; returns T if ename is in selection set ss
|
||||||
|
(if (and ss ename)
|
||||||
|
(if (ssmemb ename ss) T nil)
|
||||||
|
nil
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_pick-block-from-ss (ss / sel ename nm)
|
||||||
|
(setq nm nil)
|
||||||
|
(while (not nm)
|
||||||
|
(setq sel (entsel "\nClick a block (AS*) inside the selected zone (Enter to cancel): "))
|
||||||
|
(cond
|
||||||
|
((null sel) (setq nm "")) ;; cancelled
|
||||||
|
(t
|
||||||
|
(setq ename (car sel))
|
||||||
|
(if (and ename (= (cdr (assoc 0 (entget ename))) "INSERT") (asba:_ss-has ss ename))
|
||||||
|
(progn
|
||||||
|
(setq nm (asba:_effname ename))
|
||||||
|
(if (not (and (= (type nm) 'STR) (wcmatch (strcase nm) "AS*")))
|
||||||
|
(progn (prompt "\nThat block name does not start with AS*.") (setq nm nil))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(progn (prompt "\nPlease click a block INSERT that is inside your selected zone.") (setq nm nil))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(if (= nm "") nil nm)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun asba:_getstring-safe (msg / r)
|
||||||
|
;; Some AutoCAD builds do not support (getstring T ...). Use the simplest form.
|
||||||
|
(setq r (getstring msg))
|
||||||
|
r
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun c:ASBATCHATTR (/ ss blockCounts blockNames blockDisplay mode block bIdx attrs tag aIdx newVal changed blkTotal)
|
||||||
|
(vl-load-com)
|
||||||
|
|
||||||
|
(prompt "\nSelect a zone/area (window/crossing allowed). Press Enter when done...")
|
||||||
|
(setq ss (ssget))
|
||||||
|
(if (not ss)
|
||||||
|
(progn (prompt "\nNothing selected.") (princ))
|
||||||
|
(progn
|
||||||
|
(setq blockCounts (asba:_block-counts-in-selection ss))
|
||||||
|
(setq blockNames (mapcar 'car blockCounts))
|
||||||
|
(setq blockDisplay (asba:_counts->display blockCounts))
|
||||||
|
(if (not blockNames)
|
||||||
|
(progn (prompt "\nNo blocks found with name starting with AS* in the selection.") (princ))
|
||||||
|
(progn
|
||||||
|
;; Choose block: Pick (mouse) or List (numbered)
|
||||||
|
(initget "Pick List")
|
||||||
|
(setq mode (getkword "\nChoose block by [Pick/List] <Pick>: "))
|
||||||
|
(if (null mode) (setq mode "Pick"))
|
||||||
|
|
||||||
|
(cond
|
||||||
|
((= mode "Pick")
|
||||||
|
(setq block (asba:_pick-block-from-ss ss))
|
||||||
|
)
|
||||||
|
(t
|
||||||
|
(asba:_print-numbered "Unique blocks (AS*) with counts:" blockDisplay)
|
||||||
|
(setq bIdx (asba:_choose-index "Choose block number" (length blockNames)))
|
||||||
|
(setq block (if (= bIdx 0) nil (nth (1- bIdx) blockNames)))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(if (not block)
|
||||||
|
(progn (prompt "\nCancelled.") (princ))
|
||||||
|
(progn
|
||||||
|
(setq blkTotal (cdr (assoc (strcase block) blockCounts)))
|
||||||
|
(setq attrs (asba:_attrs-for-block ss block))
|
||||||
|
(if (not attrs)
|
||||||
|
(progn (prompt (strcat "\nBlock " block " has no attributes in the selection.")) (princ))
|
||||||
|
(progn
|
||||||
|
;; Choose attribute: numbered list only (reliable + matches requested workflow)
|
||||||
|
(asba:_print-numbered (strcat "Unique attribute tags for " block ":") attrs)
|
||||||
|
(setq aIdx (asba:_choose-index "Choose attribute number" (length attrs)))
|
||||||
|
(setq tag (if (= aIdx 0) nil (nth (1- aIdx) attrs)))
|
||||||
|
|
||||||
|
(if (not tag)
|
||||||
|
(progn (prompt "\nCancelled.") (princ))
|
||||||
|
(progn
|
||||||
|
(setq newVal (asba:_getstring-safe (strcat "\nNew value for tag " tag " (Enter to cancel): ")))
|
||||||
|
;; If user presses Enter immediately, AutoCAD may return "".
|
||||||
|
(if (or (null newVal) (= newVal ""))
|
||||||
|
(progn (prompt "\nCancelled.") (princ))
|
||||||
|
(progn
|
||||||
|
(setq changed (asba:_apply-attr ss block tag newVal))
|
||||||
|
(command "_.REGEN")
|
||||||
|
(prompt
|
||||||
|
(strcat
|
||||||
|
"\nDone. Changed "
|
||||||
|
(itoa changed)
|
||||||
|
" block(s) out of "
|
||||||
|
(itoa (if blkTotal blkTotal 0))
|
||||||
|
" selected "
|
||||||
|
block
|
||||||
|
" block(s)."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(princ)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(princ "\nLoaded AS_AttrBatch. Run command: ASBATCHATTR")
|
||||||
|
(princ)
|
||||||
|
|
||||||
|
|
||||||
25
AutoCAD/Attribute/README.md
Normal file
25
AutoCAD/Attribute/README.md
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# AS Batch Attribute Editor (AutoLISP)
|
||||||
|
|
||||||
|
This tool lets you select an area in your drawing, list **unique block names starting with `AS*`**, list **unique attribute tags** for a chosen block, and then **change one attribute value for all matching blocks** inside the selected area.
|
||||||
|
|
||||||
|
## Files
|
||||||
|
- `AS_AttrBatch.lsp` — AutoLISP logic (**this is the only required file**)
|
||||||
|
No `.dcl` file is used — this is a command-line tool.
|
||||||
|
|
||||||
|
## Install / Run
|
||||||
|
1. Copy `AS_AttrBatch.lsp` into any folder.
|
||||||
|
2. In AutoCAD run `APPLOAD` and load `AS_AttrBatch.lsp`.
|
||||||
|
3. Run command: `ASBATCHATTR`
|
||||||
|
|
||||||
|
## How to use
|
||||||
|
1. Select a zone/part of the drawing (window/crossing selection is fine), then press **Enter**.
|
||||||
|
2. Choose the block:
|
||||||
|
- Option **Pick**: click a block in the selected zone
|
||||||
|
- Option **List**: choose from a numbered list
|
||||||
|
- The list shows **unique block names** with a **count** like `AS_VFD (50)`
|
||||||
|
3. Choose the attribute tag from the **numbered list**.
|
||||||
|
4. Type the new value and press **Enter**.
|
||||||
|
|
||||||
|
Only the chosen attribute tag is updated. All other attributes remain unchanged.
|
||||||
|
|
||||||
|
|
||||||
BIN
AutoCAD/Compare/DPM/DPM_device.xlsx
Normal file
BIN
AutoCAD/Compare/DPM/DPM_device.xlsx
Normal file
Binary file not shown.
225
AutoCAD/Compare/DPM/check_dpm_devices.py
Normal file
225
AutoCAD/Compare/DPM/check_dpm_devices.py
Normal file
@ -0,0 +1,225 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Script to compare device lists from two Excel files.
|
||||||
|
- File 1: Extract all values from P_TAG1 column
|
||||||
|
- File 2: Extract all devices, ignoring those containing "SPARE"
|
||||||
|
- Output: Excel file with all devices and a YES/NO column indicating if device is in both files
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import pandas as pd
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def extract_from_p_tag1_file(file_path, sheet_name=0):
|
||||||
|
"""
|
||||||
|
Extract all device values from the P_TAG1 column.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to Excel file with P_TAG1 column
|
||||||
|
sheet_name: Sheet name or index to read
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of device names (preserving original case)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
||||||
|
|
||||||
|
# Find P_TAG1 column
|
||||||
|
if 'P_TAG1' not in df.columns:
|
||||||
|
print(f"Error: Column 'P_TAG1' not found in '{file_path}'")
|
||||||
|
print(f"Available columns: {list(df.columns)}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Extract all values from P_TAG1 column, preserving original case
|
||||||
|
devices = df['P_TAG1'].dropna().astype(str).str.strip()
|
||||||
|
devices = devices[devices != ''].tolist()
|
||||||
|
|
||||||
|
return devices
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Error: File '{file_path}' not found.")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading '{file_path}': {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_from_devices_file(file_path, sheet_name=0):
|
||||||
|
"""
|
||||||
|
Extract all devices from file, ignoring those containing "SPARE".
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to Excel file with device list
|
||||||
|
sheet_name: Sheet name or index to read
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of device names (preserving original case)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
||||||
|
|
||||||
|
# Get all values from first column (assuming devices are in first column)
|
||||||
|
devices = df.iloc[:, 0].dropna().astype(str).str.strip()
|
||||||
|
devices = devices[devices != ''].tolist()
|
||||||
|
|
||||||
|
# Also check if column name itself is a device (if it looks like a device name)
|
||||||
|
column_name = str(df.columns[0]).strip()
|
||||||
|
column_lower = column_name.lower()
|
||||||
|
|
||||||
|
# If column name looks like a device (contains underscore, not "P_TAG1"), add it
|
||||||
|
if '_' in column_name and len(column_name) > 3 and column_lower != 'p_tag1':
|
||||||
|
if column_name not in devices:
|
||||||
|
devices.append(column_name)
|
||||||
|
|
||||||
|
# Filter out devices containing "SPARE" (case-insensitive)
|
||||||
|
filtered_devices = []
|
||||||
|
for device in devices:
|
||||||
|
device_lower = device.lower()
|
||||||
|
if 'spare' not in device_lower:
|
||||||
|
filtered_devices.append(device)
|
||||||
|
|
||||||
|
return filtered_devices
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Error: File '{file_path}' not found.")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading '{file_path}': {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def compare_and_create_output(file1_path, file2_path, output_path, sheet1=0, sheet2=0):
|
||||||
|
"""
|
||||||
|
Compare device lists and create Excel output file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file1_path: Path to file with P_TAG1 column
|
||||||
|
file2_path: Path to file with device list
|
||||||
|
output_path: Path to output Excel file
|
||||||
|
sheet1: Sheet name or index for first file
|
||||||
|
sheet2: Sheet name or index for second file
|
||||||
|
"""
|
||||||
|
# Get file names (without path) for column headers
|
||||||
|
file1_name = Path(file1_path).name
|
||||||
|
file2_name = Path(file2_path).name
|
||||||
|
|
||||||
|
print(f"Reading '{file1_path}' (looking for P_TAG1 column)...")
|
||||||
|
devices1 = extract_from_p_tag1_file(file1_path, sheet_name=sheet1)
|
||||||
|
print(f" Found {len(devices1)} devices in P_TAG1 column")
|
||||||
|
|
||||||
|
print(f"Reading '{file2_path}' (extracting all devices, ignoring SPARE)...")
|
||||||
|
devices2 = extract_from_devices_file(file2_path, sheet_name=sheet2)
|
||||||
|
print(f" Found {len(devices2)} devices (after filtering SPARE)")
|
||||||
|
|
||||||
|
# Create sets for case-insensitive comparison
|
||||||
|
devices1_lower = {d.lower(): d for d in devices1}
|
||||||
|
devices2_lower = {d.lower(): d for d in devices2}
|
||||||
|
|
||||||
|
# Get all unique devices (combining both lists, preserving original case)
|
||||||
|
all_devices_lower = set(devices1_lower.keys()) | set(devices2_lower.keys())
|
||||||
|
|
||||||
|
# Create result list with original case
|
||||||
|
result_data = []
|
||||||
|
for device_lower in sorted(all_devices_lower):
|
||||||
|
# Get original case (prefer from file1, then file2)
|
||||||
|
if device_lower in devices1_lower:
|
||||||
|
device_original = devices1_lower[device_lower]
|
||||||
|
else:
|
||||||
|
device_original = devices2_lower[device_lower]
|
||||||
|
|
||||||
|
# Check if device is in each file (case-insensitive)
|
||||||
|
in_file1 = device_lower in devices1_lower
|
||||||
|
in_file2 = device_lower in devices2_lower
|
||||||
|
in_both = in_file1 and in_file2
|
||||||
|
|
||||||
|
result_data.append({
|
||||||
|
'Device': device_original,
|
||||||
|
file1_name: "YES" if in_file1 else "NO",
|
||||||
|
file2_name: "YES" if in_file2 else "NO",
|
||||||
|
'In Both Files': "YES" if in_both else "NO"
|
||||||
|
})
|
||||||
|
|
||||||
|
# Create DataFrame
|
||||||
|
result_df = pd.DataFrame(result_data)
|
||||||
|
|
||||||
|
# Validate output file extension
|
||||||
|
output_path_obj = Path(output_path)
|
||||||
|
if output_path_obj.suffix.lower() not in ['.xlsx', '.xls']:
|
||||||
|
# If no extension or wrong extension, add .xlsx
|
||||||
|
if not output_path_obj.suffix:
|
||||||
|
output_path = str(output_path_obj) + '.xlsx'
|
||||||
|
print(f"Note: Output file extension added: {output_path}")
|
||||||
|
else:
|
||||||
|
print(f"Warning: Output file should be .xlsx format. Converting to .xlsx")
|
||||||
|
output_path = str(output_path_obj.with_suffix('.xlsx'))
|
||||||
|
|
||||||
|
# Write to Excel
|
||||||
|
try:
|
||||||
|
# Ensure openpyxl is available
|
||||||
|
try:
|
||||||
|
import openpyxl
|
||||||
|
except ImportError:
|
||||||
|
print("Error: openpyxl library is required. Install it with: pip install openpyxl")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
result_df.to_excel(output_path, index=False, engine='openpyxl')
|
||||||
|
print(f"\nOutput saved to: {output_path}")
|
||||||
|
print(f"Total devices: {len(result_df)}")
|
||||||
|
print(f"Devices in both files: {len(result_df[result_df['In Both Files'] == 'YES'])}")
|
||||||
|
print(f"Devices only in one file: {len(result_df[result_df['In Both Files'] == 'NO'])}")
|
||||||
|
print(f"\nColumns in output file:")
|
||||||
|
print(f" - Device: Device name")
|
||||||
|
print(f" - {file1_name}: YES if device is in this file")
|
||||||
|
print(f" - {file2_name}: YES if device is in this file")
|
||||||
|
print(f" - In Both Files: YES if device is in both files")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error writing to Excel file: {e}")
|
||||||
|
print(f"Make sure the output path is valid and you have write permissions.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Compare device lists from two Excel files and create Excel output',
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Examples:
|
||||||
|
python check_dpm_devices.py file1.xlsx file2.xlsx output.xlsx
|
||||||
|
python check_dpm_devices.py file1.xlsx file2.xlsx output.xlsx --sheet1 "Sheet1" --sheet2 "Sheet2"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument('file1', type=str,
|
||||||
|
help='Path to first Excel file (must have P_TAG1 column)')
|
||||||
|
parser.add_argument('file2', type=str,
|
||||||
|
help='Path to second Excel file (device list, SPARE devices will be ignored)')
|
||||||
|
parser.add_argument('output', type=str,
|
||||||
|
help='Path to output Excel file (must be .xlsx format)')
|
||||||
|
parser.add_argument('--sheet1', type=str, default=0,
|
||||||
|
help='Sheet name or index for first file (default: 0)')
|
||||||
|
parser.add_argument('--sheet2', type=str, default=0,
|
||||||
|
help='Sheet name or index for second file (default: 0)')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Validate files exist
|
||||||
|
if not Path(args.file1).exists():
|
||||||
|
print(f"Error: File '{args.file1}' does not exist.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not Path(args.file2).exists():
|
||||||
|
print(f"Error: File '{args.file2}' does not exist.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Compare and create output
|
||||||
|
compare_and_create_output(
|
||||||
|
args.file1,
|
||||||
|
args.file2,
|
||||||
|
args.output,
|
||||||
|
sheet1=args.sheet1,
|
||||||
|
sheet2=args.sheet2
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
BIN
AutoCAD/Compare/DPM/device_list.xlsx
Normal file
BIN
AutoCAD/Compare/DPM/device_list.xlsx
Normal file
Binary file not shown.
BIN
AutoCAD/Compare/DPM/results.xlsx
Normal file
BIN
AutoCAD/Compare/DPM/results.xlsx
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -162,9 +162,12 @@ def generate_rows(unique_dpms, assign, rating_map, net2_dpms):
|
|||||||
for dpm in unique_dpms:
|
for dpm in unique_dpms:
|
||||||
ring_ip = ring_ip_map[dpm]
|
ring_ip = ring_ip_map[dpm]
|
||||||
tags = assign.get(dpm, [])
|
tags = assign.get(dpm, [])
|
||||||
tags = tags[:24] + ["SPARE"] * (24 - len(tags[:24]))
|
|
||||||
|
# Generate ports starting from 5, continuing beyond 28 if needed
|
||||||
|
num_devices = len(tags)
|
||||||
|
ports = list(range(5, 5 + num_devices))
|
||||||
|
|
||||||
for idx, port in enumerate(range(5, 29)):
|
for idx, port in enumerate(ports):
|
||||||
dev = tags[idx]
|
dev = tags[idx]
|
||||||
pn = part_number_for(dev, rating_map)
|
pn = part_number_for(dev, rating_map)
|
||||||
|
|
||||||
@ -201,6 +204,7 @@ def write_sheet(ws, rows, title):
|
|||||||
)
|
)
|
||||||
|
|
||||||
yellow = PatternFill("solid", fgColor="FFC000")
|
yellow = PatternFill("solid", fgColor="FFC000")
|
||||||
|
red = PatternFill("solid", fgColor="FF0000")
|
||||||
thin = Side(style="thin", color="000000")
|
thin = Side(style="thin", color="000000")
|
||||||
border = Border(left=thin, right=thin, top=thin, bottom=thin)
|
border = Border(left=thin, right=thin, top=thin, bottom=thin)
|
||||||
center = Alignment(horizontal="center", vertical="center")
|
center = Alignment(horizontal="center", vertical="center")
|
||||||
@ -218,11 +222,10 @@ def write_sheet(ws, rows, title):
|
|||||||
|
|
||||||
for dpm, group in groupby(rows, key=lambda x: x[0]):
|
for dpm, group in groupby(rows, key=lambda x: x[0]):
|
||||||
g = list(group)
|
g = list(group)
|
||||||
# should already be 24 rows per DPM
|
num_rows = len(g)
|
||||||
g = g[:24]
|
|
||||||
|
|
||||||
start = row_idx
|
start = row_idx
|
||||||
end = row_idx + 24 - 1
|
end = row_idx + num_rows - 1
|
||||||
|
|
||||||
ws.merge_cells(start_row=start, start_column=4, end_row=end, end_column=4)
|
ws.merge_cells(start_row=start, start_column=4, end_row=end, end_column=4)
|
||||||
ws.merge_cells(start_row=start, start_column=5, end_row=end, end_column=5)
|
ws.merge_cells(start_row=start, start_column=5, end_row=end, end_column=5)
|
||||||
@ -235,17 +238,23 @@ def write_sheet(ws, rows, title):
|
|||||||
cell.border = border
|
cell.border = border
|
||||||
cell.font = Font(bold=False)
|
cell.font = Font(bold=False)
|
||||||
|
|
||||||
for i in range(24):
|
for i in range(num_rows):
|
||||||
row_vals = {
|
row_vals = {
|
||||||
6: g[i][2], # Assigned Device
|
6: g[i][2], # Assigned Device
|
||||||
7: g[i][3], # Part Number
|
7: g[i][3], # Part Number
|
||||||
8: g[i][4], # IP
|
8: g[i][4], # IP
|
||||||
9: g[i][5], # DPM PORT
|
9: g[i][5], # DPM PORT
|
||||||
}
|
}
|
||||||
|
# Highlight rows beyond 24th device in red
|
||||||
|
is_overflow = i >= 24
|
||||||
|
fill_color = red if is_overflow else None
|
||||||
|
|
||||||
for col, val in row_vals.items():
|
for col, val in row_vals.items():
|
||||||
c = ws.cell(row=start + i, column=col, value=val)
|
c = ws.cell(row=start + i, column=col, value=val)
|
||||||
c.alignment = left if col in (6, 7, 9) else center
|
c.alignment = left if col in (6, 7, 9) else center
|
||||||
c.border = border
|
c.border = border
|
||||||
|
if fill_color:
|
||||||
|
c.fill = fill_color
|
||||||
|
|
||||||
for r in range(start + 1, end + 1):
|
for r in range(start + 1, end + 1):
|
||||||
for col in (4, 5):
|
for col in (4, 5):
|
||||||
|
|||||||
314
AutoCAD/RenAttrib.LSP
Normal file
314
AutoCAD/RenAttrib.LSP
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
;;; Rename Attributes by Irné Barnard
|
||||||
|
;;; Version 3
|
||||||
|
|
||||||
|
(vl-load-com)
|
||||||
|
|
||||||
|
(defun RenAtt:LoadSettings (filename / f s lst)
|
||||||
|
(if (and (or (setq f (findfile (cond (filename)
|
||||||
|
("RenAttrib.DAT"))))
|
||||||
|
(setq f (findfile (strcat (getvar "RoamableRootPrefix") "RenAttrib.DAT"))))
|
||||||
|
(setq f (open f "r")))
|
||||||
|
(progn
|
||||||
|
(setq lst "")
|
||||||
|
(while (setq s (read-line f)) (setq lst (strcat lst "\n" s)))
|
||||||
|
(setq lst (read lst))
|
||||||
|
(close f)))
|
||||||
|
(setq *RenAtt:Settings* lst))
|
||||||
|
|
||||||
|
(RenAtt:LoadSettings nil)
|
||||||
|
|
||||||
|
(defun RenAtt:SaveSettings (filename / f)
|
||||||
|
(setq f (cond (filename)
|
||||||
|
((findfile "RenAttrib.DAT"))
|
||||||
|
((strcat (getvar "RoamableRootPrefix") "RenAttrib.DAT"))))
|
||||||
|
(if (setq f (open f "w"))
|
||||||
|
(progn
|
||||||
|
(prin1 *RenAtt:Settings* f)
|
||||||
|
(close f))))
|
||||||
|
|
||||||
|
;; Get ALL (even duplicate) attributes from a block reference
|
||||||
|
(defun RenAtt:GetAttributes (obj / an ao lst)
|
||||||
|
(setq an (vlax-vla-object->ename obj))
|
||||||
|
(while (and (setq an (entnext an))
|
||||||
|
(setq ao (vlax-ename->vla-object an))
|
||||||
|
(eq (vla-get-ObjectName ao) "AcDbAttribute"))
|
||||||
|
(setq lst (cons ao lst)))
|
||||||
|
lst)
|
||||||
|
|
||||||
|
(defun RenAtt:GetBlocks (/ lst item name attList attTest attName changed)
|
||||||
|
(or *AcadApp* (setq *AcadApp* (vlax-get-acad-object)))
|
||||||
|
(or *ActiveDocument* (setq *ActiveDocument* (vla-get-ActiveDocument *AcadApp*)))
|
||||||
|
(or *BlocksCollection* (setq *BlocksCollection* (vla-get-Blocks *ActiveDocument*)))
|
||||||
|
;; Get attributes from block definitions
|
||||||
|
(vlax-for blk *BlocksCollection*
|
||||||
|
(if (wcmatch (setq name (vla-get-Name blk)) "~`**")
|
||||||
|
(progn
|
||||||
|
(setq item nil)
|
||||||
|
(vlax-for obj blk
|
||||||
|
(if (eq (vla-get-ObjectName obj) "AcDbAttributeDefinition")
|
||||||
|
(if (setq attTest (assoc (setq attName (strcase (vla-get-TagString obj))) item))
|
||||||
|
(setq item (subst (cons attName (1+ (cdr attTest))) attTest item))
|
||||||
|
(setq item (cons (cons attName 1) item)))))
|
||||||
|
(if item
|
||||||
|
(setq lst (cons (cons (strcase name) (reverse item)) lst))))))
|
||||||
|
;; Get attributes from block references
|
||||||
|
(vlax-for blk *BlocksCollection*
|
||||||
|
(vlax-for obj blk
|
||||||
|
(if (and (eq (vla-get-ObjectName obj) "AcDbBlockReference") (eq (vla-get-HasAttributes obj) :vlax-true))
|
||||||
|
(progn
|
||||||
|
(setq attList nil)
|
||||||
|
(foreach att (vlax-invoke obj "GetAttributes")
|
||||||
|
(if (and (setq attName (strcase (vla-get-TagString att))) (setq attTest (assoc attName attList)))
|
||||||
|
(setq attList (subst (cons attName (1+ (cdr attTest))) attTest attList))
|
||||||
|
(setq attList (cons (cons attName 1) attList))))
|
||||||
|
(setq name (strcase (vla-get-EffectiveName obj))
|
||||||
|
item (reverse (cdr (assoc name lst)))
|
||||||
|
attName nil)
|
||||||
|
(foreach att (reverse attList)
|
||||||
|
(if (setq attTest (assoc (setq attName (car att)) item))
|
||||||
|
(if (> (cdr att) (cdr attTest))
|
||||||
|
(setq changed t
|
||||||
|
item (subst att attTest item)))
|
||||||
|
(setq changed t
|
||||||
|
item (cons att item))))
|
||||||
|
;; Merge into block definition list
|
||||||
|
(if changed
|
||||||
|
(setq lst (subst (cons name (reverse item)) (assoc name lst) lst)))))))
|
||||||
|
(reverse lst))
|
||||||
|
|
||||||
|
(defun RenAtt:MergeData (Settings NewData / old old2 attOld len)
|
||||||
|
(foreach item NewData
|
||||||
|
(if (setq old2 (setq old (assoc (car item) Settings)))
|
||||||
|
(foreach att (cdr item)
|
||||||
|
(if (setq attOld (assoc (car att) (cdr old)))
|
||||||
|
(if (> (cdr att) (setq len (1- (length attOld))))
|
||||||
|
(setq Settings
|
||||||
|
(subst
|
||||||
|
(setq old (subst (append attOld (_MakeList (car att) (- (cdr att) len))) attOld old))
|
||||||
|
old2
|
||||||
|
Settings)
|
||||||
|
old2 old))
|
||||||
|
;;Some error here
|
||||||
|
(setq Settings (subst (append old (list (cons (car att) (_MakeList (car att) (cdr att))))) old Settings))))
|
||||||
|
(setq Settings
|
||||||
|
(append
|
||||||
|
Settings
|
||||||
|
(list
|
||||||
|
(cons (car item)
|
||||||
|
(mapcar (function (lambda (attName) (list attName attName)))
|
||||||
|
(apply 'append (mapcar (function (lambda (a) (_MakeList (car a) (cdr a)))) (cdr item))))))))))
|
||||||
|
Settings)
|
||||||
|
|
||||||
|
(defun _MakeList (val count / lst) (repeat count (setq lst (cons val lst))))
|
||||||
|
(defun _FirstN (lst num / res)
|
||||||
|
(if (< num (length lst))
|
||||||
|
(progn
|
||||||
|
(repeat num
|
||||||
|
(setq res (cons (car lst) res)
|
||||||
|
lst (cdr lst)))
|
||||||
|
(reverse res))
|
||||||
|
lst))
|
||||||
|
(defun _CdrN (lst num /) (repeat num (setq lst (cdr lst))))
|
||||||
|
(defun _ReplaceN (val idx lst /)
|
||||||
|
(if (< idx 0)
|
||||||
|
(cons val lst)
|
||||||
|
(append (_FirstN lst idx) (list val) (_CdrN lst (1+ idx)))))
|
||||||
|
(defun _RemoveN (lst idx /)
|
||||||
|
(if (< idx 0)
|
||||||
|
(cons val lst)
|
||||||
|
(append (_FirstN lst idx) (_CdrN lst (1+ idx)))))
|
||||||
|
|
||||||
|
(defun RenAttr (obj / setting found)
|
||||||
|
(cond
|
||||||
|
((and (eq (vla-get-ObjectName obj) "AcDbBlockReference")
|
||||||
|
(eq (vla-get-HasAttributes obj) :vlax-true)
|
||||||
|
(setq setting (assoc (strcase (vla-get-EffectiveName obj)) *RenAtt:Settings*)))
|
||||||
|
(setq setting (cdr setting))
|
||||||
|
(foreach att (vlax-invoke obj "GetAttributes")
|
||||||
|
(if (setq found (assoc (strcase (vla-get-TagString att)) setting))
|
||||||
|
(progn
|
||||||
|
(setq setting (_RemoveN setting (vl-position found setting)))
|
||||||
|
(if (not (eq (car found) (cadr found)))
|
||||||
|
(vla-put-TagString att (cadr found)))))))
|
||||||
|
((and (eq (vla-get-ObjectName obj) "AcDbBlockTableRecord")
|
||||||
|
(setq setting (assoc (strcase (vla-get-Name obj)) *RenAtt:Settings*)))
|
||||||
|
(setq setting (cdr setting))
|
||||||
|
(vlax-for att obj
|
||||||
|
(if (and (eq (vla-get-ObjectName att) "AcDbAttributeDefinition")
|
||||||
|
(setq found (assoc (strcase (vla-get-TagString att)) setting)))
|
||||||
|
(progn
|
||||||
|
(setq setting (_RemoveN setting (vl-position found setting)))
|
||||||
|
(if (not (eq (car found) (cadr found)))
|
||||||
|
(vla-put-TagString att (cadr found)))))))))
|
||||||
|
|
||||||
|
(defun c:RenAttr (/)
|
||||||
|
(or *AcadApp* (setq *AcadApp* (vlax-get-acad-object)))
|
||||||
|
(or *ActiveDocument* (setq *ActiveDocument* (vla-get-ActiveDocument *AcadApp*)))
|
||||||
|
(or *BlocksCollection* (setq *BlocksCollection* (vla-get-Blocks *ActiveDocument*)))
|
||||||
|
;; Get attributes from block definitions
|
||||||
|
(vlax-for blk *BlocksCollection*
|
||||||
|
(if (wcmatch (vla-get-Name blk) "~`**")
|
||||||
|
(RenAttr blk))
|
||||||
|
(vlax-for obj blk (RenAttr obj)))
|
||||||
|
(princ))
|
||||||
|
|
||||||
|
(defun c:RenAttrSet (/ dcl s Blocks Settings ~btnCurrentAdd ~btnCurrentRem ~lstBlocks ~lstAttribs ~btnSave)
|
||||||
|
(if
|
||||||
|
(and (setq dcl (open (setq s (strcat (getvar "TempPrefix") "RenAttrib.DCL")) "w"))
|
||||||
|
(princ
|
||||||
|
(strcat
|
||||||
|
"RenAttrSet : dialog {\n"
|
||||||
|
" label = \"Rename Attributes - Settings\";\n"
|
||||||
|
" : row {\n"
|
||||||
|
" : boxed_column {\n"
|
||||||
|
" label = \"Blocks in current Drawing\";\n"
|
||||||
|
" : list_box { height = 20; key = \"lstCurrent\"; width = 30; multiple_select = true; }\n"
|
||||||
|
" : column {\n"
|
||||||
|
" : button { label = \"Add to Settings\"; key = \"btnCurrentAdd\"; }\n"
|
||||||
|
" : button { label = \"Remove from Settings\"; key = \"btnCurrentRem\"; }\n"
|
||||||
|
" }\n"
|
||||||
|
" }\n"
|
||||||
|
" : boxed_column {\n"
|
||||||
|
" label = \"Settings\";\n"
|
||||||
|
" :row { : column {\n"
|
||||||
|
" : list_box { label = \"Block Names\"; key = \"lstBlocks\"; width = 30; height = 20; }\n"
|
||||||
|
" : edit_box { label = \"New Name\"; key = \"edtName\"; edit_width = 25; }\n"
|
||||||
|
" }\n"
|
||||||
|
" : column {\n"
|
||||||
|
" : list_box { label = \"Old Attribute Names\"; key = \"lstAttribs\"; width = 30; height = 20; }\n"
|
||||||
|
" : retirement_button { key = \"btnSave\"; label = \" Save \"; }\n"
|
||||||
|
" }}\n"
|
||||||
|
" : row {\n"
|
||||||
|
" fixed_width = true;\n"
|
||||||
|
" alignment = centered;\n"
|
||||||
|
" : button { label = \"Clear\"; key = \"clear\"; action = \"(done_dialog 2)\"; }\n"
|
||||||
|
" : spacer { width = 2; }\n"
|
||||||
|
" ok_button;\n"
|
||||||
|
" : spacer { width = 2; }\n"
|
||||||
|
" cancel_button;\n"
|
||||||
|
" }\n"
|
||||||
|
" }\n"
|
||||||
|
" }\n"
|
||||||
|
"}")
|
||||||
|
dcl)
|
||||||
|
(not (close dcl))
|
||||||
|
(setq dcl (load_dialog s))
|
||||||
|
(new_dialog "RenAttrSet" dcl))
|
||||||
|
(progn
|
||||||
|
;; Action on button "Add to Settings" clicked
|
||||||
|
(defun ~btnCurrentAdd (/ idx)
|
||||||
|
(if (not (eq (setq idx (get_tile "lstCurrent")) ""))
|
||||||
|
(progn
|
||||||
|
(setq idx (mapcar (function (lambda (n) (nth n Blocks))) (read (strcat "(" idx ")")))
|
||||||
|
Settings (RenAtt:MergeData Settings idx))
|
||||||
|
(start_list "lstBlocks")
|
||||||
|
(mapcar 'add_list (mapcar 'car Settings))
|
||||||
|
(end_list)
|
||||||
|
(set_tile "lstBlocks" "")
|
||||||
|
(~lstBlocks))))
|
||||||
|
;; Action on button "Add to Settings" clicked
|
||||||
|
(defun ~btnCurrentRem (/ idx)
|
||||||
|
(if (not (eq (setq idx (get_tile "lstCurrent")) ""))
|
||||||
|
(progn
|
||||||
|
(setq idx (mapcar (function (lambda (n) (nth n Blocks))) (read (strcat "(" idx ")"))))
|
||||||
|
(foreach i idx
|
||||||
|
(setq i (assoc (car i) Settings)
|
||||||
|
Settings (vl-remove i Settings)))
|
||||||
|
(start_list "lstBlocks")
|
||||||
|
(mapcar 'add_list (mapcar 'car Settings))
|
||||||
|
(end_list)
|
||||||
|
(set_tile "lstBlocks" "")
|
||||||
|
(~lstBlocks))))
|
||||||
|
;; Action on selection "Block Names" changed
|
||||||
|
(defun ~lstBlocks (/ idx)
|
||||||
|
(if (not (eq (setq idx (get_tile "lstBlocks")) ""))
|
||||||
|
(progn
|
||||||
|
(start_list "lstAttribs")
|
||||||
|
(mapcar 'add_list
|
||||||
|
(mapcar
|
||||||
|
(function
|
||||||
|
(lambda (att)
|
||||||
|
(if (eq (car att) (cadr att))
|
||||||
|
(car att)
|
||||||
|
(strcat (car att) " <" (cadr att) ">"))))
|
||||||
|
(cdr (nth (atoi idx) Settings))))
|
||||||
|
(end_list)
|
||||||
|
(mode_tile "lstAttribs" 0)
|
||||||
|
(set_tile "lstAttribs" ""))
|
||||||
|
(progn
|
||||||
|
(start_list "lstAttribs")
|
||||||
|
(setq idxLst nil)
|
||||||
|
(end_list)
|
||||||
|
(mode_tile "lstAttribs" 1)
|
||||||
|
(set_tile "lstAttribs" "")))
|
||||||
|
(~lstAttribs))
|
||||||
|
|
||||||
|
;; Action on selection "Old Attribute Names" changed
|
||||||
|
(defun ~lstAttribs (/ idx1 idx2 blk att)
|
||||||
|
(if (and (not (eq (setq idx1 (get_tile "lstBlocks")) ""))
|
||||||
|
(not (eq (setq idx2 (get_tile "lstAttribs")) ""))
|
||||||
|
(setq blk (nth (atoi idx1) Settings))
|
||||||
|
(setq att (nth (atoi idx2) (cdr blk))))
|
||||||
|
(progn
|
||||||
|
(set_tile "edtName" (cadr att))
|
||||||
|
(mode_tile "edtName" 0)
|
||||||
|
(mode_tile "btnSave" 0))
|
||||||
|
(progn
|
||||||
|
(set_tile "edtName" "")
|
||||||
|
(mode_tile "edtName" 1)
|
||||||
|
(mode_tile "btnSave" 1))))
|
||||||
|
|
||||||
|
;; Action on button "Save" clicked
|
||||||
|
(defun ~btnSave (/ idx1 idx2 blk att)
|
||||||
|
(if (and (not (eq (setq idx1 (get_tile "lstBlocks")) ""))
|
||||||
|
(not (eq (setq idx2 (get_tile "lstAttribs")) ""))
|
||||||
|
(setq idx1 (atoi idx1)
|
||||||
|
idx2 (atoi idx2))
|
||||||
|
(setq blk (nth idx1 Settings))
|
||||||
|
(setq att (nth idx2 (cdr blk))))
|
||||||
|
(progn
|
||||||
|
(setq Settings
|
||||||
|
(subst (cons (car blk)
|
||||||
|
(_ReplaceN (list (car att) (strcase (get_tile "edtName"))) idx2 (cdr blk)))
|
||||||
|
blk
|
||||||
|
Settings))
|
||||||
|
(~lstBlocks)
|
||||||
|
(set_tile "lstAttribs" (itoa idx2))
|
||||||
|
(~lstAttribs))))
|
||||||
|
|
||||||
|
(start_list "lstCurrent")
|
||||||
|
(mapcar 'add_list (mapcar 'car (setq Blocks (RenAtt:GetBlocks))))
|
||||||
|
(end_list)
|
||||||
|
(setq Settings *RenAtt:Settings*)
|
||||||
|
(start_list "lstBlocks")
|
||||||
|
(mapcar 'add_list (mapcar 'car Settings))
|
||||||
|
(end_list)
|
||||||
|
(action_tile "btnCurrentAdd" "(~btnCurrentAdd)")
|
||||||
|
(action_tile "btnCurrentRem" "(~btnCurrentRem)")
|
||||||
|
(action_tile "lstBlocks" "(~lstBlocks)")
|
||||||
|
(action_tile "lstBlocks" "(~lstBlocks)")
|
||||||
|
(action_tile "lstAttribs" "(~lstAttribs)")
|
||||||
|
(action_tile "btnSave" "(~btnSave)")
|
||||||
|
(cond
|
||||||
|
((= (setq s (start_dialog)) 1)
|
||||||
|
(setq *RenAtt:Settings* Settings)
|
||||||
|
(RenAtt:SaveSettings nil)
|
||||||
|
(unload_dialog dcl))
|
||||||
|
((= s 2)
|
||||||
|
(if (eq (progn (initget "Yes No")
|
||||||
|
(getkword "Are you sure you want to clear all block settings? [Yes/No] <No>: "))
|
||||||
|
"Yes")
|
||||||
|
(while (setq s (cond ((findfile "RenAttrib.DAT"))
|
||||||
|
((findfile (strcat (getvar "RoamableRootPrefix") "RenAttrib.DAT")))))
|
||||||
|
(vl-file-delete s)))
|
||||||
|
(unload_dialog dcl)
|
||||||
|
(c:RenAttrSet))
|
||||||
|
(t (unload_dialog dcl)))
|
||||||
|
))
|
||||||
|
(princ))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
;|«Visual LISP© Format Options»
|
||||||
|
(120 2 1 0 nil "end of " 100 9 0 0 0 nil T nil T)
|
||||||
|
;*** DO NOT add text below the comment! ***|;
|
||||||
Loading…
x
Reference in New Issue
Block a user