BNA8/.resources/bb21db71e8066124d71b62618a2dd95d0a05b8714db2020ca41f4d7b281a6ddd

696 lines
19 KiB
Plaintext

################################################################
################################################################
## DESC: For generating stats tables from csv tagpath tables
################################################################
################################################################
#from __future__ import with_statement
import codecs
import copy
import csv
import os
import re
import StringIO
import uuid
import zipfile
from contextlib import closing
from os import listdir, makedirs
from os.path import isfile, join
from org.python.core import PyNone
from sys import platform
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
from com.inductiveautomation.ignition.common.model import ApplicationScope
#############
### Constants
#############
CP = "D:\\Ignition\\Reports\\Config"
DP = "D:\\Ignition\\Reports\\Data"
TP = "D:\\Ignition\\Reports\\Temp"
if not os.path.exists("D:\\"):
# Not on actual production server, so store locally:
CP = "Ignition\\Reports\\Config"
DP = "Ignition\\Reports\\Data"
TP = "Ignition\\Reports\\Temp"
if ApplicationScope.isGateway(ApplicationScope.getGlobalScope()):
if not os.path.exists(CP):
makedirs(CP)
if not os.path.exists(DP):
makedirs(DP)
if not os.path.exists(TP):
makedirs(TP)
defaultColumn = {
"field": "",
"visible": True,
"editable": False,
"render": "auto",
"justify": "center",
"align": "center",
"resizable": True,
"sortable": True,
"boolean": "checkbox",
"number": "value",
"numberFormat": "none",#"0,0.##",
"dateFormat": "YYYY-MM-DD HH:mm:ss",
"header": {
"justify": "center",
"align": "center",
"style": {
"classes": "table/column-header"#,
#"backgroundColor": "#60B0D2",
#"borderBottomColor": "#D5D5D5",
#"borderBottomStyle": "solid",
#"borderBottomWidth": "1px",
#"borderRightColor": "#D5D5D5",
#"borderRightStyle": "solid",
#"borderRightWidth": "1px"
}
}
}
defaultSeries = {
"name": "",
"label": {
"text": ""
},
"visible": True,
"hiddenInLegend": False,
"defaultState": {
"visible": True
},
"data": {
"source": "",
"x": "",
"y": ""
},
"xAxis": "",
"yAxis": "",
"zIndex": 0,
"tooltip": {
"enabled": True,
"text": "{name}: [bold]{valueY}[/]",
"cornerRadius": 3,
"pointerLength": 4,
"background": {
"color": "",
"opacity": 1
}
},
"render": "column",
"candlestick": {
"open": {
"x": "",
"y": ""
},
"high": {
"x": "",
"y": ""
},
"low": {
"x": "",
"y": ""
},
"appearance": {
"fill": {
"color": "",
"opacity": 1
},
"stroke": {
"color": "",
"opacity": 1,
"width": 1
},
"stacked": False,
"deriveFieldsFromData": {
"fill": {
"color": "",
"opacity": ""
},
"stroke": {
"color": "",
"opacity": "",
"width": ""
}
},
"heatRules": {
"enabled": False,
"max": "",
"min": "",
"dataField": ""
}
}
},
"column": {
"open": {
"x": "",
"y": ""
},
"appearance": {
"fill": {
"color": "",
"opacity": 1
},
"stroke": {
"color": "",
"opacity": 1,
"width": 1
},
"stacked": False,
"width": None,
"height": None,
"deriveFieldsFromData": {
"fill": {
"color": "",
"opacity": ""
},
"stroke": {
"color": "",
"opacity": "",
"width": ""
}
},
"heatRules": {
"enabled": False,
"max": "",
"min": "",
"dataField": ""
}
}
},
"line": {
"open": {
"x": "",
"y": ""
},
"appearance": {
"connect": True,
"tensionX": 1,
"tensionY": 1,
"minDistance": 0.5,
"stroke": {
"width": 3,
"opacity": 1,
"color": "",
"dashArray": ""
},
"fill": {
"opacity": 0,
"color": ""
},
"bullets": [
{
"enabled": False,
"render": "circle",
"width": 10,
"height": 10,
"label": {
"text": "{value}",
"position": {
"dx": 0,
"dy": 0
}
},
"fill": {
"color": "",
"opacity": 1
},
"stroke": {
"color": "",
"opacity": 1,
"width": 1
},
"rotation": 0,
"tooltip": {
"enabled": True,
"text": "{name}: [bold]{valueY}[/]",
"cornerRadius": 3,
"pointerLength": 4,
"background": {
"color": "",
"opacity": 1
}
},
"deriveFieldsFromData": {
"fill": {
"color": "",
"opacity": ""
},
"stroke": {
"color": "",
"opacity": "",
"width": ""
},
"rotation": ""
},
"heatRules": {
"enabled": False,
"max": 100,
"min": 2,
"dataField": ""
}
}
]
}
},
"stepLine": {
"open": {
"x": "",
"y": ""
},
"appearance": {
"connect": True,
"tensionX": 1,
"tensionY": 1,
"minDistance": 0.5,
"stroke": {
"width": 3,
"opacity": 1,
"color": "",
"dashArray": ""
},
"fill": {
"opacity": 0,
"color": ""
},
"bullets": [
{
"enabled": True,
"render": "circle",
"width": 10,
"height": 10,
"label": {
"text": "{value}",
"position": {
"dx": 0,
"dy": 0
}
},
"fill": {
"color": "",
"opacity": 1
},
"stroke": {
"color": "",
"opacity": 1,
"width": 1
},
"rotation": 0,
"tooltip": {
"enabled": True,
"text": "{name}: [bold]{valueY}[/]",
"cornerRadius": 3,
"pointerLength": 4,
"background": {
"color": "",
"opacity": 1
}
},
"deriveFieldsFromData": {
"fill": {
"color": "",
"opacity": ""
},
"stroke": {
"color": "",
"opacity": "",
"width": ""
},
"rotation": ""
},
"heatRules": {
"enabled": False,
"max": 100,
"min": 2,
"dataField": ""
}
}
]
}
}
}
#######################################################
#######################################################
#######################################################
#### Functions
#######################################################
def checkRole(filename, roles=[]):
roles = [role.split("/")[-1] for role in roles]
if "Administrator" in roles: # Admin reports has "@" in the beginning
return True
elif filename[0] == "#" and "Maintenance" in roles:
return True
elif filename[0] not in ["@", "#"]:
return True
else:
return False
def checkCompatibility(filename, extensions=[".csv", ".sql"]):
if filename[-4:].lower() in extensions:
return True
else:
return False
def generateColumn(field, extra={}):
column = copy.deepcopy(defaultColumn)
column["field"] = field
# Convert title:
title = utils.prettyTitle(field)
column["header"]["title"] = title
# Apply rendering styles:
if title.endswith("(%)"):
column["numberFormat"] = "0.##%"
# Apply overrides:
for prop, value in extra.iteritems():
column[prop] = value
return column
def generateSeries(field, extra={}):
series = copy.deepcopy(defaultSeries)
series["name"] = field
series["data"]["y"] = field
# Convert title:
series["label"]["text"] = utils.prettyTitle(field)
# Apply overrides:
for prop, value in extra.iteritems():
series[prop] = value
return series
def generateZip(filenames, rootPath):
if len(filenames) > 2000:
return {'err': "Too many files! Max of 2000 allowed."}
# Encode filenames (since they are unicode):
filenames = [filename.encode('UTF-8', 'replace') for filename in filenames]
# Determine greatest common prefix to strip out for filenames in zip:
prefix = os.path.commonprefix(filenames)
if prefix.rfind('\\') >= 0:
prefix = prefix[:prefix.rfind('\\')+1]
else:
prefix = ""
prefixOmmit = len(prefix)
# Add files to zip:
zippath = os.path.join(TP, str(uuid.uuid4()) + ".zip")
with ZipFile(zippath, "w", ZIP_DEFLATED) as z:
for filename in filenames:
filepath = os.path.join(rootPath, filename)
try:
z.write(filepath, filename[prefixOmmit:])
except:
return {'err': "File doesn't exist!"}
# Read contents of zip:
content = system.file.readFileAsBytes(zippath)
os.remove(zippath)
return {'content': content}
def getConfigs(configPath = CP, roles = []):
if roles is None:
roles = []
# Get custom reports:
return sorted([f for f in listdir(configPath) if isfile(join(configPath, f)) and checkRole(f, roles) and checkCompatibility(f)])
def uploadConfig(filename, contents, configPath = CP):
filepath = join(configPath, filename)
#if system.file.fileExists(filepath):
#return "Config file already exists!\r\nYou must delete the config before uploading over it."
if not checkCompatibility(filename):
return "File extension not compatible!\r\nThe supported extensions are: csv, sql"
else:
system.file.writeFile(filepath, contents, False)
def downloadConfig(filename, configPath = CP):
filepath = join(configPath, filename)
if not system.file.fileExists(filepath):
return {'err': "Config file doesn't exist!"}
else:
return {'contents': system.file.readFileAsString(filepath, 'UTF-8')}
def downloadConfigs(filenames, configPath = CP):
if len(filenames) == 1:
filename = filenames[0]
filepath = join(configPath, filename)
if not system.file.fileExists(filepath):
return "Config file doesn't exist!"
else:
system.perspective.download(filename, system.file.readFileAsString(filepath, 'UTF-8'))
elif len(filenames) > 1:
result = generateZip(filenames, configPath)
if "err" in result:
return result["err"]
else:
system.perspective.download("configs.zip", result["content"])
def deleteConfigs(filenames, configPath = CP):
for filename in filenames:
filepath = join(configPath, filename)
if not system.file.fileExists(filepath):
return "Config file doesn't exist!"
else:
os.remove(filepath)
def getOldReports(dataPath = DP, roles = []):
if roles is None:
roles = []
# Get old reports:
reports = []
for root, dirs, files in os.walk(dataPath):
path = root[len(dataPath):].strip("\\")
for name in files:
if checkRole(name, roles) and checkCompatibility(name, extensions=[".csv"]):
reports.append((path, name))
return reports
def downloadOldReport(filename, dataPath = DP):
filepath = join(dataPath, filename)
if not system.file.fileExists(filepath):
return {'err': "Report file doesn't exist!"}
else:
return {'contents': system.file.readFileAsString(filepath, 'UTF-8')}
def downloadOldReports(filenames, dataPath = DP):
if len(filenames) > 2000:
return {'err': "Too many files! Max of 2000 allowed."}
# Encode filenames (since they are unicode):
filenames = [filename.encode('UTF-8', 'replace') for filename in filenames]
# Determine greatest common prefix to strip out for filenames in zip:
prefix = os.path.commonprefix(filenames)
if prefix.rfind('\\') >= 0:
prefix = prefix[:prefix.rfind('\\')+1]
else:
prefix = ""
prefixOmmit = len(prefix)
# Open in-memory zip:
zmem = StringIO.StringIO()
# Add files to zip:
with closing(ZipFile(zmem, "w", ZIP_DEFLATED)) as z:
for filename in filenames:
filepath = os.path.join(dataPath, filename)
z.write(filepath, filename[prefixOmmit:])
# Read contents of zip:
zmem.seek(0)
contents = zmem.read()
# Send zip:
return {'contents': contents}
def retrieveTags(headers, values, plcTagPaths, plcTagPos, ignitionTagPaths, ignitionTagPos, configPath = CP, server = "Ignition OPC UA Server"):
# Retrieve values of the tag table:
plcResults = system.opc.readValues(server, plcTagPaths)
ignitionResults = system.tag.readAll(ignitionTagPaths)
# Retrieve values of the plc tags:
for i, result in enumerate(system.opc.readValues(server, plcTagPaths)):
(r, c) = plcTagPos[i]
if result.value is not None:
# Retrieved a value, so add it:
values[r][c] = result.value
# Retrieve values of the ignition tags:
for i, result in enumerate(system.tag.readAll(ignitionTagPaths)):
(r, c) = ignitionTagPos[i]
if result.value is not None:
# Retrieved a value, so add it:
val = result.value
try:
val = round(float(val), 2)
except:
pass
values[r][c] = val
# Generate columns
columns = []
for col in headers:
column = copy.deepcopy(defaultColumn)
column['field'] = col
columns.append(column)
# Return data table:
return {'data': values, 'columns': columns}
def readDataTable(headers, rows, configPath = CP, server = "Ignition OPC UA Server"):
# Default results:
values = [{} for row in range(len(rows))]
# Extract tags:
plcTagPaths = []
plcTagPos = []
ignitionTagPaths = []
ignitionTagPos = []
for r, row in enumerate(rows):
for c, val in enumerate(row[:len(headers)]):
column = headers[c]
if val is not None and len(val) > 0 and val[0] == "[":
# Default value:
values[r][column] = {
"value": "ERR: NOT FOUND",
"style": { "background": "#F88" }
}
# Determine type of tag:
if val.lower().startswith("[ignition]"):
# Is an Ignition tag
ignitionTagPaths.append(val[10:])
ignitionTagPos.append((r, column))
else:
# Is a PLC tag
plcTagPaths.append(val)
plcTagPos.append((r, column))
elif val is not None and len(val) > 0 and val[0] == "'":
# Is not a tag, treat as string:
values[r][column] = val[1:]
else:
# Is not a tag, treat as value:
values[r][column] = val
# Retrieve and return data table:
return retrieveTags(headers, values, plcTagPaths, plcTagPos, ignitionTagPaths, ignitionTagPos, configPath, server)
def readJsonTable(headers, json, configPath = CP, server = "Ignition OPC UA Server"):
# Default results:
values = [{} for row in range(len(json))]
# Extract tags:
plcTagPaths = []
plcTagPos = []
ignitionTagPaths = []
ignitionTagPos = []
for r, row in enumerate(values):
for column in headers:
val = json[r].get(column, None)
if val is not None and len(val) > 0 and val[0] == "[":
# Default value:
row[column] = {
"value": "ERR: NOT FOUND",
"style": { "background": "#F88" }
}
# Determine type of tag:
if val.lower().startswith("[ignition]"):
# Is an Ignition tag
ignitionTagPaths.append(val[10:])
ignitionTagPos.append((r, column))
else:
# Is a PLC tag
plcTagPaths.append(val)
plcTagPos.append((r, column))
elif val is not None and len(val) > 0 and val[0] == "'":
# Is not a tag, treat as string:
row[column] = val[1:]
else:
# Is not a tag, treat as value:
row[column] = val
# Retrieve and return data table:
return retrieveTags(headers, values, plcTagPaths, plcTagPos, ignitionTagPaths, ignitionTagPos, configPath, server)
def readCSVTable(filename, configPath = CP, server = "Ignition OPC UA Server"):
# Open CSV file:
filepath = join(configPath, filename)
if system.file.fileExists(filepath):
with codecs.open(filepath, mode='rb', encoding='UTF-8') as csvfile:
rows = csv.reader(csvfile)
headers = rows.next()
rows = list(rows)
# Retrieve data:
return readDataTable(headers, rows, configPath, server)
return {'err': "Config file doesn't exist!"}
def readSQLTable(filename, configPath = CP):
# Open SQL file:
filepath = join(configPath, filename)
sql = ""
try:
sql = system.file.readFileAsString(filepath, 'UTF-8')
except:
return {'err': "Config file doesn't exist!"}
data = None
values = None
headers = None
try:
data = system.db.runQuery(sql).getUnderlyingDataset()
headers = [name for name in data.getColumnNames()]
def parse(r, c):
v = data.getValueAt(r, c)
return v if v <> None else "null"
values = [{headers[c]: parse(r, c) for c in range(data.getColumnCount())} for r in range(data.getRowCount())]
except:
return {'err': "Bad SQL:\r\n"+sql}
# Generate columns
columns = []
for col in headers:
column = defaultColumn.copy()
column['field'] = col
columns.append(column)
# Return data table:
return {'data': values, 'columns': columns}
def readTable(filename, configPath = CP, server = "Ignition OPC UA Server"):
extension = filename.split(".")[-1].lower()
# Check type of data:
if extension == "csv":
return readCSVTable(filename, configPath, server)
elif extension == "sql":
return readSQLTable(filename, configPath)
else:
return {'err': "File extension not compatible!\r\nThe supported extensions are: csv, sql"}
#def getAlarmHitListBySystem(sys, startDate, endDate, projectName=system.util.getProjectName()):
# # Run the Named Query
# params = {"startDate": startDate, "endDate": endDate, "system": sys}
# try:
# # Gateway Scope:
# return system.db.runNamedQuery(projectName, "AlarmHitListBySystem", params)
# except:
# # Project Scope:
# return system.db.runNamedQuery("AlarmHitListBySystem", params)
def importFromCSV(filename, dataPath = DP, numDefault=0, strDefault=""):
# Open CSV file:
filepath = join(dataPath, filename)
with codecs.open(filepath, mode='rb', encoding='UTF-8') as csvfile:
rows = csv.reader(csvfile)
headers = rows.next()
rows = list(rows)
# Create all other types:
values = [[str(v) for v in row] for row in rows]
textValues = [[""]*len(headers)]*len(values)
errValues = [[False]*len(headers)]*len(values)
isTagValues = errValues
# Return data table:
return {
'values': system.dataset.toDataSet(headers, values),
'text': system.dataset.toDataSet(headers, textValues),
'errors': system.dataset.toDataSet(headers, errValues),
'isTag': system.dataset.toDataSet(headers, isTagValues),
'numDefault': numDefault, 'strDefault': strDefault
}
return {'err': "Report file doesn't exist!"}
def exportToCSV(filename, dataset, text=None, numDefault=None, strDefault=None):
with open(filename, mode='wb') as csv_file:
writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
writer.writerow(dataset.getColumnNames())
dataset = system.dataset.toPyDataSet(dataset)
if text is not None:
text = system.dataset.toPyDataSet(text)
else:
text = dataset
def check(v, r, c):
try:
if len(text[r][c]) > 0 or v in [numDefault, strDefault]:
return text[r][c]
except:
pass
return v
for row in [[check(v, r, c) for c, v in enumerate(row)] for r, row in enumerate(dataset)]:
writer.writerow(row)