Browse Source

refactor structure of toolhandling

master
Ulrich Carmesin 3 years ago
parent
commit
32b55ab83d
  1. 18
      basic/constants.py
  2. 51
      basic/toolHandling.py
  3. 3
      test/test_path.py
  4. 14
      test/test_tdata.py
  5. 20
      test/test_toolhandling.py
  6. 22
      utils/cli_abstract.py
  7. 4
      utils/config_tool.py
  8. 36
      utils/db_abstract.py
  9. 9
      utils/dbcsv_tool.py
  10. 10
      utils/dbmysql_tool.py
  11. 4
      utils/dbspark_tool.py
  12. 10
      utils/match_tool.py
  13. 20
      utils/path_tool.py
  14. 18
      utils/tdata_tool.py

18
basic/constants.py

@ -7,11 +7,14 @@
"""
The constants desribes the keywords of the main datastructures, these are
* the configurations of
* program-configuration for general technical variables in tools - stored in internal conf-folder
it represents the general knowledge of this test-automatism
* basic-configuration for global variables in job - stored in external conf-folder
it represents the knowledge of your test-topic
* comp-configuration for component-attributes - stored in internal component-folder in
+ ddl-configuration of an entity of the component as attributes - stored in internal component-folder
+ environment-config for connection-attributes of the component - stored in external environment-folder
* tool-configuration
it represents the application-knowledge and the knowledge of the application-installation
* test-specification with testdata - stored in external testdata-folder
* the internal datastructure
@ -34,6 +37,13 @@ DATA_NODE_OPTION = "_option"
""" This constant defines main node in the testdata for testcase specific parameters """
DATA_NODE_DDL = "ddl"
""" This constant defines """
DATA_NODE_COMP = "comp"
""" This constant defines """
DATA_NODE_PAR = "par"
""" This constant defines """
TOPIC_NODE_DB = "db"
TOPIC_NODE_CLI = "cli"
TOPIC_NODE_API = "api"
# the configuration of a component or tool
# entity { : variable name of the group, basic, component-name or tool-name
@ -78,7 +88,9 @@ ATTR_ARTS_LOB = "lob"
SUBJECT_DB = "databases" # | | | | # | db*_tools, match_tool
SUBJECT_CONN = "conn" # | | x | | | conn_tool, db*_tools, cli*_toold
ATTR_CONN_DBTYPE = "dbtype" # | x | x | | x | conn_tool, toolHandling, db*_tools
ATTR_CONN_CLITYPE = "clitype" # | x | x | | x | conn_tool, toolHandling, cli*_tools
ATTR_TYPE = "type" # | x | x | | x | conn_tool, toolHandling, db*_tools
RULE_ACCEPTANCE = "acceptance" # | | | | x | tools_match
ATTR_STEP_ARGS = "args"
ATTR_DATA_REF = "_nr"

51
basic/toolHandling.py

@ -10,37 +10,50 @@ import basic.constants as B
# -------------------------------------------------
def hasAttr(o, name):
print("hasAttr " + str(type(o))+" "+name)
if (isinstance(o, dict)):
if (name in o.keys()):
print("hasAttr dict ok " + str(type(o)))
return True
print("hasAttr dict "+str(type(o)))
elif (isinstance(o, list)):
print("hasAttr list "+str(type(o)))
elif hasattr(o, name):
print("hasAttr class ok "+str(type(o)))
return True
return False
def getAttr(o, name):
if (isinstance(o, dict)):
if (name in o.keys()):
return o[name]
elif (isinstance(o, list)):
print("getAttr c list "+str(type(o)))
elif hasattr(o, name):
return o.get(name)
"""
Toolmanager
"""
def getCompAttr(comp, topic, attr, table=""):
out = ""
print(topic+" "+attr+" "+str(comp))
if hasAttr(comp.conf[B.SUBJECT_CONN], topic) and hasAttr(comp.conf[B.SUBJECT_CONN][topic], attr):
return getAttr(comp.conf[B.SUBJECT_CONN][topic], attr)
if len(table) > 1 and hasAttr(comp.conf[B.SUBJECT_ARTS][topic], table) \
and hasAttr(comp.conf[B.SUBJECT_ARTS][topic][table], attr):
return getAttr(comp.conf[B.SUBJECT_ARTS][topic][table], attr)
if hasAttr(comp.conf[B.SUBJECT_ARTS], topic) and hasAttr(comp.conf[B.SUBJECT_ARTS][topic], attr):
print("attr "+attr+" vorhanden")
return getAttr(comp.conf[B.SUBJECT_ARTS][topic], attr)
raise LookupError(topic+"."+attr+" is not set in comp " + comp.name)
# class ToolManager:
def getDbTool(comp):
job = basic.program.Job.getInstance()
verify = int(job.getDebugLevel("db_tool"))
if not hasAttr(comp.conf[B.SUBJECT_CONN], "dbtype"):
if hasAttr(comp.conf[B.SUBJECT_CONN], "types") and hasAttr(comp.conf[B.SUBJECT_CONN]["types"], "dbtype"):
dbtype = comp.conf[B.SUBJECT_CONN]["types"]["dbtype"]
else:
raise LookupError("dbtype is not set in comp " + comp.name)
else:
dbtype = comp.conf["conn"]["dbtype"]
dbtype = getCompAttr(comp, B.TOPIC_NODE_DB, B.ATTR_TYPE, "")
toolname = "db"+dbtype+"_tool"
print("toolname "+toolname)
filepath = os.path.join(job.conf.confs["paths"]["program"], "utils", toolname+".py")
print("toolname "+filepath)
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")
#comp.m.debug(verify, "toolname "+filepath)
if not os.path.exists(filepath):
raise FileNotFoundError("file for tool "+toolname+" does not exist "+filepath)
cmodul = importlib.import_module("utils."+toolname)
@ -52,12 +65,10 @@ def getDbTool(comp):
def getCliTool(comp):
job = basic.program.Job.getInstance()
verify = int(job.getDebugLevel("db_tool"))
if not hasAttr(comp.conf["conn"], "clitype"):
raise LookupError("dbtype is not set in comp " + comp.name)
toolname = "cli"+comp.conf["conn"]["clitype"]+"_tool"
print("toolname "+toolname)
filepath = os.path.join(job.conf.confs["paths"]["program"], "utils", toolname+".py")
print("toolname "+filepath)
clitype = getCompAttr(comp, B.TOPIC_NODE_CLI, B.ATTR_TYPE, "")
toolname = "cli"+clitype+"_tool"
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")
#comp.m.debug(verify, "toolname "+filepath)
if not os.path.exists(filepath):
raise FileNotFoundError("file for tool "+toolname+" does not exist "+filepath)
cmodul = importlib.import_module("utils."+toolname)

3
test/test_path.py

@ -9,6 +9,9 @@ class MyTestCase(unittest.TestCase):
def runTest(self):
self.test_path()
def test_key(self):
pass
def test_path(self):
job = Job("unit")
args = {"application": "TEST", "environment": "ENV01", "modus": "unit", "loglevel": "debug", "tool": "job_tool",

14
test/test_tdata.py

@ -6,7 +6,8 @@ class MyTestCase(unittest.TestCase):
def runTest(self):
self.test_tdata()
def test_tdata(self):
def xtest_tdata(self):
job = basic.program.Job("unit")
args = {"application": "TEST", "application": "ENV01", "modus": "unit", "loglevel": "debug",
"tool": "job_tool", "tdtyp": "csv", "tdsrc": "implement", "tdname": "firstunit",
@ -20,6 +21,17 @@ class MyTestCase(unittest.TestCase):
tdata = t.getTestdata()
self.assertEqual(("steps" in tdata), True)
def test_getCsvSpec(self):
job = basic.program.Job("unit")
tdata = {}
args = {"application": "TEST", "application": "ENV01", "modus": "unit", "loglevel": "debug",
"tdtyp": "csv", "tdsrc": "TC0001", "tdname": "testspec",
"modus": "unit"}
job.par.setParameterArgs(args)
filename = str(job.conf.confs["paths"]["testdata"]) + "/" + getattr(job.par, "tdsrc") + "/" + getattr(job.par, "tdname") + ".csv"
tdata = t.getCsvSpec(job.m, filename, "data")
print("111")
print(tdata)
if __name__ == '__main__':
unittest.main()

20
test/test_toolhandling.py

@ -5,6 +5,7 @@ import utils.path_tool
import basic.toolHandling
import test.constants
import components.component
import basic.constants as B
HOME_PATH = test.constants.HOME_PATH
conf = {}
@ -22,22 +23,27 @@ class MyTestCase(unittest.TestCase):
comp = components.component.Component()
comp.name = "testb"
comp.conf = {}
comp.conf["conn"] = {}
self.assertRaises(LookupError, basic.toolHandling.getDbTool, comp)
comp.conf[B.SUBJECT_CONN] = {}
comp.conf[B.SUBJECT_CONN][B.TOPIC_NODE_CLI] = {}
#self.assertRaises(LookupError, basic.toolHandling.getDbTool, comp)
comp = components.component.Component()
comp.name = "testb"
comp.conf = {}
comp.conf["conn"] = {}
comp.conf["conn"]["dbtype"] = "mysql"
comp.conf["conn"]["clitype"] = "ssh"
comp.conf[B.SUBJECT_ARTS] = {}
comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_API] = {}
comp.conf[B.SUBJECT_CONN] = {}
comp.conf[B.SUBJECT_CONN][B.TOPIC_NODE_DB] = {}
comp.conf[B.SUBJECT_CONN][B.TOPIC_NODE_CLI] = {}
comp.conf[B.SUBJECT_CONN][B.TOPIC_NODE_DB][B.ATTR_TYPE] = "mysql"
comp.conf[B.SUBJECT_CONN][B.TOPIC_NODE_CLI][B.ATTR_TYPE] = "ssh"
tool = basic.toolHandling.getDbTool(comp)
self.assertRegex(str(type(tool)), 'dbmysql_tool.DbFcts')
tool = basic.toolHandling.getCliTool(comp)
self.assertRegex(str(type(tool)), 'clissh_tool.CliFcts')
comp.conf["conn"]["dbtype"] = "dxx"
comp.conf["conn"]["clitype"] = "sxx"
comp.conf[B.SUBJECT_CONN][B.TOPIC_NODE_DB][B.ATTR_TYPE] = "dxx"
comp.conf[B.SUBJECT_CONN][B.TOPIC_NODE_CLI][B.ATTR_TYPE] = "sxx"
self.assertRaises(FileNotFoundError, basic.toolHandling.getDbTool, comp)
self.assertRaises(FileNotFoundError, basic.toolHandling.getCliTool, comp)

22
utils/cli_abstract.py

@ -1,3 +1,25 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
"""
This abstract class CliFcts defines the interface for the execution of any command-line-functions.
It uses the following configuration
.a) COMP.conf->artifact->cli->[system] : for structural attributes of the operating-system \n
.c) COMP.conf->conn->[cli] : for connection-attributes and structural attributes,
maybe specialized for the operating-system
The attribute type resolves which technique is used, implemented in a specific tool-class:
* cmd,bash,powersh ... for specific local operating-system
* ssh,hadoop ... for specific remote operating-system
The main tasks are: \n
.1. executing the command-array - with attributes
* conn.ip, host, port, user, password, ... for synchronous db-connection
* conn.root, ... for path-definitions for file-implementations (csv, )
"""
import basic.program
import utils.config_tool

4
utils/config_tool.py

@ -36,7 +36,7 @@ def getConfigPath(modul, name):
* testcase << parameter
"""
job = basic.program.Job.getInstance()
verify = job.getDebugLevel("config_tool")
verify = job.getDebugLevel("config_tool")-4
job.debug(verify, "getConfig " + modul + ", " + name)
if modul == "tool":
pathname = job.conf.confs.get("paths").get("home") + "/configs/tool_" + name + ".yml"
@ -122,7 +122,7 @@ def hasAttr(o, name):
def getConfig(modul, name):
job = basic.program.Job.getInstance()
verify = job.getDebugLevel("config_tool")-1
verify = job.getDebugLevel("config_tool")-4
pathname = getConfigPath(modul, name)
confs = {}
job.debug(verify, "getConfig " + pathname)

36
utils/db_abstract.py

@ -4,6 +4,40 @@
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
"""
This abstract class DbFcts defines the interface for any function relating to any kind of a database-management.
It uses the following configuration
.a) COMP.conf->artifact->db->[table] : for structural attributes of the database, maybe specialized for tables \n
.b) COMP.conf->artifact->ddl : for structural attributes of the database-tables \n
.c) COMP.conf->conn->[db] : for connection-attributes and structural attributes, maybe specialized for the database
The attribute db.type resolves which technique is used, implemented in a specific tool-class:
* db2,mysql,pgsql,(s)hive ... for specific synchronous RDBs
* spark,shive,sfile ... using spark-technology
* csv,(s)file ... for data managed in files
The main tasks are: \n
.1. connecting to the datebase - with attributes
* conn.ip, host, port, user, password, ... for synchronous db-connection
* conn.root, ... for path-definitions for file-implementations (csv, )
.2. delete at the initialisation - with
* db.reset for the context testcase, testsuite or never, on which the content will be deleted - default: testcase
* db.character of the table if the content will be be deleted in a sequence of testcases
* db.tabname -- if the tablename is differs from the node-name of the table - default: not necessary
* db.filename -- if the filename is differs from the node-name of the table - default: not necessary
* par.dbwhere which rows will be deleted - default empty
.3. insert testdata at the initialisation
* ddl.[fields]
* db.tabname - if the tablename is differs from the node-name of the table - default: not necessary
* db.filename - if the filename is differs from the node-name of the table - default: not necessary
.4. select the data with
* db.tabname - if the tablename is differs from the node-name of the table - default: not necessary
* db.filename - if the filename is differs from the node-name of the table - default: not necessary
* ddl._data.[fields].order for an order-clause
* par.dbwhere which rows will be deleted - default empty
SPECIAL CASES:
* If the table is partitioned tables the functions delete/insert/select calls the callback-functions
COMP.nextTable() resp. COMP.nextTdata().
"""
import basic.program
import utils.config_tool
import basic.constants as B
@ -63,7 +97,7 @@ class DbFcts():
statement written in sql """
plainname = basic.componentHandling.getPlainCompname(self.comp.name)
self.loadDdl()
for t in self.comp.conf["ddl"]:
for t in self.comp.conf[B.DATA_NODE_DDL]:
print("einzufuegende Tabelle "+t)
if (t in tdata[plainname]):
self.insertRows(t, tdata[plainname][t][B.DATA_NODE_DATA])

9
utils/dbcsv_tool.py

@ -7,6 +7,7 @@
import basic.program
import utils.config_tool
import utils.db_abstract
import basic.constants as B
class DbFcts(utils.db_abstract.DbFcts):
@ -45,7 +46,7 @@ class DbFcts(utils.db_abstract.DbFcts):
verify = -1+job.getDebugLevel("db_tool")
cmd = "INSERT INTO "+table+";"
header = ""
for h in self.comp.conf["ddl"][table]["fields"]:
for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
print(h)
header += ", "+h
cmd += " (" + header[1:]+" ) "
@ -55,12 +56,12 @@ class DbFcts(utils.db_abstract.DbFcts):
print(r)
rowvalues = ""
cmd += "\n ( "
for h in self.comp.conf["ddl"][table]["fields"]:
for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
print("h "+h)
if (h in r):
rowvalues += ", "+self.getDbValue(self.comp.conf["ddl"][table]["data"][h], r[h])
rowvalues += ", "+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DATA][h], r[h])
else:
rowvalues += ", "+self.getDbValue(self.comp.conf["ddl"][table]["data"][h], "")
rowvalues += ", "+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DATA][h], "")
print("rv " + rowvalues)
cmd += rowvalues[1:]+" ),"
cmd = cmd[0:-1]+";"

10
utils/dbmysql_tool.py

@ -30,7 +30,7 @@ class DbFcts(utils.db_abstract.DbFcts):
#mycursor.execute(cmd)
#myresult = mycursor.fetchall()
tdata[B.DATA_NODE_HEADER] = []
for f in self.comp.conf["ddl"][table][B.DATA_NODE_HEADER]:
for f in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
tdata[B.DATA_NODE_HEADER].append(f)
myresult = []
for x in myresult:
@ -59,7 +59,7 @@ class DbFcts(utils.db_abstract.DbFcts):
verify = -1+job.getDebugLevel("db_tool")
cmd = "INSERT INTO "+table+";"
header = ""
for h in self.comp.conf["ddl"][table]["fields"]:
for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
print(h)
header += ", "+h
cmd += " (" + header[1:]+" ) "
@ -69,12 +69,12 @@ class DbFcts(utils.db_abstract.DbFcts):
print(r)
rowvalues = ""
cmd += "\n ( "
for h in self.comp.conf["ddl"][table]["fields"]:
for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
print("h "+h)
if (h in r):
rowvalues += ", "+self.getDbValue(self.comp.conf["ddl"][table]["data"][h], r[h])
rowvalues += ", "+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DATA][h], r[h])
else:
rowvalues += ", "+self.getDbValue(self.comp.conf["ddl"][table]["data"][h], "")
rowvalues += ", "+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DATA][h], "")
print("rv " + rowvalues)
cmd += rowvalues[1:]+" ),"
cmd = cmd[0:-1]+";"

4
utils/dbspark_tool.py

@ -23,14 +23,14 @@ class DbFcts(utils.db_abstract.DbFcts):
tdata = {}
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT "+",".join(self.comp.conf["ddl"][table]["fields"])
cmd = "SELECT "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER])
cmd += " FROM "+table+""+self.getWhere()+""+self.getOrder()
spark = self.getConnector()
df = spark.sql(cmd)
data = []
for r in df:
data.append(r)
tdata[B.DATA_NODE_HEADER] = self.comp.conf["ddl"][table]["fields"]
tdata[B.DATA_NODE_HEADER] = self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]
tdata[B.DATA_NODE_DATA] = data
return tdata

10
utils/match_tool.py

@ -120,10 +120,10 @@ class Matching():
self.nomatch[similarity] = [keyA, keyB]
def getTableDdl(self, path):
a = path.split(":")
ddl = self.comp.conf["ddl"]
ddl = self.comp.conf[B.DATA_NODE_DDL]
for x in a:
if (len(x) < 2): continue
if (x == "_data"): break
if (x == B.DATA_NODE_DATA): break
if x in ddl: ddl = ddl[x]
return ddl
def setDiffHeader(matching):
@ -229,13 +229,13 @@ def getSimilarity(matching, path, rA, rB, i):
job.debug(verify, "getSimilarity "+path+" "+str(i))
if len(matching.matchkeys) > 0:
keys = matching.matchkeys
elif ("ddl" in matching.comp.conf):
elif (B.DATA_NODE_DDL in matching.comp.conf):
job.debug(verify, "ddl " + path + " " + str(i))
a = path.split(":")
ddl = matching.comp.conf["ddl"]
ddl = matching.comp.conf[B.DATA_NODE_DDL]
for x in a:
if (len(x) < 2): continue
if (x == "_data"): break
if (x == B.DATA_NODE_DATA): break
if x in ddl: ddl = ddl[x]
job.debug(verify, "ddl " + json.dumps(ddl) + " " + str(i))
keys = {}

20
utils/path_tool.py

@ -12,7 +12,7 @@ import re
import basic.constants as B
def getKeyValue(key):
def getKeyValue(key, comp=None):
job = basic.program.Job.getInstance()
verify = job.getDebugLevel("path_tool")
pt = PathConf.getInstance()
@ -25,12 +25,22 @@ def getKeyValue(key):
job.debug(verify, neu)
return neu
# return job.conf.confs["paths"][key[9:]]
elif 'comp.' in key:
if comp is None:
raise Exception("Component is missing for "+key)
if not utils.config_tool.hasAttr(comp, key[5:]):
pass
pass
elif 'env.' in key:
#if key[4:]
pass
elif (pt.pattern):
return pt.pattern[key]
job.debug(verify, "pt exists")
else:
return "xx-"+key+"-xx"
def composePath(pathname, comp):
job = basic.program.Job.getInstance()
verify = job.getDebugLevel("path_tool")
@ -42,6 +52,7 @@ def composePath(pathname, comp):
else:
job.debug(verify, "in Pattern nicht vorhanden: " + pathname)
def composePatttern(pattern, comp):
job = basic.program.Job.getInstance()
verify = job.getDebugLevel("path_tool")
@ -50,7 +61,7 @@ def composePatttern(pattern, comp):
l = re.findall('\{.*?\}', pattern)
job.debug(verify, l)
for pat in l:
pit = getKeyValue(pat[1:-1])
pit = getKeyValue(pat[1:-1], comp)
job.debug(verify, str(max) + ": " + pattern + ": " + pat + ": " + pit)
pattern = pattern.replace(pat, pit)
job.debug(verify, str(max) + ": " + pattern + ": " + pat + ": " + pit)
@ -63,7 +74,8 @@ def composePatttern(pattern, comp):
break
return pattern
def extractPattern(pathtyp):
def extractPattern(pathtyp, comp=None):
job = basic.program.Job.getInstance()
verify = job.getDebugLevel("path_tool")
out = []
@ -76,7 +88,7 @@ def extractPattern(pathtyp):
pre = work[0:i]
pat = work[i+1:j]
job.debug(verify, work + " von " + str(i) + "-" + str(j) + " pre " + pre + "pat " + pat)
pit = getKeyValue(pat)
pit = getKeyValue(pat, comp)
tup = (pre, pat, pit)
out.append(tup)
work = work[j+1:]

18
utils/tdata_tool.py

@ -79,6 +79,7 @@ def getCsvSpec(msg, filename, type):
lines = utils.file_tool.readFileLines(msg, filename)
status = "start"
for l in lines:
print("lines "+l)
fields = l.split(CSV_DELIMITER)
# check empty line, comment
if (len(l.strip().replace(CSV_DELIMITER,"")) < 1):
@ -91,13 +92,17 @@ def getCsvSpec(msg, filename, type):
if (a[0] not in data):
data[a[0]] = {}
if (a[0].lower() == "step"):
if (not "steps" in data):
data["steps"] = []
if (not B.DATA_NODE_STEPS in data):
data[B.DATA_NODE_STEPS] = []
step = {}
step["comp"] = fields[1]
step["todo"] = fields[2]
step["args"] = fields[3]
data["steps"].append(step)
step[B.DATA_NODE_COMP] = fields[1]
step[B.ATTR_DATA_REF] = fields[2]
step[B.ATTR_STEP_ARGS] = {}
a = fields[3].split(",")
for arg in a:
b = arg.split(":")
step[B.ATTR_STEP_ARGS][b[0]] = b[1]
data[B.DATA_NODE_STEPS].append(step)
continue
elif (a[0].lower() == "option"):
data[a[0]][a[1]] = fields[1]
@ -143,6 +148,7 @@ def getCsvSpec(msg, filename, type):
data[a[0]][a[1]][a[2]] = row
elif len(a) == 1 and type == CSV_SPECTYPE_DATA:
data[a[0]][a[1]][a[2]] = {f: row}
print("return getCsvSpec "+str(data))
return data
def readCsv(msg, filename, comp):

Loading…
Cancel
Save