Browse Source

execution init_testcase with bugfixes

master
Ulrich Carmesin 2 years ago
parent
commit
5e8d3018d9
  1. 37
      basic/compexec.py
  2. 2
      basic/componentHandling.py
  3. 9
      basic/constants.py
  4. 29
      basic/toolHandling.py
  5. 23
      init_testcase.py
  6. 8
      test/test_04config.py
  7. 3
      utils/api_abstract.py
  8. 4
      utils/cli_abstract.py
  9. 8
      utils/config_tool.py
  10. 1
      utils/data_const.py
  11. 37
      utils/db_abstract.py
  12. 10
      utils/dbcsv_tool.py
  13. 11
      utils/dbmysql_tool.py
  14. 11
      utils/dbrel_tool.py
  15. 8
      utils/dbsfile_tool.py
  16. 9
      utils/dbshive_tool.py
  17. 3
      utils/dbspark_tool.py
  18. 3
      utils/file_abstract.py
  19. 19
      utils/filejson_tool.py
  20. 19
      utils/filelog_tool.py
  21. 19
      utils/filexml_tool.py
  22. 2
      utils/i18n_tool.py
  23. 3
      utils/path_tool.py
  24. 63
      utils/tdata_tool.py

37
basic/compexec.py

@ -79,16 +79,18 @@ class Testexecuter():
verify = -1+job.getDebugLevel(self.name) verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() started at " self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() started at "
+ datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper()) + datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper())
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]: for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API]:
self.m.logInfo("log rotate in "+ self.name) if node not in self.conf[B.SUBJECT_ARTS]:
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]: continue
self.m.logInfo("delete content "+ self.name) tool = basic.toolHandling.getTool(node, self, job)
dbi = basic.toolHandling.getDbTool(self) tool.reset_TData(job)
dbi.deleteTables() if B.TOPIC_NODE_FILE in self.conf[B.SUBJECT_ARTS]:
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]: for file in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_FILE]:
self.m.logInfo("lob is deleted with flaskdb "+ self.name) if file in B.LIST_FILE_ATTR:
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTS]: continue
self.m.logInfo("rm files in "+ self.name) print("91: "+self.classname+" "+file)
tool = basic.toolHandling.getFileTool(job, self, B.TOPIC_NODE_FILE+"."+file)
tool.reset_TData(job)
self.m.setMsg("resetInstance for " + self.name + " is OK") self.m.setMsg("resetInstance for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
@ -103,13 +105,15 @@ class Testexecuter():
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name) verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_FILE, B.TOPIC_NODE_API]:
print(node)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS] and B.DATA_NODE_TABLES in tdata: if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS] and B.DATA_NODE_TABLES in tdata:
for t in tdata[B.DATA_NODE_TABLES]: for t in tdata[B.DATA_NODE_TABLES]:
print (t) print (t)
if utils.db_abstract.isCompTable(self, tdata, t): if utils.db_abstract.isCompTable(self, tdata, t):
self.m.logInfo("insert content "+ self.name) self.m.logInfo("insert content "+ self.name)
dbi = basic.toolHandling.getDbTool(self) dbi = basic.toolHandling.getDbTool(self, job)
dbi.insertTables(tdata) dbi.insertTables(tdata, job)
break break
self.m.setMsg("data loaded for " + self.name + " is OK") self.m.setMsg("data loaded for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
@ -135,15 +139,16 @@ class Testexecuter():
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]: if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("select db-content "+ self.name) self.m.logInfo("select db-content "+ self.name)
dbi = basic.toolHandling.getDbTool(self) dbi = basic.toolHandling.getDbTool(self, job)
data = dbi.selectTables(subdir) data = dbi.selectTables(subdir, job)
print("ppp") print("ppp")
#data = {} #data = {}
for t in data[subdir]: for t in data[subdir]:
data[B.DATA_NODE_TABLES] = {} data[B.DATA_NODE_TABLES] = {}
data[B.DATA_NODE_TABLES][t] = data[subdir][t] data[B.DATA_NODE_TABLES][t] = data[subdir][t]
utils.tdata_tool.writeCsvData(utils.path_tool.rejoinPath( utils.tdata_tool.writeCsvData(
utils.path_tool.composePattern("{tcresult}", self), subdir, t+".csv"), data, self) utils.path_tool.rejoinPath(utils.path_tool.composePattern("{tcresult}", self), subdir, t+".csv"),
data, self, job)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]: if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("check lob if is deleted with flaskdb "+ self.name) self.m.logInfo("check lob if is deleted with flaskdb "+ self.name)
self.m.setMsg("readInstance for " + self.name + " is OK") self.m.setMsg("readInstance for " + self.name + " is OK")

2
basic/componentHandling.py

@ -192,6 +192,8 @@ class ComponentManager:
name = compName name = compName
i = 0 i = 0
c.name = name c.name = name
c.classname = compName
c.m = basic.message.Message(job, basic.message.LIMIT_DEBUG, "logTime", name) c.m = basic.message.Message(job, basic.message.LIMIT_DEBUG, "logTime", name)
c.conf = utils.config_tool.mergeConn(c.m, confs["conf"], conns[i]) c.conf = utils.config_tool.mergeConn(c.m, confs["conf"], conns[i])
c.conf[B.SUBJECT_CONN] = conns[i] c.conf[B.SUBJECT_CONN] = conns[i]

9
basic/constants.py

@ -100,13 +100,15 @@ DATA_NODE_PAR = "par"
ATTR_ARTS_TYPE = "type" ATTR_ARTS_TYPE = "type"
""" must attribute for the type of the technique """ """ must attribute for the type of the technique """
ATTR_ARTS_NAME = "name"
""" optional attribute just for information """
ATTR_ARTS_PATH = "path" ATTR_ARTS_PATH = "path"
""" optional attribute for the basic folder if the artifact is stored in the filesystem """ """ optional attribute for the basic folder if the artifact is stored in the filesystem """
ATTR_ARTS_RESET = "reset" ATTR_ARTS_RESET = "reset"
""" optional attribute if the artefact should be reset in the initializing-phase """ """ optional attribute if the artefact should be reset in the initializing-phase """
ATTR_ARTS_PRESTEP = "prestep" ATTR_ARTS_PRESTEP = "prestep"
""" optional attribute to define a source-table for this table """ """ optional attribute to define a source-table for this table """
LIST_ARTS_ATTR = [ATTR_ARTS_TYPE, ATTR_ARTS_PATH, ATTR_ARTS_RESET, ATTR_ARTS_PRESTEP] LIST_ARTS_ATTR = [ATTR_ARTS_TYPE, ATTR_ARTS_PATH, ATTR_ARTS_RESET, ATTR_ARTS_PRESTEP, ATTR_ARTS_NAME]
TOPIC_NODE_DB = "db" TOPIC_NODE_DB = "db"
# testexec, db_abstr # testexec, db_abstr
@ -125,7 +127,10 @@ LIST_CLI_ATTR = [] + LIST_ARTS_ATTR
TOPIC_NODE_API = "api" TOPIC_NODE_API = "api"
LIST_API_ATTR = [] + LIST_ARTS_ATTR LIST_API_ATTR = [] + LIST_ARTS_ATTR
TOPIC_NODE_FILE = "file" TOPIC_NODE_FILE = "file"
LIST_FILE_ATTR = [] + LIST_ARTS_ATTR ATTR_FILE_OLD = "oldfile"
ATTR_FILE_ROTATE = "rotate"
LIST_FILE_ATTR = [ATTR_FILE_OLD, ATTR_FILE_ROTATE] + LIST_ARTS_ATTR
LIST_ATTR = { LIST_ATTR = {
TOPIC_NODE_DB: LIST_DB_ATTR, TOPIC_NODE_DB: LIST_DB_ATTR,
TOPIC_NODE_API: LIST_API_ATTR, TOPIC_NODE_API: LIST_API_ATTR,

29
basic/toolHandling.py

@ -9,6 +9,8 @@ import os
import basic.program import basic.program
import basic.constants as B import basic.constants as B
# ------------------------------------------------- # -------------------------------------------------
import utils.config_tool
def hasAttr(o, name): def hasAttr(o, name):
if (isinstance(o, dict)): if (isinstance(o, dict)):
@ -46,10 +48,20 @@ def getCompAttr(comp, topic, attr, table=""):
return getAttr(comp.conf[B.SUBJECT_ARTS][topic], attr) return getAttr(comp.conf[B.SUBJECT_ARTS][topic], attr)
raise LookupError(topic+"."+attr+" is not set in comp " + comp.name) raise LookupError(topic+"."+attr+" is not set in comp " + comp.name)
def getTool(technicType, comp, job):
if technicType == B.TOPIC_NODE_DB:
return getDbTool(comp, job)
if technicType == B.TOPIC_NODE_CLI:
return getCliTool(comp, job)
if technicType == B.TOPIC_NODE_API:
return getApiTool(comp, job)
if technicType == B.TOPIC_NODE_FILE:
# TODO im Allgemeinen keine konrete Implementierung aufrufen,
# denn zu einer Komponente koennen unterschiedliche Dateien vorkommen
return getFileTool(job, comp, "")
# class ToolManager: # class ToolManager:
def getDbTool(comp): def getDbTool(comp, job):
job = basic.program.Job.getInstance()
verify = int(job.getDebugLevel("db_tool")) verify = int(job.getDebugLevel("db_tool"))
dbtype = getCompAttr(comp, B.TOPIC_NODE_DB, B.ATTR_TYPE, "") dbtype = getCompAttr(comp, B.TOPIC_NODE_DB, B.ATTR_TYPE, "")
toolname = "db"+dbtype+"_tool" toolname = "db"+dbtype+"_tool"
@ -63,8 +75,7 @@ def getDbTool(comp):
c.setComp(comp) c.setComp(comp)
return c return c
def getCliTool(comp): def getCliTool(comp, job):
job = basic.program.Job.getInstance()
verify = int(job.getDebugLevel("db_tool")) verify = int(job.getDebugLevel("db_tool"))
clitype = getCompAttr(comp, B.TOPIC_NODE_CLI, B.ATTR_TYPE, "") clitype = getCompAttr(comp, B.TOPIC_NODE_CLI, B.ATTR_TYPE, "")
toolname = "cli"+clitype+"_tool" toolname = "cli"+clitype+"_tool"
@ -78,8 +89,7 @@ def getCliTool(comp):
c.setComp(comp) c.setComp(comp)
return c return c
def getApiTool(comp): def getApiTool(comp, job):
job = basic.program.Job.getInstance()
verify = int(job.getDebugLevel("db_tool")) verify = int(job.getDebugLevel("db_tool"))
apitype = getCompAttr(comp, B.TOPIC_NODE_API, B.ATTR_TYPE, "") apitype = getCompAttr(comp, B.TOPIC_NODE_API, B.ATTR_TYPE, "")
toolname = "api"+apitype+"_tool" toolname = "api"+apitype+"_tool"
@ -93,9 +103,12 @@ def getApiTool(comp):
c.setComp(comp) c.setComp(comp)
return c return c
def getFileTool(job, comp=None): def getFileTool(job, comp, filenode=""):
verify = int(job.getDebugLevel("db_tool")) verify = int(job.getDebugLevel("db_tool"))
filetype = getCompAttr(comp, B.TOPIC_NODE_FILE, B.ATTR_TYPE, "") if len(filenode) > 3 and filenode[-1:] != ".":
filetype = utils.config_tool.getAttribute(comp, filenode, B.ATTR_ARTS_TYPE, job)
else:
filetype = getCompAttr(comp, B.TOPIC_NODE_FILE, B.ATTR_TYPE, "")
toolname = "file"+filetype+"_tool" toolname = "file"+filetype+"_tool"
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py") filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")
#comp.m.debug(verify, "toolname "+filepath) #comp.m.debug(verify, "toolname "+filepath)

23
init_testcase.py

@ -1,25 +1,30 @@
# This is a sample Python script. #!/usr/bin/python
import os # program to execute steps of a testcase
# import jsonpickle # pip install jsonpickle # PARAM: --environment --application --tcdir [ testcase, tctime ]
import yaml # pip install pyyaml # main functions
# + reset_testcase() : comp-config --> system
# + load_testcase() : testspec --> tdata --> system.data
# + select_testcase() : system.data --> data --> archiv.result
# ---------------------------------------------------import os
import basic.program as program import basic.program as program
import utils.tdata_tool import utils.tdata_tool
import basic.componentHandling import basic.componentHandling
import basic.constants as B import basic.constants as B
import utils.file_tool
import utils.path_tool
import utils.path_const as P
import basic.message as message import basic.message as message
# Press Umschalt+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
PROGRAM_NAME = "init_testcase" PROGRAM_NAME = "init_testcase"
def startPyJob(job): def startPyJob(job):
cm = basic.componentHandling.ComponentManager.getInstance("init") cm = basic.componentHandling.ComponentManager.getInstance("init")
print("cm "+str(cm))
cm.initComponents() cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME) comps = cm.getComponents(PROGRAM_NAME)
job.m.setMsg("# Components initialized with these relevant components " + str(comps)) job.m.setMsg("# Components initialized with these relevant components " + str(comps))
testdata = utils.tdata_tool.getTestdata() testdata = utils.tdata_tool.getTestdata()
print("------------------------------------------------------------")
for c in comps: for c in comps:
comp = cm.getComponent(c) comp = cm.getComponent(c)
comp.m.logInfo("------- "+comp.name+" ----------------------------------------") comp.m.logInfo("------- "+comp.name+" ----------------------------------------")
@ -28,7 +33,7 @@ def startPyJob(job):
if job.hasFunction("load_TData"): if job.hasFunction("load_TData"):
comp.load_TData(B.PAR_TESTCASE, testdata) comp.load_TData(B.PAR_TESTCASE, testdata)
if job.hasFunction("read_TData"): if job.hasFunction("read_TData"):
comp.read_TData("vorher", B.PAR_TESTCASE) comp.read_TData(utils.path_tool.getKeyValue(P.KEY_PRECOND), B.PAR_TESTCASE)
comp.m.logInfo("------- "+comp.name+" ----------------------------------------") comp.m.logInfo("------- "+comp.name+" ----------------------------------------")
job.m.merge(comp.m) job.m.merge(comp.m)
print(str(comp)) print(str(comp))
@ -39,8 +44,6 @@ if __name__ == '__main__':
print(PROGRAM_NAME) print(PROGRAM_NAME)
x = program.Job(PROGRAM_NAME) x = program.Job(PROGRAM_NAME)
x.startJob() x.startJob()
x.m.logInfo("hier eine LogInfo")
x.m.logDebug("hier eine DbugMeldung")
x.m.logDebug(str(vars(x.par)) + "\n" + str(vars(x.conf))) x.m.logDebug(str(vars(x.par)) + "\n" + str(vars(x.conf)))
if x.m.isRc("fatal"): if x.m.isRc("fatal"):
x.stopJob() x.stopJob()

8
test/test_04config.py

@ -18,7 +18,7 @@ import utils.path_const as P
import basic.constants as B import basic.constants as B
TEST_FUNCTIONS = ["test_01getConfig", "test_02mergeAttributes", "test_03getAttributes"] TEST_FUNCTIONS = ["test_01getConfig", "test_02mergeAttributes", "test_03getAttributes"]
#TEST_FUNCTIONS = ["test_03getAttributes"] TEST_FUNCTIONS = ["test_03getAttributes"]
verbose = False verbose = False
class MyTestCase(unittest.TestCase): class MyTestCase(unittest.TestCase):
@ -95,6 +95,12 @@ class MyTestCase(unittest.TestCase):
self.assertIn(B.ATTR_ARTS_PATH, attrList) self.assertIn(B.ATTR_ARTS_PATH, attrList)
self.assertIn(B.ATTR_ARTS_RESET, attrList) self.assertIn(B.ATTR_ARTS_RESET, attrList)
cnttest += 2 # new attribute cnttest += 2 # new attribute
comp = test.testtools.getComp("testrest")
path = "file.xmlrest"
attrList = utils.config_tool.getAttributeList(comp, path, job)
print(str(comp.conf["conn"]))
print(str(comp.conf[B.SUBJECT_ARTS]))
print(str(attrList))
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)

3
utils/api_abstract.py

@ -28,6 +28,9 @@ class ApiFcts():
self.comp = None self.comp = None
pass pass
def reset_TData(self, job):
pass
def setComp(self, comp): def setComp(self, comp):
self.comp = comp self.comp = comp

4
utils/cli_abstract.py

@ -22,6 +22,7 @@ The main tasks are: \n
""" """
import basic.program import basic.program
import utils.config_tool import utils.config_tool
import basic.constants as B
class CliFcts(): class CliFcts():
""" """
@ -32,6 +33,9 @@ class CliFcts():
self.comp = None self.comp = None
pass pass
def reset_TData(self, job):
pass
def setComp(self, comp): def setComp(self, comp):
self.comp = comp self.comp = comp

8
utils/config_tool.py

@ -248,6 +248,14 @@ def getConfig(modul, name, subname=""):
return confs return confs
def getAttribute(comp, path, attr, job):
attrList = getAttributeList(comp, path, job)
if attr in attrList:
return attrList[attr]
else:
return ""
def getAttributeList(comp, path, job): def getAttributeList(comp, path, job):
""" """
gets a concrete attribute-list for an arteifact-element from the config-attributes from the connection-attributes gets a concrete attribute-list for an arteifact-element from the config-attributes from the connection-attributes

1
utils/data_const.py

@ -8,6 +8,7 @@ DDL_FILENAME = "DATASTRUCTURE"
DATA_NODE_TYPE = "type" DATA_NODE_TYPE = "type"
TYPE_STRING = "string" TYPE_STRING = "string"
TYPE_STR = "str"
TYPE_INT = "int" TYPE_INT = "int"
TYPE_FLOAT = "float" TYPE_FLOAT = "float"
TYPE_DOUBLE = "double" TYPE_DOUBLE = "double"

37
utils/db_abstract.py

@ -219,7 +219,7 @@ def formatDbField(comp, val, field):
def formatDbVal(msg, val, dtyp): def formatDbVal(msg, val, dtyp):
ctlg = basic.catalog.Catalog.getInstance() ctlg = basic.catalog.Catalog.getInstance()
if dtyp == D.TYPE_STRING: if dtyp == D.TYPE_STRING or dtyp == D.TYPE_STR:
if not isinstance(val, str): if not isinstance(val, str):
msg.logError("field must be " + dtyp + ", " + str(val)) msg.logError("field must be " + dtyp + ", " + str(val))
return str(val) return str(val)
@ -278,14 +278,14 @@ class DbFcts():
return getDbAttributes(self.comp, table) return getDbAttributes(self.comp, table)
def selectTables(self, subdir): def selectTables(self, subdir, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
self.loadDdl() self.loadDdl(job)
tdata = {} tdata = {}
tdata[subdir] = {} tdata[subdir] = {}
for t in self.comp.conf[B.DATA_NODE_DDL]: for t in self.comp.conf[B.DATA_NODE_DDL]:
tdata[subdir][t] = self.selectRows(t) tdata[subdir][t] = self.selectRows(t, job)
if B.DATA_NODE_DATA not in tdata[subdir][t]: if B.DATA_NODE_DATA not in tdata[subdir][t]:
raise Exception("missing data node in table") raise Exception("missing data node in table")
tdata[subdir][t][D.DATA_ATTR_COUNT] = len(tdata[subdir][t][B.DATA_NODE_DATA]) tdata[subdir][t][D.DATA_ATTR_COUNT] = len(tdata[subdir][t][B.DATA_NODE_DATA])
@ -293,25 +293,28 @@ class DbFcts():
self.comp.m.logMsg("Tabelle {} mit {} Zeilen gelesen".format(t, len(tdata[subdir][t][B.DATA_NODE_DATA]))) self.comp.m.logMsg("Tabelle {} mit {} Zeilen gelesen".format(t, len(tdata[subdir][t][B.DATA_NODE_DATA])))
return tdata return tdata
def selectRows(self, statement): def selectRows(self, table, job):
""" method to select rows from a database """ method to select rows from a database
statement written in sql """ statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def deleteTables(self): def reset_TData(self, job):
self.deleteTables(job)
def deleteTables(self, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
self.loadDdl() self.loadDdl(job)
for t in self.comp.conf[B.DATA_NODE_DDL]: for t in self.comp.conf[B.DATA_NODE_DDL]:
print("zu loeschende Tabelle "+t) print("zu loeschende Tabelle "+t)
self.deleteRows(t) self.deleteRows(t, job)
def deleteRows(self, table): def deleteRows(self, table, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def updateRows(self, statement): def updateRows(self, statement, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
@ -321,17 +324,17 @@ class DbFcts():
this method should only called by the class itself """ this method should only called by the class itself """
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertTables(self, tdata): def insertTables(self, tdata, job):
""" """
method to insert rows into the database of the component method to insert rows into the database of the component
""" """
# TODO wird der Tabellenname/DB/Schema unter tdata gespeichert? # TODO wird der Tabellenname/DB/Schema unter tdata gespeichert?
plainname = basic.componentHandling.getPlainCompname(self.comp.name) plainname = basic.componentHandling.getPlainCompname(self.comp.name)
self.loadDdl() self.loadDdl(job)
for t in tdata[B.DATA_NODE_TABLES]: for t in tdata[B.DATA_NODE_TABLES]:
print("einzufuegende Tabelle "+self.comp.name+" "+t) print("einzufuegende Tabelle "+self.comp.name+" "+t)
if isCompTable(self.comp, tdata, t): if isCompTable(self.comp, tdata, t):
self.insertRows(t, tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA]) self.insertRows(t, tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA], job)
self.comp.m.logMsg("in Tabelle {} {} Zeilen eingefuegt".format( self.comp.m.logMsg("in Tabelle {} {} Zeilen eingefuegt".format(
t, len(tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA]))) t, len(tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA])))
@ -347,10 +350,9 @@ class DbFcts():
this method should only called by the class itself """ this method should only called by the class itself """
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def loadDdl(self): def loadDdl(self, job):
"""" load the DDL for each database-table """" load the DDL for each database-table
the ddl are mostly stored as csv in the component-folder """ the ddl are mostly stored as csv in the component-folder """
job = basic.program.Job.getInstance()
if (B.DATA_NODE_DDL in self.comp.conf): if (B.DATA_NODE_DDL in self.comp.conf):
return return
conf = utils.config_tool.getConfig(D.DDL_FILENAME, self.comp.name) conf = utils.config_tool.getConfig(D.DDL_FILENAME, self.comp.name)
@ -365,11 +367,10 @@ class DbFcts():
return "" return ""
def getDbValue(self, fo, value): def getDbValue(self, fo, value):
# TODO Untersceidung csv und echte DB value = str(formatDbField(self.comp, value, fo))
return formatDbField(self.comp, value, fo)
if len(value.strip()) == 0 and fo[D.DDL_FNULLABLE] == B.SVAL_YES: if len(value.strip()) == 0 and fo[D.DDL_FNULLABLE] == B.SVAL_YES:
return self.getDbNull() return self.getDbNull()
if fo[D.DATA_NODE_TYPE] == D.TYPE_STRING: if fo[D.DATA_NODE_TYPE] == D.TYPE_STRING or fo[D.DATA_NODE_TYPE] == D.TYPE_STR:
return "'"+value.strip()+"'" return "'"+value.strip()+"'"
elif fo[D.DATA_NODE_TYPE] == D.TYPE_INT: elif fo[D.DATA_NODE_TYPE] == D.TYPE_INT:
return value.strip() return value.strip()

10
utils/dbcsv_tool.py

@ -21,7 +21,7 @@ class DbFcts(utils.db_abstract.DbFcts):
def __init__(self): def __init__(self):
pass pass
def selectRows(self, table): def selectRows(self, table, job):
""" method to select rows from a database """ method to select rows from a database
statement written in sql """ statement written in sql """
sqlTable = utils.db_abstract.getSqlTable(self.comp, table) sqlTable = utils.db_abstract.getSqlTable(self.comp, table)
@ -40,10 +40,9 @@ class DbFcts(utils.db_abstract.DbFcts):
return tdata return tdata
def deleteRows(self, table): def deleteRows(self, table, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
sqlTable = utils.db_abstract.getSqlTable(self.comp, table) sqlTable = utils.db_abstract.getSqlTable(self.comp, table)
header = "" header = ""
@ -62,11 +61,10 @@ class DbFcts(utils.db_abstract.DbFcts):
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertRows(self, table, rows): def insertRows(self, table, rows, job):
""" method to insert rows into a database """ method to insert rows into a database
the rows will be interpreted by the ddl of the component the rows will be interpreted by the ddl of the component
""" """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
sqlTable = utils.db_abstract.getSqlTable(self.comp, table) sqlTable = utils.db_abstract.getSqlTable(self.comp, table)
header = "" header = ""
@ -89,7 +87,7 @@ class DbFcts(utils.db_abstract.DbFcts):
rowvalues = self.comp.name+":"+table rowvalues = self.comp.name+":"+table
for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
print("head "+h) print("head "+h)
if h in B.LIST_DB_ATTR: if h in [B.DATA_NODE_HEADER, B.DATA_NODE_DATA]:
continue continue
print("h "+h) print("h "+h)
if (h in r): if (h in r):

11
utils/dbmysql_tool.py

@ -19,11 +19,10 @@ class DbFcts(utils.db_abstract.DbFcts):
def __init__(self): def __init__(self):
pass pass
def selectRows(self, table): def selectRows(self, table, job):
""" method to select rows from a database """ method to select rows from a database
statement written in sql """ statement written in sql """
tdata = {} tdata = {}
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT * FROM "+table+";" cmd = "SELECT * FROM "+table+";"
#mycursor = self.getConnector() #mycursor = self.getConnector()
@ -38,24 +37,22 @@ class DbFcts(utils.db_abstract.DbFcts):
self.comp.m.logInfo(cmd) self.comp.m.logInfo(cmd)
return tdata return tdata
def deleteRows(self, table): def deleteRows(self, table, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table+";" cmd = "DELETE FROM "+table+";"
self.comp.m.logInfo(cmd) self.comp.m.logInfo(cmd)
def updateRows(self, statement): def updateRows(self, statement, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertRows(self, table, rows): def insertRows(self, table, rows, job):
""" method to insert rows into a database """ method to insert rows into a database
the rows will be interpreted by the ddl of the component the rows will be interpreted by the ddl of the component
""" """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "INSERT INTO "+table+";" cmd = "INSERT INTO "+table+";"
header = "" header = ""

11
utils/dbrel_tool.py

@ -19,11 +19,10 @@ class DbFcts(utils.db_abstract.DbFcts):
def __init__(self): def __init__(self):
pass pass
def selectRows(self, table): def selectRows(self, table, job):
""" method to select rows from a database """ method to select rows from a database
statement written in sql """ statement written in sql """
tdata = {} tdata = {}
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT * FROM "+table+";" cmd = "SELECT * FROM "+table+";"
#mycursor = self.getConnector() #mycursor = self.getConnector()
@ -38,24 +37,22 @@ class DbFcts(utils.db_abstract.DbFcts):
self.comp.m.logInfo(cmd) self.comp.m.logInfo(cmd)
return tdata return tdata
def deleteRows(self, table): def deleteRows(self, table, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table+";" cmd = "DELETE FROM "+table+";"
self.comp.m.logInfo(cmd) self.comp.m.logInfo(cmd)
def updateRows(self, statement): def updateRows(self, statement, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT) raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertRows(self, table, rows): def insertRows(self, table, rows, job):
""" method to insert rows into a database """ method to insert rows into a database
the rows will be interpreted by the ddl of the component the rows will be interpreted by the ddl of the component
""" """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "INSERT INTO "+table+";" cmd = "INSERT INTO "+table+";"
header = "" header = ""

8
utils/dbsfile_tool.py

@ -25,13 +25,12 @@ class DbFcts(utils.db_abstract.DbFcts):
pass pass
def selectRows(self, table): def selectRows(self, table, job):
""" method to select rows from a database """ method to select rows from a database
statement written in sql """ statement written in sql """
tdata = {} tdata = {}
dry = 0 dry = 0
# attr = self.getDbAttributes(table) # attr = self.getDbAttributes(table)
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
pattern = "s3a://{hostname}/data/{tenant}/mt/sandboxes/{job.par.usecae}/{job.par.workspace}/{outfile}/VR_+reg+/" pattern = "s3a://{hostname}/data/{tenant}/mt/sandboxes/{job.par.usecae}/{job.par.workspace}/{outfile}/VR_+reg+/"
files = self.comp.composeFileClauses(pattern) files = self.comp.composeFileClauses(pattern)
@ -51,10 +50,9 @@ class DbFcts(utils.db_abstract.DbFcts):
#tdata[B.DATA_NODE_DATA] = data #tdata[B.DATA_NODE_DATA] = data
return tdata return tdata
def deleteRows(self, table): def deleteRows(self, table, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
job = basic.program.Job.getInstance()
dry = 0 dry = 0
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table cmd = "DELETE FROM "+table
@ -72,7 +70,7 @@ class DbFcts(utils.db_abstract.DbFcts):
print("select "+sql) print("select "+sql)
#self.comp.m.logInfo(cmd) #self.comp.m.logInfo(cmd)
def insertRows(self, table, rows): def insertRows(self, table, rows, job):
""" method to insert rows into a database """ method to insert rows into a database
the rows will be interpreted by the ddl of the component the rows will be interpreted by the ddl of the component
""" """

9
utils/dbshive_tool.py

@ -26,13 +26,11 @@ class DbFcts(utils.db_abstract.DbFcts):
pass pass
def selectRows(self, table): def selectRows(self, table, job):
""" method to select rows from a database """ method to select rows from a database
statement written in sql """ statement written in sql """
tdata = {} tdata = {}
dry = 0 dry = 0
# attr = self.getDbAttributes(table)
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]) cmd = "SELECT "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER])
cmd += " FROM "+table cmd += " FROM "+table
@ -56,10 +54,9 @@ class DbFcts(utils.db_abstract.DbFcts):
tdata[B.DATA_NODE_DATA] = data tdata[B.DATA_NODE_DATA] = data
return tdata return tdata
def deleteRows(self, table): def deleteRows(self, table, job):
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
job = basic.program.Job.getInstance()
dry = 0 dry = 0
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table cmd = "DELETE FROM "+table
@ -77,7 +74,7 @@ class DbFcts(utils.db_abstract.DbFcts):
print("select "+sql) print("select "+sql)
#self.comp.m.logInfo(cmd) #self.comp.m.logInfo(cmd)
def insertRows(self, table, rows): def insertRows(self, table, rows, job):
""" method to insert rows into a database """ method to insert rows into a database
the rows will be interpreted by the ddl of the component the rows will be interpreted by the ddl of the component
""" """

3
utils/dbspark_tool.py

@ -23,11 +23,10 @@ class DbFcts(utils.db_abstract.DbFcts):
return out return out
def selectRows(self, table): def selectRows(self, table, job):
""" method to select rows from a database """ method to select rows from a database
statement written in sql """ statement written in sql """
tdata = {} tdata = {}
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]) cmd = "SELECT "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER])
cmd += " FROM "+table+""+self.getWhere()+""+self.getOrder() cmd += " FROM "+table+""+self.getWhere()+""+self.getOrder()

3
utils/file_abstract.py

@ -32,6 +32,9 @@ class FileFcts():
def __init__(self): def __init__(self):
pass pass
def reset_TData(self, job):
pass
def setComp(self, job, comp=None): def setComp(self, job, comp=None):
self.job = job self.job = job
self.comp = comp self.comp = comp

19
utils/filejson_tool.py

@ -0,0 +1,19 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program
import utils.config_tool
import utils.file_abstract
import basic.constants as B
import utils.path_tool
import utils.file_tool
import utils.tdata_tool
class FileFcts(utils.file_abstract.FileFcts):
def __init__(self):
pass

19
utils/filelog_tool.py

@ -0,0 +1,19 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program
import utils.config_tool
import utils.file_abstract
import basic.constants as B
import utils.path_tool
import utils.file_tool
import utils.tdata_tool
class FileFcts(utils.file_abstract.FileFcts):
def __init__(self):
pass

19
utils/filexml_tool.py

@ -0,0 +1,19 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program
import utils.config_tool
import utils.file_abstract
import basic.constants as B
import utils.path_tool
import utils.file_tool
import utils.tdata_tool
class FileFcts(utils.file_abstract.FileFcts):
def __init__(self):
pass

2
utils/i18n_tool.py

@ -46,7 +46,7 @@ class I18n:
""" """
if job is None: if job is None:
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
if "language" in job.conf.confs: if "language" in job.conf.confs:
language = job.conf.confs["language"] language = job.conf.confs["language"]
else: else:
language = "en" language = "en"

3
utils/path_tool.py

@ -83,12 +83,13 @@ def composePattern(pattern, comp):
""" """
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
verify = job.getDebugLevel(TOOL_NAME) verify = job.getDebugLevel(TOOL_NAME)
verbose = False
job.debug(verify, "composePattern " + pattern) job.debug(verify, "composePattern " + pattern)
max=5 max=5
l = re.findall('\{.*?\}', pattern) l = re.findall('\{.*?\}', pattern)
job.debug(verify, l) job.debug(verify, l)
for pat in l: for pat in l:
print(str(max) + ": " + pattern + ": " + pat) if verbose: print(str(max) + ": " + pattern + ": " + pat)
pit = getKeyValue(pat[1:-1], comp) pit = getKeyValue(pat[1:-1], comp)
job.debug(verify, str(max) + ": " + pattern + ": " + pat + ": " + pit) job.debug(verify, str(max) + ": " + pattern + ": " + pat + ": " + pit)
pattern = pattern.replace(pat, pit) pattern = pattern.replace(pat, pit)

63
utils/tdata_tool.py

@ -31,9 +31,9 @@ def getTestdata(job=None):
if job is None: if job is None:
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
if "testcase" in job.program: if "testcase" in job.program:
return collectTestdata(B.PAR_TESTCASE, job.par[B.PAR_TESTCASE], job) return collectTestdata(B.PAR_TESTCASE, getattr(job.par, B.PAR_TESTCASE), job)
else: else:
return collectTestdata(B.PAR_TESTSUITE, job.par[B.PAR_TESTSUITE], job) return collectTestdata(B.PAR_TESTSUITE, getattr(job.par, B.PAR_TESTSUITE), job)
def collectTestdata(gran, testentity, job): def collectTestdata(gran, testentity, job):
@ -45,27 +45,28 @@ def collectTestdata(gran, testentity, job):
setBlockLists(job) setBlockLists(job)
if gran == B.PAR_TESTCASE: if gran == B.PAR_TESTCASE:
basispath = utils.path_tool.rejoinPath(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA], testentity) basispath = utils.path_tool.rejoinPath(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA], testentity)
pathname = utils.config_tool.getConfigPath(P.KEY_TESTCASE, job.par[B.PAR_TESTCASE], "", job) pathname = utils.config_tool.getConfigPath(P.KEY_TESTCASE, getattr(job.par, B.PAR_TESTCASE), "", job)
if gran == B.PAR_TESTSUITE: if gran == B.PAR_TESTSUITE:
basispath = utils.path_tool.rejoinPath(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA], testentity) basispath = utils.path_tool.rejoinPath(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA], testentity)
pathname = utils.config_tool.getConfigPath(P.KEY_TESTSUITE, job.par[B.PAR_TESTSUITE], "", job) pathname = utils.config_tool.getConfigPath(P.KEY_TESTSUITE, getattr(job.par, B.PAR_TESTSUITE), "", job)
if pathname[-3:] == D.DFILE_TYPE_CSV: if pathname[-3:] == D.DFILE_TYPE_CSV:
tdata = getCsvSpec(job.m, pathname, D.CSV_SPECTYPE_DATA) tdata = getCsvSpec(job.m, pathname, D.CSV_SPECTYPE_DATA)
else: else:
tdata = utils.file_tool.readFileDict(pathname, job.m) tdata = utils.file_tool.readFileDict(pathname, job.m)
# get explicit specdata of includes # get explicit specdata of includes
for pathname in tdata[D.CSV_BLOCK_IMPORT]: if D.CSV_BLOCK_IMPORT in tdata:
pathname = utils.path_tool.rejoinPath(pathname) for pathname in tdata[D.CSV_BLOCK_IMPORT]:
if job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA] not in pathname: pathname = utils.path_tool.rejoinPath(pathname)
pathname = utils.path_tool.rejoinPath(basispath, pathname) if job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA] not in pathname:
if pathname[-3:] == D.DFILE_TYPE_CSV: pathname = utils.path_tool.rejoinPath(basispath, pathname)
data = getCsvSpec(job.m, pathname, D.CSV_SPECTYPE_DATA) if pathname[-3:] == D.DFILE_TYPE_CSV:
else: data = getCsvSpec(job.m, pathname, D.CSV_SPECTYPE_DATA)
data = utils.file_tool.readFileDict(pathname, job.m) else:
for table in data[D.CSV_BLOCK_TABLES]: data = utils.file_tool.readFileDict(pathname, job.m)
if table in tdata[D.CSV_BLOCK_TABLES]: for table in data[D.CSV_BLOCK_TABLES]:
print("Fehler") if table in tdata[D.CSV_BLOCK_TABLES]:
tdata[D.CSV_BLOCK_TABLES][table] = data[D.CSV_BLOCK_TABLES][table] print("Fehler")
tdata[D.CSV_BLOCK_TABLES][table] = data[D.CSV_BLOCK_TABLES][table]
# get implicit specdata of spec-library # get implicit specdata of spec-library
for prefix in list_blocks[D.DFILE_TABLE_PREFIX]: for prefix in list_blocks[D.DFILE_TABLE_PREFIX]:
files = utils.file_tool.getFiles(job.m, basispath, prefix, None) files = utils.file_tool.getFiles(job.m, basispath, prefix, None)
@ -178,7 +179,7 @@ def parseCsvSpec(msg, lines, ttype, tdata, job=None):
# keywords option, step, table # keywords option, step, table
if verbose: print(str(a)+" -- "+str(fields)) if verbose: print(str(a)+" -- "+str(fields))
tableAttr = setTableAttribute(tableAttr, a[0], fields[1], job) tableAttr = setTableAttribute(tableAttr, a[0], fields[1], job)
if (tableAttr["hit"]): if (tableAttr["_hit"]):
status = "TABLE_ALIAS" status = "TABLE_ALIAS"
continue continue
if (a[0].lower() in list_blocks[D.CSV_BLOCK_HEAD]): if (a[0].lower() in list_blocks[D.CSV_BLOCK_HEAD]):
@ -229,9 +230,18 @@ def parseCsvSpec(msg, lines, ttype, tdata, job=None):
setTableHeader(tableDict, tableAttr, fields, ttype, job) setTableHeader(tableDict, tableAttr, fields, ttype, job)
status = D.CSV_SPECTYPE_DATA status = D.CSV_SPECTYPE_DATA
if ttype == D.CSV_SPECTYPE_CONF: if ttype == D.CSV_SPECTYPE_CONF:
header = []
for k in tdata: for k in tdata:
if k in D.LIST_DATA_ATTR:
continue
if B.DATA_NODE_DATA in tdata[k]: if B.DATA_NODE_DATA in tdata[k]:
tdata[k].pop(B.DATA_NODE_DATA) tdata[k].pop(B.DATA_NODE_DATA)
for f in tdata[k]:
if f in [B.DATA_NODE_HEADER, "_hit"] + D.LIST_DATA_ATTR:
continue
header.append(f)
tdata[k][B.DATA_NODE_HEADER] = header
header = []
if B.DATA_NODE_TABLES in tdata and B.DATA_NODE_TABLES in tdata[B.DATA_NODE_TABLES]: if B.DATA_NODE_TABLES in tdata and B.DATA_NODE_TABLES in tdata[B.DATA_NODE_TABLES]:
for k in tdata[B.DATA_NODE_TABLES][B.DATA_NODE_TABLES]: for k in tdata[B.DATA_NODE_TABLES][B.DATA_NODE_TABLES]:
if k in tdata[B.DATA_NODE_TABLES]: if k in tdata[B.DATA_NODE_TABLES]:
@ -271,11 +281,17 @@ def setTableData(tableDict, fields, ttype, job):
row[f] = fields[i].strip() row[f] = fields[i].strip()
i += 1 i += 1
if ttype == D.CSV_SPECTYPE_DATA: if ttype == D.CSV_SPECTYPE_DATA:
if B.ATTR_DATA_COMP in tableDict:
tcomps = tableDict[B.ATTR_DATA_COMP]
else:
tcomps = {}
row[B.ATTR_DATA_COMP] = {} row[B.ATTR_DATA_COMP] = {}
for c in fields[0].split(","): for c in fields[0].split(","):
a = c.split(":") a = c.split(":")
tcomps[a[0]] = a[1]
row[B.ATTR_DATA_COMP][a[0]] = a[1].strip() row[B.ATTR_DATA_COMP][a[0]] = a[1].strip()
tableDict[B.DATA_NODE_DATA].append(row) tableDict[B.DATA_NODE_DATA].append(row)
tableDict[B.ATTR_DATA_COMP] = tcomps
elif ttype == D.CSV_SPECTYPE_KEYS: elif ttype == D.CSV_SPECTYPE_KEYS:
tableDict[D.CSV_NODETYPE_KEYS][fields[tableDict[D.DATA_ATTR_KEY]].strip()] = row tableDict[D.CSV_NODETYPE_KEYS][fields[tableDict[D.DATA_ATTR_KEY]].strip()] = row
elif ttype == D.CSV_SPECTYPE_CONF: elif ttype == D.CSV_SPECTYPE_CONF:
@ -287,9 +303,9 @@ def setTableAttribute(tableAttr, key, val, job):
for attr in D.LIST_DATA_ATTR: for attr in D.LIST_DATA_ATTR:
if (key.lower() in list_blocks[attr]): if (key.lower() in list_blocks[attr]):
tableAttr[attr] = val.strip() tableAttr[attr] = val.strip()
tableAttr["hit"] = True tableAttr["_hit"] = True
return tableAttr return tableAttr
tableAttr["hit"] = False tableAttr["_hit"] = False
return tableAttr return tableAttr
@ -362,6 +378,15 @@ def splitFields(line, delimiter, job):
return out return out
def writeCsvData(filename, tdata, comp, job):
text = ""
if B.DATA_NODE_TABLES in tdata:
for k in tdata[B.DATA_NODE_TABLES]:
text += buildCsvData(tdata[B.DATA_NODE_TABLES][k], k, job)
text += "\n"
utils.file_tool.writeFileText(comp.m, filename, text)
def buildCsvData(tdata, table, job=None): def buildCsvData(tdata, table, job=None):
""" """
writes the testdata into a csv-file for documentation of the test-run writes the testdata into a csv-file for documentation of the test-run

Loading…
Cancel
Save