Browse Source

execution init_testcase with bugfixes

master
Ulrich Carmesin 2 years ago
parent
commit
5e8d3018d9
  1. 37
      basic/compexec.py
  2. 2
      basic/componentHandling.py
  3. 9
      basic/constants.py
  4. 27
      basic/toolHandling.py
  5. 23
      init_testcase.py
  6. 8
      test/test_04config.py
  7. 3
      utils/api_abstract.py
  8. 4
      utils/cli_abstract.py
  9. 8
      utils/config_tool.py
  10. 1
      utils/data_const.py
  11. 37
      utils/db_abstract.py
  12. 10
      utils/dbcsv_tool.py
  13. 11
      utils/dbmysql_tool.py
  14. 11
      utils/dbrel_tool.py
  15. 8
      utils/dbsfile_tool.py
  16. 9
      utils/dbshive_tool.py
  17. 3
      utils/dbspark_tool.py
  18. 3
      utils/file_abstract.py
  19. 19
      utils/filejson_tool.py
  20. 19
      utils/filelog_tool.py
  21. 19
      utils/filexml_tool.py
  22. 3
      utils/path_tool.py
  23. 39
      utils/tdata_tool.py

37
basic/compexec.py

@ -79,16 +79,18 @@ class Testexecuter():
verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() started at "
+ datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper())
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("log rotate in "+ self.name)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("delete content "+ self.name)
dbi = basic.toolHandling.getDbTool(self)
dbi.deleteTables()
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("lob is deleted with flaskdb "+ self.name)
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("rm files in "+ self.name)
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API]:
if node not in self.conf[B.SUBJECT_ARTS]:
continue
tool = basic.toolHandling.getTool(node, self, job)
tool.reset_TData(job)
if B.TOPIC_NODE_FILE in self.conf[B.SUBJECT_ARTS]:
for file in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_FILE]:
if file in B.LIST_FILE_ATTR:
continue
print("91: "+self.classname+" "+file)
tool = basic.toolHandling.getFileTool(job, self, B.TOPIC_NODE_FILE+"."+file)
tool.reset_TData(job)
self.m.setMsg("resetInstance for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
@ -103,13 +105,15 @@ class Testexecuter():
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_FILE, B.TOPIC_NODE_API]:
print(node)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS] and B.DATA_NODE_TABLES in tdata:
for t in tdata[B.DATA_NODE_TABLES]:
print (t)
if utils.db_abstract.isCompTable(self, tdata, t):
self.m.logInfo("insert content "+ self.name)
dbi = basic.toolHandling.getDbTool(self)
dbi.insertTables(tdata)
dbi = basic.toolHandling.getDbTool(self, job)
dbi.insertTables(tdata, job)
break
self.m.setMsg("data loaded for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
@ -135,15 +139,16 @@ class Testexecuter():
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("select db-content "+ self.name)
dbi = basic.toolHandling.getDbTool(self)
data = dbi.selectTables(subdir)
dbi = basic.toolHandling.getDbTool(self, job)
data = dbi.selectTables(subdir, job)
print("ppp")
#data = {}
for t in data[subdir]:
data[B.DATA_NODE_TABLES] = {}
data[B.DATA_NODE_TABLES][t] = data[subdir][t]
utils.tdata_tool.writeCsvData(utils.path_tool.rejoinPath(
utils.path_tool.composePattern("{tcresult}", self), subdir, t+".csv"), data, self)
utils.tdata_tool.writeCsvData(
utils.path_tool.rejoinPath(utils.path_tool.composePattern("{tcresult}", self), subdir, t+".csv"),
data, self, job)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("check lob if is deleted with flaskdb "+ self.name)
self.m.setMsg("readInstance for " + self.name + " is OK")

2
basic/componentHandling.py

@ -192,6 +192,8 @@ class ComponentManager:
name = compName
i = 0
c.name = name
c.classname = compName
c.m = basic.message.Message(job, basic.message.LIMIT_DEBUG, "logTime", name)
c.conf = utils.config_tool.mergeConn(c.m, confs["conf"], conns[i])
c.conf[B.SUBJECT_CONN] = conns[i]

9
basic/constants.py

@ -100,13 +100,15 @@ DATA_NODE_PAR = "par"
ATTR_ARTS_TYPE = "type"
""" must attribute for the type of the technique """
ATTR_ARTS_NAME = "name"
""" optional attribute just for information """
ATTR_ARTS_PATH = "path"
""" optional attribute for the basic folder if the artifact is stored in the filesystem """
ATTR_ARTS_RESET = "reset"
""" optional attribute if the artefact should be reset in the initializing-phase """
ATTR_ARTS_PRESTEP = "prestep"
""" optional attribute to define a source-table for this table """
LIST_ARTS_ATTR = [ATTR_ARTS_TYPE, ATTR_ARTS_PATH, ATTR_ARTS_RESET, ATTR_ARTS_PRESTEP]
LIST_ARTS_ATTR = [ATTR_ARTS_TYPE, ATTR_ARTS_PATH, ATTR_ARTS_RESET, ATTR_ARTS_PRESTEP, ATTR_ARTS_NAME]
TOPIC_NODE_DB = "db"
# testexec, db_abstr
@ -125,7 +127,10 @@ LIST_CLI_ATTR = [] + LIST_ARTS_ATTR
TOPIC_NODE_API = "api"
LIST_API_ATTR = [] + LIST_ARTS_ATTR
TOPIC_NODE_FILE = "file"
LIST_FILE_ATTR = [] + LIST_ARTS_ATTR
ATTR_FILE_OLD = "oldfile"
ATTR_FILE_ROTATE = "rotate"
LIST_FILE_ATTR = [ATTR_FILE_OLD, ATTR_FILE_ROTATE] + LIST_ARTS_ATTR
LIST_ATTR = {
TOPIC_NODE_DB: LIST_DB_ATTR,
TOPIC_NODE_API: LIST_API_ATTR,

27
basic/toolHandling.py

@ -9,6 +9,8 @@ import os
import basic.program
import basic.constants as B
# -------------------------------------------------
import utils.config_tool
def hasAttr(o, name):
if (isinstance(o, dict)):
@ -46,10 +48,20 @@ def getCompAttr(comp, topic, attr, table=""):
return getAttr(comp.conf[B.SUBJECT_ARTS][topic], attr)
raise LookupError(topic+"."+attr+" is not set in comp " + comp.name)
def getTool(technicType, comp, job):
if technicType == B.TOPIC_NODE_DB:
return getDbTool(comp, job)
if technicType == B.TOPIC_NODE_CLI:
return getCliTool(comp, job)
if technicType == B.TOPIC_NODE_API:
return getApiTool(comp, job)
if technicType == B.TOPIC_NODE_FILE:
# TODO im Allgemeinen keine konrete Implementierung aufrufen,
# denn zu einer Komponente koennen unterschiedliche Dateien vorkommen
return getFileTool(job, comp, "")
# class ToolManager:
def getDbTool(comp):
job = basic.program.Job.getInstance()
def getDbTool(comp, job):
verify = int(job.getDebugLevel("db_tool"))
dbtype = getCompAttr(comp, B.TOPIC_NODE_DB, B.ATTR_TYPE, "")
toolname = "db"+dbtype+"_tool"
@ -63,8 +75,7 @@ def getDbTool(comp):
c.setComp(comp)
return c
def getCliTool(comp):
job = basic.program.Job.getInstance()
def getCliTool(comp, job):
verify = int(job.getDebugLevel("db_tool"))
clitype = getCompAttr(comp, B.TOPIC_NODE_CLI, B.ATTR_TYPE, "")
toolname = "cli"+clitype+"_tool"
@ -78,8 +89,7 @@ def getCliTool(comp):
c.setComp(comp)
return c
def getApiTool(comp):
job = basic.program.Job.getInstance()
def getApiTool(comp, job):
verify = int(job.getDebugLevel("db_tool"))
apitype = getCompAttr(comp, B.TOPIC_NODE_API, B.ATTR_TYPE, "")
toolname = "api"+apitype+"_tool"
@ -93,8 +103,11 @@ def getApiTool(comp):
c.setComp(comp)
return c
def getFileTool(job, comp=None):
def getFileTool(job, comp, filenode=""):
verify = int(job.getDebugLevel("db_tool"))
if len(filenode) > 3 and filenode[-1:] != ".":
filetype = utils.config_tool.getAttribute(comp, filenode, B.ATTR_ARTS_TYPE, job)
else:
filetype = getCompAttr(comp, B.TOPIC_NODE_FILE, B.ATTR_TYPE, "")
toolname = "file"+filetype+"_tool"
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")

23
init_testcase.py

@ -1,25 +1,30 @@
# This is a sample Python script.
import os
# import jsonpickle # pip install jsonpickle
import yaml # pip install pyyaml
#!/usr/bin/python
# program to execute steps of a testcase
# PARAM: --environment --application --tcdir [ testcase, tctime ]
# main functions
# + reset_testcase() : comp-config --> system
# + load_testcase() : testspec --> tdata --> system.data
# + select_testcase() : system.data --> data --> archiv.result
# ---------------------------------------------------import os
import basic.program as program
import utils.tdata_tool
import basic.componentHandling
import basic.constants as B
import utils.file_tool
import utils.path_tool
import utils.path_const as P
import basic.message as message
# Press Umschalt+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
PROGRAM_NAME = "init_testcase"
def startPyJob(job):
cm = basic.componentHandling.ComponentManager.getInstance("init")
print("cm "+str(cm))
cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME)
job.m.setMsg("# Components initialized with these relevant components " + str(comps))
testdata = utils.tdata_tool.getTestdata()
print("------------------------------------------------------------")
for c in comps:
comp = cm.getComponent(c)
comp.m.logInfo("------- "+comp.name+" ----------------------------------------")
@ -28,7 +33,7 @@ def startPyJob(job):
if job.hasFunction("load_TData"):
comp.load_TData(B.PAR_TESTCASE, testdata)
if job.hasFunction("read_TData"):
comp.read_TData("vorher", B.PAR_TESTCASE)
comp.read_TData(utils.path_tool.getKeyValue(P.KEY_PRECOND), B.PAR_TESTCASE)
comp.m.logInfo("------- "+comp.name+" ----------------------------------------")
job.m.merge(comp.m)
print(str(comp))
@ -39,8 +44,6 @@ if __name__ == '__main__':
print(PROGRAM_NAME)
x = program.Job(PROGRAM_NAME)
x.startJob()
x.m.logInfo("hier eine LogInfo")
x.m.logDebug("hier eine DbugMeldung")
x.m.logDebug(str(vars(x.par)) + "\n" + str(vars(x.conf)))
if x.m.isRc("fatal"):
x.stopJob()

8
test/test_04config.py

@ -18,7 +18,7 @@ import utils.path_const as P
import basic.constants as B
TEST_FUNCTIONS = ["test_01getConfig", "test_02mergeAttributes", "test_03getAttributes"]
#TEST_FUNCTIONS = ["test_03getAttributes"]
TEST_FUNCTIONS = ["test_03getAttributes"]
verbose = False
class MyTestCase(unittest.TestCase):
@ -95,6 +95,12 @@ class MyTestCase(unittest.TestCase):
self.assertIn(B.ATTR_ARTS_PATH, attrList)
self.assertIn(B.ATTR_ARTS_RESET, attrList)
cnttest += 2 # new attribute
comp = test.testtools.getComp("testrest")
path = "file.xmlrest"
attrList = utils.config_tool.getAttributeList(comp, path, job)
print(str(comp.conf["conn"]))
print(str(comp.conf[B.SUBJECT_ARTS]))
print(str(attrList))
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)

3
utils/api_abstract.py

@ -28,6 +28,9 @@ class ApiFcts():
self.comp = None
pass
def reset_TData(self, job):
pass
def setComp(self, comp):
self.comp = comp

4
utils/cli_abstract.py

@ -22,6 +22,7 @@ The main tasks are: \n
"""
import basic.program
import utils.config_tool
import basic.constants as B
class CliFcts():
"""
@ -32,6 +33,9 @@ class CliFcts():
self.comp = None
pass
def reset_TData(self, job):
pass
def setComp(self, comp):
self.comp = comp

8
utils/config_tool.py

@ -248,6 +248,14 @@ def getConfig(modul, name, subname=""):
return confs
def getAttribute(comp, path, attr, job):
attrList = getAttributeList(comp, path, job)
if attr in attrList:
return attrList[attr]
else:
return ""
def getAttributeList(comp, path, job):
"""
gets a concrete attribute-list for an arteifact-element from the config-attributes from the connection-attributes

1
utils/data_const.py

@ -8,6 +8,7 @@ DDL_FILENAME = "DATASTRUCTURE"
DATA_NODE_TYPE = "type"
TYPE_STRING = "string"
TYPE_STR = "str"
TYPE_INT = "int"
TYPE_FLOAT = "float"
TYPE_DOUBLE = "double"

37
utils/db_abstract.py

@ -219,7 +219,7 @@ def formatDbField(comp, val, field):
def formatDbVal(msg, val, dtyp):
ctlg = basic.catalog.Catalog.getInstance()
if dtyp == D.TYPE_STRING:
if dtyp == D.TYPE_STRING or dtyp == D.TYPE_STR:
if not isinstance(val, str):
msg.logError("field must be " + dtyp + ", " + str(val))
return str(val)
@ -278,14 +278,14 @@ class DbFcts():
return getDbAttributes(self.comp, table)
def selectTables(self, subdir):
def selectTables(self, subdir, job):
""" method to delete rows from a database
statement written in sql """
self.loadDdl()
self.loadDdl(job)
tdata = {}
tdata[subdir] = {}
for t in self.comp.conf[B.DATA_NODE_DDL]:
tdata[subdir][t] = self.selectRows(t)
tdata[subdir][t] = self.selectRows(t, job)
if B.DATA_NODE_DATA not in tdata[subdir][t]:
raise Exception("missing data node in table")
tdata[subdir][t][D.DATA_ATTR_COUNT] = len(tdata[subdir][t][B.DATA_NODE_DATA])
@ -293,25 +293,28 @@ class DbFcts():
self.comp.m.logMsg("Tabelle {} mit {} Zeilen gelesen".format(t, len(tdata[subdir][t][B.DATA_NODE_DATA])))
return tdata
def selectRows(self, statement):
def selectRows(self, table, job):
""" method to select rows from a database
statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def deleteTables(self):
def reset_TData(self, job):
self.deleteTables(job)
def deleteTables(self, job):
""" method to delete rows from a database
statement written in sql """
self.loadDdl()
self.loadDdl(job)
for t in self.comp.conf[B.DATA_NODE_DDL]:
print("zu loeschende Tabelle "+t)
self.deleteRows(t)
self.deleteRows(t, job)
def deleteRows(self, table):
def deleteRows(self, table, job):
""" method to delete rows from a database
statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def updateRows(self, statement):
def updateRows(self, statement, job):
""" method to delete rows from a database
statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
@ -321,17 +324,17 @@ class DbFcts():
this method should only called by the class itself """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertTables(self, tdata):
def insertTables(self, tdata, job):
"""
method to insert rows into the database of the component
"""
# TODO wird der Tabellenname/DB/Schema unter tdata gespeichert?
plainname = basic.componentHandling.getPlainCompname(self.comp.name)
self.loadDdl()
self.loadDdl(job)
for t in tdata[B.DATA_NODE_TABLES]:
print("einzufuegende Tabelle "+self.comp.name+" "+t)
if isCompTable(self.comp, tdata, t):
self.insertRows(t, tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA])
self.insertRows(t, tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA], job)
self.comp.m.logMsg("in Tabelle {} {} Zeilen eingefuegt".format(
t, len(tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA])))
@ -347,10 +350,9 @@ class DbFcts():
this method should only called by the class itself """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def loadDdl(self):
def loadDdl(self, job):
"""" load the DDL for each database-table
the ddl are mostly stored as csv in the component-folder """
job = basic.program.Job.getInstance()
if (B.DATA_NODE_DDL in self.comp.conf):
return
conf = utils.config_tool.getConfig(D.DDL_FILENAME, self.comp.name)
@ -365,11 +367,10 @@ class DbFcts():
return ""
def getDbValue(self, fo, value):
# TODO Untersceidung csv und echte DB
return formatDbField(self.comp, value, fo)
value = str(formatDbField(self.comp, value, fo))
if len(value.strip()) == 0 and fo[D.DDL_FNULLABLE] == B.SVAL_YES:
return self.getDbNull()
if fo[D.DATA_NODE_TYPE] == D.TYPE_STRING:
if fo[D.DATA_NODE_TYPE] == D.TYPE_STRING or fo[D.DATA_NODE_TYPE] == D.TYPE_STR:
return "'"+value.strip()+"'"
elif fo[D.DATA_NODE_TYPE] == D.TYPE_INT:
return value.strip()

10
utils/dbcsv_tool.py

@ -21,7 +21,7 @@ class DbFcts(utils.db_abstract.DbFcts):
def __init__(self):
pass
def selectRows(self, table):
def selectRows(self, table, job):
""" method to select rows from a database
statement written in sql """
sqlTable = utils.db_abstract.getSqlTable(self.comp, table)
@ -40,10 +40,9 @@ class DbFcts(utils.db_abstract.DbFcts):
return tdata
def deleteRows(self, table):
def deleteRows(self, table, job):
""" method to delete rows from a database
statement written in sql """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
sqlTable = utils.db_abstract.getSqlTable(self.comp, table)
header = ""
@ -62,11 +61,10 @@ class DbFcts(utils.db_abstract.DbFcts):
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertRows(self, table, rows):
def insertRows(self, table, rows, job):
""" method to insert rows into a database
the rows will be interpreted by the ddl of the component
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
sqlTable = utils.db_abstract.getSqlTable(self.comp, table)
header = ""
@ -89,7 +87,7 @@ class DbFcts(utils.db_abstract.DbFcts):
rowvalues = self.comp.name+":"+table
for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
print("head "+h)
if h in B.LIST_DB_ATTR:
if h in [B.DATA_NODE_HEADER, B.DATA_NODE_DATA]:
continue
print("h "+h)
if (h in r):

11
utils/dbmysql_tool.py

@ -19,11 +19,10 @@ class DbFcts(utils.db_abstract.DbFcts):
def __init__(self):
pass
def selectRows(self, table):
def selectRows(self, table, job):
""" method to select rows from a database
statement written in sql """
tdata = {}
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT * FROM "+table+";"
#mycursor = self.getConnector()
@ -38,24 +37,22 @@ class DbFcts(utils.db_abstract.DbFcts):
self.comp.m.logInfo(cmd)
return tdata
def deleteRows(self, table):
def deleteRows(self, table, job):
""" method to delete rows from a database
statement written in sql """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table+";"
self.comp.m.logInfo(cmd)
def updateRows(self, statement):
def updateRows(self, statement, job):
""" method to delete rows from a database
statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertRows(self, table, rows):
def insertRows(self, table, rows, job):
""" method to insert rows into a database
the rows will be interpreted by the ddl of the component
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "INSERT INTO "+table+";"
header = ""

11
utils/dbrel_tool.py

@ -19,11 +19,10 @@ class DbFcts(utils.db_abstract.DbFcts):
def __init__(self):
pass
def selectRows(self, table):
def selectRows(self, table, job):
""" method to select rows from a database
statement written in sql """
tdata = {}
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT * FROM "+table+";"
#mycursor = self.getConnector()
@ -38,24 +37,22 @@ class DbFcts(utils.db_abstract.DbFcts):
self.comp.m.logInfo(cmd)
return tdata
def deleteRows(self, table):
def deleteRows(self, table, job):
""" method to delete rows from a database
statement written in sql """
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table+";"
self.comp.m.logInfo(cmd)
def updateRows(self, statement):
def updateRows(self, statement, job):
""" method to delete rows from a database
statement written in sql """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insertRows(self, table, rows):
def insertRows(self, table, rows, job):
""" method to insert rows into a database
the rows will be interpreted by the ddl of the component
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "INSERT INTO "+table+";"
header = ""

8
utils/dbsfile_tool.py

@ -25,13 +25,12 @@ class DbFcts(utils.db_abstract.DbFcts):
pass
def selectRows(self, table):
def selectRows(self, table, job):
""" method to select rows from a database
statement written in sql """
tdata = {}
dry = 0
# attr = self.getDbAttributes(table)
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
pattern = "s3a://{hostname}/data/{tenant}/mt/sandboxes/{job.par.usecae}/{job.par.workspace}/{outfile}/VR_+reg+/"
files = self.comp.composeFileClauses(pattern)
@ -51,10 +50,9 @@ class DbFcts(utils.db_abstract.DbFcts):
#tdata[B.DATA_NODE_DATA] = data
return tdata
def deleteRows(self, table):
def deleteRows(self, table, job):
""" method to delete rows from a database
statement written in sql """
job = basic.program.Job.getInstance()
dry = 0
verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table
@ -72,7 +70,7 @@ class DbFcts(utils.db_abstract.DbFcts):
print("select "+sql)
#self.comp.m.logInfo(cmd)
def insertRows(self, table, rows):
def insertRows(self, table, rows, job):
""" method to insert rows into a database
the rows will be interpreted by the ddl of the component
"""

9
utils/dbshive_tool.py

@ -26,13 +26,11 @@ class DbFcts(utils.db_abstract.DbFcts):
pass
def selectRows(self, table):
def selectRows(self, table, job):
""" method to select rows from a database
statement written in sql """
tdata = {}
dry = 0
# attr = self.getDbAttributes(table)
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER])
cmd += " FROM "+table
@ -56,10 +54,9 @@ class DbFcts(utils.db_abstract.DbFcts):
tdata[B.DATA_NODE_DATA] = data
return tdata
def deleteRows(self, table):
def deleteRows(self, table, job):
""" method to delete rows from a database
statement written in sql """
job = basic.program.Job.getInstance()
dry = 0
verify = -1+job.getDebugLevel("db_tool")
cmd = "DELETE FROM "+table
@ -77,7 +74,7 @@ class DbFcts(utils.db_abstract.DbFcts):
print("select "+sql)
#self.comp.m.logInfo(cmd)
def insertRows(self, table, rows):
def insertRows(self, table, rows, job):
""" method to insert rows into a database
the rows will be interpreted by the ddl of the component
"""

3
utils/dbspark_tool.py

@ -23,11 +23,10 @@ class DbFcts(utils.db_abstract.DbFcts):
return out
def selectRows(self, table):
def selectRows(self, table, job):
""" method to select rows from a database
statement written in sql """
tdata = {}
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER])
cmd += " FROM "+table+""+self.getWhere()+""+self.getOrder()

3
utils/file_abstract.py

@ -32,6 +32,9 @@ class FileFcts():
def __init__(self):
pass
def reset_TData(self, job):
pass
def setComp(self, job, comp=None):
self.job = job
self.comp = comp

19
utils/filejson_tool.py

@ -0,0 +1,19 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program
import utils.config_tool
import utils.file_abstract
import basic.constants as B
import utils.path_tool
import utils.file_tool
import utils.tdata_tool
class FileFcts(utils.file_abstract.FileFcts):
def __init__(self):
pass

19
utils/filelog_tool.py

@ -0,0 +1,19 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program
import utils.config_tool
import utils.file_abstract
import basic.constants as B
import utils.path_tool
import utils.file_tool
import utils.tdata_tool
class FileFcts(utils.file_abstract.FileFcts):
def __init__(self):
pass

19
utils/filexml_tool.py

@ -0,0 +1,19 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program
import utils.config_tool
import utils.file_abstract
import basic.constants as B
import utils.path_tool
import utils.file_tool
import utils.tdata_tool
class FileFcts(utils.file_abstract.FileFcts):
def __init__(self):
pass

3
utils/path_tool.py

@ -83,12 +83,13 @@ def composePattern(pattern, comp):
"""
job = basic.program.Job.getInstance()
verify = job.getDebugLevel(TOOL_NAME)
verbose = False
job.debug(verify, "composePattern " + pattern)
max=5
l = re.findall('\{.*?\}', pattern)
job.debug(verify, l)
for pat in l:
print(str(max) + ": " + pattern + ": " + pat)
if verbose: print(str(max) + ": " + pattern + ": " + pat)
pit = getKeyValue(pat[1:-1], comp)
job.debug(verify, str(max) + ": " + pattern + ": " + pat + ": " + pit)
pattern = pattern.replace(pat, pit)

39
utils/tdata_tool.py

@ -31,9 +31,9 @@ def getTestdata(job=None):
if job is None:
job = basic.program.Job.getInstance()
if "testcase" in job.program:
return collectTestdata(B.PAR_TESTCASE, job.par[B.PAR_TESTCASE], job)
return collectTestdata(B.PAR_TESTCASE, getattr(job.par, B.PAR_TESTCASE), job)
else:
return collectTestdata(B.PAR_TESTSUITE, job.par[B.PAR_TESTSUITE], job)
return collectTestdata(B.PAR_TESTSUITE, getattr(job.par, B.PAR_TESTSUITE), job)
def collectTestdata(gran, testentity, job):
@ -45,15 +45,16 @@ def collectTestdata(gran, testentity, job):
setBlockLists(job)
if gran == B.PAR_TESTCASE:
basispath = utils.path_tool.rejoinPath(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA], testentity)
pathname = utils.config_tool.getConfigPath(P.KEY_TESTCASE, job.par[B.PAR_TESTCASE], "", job)
pathname = utils.config_tool.getConfigPath(P.KEY_TESTCASE, getattr(job.par, B.PAR_TESTCASE), "", job)
if gran == B.PAR_TESTSUITE:
basispath = utils.path_tool.rejoinPath(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA], testentity)
pathname = utils.config_tool.getConfigPath(P.KEY_TESTSUITE, job.par[B.PAR_TESTSUITE], "", job)
pathname = utils.config_tool.getConfigPath(P.KEY_TESTSUITE, getattr(job.par, B.PAR_TESTSUITE), "", job)
if pathname[-3:] == D.DFILE_TYPE_CSV:
tdata = getCsvSpec(job.m, pathname, D.CSV_SPECTYPE_DATA)
else:
tdata = utils.file_tool.readFileDict(pathname, job.m)
# get explicit specdata of includes
if D.CSV_BLOCK_IMPORT in tdata:
for pathname in tdata[D.CSV_BLOCK_IMPORT]:
pathname = utils.path_tool.rejoinPath(pathname)
if job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_TDATA] not in pathname:
@ -178,7 +179,7 @@ def parseCsvSpec(msg, lines, ttype, tdata, job=None):
# keywords option, step, table
if verbose: print(str(a)+" -- "+str(fields))
tableAttr = setTableAttribute(tableAttr, a[0], fields[1], job)
if (tableAttr["hit"]):
if (tableAttr["_hit"]):
status = "TABLE_ALIAS"
continue
if (a[0].lower() in list_blocks[D.CSV_BLOCK_HEAD]):
@ -229,9 +230,18 @@ def parseCsvSpec(msg, lines, ttype, tdata, job=None):
setTableHeader(tableDict, tableAttr, fields, ttype, job)
status = D.CSV_SPECTYPE_DATA
if ttype == D.CSV_SPECTYPE_CONF:
header = []
for k in tdata:
if k in D.LIST_DATA_ATTR:
continue
if B.DATA_NODE_DATA in tdata[k]:
tdata[k].pop(B.DATA_NODE_DATA)
for f in tdata[k]:
if f in [B.DATA_NODE_HEADER, "_hit"] + D.LIST_DATA_ATTR:
continue
header.append(f)
tdata[k][B.DATA_NODE_HEADER] = header
header = []
if B.DATA_NODE_TABLES in tdata and B.DATA_NODE_TABLES in tdata[B.DATA_NODE_TABLES]:
for k in tdata[B.DATA_NODE_TABLES][B.DATA_NODE_TABLES]:
if k in tdata[B.DATA_NODE_TABLES]:
@ -271,11 +281,17 @@ def setTableData(tableDict, fields, ttype, job):
row[f] = fields[i].strip()
i += 1
if ttype == D.CSV_SPECTYPE_DATA:
if B.ATTR_DATA_COMP in tableDict:
tcomps = tableDict[B.ATTR_DATA_COMP]
else:
tcomps = {}
row[B.ATTR_DATA_COMP] = {}
for c in fields[0].split(","):
a = c.split(":")
tcomps[a[0]] = a[1]
row[B.ATTR_DATA_COMP][a[0]] = a[1].strip()
tableDict[B.DATA_NODE_DATA].append(row)
tableDict[B.ATTR_DATA_COMP] = tcomps
elif ttype == D.CSV_SPECTYPE_KEYS:
tableDict[D.CSV_NODETYPE_KEYS][fields[tableDict[D.DATA_ATTR_KEY]].strip()] = row
elif ttype == D.CSV_SPECTYPE_CONF:
@ -287,9 +303,9 @@ def setTableAttribute(tableAttr, key, val, job):
for attr in D.LIST_DATA_ATTR:
if (key.lower() in list_blocks[attr]):
tableAttr[attr] = val.strip()
tableAttr["hit"] = True
tableAttr["_hit"] = True
return tableAttr
tableAttr["hit"] = False
tableAttr["_hit"] = False
return tableAttr
@ -362,6 +378,15 @@ def splitFields(line, delimiter, job):
return out
def writeCsvData(filename, tdata, comp, job):
text = ""
if B.DATA_NODE_TABLES in tdata:
for k in tdata[B.DATA_NODE_TABLES]:
text += buildCsvData(tdata[B.DATA_NODE_TABLES][k], k, job)
text += "\n"
utils.file_tool.writeFileText(comp.m, filename, text)
def buildCsvData(tdata, table, job=None):
"""
writes the testdata into a csv-file for documentation of the test-run

Loading…
Cancel
Save