Data-Test-Executer Framework speziell zum Test von Datenverarbeitungen mit Datengenerierung, Systemvorbereitungen, Einspielungen, ganzheitlicher diversifizierender Vergleich
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

392 lines
18 KiB

#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
# abstract class for testrunning-functions
# ---------------------------------------------
"""
The test runs in 3 step:
1.1 precondition of system
1.2 test the system
1.3 postconditions of system incl. comparison
---
The main test is the testcase but there are superior granularities like
2.a test-sequence : few test-cases depending in a businessflow
therefore you test a testcase without resetting the system
2.b test-set : some independing test-cases
therefore you can test postconditions that works not directly
2.c test-matrix : a set of test-sets for the whole test
The granularity could be an implicite parameter in the main-module.
---
the test could be in different test-levels like
3.a component/service-test
a quick test to check single functions like in component-test, so you have the possibility to test x-product.
In difference of developer-test you can run large test-sets
3.b system-test
3.c integration-test
3.d acceptance-test
the test-level could be a configuration of the test-center or implicite of the application-definition.
---
the test can check different quality-measures like
4.a functionality
4.b
"""
from datetime import datetime
import basic.message
import basic.program
import inspect
import os
import re
import utils.db_abstract
import basic.toolHandling
import basic.component
import basic.componentHandling
import utils.path_tool
import utils.file_tool
import utils.match_tool
import utils.match_const as M
import utils.tdata_tool
import basic.constants as B
class Testexecuter():
def prepare_system(self, granularity):
"""
In order to preparate the test system test data should be cleaned and loaded with actual data.
This can happen on different granularities: for each testcase or for each test sequences or for a test set or
for a whole test.
The loaded test data should be selected for a later comparison.
:return:
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.logInfo("--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
self.reset_TData(granularity)
self.m.setMsg("prepare_system for " + self.name +" "+ granularity + " is OK")
self.m.logInfo("--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
pass
def reset_TData(self, granularity):
"""
the testdata in the component of the testsystem will be resetted
correponding with the configuration of the componend
:param granularity:
:return:
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() started at "
+ datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API]:
if node not in self.conf[B.SUBJECT_ARTS]:
continue
tool = basic.toolHandling.getTool(node, self, job)
tool.reset_TData(job)
if B.TOPIC_NODE_FILE in self.conf[B.SUBJECT_ARTS]:
for file in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_FILE]:
if file in B.LIST_FILE_ATTR:
continue
print("91: "+self.classname+" "+file)
tool = basic.toolHandling.getFileTool(job, self, B.TOPIC_NODE_FILE+"."+file)
tool.reset_TData(job)
self.m.setMsg("resetInstance for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
def load_TData(self, granularity, tdata):
"""
the testdata will be loaded into the componend especially into databses
or with import-functions of the component
:param granularity:
:param testdata:
:return:
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_FILE, B.TOPIC_NODE_API]:
print(node)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS] and B.DATA_NODE_TABLES in tdata:
for t in tdata[B.DATA_NODE_TABLES]:
print (t)
if utils.db_abstract.isCompTable(self, tdata, t):
self.m.logInfo("insert content "+ self.name)
dbi = basic.toolHandling.getDbTool(self, job)
dbi.insertTables(tdata, job)
break
self.m.setMsg("data loaded for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
def collect_TcArtifacts(self):
"""
collects the artifacts from the test-system.
the result is written as original in subfolder {tsorigin}
:return:
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.read_TData("nachher", B.PAR_TESTCASE)
def read_TData(self, subdir, granularity):
"""
:param granularity:
:return:
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("select db-content "+ self.name)
dbi = basic.toolHandling.getDbTool(self, job)
data = dbi.selectTables(subdir, job)
print("ppp")
#data = {}
for t in data[subdir]:
data[B.DATA_NODE_TABLES] = {}
data[B.DATA_NODE_TABLES][t] = data[subdir][t]
utils.tdata_tool.writeCsvData(
utils.path_tool.rejoinPath(utils.path_tool.composePattern("{tcresult}", self), subdir, t+".csv"),
data, self, job)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("check lob if is deleted with flaskdb "+ self.name)
self.m.setMsg("readInstance for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
def composeFileClauses(self, pattern):
job = basic.program.Job.getInstance()
out = {}
attr = utils.db_abstract.getDbAttributes(self, "null")
while "{" in pattern:
pre = pattern[0:pattern.index("{")]
pat = pattern[pattern.index("{"):pattern.index("}")]
post = pattern[pattern.index("}"):]
pattern = pre+attr[pat]+post
if (hasattr(job.par, B.PAR_DB_PARTITION)) and (attr[B.ATTR_DB_PARTITION] != "n"):
parts = getattr(job.par, B.PAR_DB_PARTITION)
a = parts.split(",")
for p in a:
pattern = pattern.replace(attr[B.ATTR_DB_PARTITION], p)
out[p] = pattern
else:
out["ALL"] = pattern
return out
@staticmethod
def getStringIndex(text, intern):
if intern in text:
return text.index(intern)
return 999
@staticmethod
def parseCondition(condition):
out = []
for operator in [">=", "<=", ">", "<", "=", " like ", " in "]:
if operator in condition:
i = condition.lower().index(operator)
field = condition[0:i]
attr = condition[i+len(operator):]
out = [operator, field, attr]
return out
return out
def composeSqlClauses(self, sql):
job = basic.program.Job.getInstance()
out = {}
print("-------- composeSqlClauses "+sql)
table = sql[sql.upper().index(" FROM ")+6:].strip()
print("table "+table)
sql_new = sql[0:sql.upper().index(" FROM ")+5]
print("sql_new "+sql_new)
attr = utils.db_abstract.getDbAttributes(self, table)
if attr[B.ATTR_DB_TABNAME] != "":
table = attr[B.ATTR_DB_TABNAME]
if attr[B.ATTR_DB_SCHEMA] != "":
table = attr[B.ATTR_DB_SCHEMA]+"."+table
sql_new += " "+attr[B.ATTR_DB_DATABASE]+"."+table
print("sql_new "+sql_new)
if (hasattr(job.par, B.PAR_DB_WHERE)):
# actual it parses only conjunct or disjunct normalform without parentesis
parts = utils.db_abstract.parseSQLwhere(getattr(job.par, B.PAR_DB_WHERE), self.conf[B.DATA_NODE_DDL][table])
# Felder und Operationen
# print(dbwhere)
sql_new += " WHERE "+parts
if sql_new[0:6] == "SELECT":
ids = utils.db_abstract.getTechnicalIDFields(self.conf["ddl"][table])
sql_new += " ORDER BY "+",".join(ids)
print("sql_new "+sql_new)
sql_new = sql_new.replace('!', "\'")
if (hasattr(job.par, B.PAR_DB_PARTITION)) and (attr[B.ATTR_DB_PARTITION] != "n"):
parts = getattr(job.par, B.PAR_DB_PARTITION)
a = parts.split(",")
for p in a:
sql_part = sql_new
sql_part = sql_part.replace(attr[B.ATTR_DB_PARTITION], p)
out[p] = sql_part
else:
out["ALL"] = sql_new
print("---> out "+str(out))
return out
def test_System(self, granularity):
"""
:param granularity:
:return:
"""
def create_Request(self, granularity):
pass
def send_Request(self, granularity):
pass
def get_Response(self, granularity):
pass
def execute_test(self, job, step, tdata):
"""
the function executes a teststep. The exact step with partial steps are defined in the component-configuration
under teststeps and the step-attribute.
:param job:
:param step: the step object
:param tdata:
:return:
"""
if step.fct in self.conf["teststeps"]:
for s in self.conf["teststeps"][step.fct]:
stepconf = self.conf["teststeps"][step.fct][s]
if stepconf["tool"] == "file":
print("file-tool")
elif stepconf["tool"] == "api":
print("api-tool")
elif stepconf["tool"] == "cli":
print("cli-tool")
elif stepconf["tool"] == "db":
print("db-tool")
else:
print("nichts da")
def finish_Test(self, granularity):
"""
initialization-routine for finish-step
:return:
"""
job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.logInfo("--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
self.m.logInfo("something in "+ self.name)
self.m.setMsg("checkInstance for " + self.name + " is OK")
self.m.logInfo("--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
def collect_system(self, granularity):
"""
After the test the test data of each component should be selected again - to get the post-condition.
In each component is configured how these data can be collected.
In this phase the collected data have to be transformed to comparable files.
:return:
"""
pass
def collect_TcArtifact(self, granularity):
"""
collects the artifacts from the test-system.
the result is written as original in subfolder {tcorigin}
post: a further contact zo the test-system is not necessary
:return:
"""
job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("get files in for "+ self.name + " in " + self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_LOG]["path"])
if "flaskdb" in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("select flaskdb-content "+ self.name)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
pass # after selection get file from flaskdb
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("get files in for "+ self.name + " in " + self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_FILE]["path"])
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() " + str(self.name))
def split_TcResult(self, granularity):
"""
transforms the result from subfolder {tcorigin}.
the result is written as utf-8-readable parts in subfolder {tcparts}
:return:
"""
job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() "+str(self.name))
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]:
pass #
if "flaskdb" in self.conf[B.SUBJECT_ARTS]:
pass # stored in table
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("tidy files in for "+self.name+" in "+self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_LOB]["format"])
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("tidy files in for "+self.name+" in "+self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_FILE]["format"])
def fix_TcResult(self, granularity):
"""
fixes the result which is collected and transformed from the test-system.
the result is written in comparable form in folder {tcresult}
with the identifiable name - like in target
:return:
"""
job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() " + str(self.name))
def compare_results(self, report):
"""
to compare the actual result with the target result has three steps:
1 matching each element so you get a comparable pair
2 comparing this pair so maybe you get differences
3 rating the difference if this can be accepted
:return:
"""
job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
cm = basic.componentHandling.ComponentManager.getInstance()
data = {}
matching = utils.match_tool.Matching()
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
for t in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]:
if t in B.LIST_DB_ATTR:
continue
# fill each data into matching-object
for side in M.MATCH_SIDES:
if side == M.MATCH_SIDE_PRESTEP:
if B.ATTR_ARTS_PRESTEP in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][t]:
a = self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][t][B.ATTR_ARTS_PRESTEP].split(":")
if a[0] != self.name:
comp = cm.getComponent(a[0])
else:
comp = self
path = os.path.join(utils.path_tool.composePattern(
"{"+M.MATCH[M.MATCH_SIDE_POSTACTUAL][M.M_FILEPATTERN]+"}", comp), a[1]+".csv")
pass
elif side == M.MATCH_SIDE_TESTCASE:
if hasattr(job.par, "testcase_example"):
path = os.path.join(utils.path_tool.composePattern(
"{"+M.MATCH[M.MATCH_SIDE_POSTEXPECT][M.M_FILEPATTERN]+"}", self), t+".csv")
path.replace(getattr(job.par, B.PAR_TESTCASE), getattr(job.par, "testcase_example"))
else:
path = os.path.join(utils.path_tool.composePattern("{" + M.MATCH[side][M.M_FILEPATTERN] + "}", self), t + ".csv")
filedata = utils.tdata_tool.readCsv(self.m, path, self)
data[side] = M.MATCH[side]
data[side]["path"] = path
data[side]["data"] = filedata
# execute the matches
for type in M.MATCH_TYPES:
matching.setData(data, type)
report.setPaths(job.par.testcase, self.name, t, type, matching.matchfiles["A"], matching.matchfiles["B"])
text = utils.match_tool.matchTree(matching)
report.setMatchResult(job.par.testcase, self.name, t, type, matching.cssClass, matching.diffText)
path = os.path.join(utils.path_tool.composePattern("{tcresult}", self),
t+"_"+M.MATCH[type]["filename"]+".html")
utils.file_tool.writeFileText(self.m, path, text)
# write text
pass
pass