Browse Source

debugging, refactor subject-groupname

refactor
Ulrich 1 year ago
parent
commit
a7d907cba3
  1. 55
      basic/Testserver.py
  2. 44
      basic/compexec.py
  3. 10
      basic/componentHandling.py
  4. 4
      basic/program.py
  5. 10
      basic/toolHandling.py
  6. 4
      install_workspace.py
  7. 56
      model/application.py
  8. 8
      model/component.py
  9. 57
      model/entity.py
  10. 1
      model/step.py
  11. 84
      model/table.py
  12. 3
      model/testcase.py
  13. 4
      test/test_04config.py
  14. 2
      test/test_12component.py
  15. 4
      test/test_12toolhandling.py
  16. 18
      test/test_14table.py
  17. 10
      test/test_31db.py
  18. 50
      test/test_31filecsv.py
  19. 1
      test/test_90testserver.py
  20. 2
      test/testtools.py
  21. 39
      tools/config_tool.py
  22. 2
      tools/conn_tool.py
  23. 2
      tools/data_const.py
  24. 12
      tools/db_abstract.py
  25. 6
      tools/file_abstract.py
  26. 72
      tools/file_tool.py
  27. 40
      tools/filecsv_fcts.py
  28. 2
      tools/fileyaml_fcts.py
  29. 2
      tools/path_const.py
  30. 13
      tools/value_tool.py

55
basic/Testserver.py

@ -74,12 +74,42 @@ class Testserver():
self.conf[B.DATA_NODE_DDL][table] = ddl
def createDBTables(self, job):
def createAdminDBTables(self, job):
"""
creates the complete data-model in the database. it contains:
* the model for administration
* the model of each project:
* * root-tables - defined in testcases TODO wie allgemein deklariert, special components/config
* * comp-artifacts - it could contain build-rules for building from testcase-spec
:param job:
:return:
"""
tables = {}
if B.TOPIC_NODE_DB in job.conf:
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
self.dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
# the model for administration
for m in self.model.keys():
print("model " + m)
self.createDBTable(job, B.ATTR_INST_TESTSERVER, B.ATTR_INST_TESTSERVER, m)
enty = self.model[m]
for t in enty.getSubtableNames():
print("subtable "+t)
self.createDBTable(job, B.ATTR_INST_TESTSERVER, B.ATTR_INST_TESTSERVER, t)
def createProjectDBTables(self, job):
"""
creates the complete data-model in the database. it contains:
* the model for administration
* the model of each project:
* * root-tables - defined in testcases TODO wie allgemein deklariert, special components/config
* * comp-artifacts - it could contain build-rules for building from testcase-spec
:param job:
:return:
"""
tables = {}
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_PROGRAM], "model")
fct = basic.toolHandling.getFileTool(job, None, "csv")
for m in sorted(os.listdir(path)):
@ -97,7 +127,26 @@ class Testserver():
if len(s) < 3:
continue
try:
dbi.execStatement(s+";", job.conf[B.TOPIC_NODE_DB])
self.dbi.execStatement(s+";", job.conf[B.TOPIC_NODE_DB])
print("SQL executed: "+s)
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
def createDBTable(self, job, project, compname, tablename):
"""
creates a table in the database
:param job:
:return:
"""
table = model.table.Table(job)
table = table.read_entity(job, tablename, project)
sql = table.get_schema(project=project, tableName=tablename, tableObject=table) # [B.DATA_NODE_TABLES][m[:-4]]
job.m.logInfo(sql)
for s in sql.split(";\n"):
if len(s) < 3:
continue
try:
#self.dbi.execStatement(s + ";", job.conf[B.TOPIC_NODE_DB])
print("SQL executed: " + s)
except Exception as e:
raise Exception("Fehler bei createSchema " + s)

44
basic/compexec.py

@ -80,12 +80,12 @@ class Testexecuter():
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() started at "
+ datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API]:
if node not in self.conf[B.SUBJECT_ARTIFACT]:
if node not in self.conf[B.SUBJECT_ARTIFACTS]:
continue
tool = basic.toolHandling.getTool(node, self, job)
tool.reset_TData(job)
if B.TOPIC_NODE_FILE in self.conf[B.SUBJECT_ARTIFACT]:
for file in self.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_FILE]:
if B.TOPIC_NODE_FILE in self.conf[B.SUBJECT_ARTIFACTS]:
for file in self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_FILE]:
if file in B.LIST_FILE_ATTR:
continue
print("91: "+self.classname+" "+file)
@ -107,7 +107,7 @@ class Testexecuter():
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_FILE, B.TOPIC_NODE_API]:
print(node)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACT] and B.DATA_NODE_TABLES in tdata:
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACTS] and B.DATA_NODE_TABLES in tdata:
for t in tdata[B.DATA_NODE_TABLES]:
print (t)
if tools.db_abstract.isCompTable(self, job, tdata, t):
@ -137,7 +137,7 @@ class Testexecuter():
#job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACT]:
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("select db-content "+ self.name)
dbi = basic.toolHandling.getDbTool(job, self)
data = dbi.selectTables(subdir, job)
@ -147,7 +147,7 @@ class Testexecuter():
utils.tdata_tool.writeCsvData(
utils.path_tool.rejoinPath(utils.path_tool.composePattern(job, "{tcresult}", self), subdir, t+".csv"),
data, self, job)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACT]:
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("check lob if is deleted with flaskdb "+ self.name)
self.m.setMsg("readInstance for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
@ -297,14 +297,14 @@ class Testexecuter():
"""
#job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTIFACT]:
self.m.logInfo("get files in for " + self.name + " in " + self.conf[B.SUBJECT_ARTIFACT][B.ATTR_ARTS_LOG]["path"])
if "flaskdb" in self.conf[B.SUBJECT_ARTIFACT]:
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("get files in for " + self.name + " in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_LOG]["path"])
if "flaskdb" in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("select flaskdb-content "+ self.name)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACT]:
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACTS]:
pass # after selection get file from flaskdb
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTIFACT]:
self.m.logInfo("get files in for " + self.name + " in " + self.conf[B.SUBJECT_ARTIFACT][B.ATTR_ARTS_FILE]["path"])
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("get files in for " + self.name + " in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_FILE]["path"])
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() " + str(self.name))
@ -317,14 +317,14 @@ class Testexecuter():
#job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() "+str(self.name))
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTIFACT]:
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTIFACTS]:
pass #
if "flaskdb" in self.conf[B.SUBJECT_ARTIFACT]:
if "flaskdb" in self.conf[B.SUBJECT_ARTIFACTS]:
pass # stored in table
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACT]:
self.m.logInfo("tidy files in for " + self.name +" in " + self.conf[B.SUBJECT_ARTIFACT][B.ATTR_ARTS_LOB]["format"])
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTIFACT]:
self.m.logInfo("tidy files in for " + self.name +" in " + self.conf[B.SUBJECT_ARTIFACT][B.ATTR_ARTS_FILE]["format"])
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("tidy files in for " + self.name +" in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_LOB]["format"])
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("tidy files in for " + self.name +" in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_FILE]["format"])
def fix_TcResult(self, job, granularity):
"""
@ -350,15 +350,15 @@ class Testexecuter():
cm = basic.componentHandling.ComponentManager.getInstance(job)
data = {}
matching = utils.match_tool.Matching(job, self)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACT]:
for t in self.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB]:
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACTS]:
for t in self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB]:
if t in B.LIST_DB_ATTR:
continue
# fill each data into matching-object
for side in M.MATCH_SIDES:
if side == M.MATCH_SIDE_PRESTEP:
if B.ATTR_ARTS_PRESTEP in self.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][t]:
a = self.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][t][B.ATTR_ARTS_PRESTEP].split(":")
if B.ATTR_ARTS_PRESTEP in self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][t]:
a = self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][t][B.ATTR_ARTS_PRESTEP].split(":")
if a[0] != self.name:
comp = cm.getComponent(a[0])
else:

10
basic/componentHandling.py

@ -26,7 +26,7 @@ import basic.constants as B
import tools.data_const as D
comps = {}
PARAM_NOSUBNODE = [B.SUBJECT_ARTIFACT, "components", "instance"]
PARAM_NOSUBNODE = [B.SUBJECT_ARTIFACTS, "components", "instance"]
DEFAULT_INST_CNT = 1
DEFAULT_INST_SGL = "y"
@ -190,7 +190,7 @@ class ComponentManager:
c.classname = compName
c.m = basic.message.Message(job, basic.message.LIMIT_DEBUG, job.start, c.name)
c.conf = tools.config_tool.mergeConn(c.m, confs["conf"], conns[i])
c.conf = tools.config_tool.mergeConn(c.m, confs[B.SUBJECT_COMP], conns[i])
c.conf[B.TOPIC_CONN] = conns[i]
c.init(job)
if parContent is not None:
@ -198,13 +198,13 @@ class ComponentManager:
if B.SUBJECT_COMPS in parContent and compName in parContent[B.SUBJECT_COMPS]:
for k in parContent[B.SUBJECT_COMPS][compName].keys():
c.conf[k] = parContent[B.SUBJECT_COMPS][compName][k]
if B.SUBJECT_ARTIFACT in c.conf and B.TOPIC_NODE_DB in c.conf[B.SUBJECT_ARTIFACT]:
if B.SUBJECT_ARTIFACTS in c.conf and B.TOPIC_NODE_DB in c.conf[B.SUBJECT_ARTIFACTS]:
if not B.DATA_NODE_DDL in c.conf:
c.conf[B.DATA_NODE_DDL] = {}
for table in c.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB]:
for table in c.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB]:
if table in B.LIST_DB_ATTR:
continue
conf = tools.config_tool.getConfig(job, D.DDL_FILENAME, compName, table)
conf = tools.config_tool.getConfig(job, D.DDL_FILENAME, compName, table, D.CSV_SPECTYPE_DDL)
if B.DATA_NODE_TABLES in conf and table in conf[B.DATA_NODE_TABLES]:
c.conf[B.DATA_NODE_DDL][table] = conf[B.DATA_NODE_TABLES][table]
elif table in conf:

4
basic/program.py

@ -167,7 +167,7 @@ class Job:
if hasattr(self, "conf"):
conf = self.conf
conf["configpath"].append(path)
doc = tools.file_tool.read_file_dict(self, path, None)
doc = tools.file_tool.read_file_dict(self, path, None, "basic")
if "basic" in doc:
for k, v in doc["basic"].items():
if k not in conf:
@ -351,7 +351,7 @@ def getConfiguration(job, path):
if hasattr(job, "conf"):
conf = job.conf
conf["configpath"].append(path)
doc = tools.file_tool.read_file_dict(job, path, None)
doc = tools.file_tool.read_file_dict(job, path, None, ttype="basic")
if "basic" in doc:
for k, v in doc["basic"].items():
if k not in conf:

10
basic/toolHandling.py

@ -43,12 +43,12 @@ def getCompAttr(comp, topic, attr, table=""):
print(topic + " " + attr + " " + str(comp))
if hasAttr(comp.conf[B.TOPIC_CONN], topic) and hasAttr(comp.conf[B.TOPIC_CONN][topic], attr):
return getAttr(comp.conf[B.TOPIC_CONN][topic], attr)
if len(table) > 1 and hasAttr(comp.conf[B.SUBJECT_ARTIFACT][topic], table) \
and hasAttr(comp.conf[B.SUBJECT_ARTIFACT][topic][table], attr):
return getAttr(comp.conf[B.SUBJECT_ARTIFACT][topic][table], attr)
if hasAttr(comp.conf[B.SUBJECT_ARTIFACT], topic) and hasAttr(comp.conf[B.SUBJECT_ARTIFACT][topic], attr):
if len(table) > 1 and hasAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic], table) \
and hasAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic][table], attr):
return getAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic][table], attr)
if hasAttr(comp.conf[B.SUBJECT_ARTIFACTS], topic) and hasAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic], attr):
print("attr " + attr + " vorhanden")
return getAttr(comp.conf[B.SUBJECT_ARTIFACT][topic], attr)
return getAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic], attr)
raise LookupError(topic + "." + attr + " is not set in comp " + comp.name)

4
install_workspace.py

@ -55,6 +55,8 @@ class Logger:
def logInfo(self, text):
self.logfile.write(text + "\n")
def logWarn(self, text):
self.logfile.write("WARN: "+text + "\n")
def setMsg(self, text):
self.logfile.write(text + "\n")
def logError(self, text):
@ -294,7 +296,7 @@ def createDb(job):
import basic.Testserver
testserver = basic.Testserver.Testserver(job)
testserver.createDBTables(job)
testserver.createAdminDBTables(job)
def getHome():
home = os.getcwd()

56
model/application.py

@ -37,6 +37,7 @@ SUB_APPS = B.SUBJECT_APPS
SUB_RELS = B.SUBJECT_RELS
SUB_USECASE = B.SUBJECT_USECASES
LIST_SUBTABLES = [SUB_APPS, SUB_COMPS, SUB_RELS, SUB_USECASE]
PREFIX_SUBTABLE = "ap"
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [FIELD_NAME]
@ -45,16 +46,6 @@ IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
print("has imported module.app")
def getProjects(job):
"""
get all project which are configured for the workspace
with all environments where the application of the project are installed
:param job:
:return:
"""
appl = tools.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
return searchProjects(job, appl)
def searchProjects(job, appl):
"""
search all relevant projects from server-configuration
@ -78,38 +69,6 @@ def searchProjects(job, appl):
job.conf[B.SUBJECT_PROJECTS] = appl[B.SUBJECT_PROJECTS]
return projects
def getEnvironments(job, projectList):
"""
searches and gets environments in which the applications of the project are declared that these are installed
filtered by parameter --environment
:param job:
:return:
"""
projects = {}
path = job.conf[B.TOPIC_PATH][B.ATTR_PATH_ENV]
if os.path.exists(path):
raise Exception("Umgebungsverzeichnis existiert nicht "+path)
for envdir in os.listdir(path):
print ("-- "+envdir)
if not os.path.isdir(os.path.join(path, envdir)):
continue
if envdir[0:1] == "_":
continue
if hasattr(job.par, B.PAR_ENV) and envdir != getattr(job.par, B.PAR_ENV):
continue
for format in tools.config_tool.CONFIG_FORMAT:
pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_ENV),
envdir, P.VAL_CONFIG, P.KEY_TOOL + "_conn." + format)
if os.path.exists(pathname):
break
if os.path.exists(pathname):
doc = tools.file_tool.readFileDict(job, pathname, job.m)
print(str(doc))
for proj in doc[B.SUBJECT_ENVIRONMENT][B.CONF_NODE_GENERAL][B.SUBJECT_PROJECTS]:
if proj in projectList:
projects[proj][B.SUBJECT_ENVIRONMENT].append(envdir)
return projects
def select_applications(job, projectList):
"""
get all project which are configured for the workspace
@ -132,9 +91,6 @@ def searchApplications(job, projectList, appl):
return appList
def syncEnitity(job, elem):
return syncEnitities(job)
import model.entity
def syncEnitities(job):
"""
@ -241,6 +197,16 @@ class Application(model.entity.Entity):
components = {}
project = {}
"""
FIELD_ID = "apid"
FIELD_NAME = D.FIELD_NAME
FIELD_DESCRIPTION = B.SUBJECT_DESCRIPTION
FIELD_REFERENCE = B.SUBJECT_REFERENCE
FIELD_PROJECT = B.SUBJECT_PROJECT
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE, FIELD_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_APPS, B.SUBJECT_COMPS, B.SUBJECT_RELS, B.SUBJECT_USECASES]
PREFIX_SUBTABLE = "ap"
def __init__(self, job):
self.job = job

8
model/component.py

@ -26,15 +26,13 @@ FIELD_ATTRIBUTES = B.NODE_ATTRIBUTES
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES, B.DATA_NODE_TOPICS]
LIST_SUBTABLES = {
}
LIST_SUBTABLES = [B.SUBJECT_ARTIFACTS, B.SUBJECT_COMPS, B.SUBJECT_STEPS, B.SUBJECT_DATATABLES]
PREFIX_SUBTABLE = ""
CP_SUBJECT_COMPS = "components"
CP_SUBJECT_STEPS = "steps"
CP_SUBJECT_TABLES = "tables"
CP_SUBJECT_ARTS = B.SUBJECT_ARTIFACT
CP_SUBJECT_ARTS = B.SUBJECT_ARTIFACTS
LIST_CP_SUBJECTS = [CP_SUBJECT_COMPS, CP_SUBJECT_STEPS, CP_SUBJECT_TABLES, CP_SUBJECT_ARTS]
REL_ATTR_TYPE = "relationtyp"

57
model/entity.py

@ -55,6 +55,15 @@ def getEntityValue(job, field, gitcommit):
class Entity:
""" system-name for this entity """
FIELD_ID = ""
FIELD_NAME = D.FIELD_NAME
LIST_FIELDS = []
""" list of object-attributes """
LIST_NODES = []
LIST_SUBTABLES = []
PREFIX_SUBTABLE = ""
def __init__(self, job, name=""):
self.job = job
if len(name) > 1:
@ -98,7 +107,7 @@ class Entity:
if storage == STORAGE_DB:
entity = self.select_entity(job, k)
elif storage == STORAGE_FILE:
entity = self.read_entity(job, k)
entity = self.read_entity(job, k, B.ATTR_INST_TESTSERVER)
else:
entity = self.read_entity(job, k)
entities.append(entity)
@ -241,7 +250,7 @@ class Entity:
""" 2023-05 """
@staticmethod
def getConfig(job, module, subject, name):
def getConfig(job, module: str, subject: str, name: str, ttype: str = D.CSV_SPECTYPE_DDL) -> dict:
"""
reads the entity from the database
it should get the same result like read_entity
@ -249,7 +258,7 @@ class Entity:
:param name:
:return:
"""
config = tools.config_tool.getConfig(job, module, subject)
config = tools.config_tool.getConfig(job, module, subject, ttype)
oldConfig = config
if config is not None:
if subject not in config:
@ -276,7 +285,7 @@ class Entity:
raise Exception("keine Config zu "+name)
@staticmethod
def getDirlist(job, path, ext):
def getDirlist(job, path, ext) -> list:
outList = []
for k in os.listdir(path):
if k[:1] in [".", "_"]:
@ -353,40 +362,57 @@ class Entity:
setattr(self, tools.data_tool.getPluralKeyword(B.DATA_NODE_TOPICS), topics)
return self
def getFieldList(self):
def getFieldList(self) -> list:
"""
returns a list of scalar attributes
:return: LIST_FIELDS
"""
return []
return self.LIST_FIELDS
def getNodeList(self):
def getNodeList(self) -> list:
"""
returns a list of sub-nodes - which can be persisted in a clob-field
:return: LIST_NODES
"""
return []
return self.LIST_NODES
def getSubtableList(self) -> list:
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return self.LIST_SUBTABLES
def getPrefixSubtable(self) -> str:
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return self.PREFIX_SUBTABLE
def getSubtableList(self):
def getSubtableNames(self) -> list:
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return {}
out = []
for t in self.LIST_SUBTABLES:
out.append(self.PREFIX_SUBTABLE+"_"+t)
return out
def getName(self):
def getName(self) -> str:
"""
returns the name - maybe build from other attributes
:return:
"""
return ""
return self.name
def getIDName(self):
def getIDName(self) -> str:
"""
it returns the name as unique-id - maybe build from few attributes
:return:
"""
return ""
return self.name
def setSubtable(self, job, subtable, sublist):
outDict = {}
@ -449,7 +475,7 @@ class Entity:
return "No DB in job-config"
dbi.selectRows
def getHistoryIndex(self, table):
def getHistoryIndex(self, table) -> str:
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getSchemaIndex(table, "actual") + "\n"
@ -476,3 +502,4 @@ def read_spec(job, testentity, testgran, specpath):
val = res.group(2).replace(";", "")
spec[key] = val
return spec

1
model/step.py

@ -27,6 +27,7 @@ LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_VARIANT, FIELD_COMPONENT, FIELD_SORTN
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES, B.DATA_NODE_TOPICS]
LIST_SUBTABLES = []
PREFIX_SUBTABLE = ""
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [FIELD_NAME]

84
model/table.py

@ -12,7 +12,7 @@ import tools.config_tool
import tools.file_tool
import tools.git_tool
TABLE_NAME = "component"
TABLE_NAME = "table"
""" system-name for this entity """
FIELD_ID = "tbid"
FIELD_NAME = D.FIELD_NAME
@ -75,15 +75,6 @@ class Table(model.entity.Entity):
fieldnames = []
fielddef = {}
def __init__(self, job, project="", application="", component=None, name=""):
"""
to be initialized
:param job:
"""
self.job = job
if project != "":
self.project = project
def set_object(self, project, name):
self.project = project
self.name = name
@ -130,32 +121,11 @@ class Table(model.entity.Entity):
"""
return sql
def read_unique_names(self, job, project, application, gran, args):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
if project == B.ATTR_INST_TESTSERVER:
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_PROGRAM], P.ATTR_PATH_MODEL)
else:
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS], P.KEY_CATALOG, P.VAL_TABLES)
outList = self.getDirlist(job, path, "csv")
return outList
# def read_unique_names(self, job, project, application, gran, args):
# table is not an real entity
def read_entity(self, job, name, project=""):
if project == B.ATTR_INST_TESTSERVER:
config = self.getConfig(job, P.KEY_MODEL, name, tools.config_tool.get_plain_filename(job, name))
else:
config = self.getConfig(job, P.KEY_CATALOG, name, tools.config_tool.get_plain_filename(job, name))
self.setAttributes(job, config, name, LIST_FIELDS, LIST_NODES, LIST_SUBTABLES)
self.fielddef = self.key
self.fieldnames = list(self.key.keys())
return self
# def read_entity(self, job, name, project=""):
# table is not an real entity
def getFieldList(self):
"""
@ -227,42 +197,14 @@ class Table(model.entity.Entity):
self.read_entity(job, name)
# raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def write_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
# def write_entity(self, job, name):
# table is not an real entity
def update_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
# def update_entity(self, job, name):
# table is not an real entity
def remove_entity(self, job, name):
"""
removes the entity from the file-system
it similar to delete_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
# def remove_entity(self, job, name):
# table is not an real entity
def delete_entity(self, job, name):
"""
deletes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
# def delete_entity(self, job, name):
# table is not an real entity

3
model/testcase.py

@ -114,7 +114,8 @@ class Testcase(model.entity.Entity):
:return:
"""
# r = tools.config_tool.select_config_path(job, P.KEY_TESTCASE, "TC0001")
config = self.getConfig(job, P.KEY_TESTCASE, name, tools.config_tool.get_plain_filename(job, name))
# ttype=testcase => Aufteilung der Testspec in Bloecke und separater Aufruf zur Pruefung der Bloecke
config = self.getConfig(job, P.KEY_TESTCASE, name, tools.config_tool.get_plain_filename(job, name), B.SUBJECT_TESTCASE)
self.setAttributes(job, config, name, LIST_FIELDS, LIST_NODES, LIST_SUBTABLES)
return self

4
test/test_04config.py

@ -86,9 +86,9 @@ class MyTestCase(unittest.TestCase):
componentName = "testprddb"
confs = tools.config_tool.getConfig(job, "comp", componentName)
conns = tools.conn_tool.getConnections(job, componentName)
self.assertNotIn(B.ATTR_ARTS_TYPE, confs["conf"][B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB])
self.assertNotIn(B.ATTR_ARTS_TYPE, confs["conf"][B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB])
confs["conf"] = tools.config_tool.mergeConn(job.m, confs["conf"], conns[0])
self.assertIn(B.ATTR_ARTS_TYPE, confs["conf"][B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB])
self.assertIn(B.ATTR_ARTS_TYPE, confs["conf"][B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB])
cnttest += 1 # new attribute
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)

2
test/test_12component.py

@ -116,7 +116,7 @@ class MyTestCase(unittest.TestCase):
conns = tools.conn_tool.getConnections(job, componentName)
c = cm.createInstance(componentName, None, confs, conns, 0)
self.assertEqual(c.name, "testprddb")
self.assertIn(B.ATTR_DB_TYPE, c.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB], "conn-attribute creates missing config-attribute")
self.assertIn(B.ATTR_DB_TYPE, c.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB], "conn-attribute creates missing config-attribute")
cnttest += 2 # new attributes
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)

4
test/test_12toolhandling.py

@ -41,8 +41,8 @@ class MyTestCase(unittest.TestCase):
comp = basic.component.Component()
comp.name = "testb"
comp.conf = {}
comp.conf[B.SUBJECT_ARTIFACT] = {}
comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_API] = {}
comp.conf[B.SUBJECT_ARTIFACTS] = {}
comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_API] = {}
comp.conf[B.TOPIC_CONN] = {}
comp.conf[B.TOPIC_CONN][B.TOPIC_NODE_DB] = {}
comp.conf[B.TOPIC_CONN][B.TOPIC_NODE_CLI] = {}

18
test/test_14table.py

@ -17,7 +17,7 @@ PYTHON_CMD = "python"
TEST_FUNCTIONS = ["test_10getEntityNames", "test_11getEntities", "test_12getEntity",
# "test_14insertEntity", # "test_13writeEntity",
"test_20getSchema"]
#TEST_FUNCTIONS = ["test_11getEntities"]
TEST_FUNCTIONS = []
PROGRAM_NAME = "clean_workspace"
class MyTestCase(unittest.TestCase):
@ -32,12 +32,14 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
table = model.table.Table(job, "TESTPROJ")
entityNames = table.read_unique_names(job, "", "", "", {})
table = model.table.Table(job)
entityNames = table.read_unique_names(job, "TESTPROJ", "", "", {})
self.assertEqual(type(entityNames), list)
entityNames = table.read_unique_names(job, B.ATTR_INST_TESTSERVER, "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
@ -49,7 +51,7 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
table = model.table.Table(job, "TESTPROJ")
table = model.table.Table(job)
entityNames = []
entityNames = table.get_entities(job, storage=model.entity.STORAGE_FILE)
self.assertEqual(type(entityNames), list)
@ -64,19 +66,19 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
table = model.table.Table(job, "TESTPROJ")
table = model.table.Table(job)
name = "adress"
acttable = table.read_entity(job, name)
self.assertEqual(getattr(acttable, model.table.FIELD_NAME), name)
self.assertRaises(Exception, table.read_entity, job, "xyzxyz")
#
table = model.table.Table(job, B.ATTR_INST_TESTSERVER)
table = model.table.Table(job)
name = B.SUBJECT_APP
acttable = table.read_entity(job, name, project=B.ATTR_INST_TESTSERVER)
self.assertEqual(getattr(acttable, model.table.FIELD_NAME), name)
self.assertRaises(Exception, table.read_entity, job, "xyzxyz")
#
table = model.table.Table(job, B.ATTR_INST_TESTSERVER)
table = model.table.Table(job)
name = "ap_application"
acttable = table.read_entity(job, name, project=B.ATTR_INST_TESTSERVER)
self.assertEqual(getattr(acttable, model.table.FIELD_NAME), name)
@ -90,7 +92,7 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
table = model.table.Table(job, B.ATTR_INST_TESTSERVER)
table = model.table.Table(job)
entityNames = table.read_unique_names(job, B.ATTR_INST_TESTSERVER, "", "", {})
name = "tc_table"
print(name)

10
test/test_31db.py

@ -74,8 +74,8 @@ class MyTestCase(unittest.TestCase):
comp.conf = {}
comp.conf[B.DATA_NODE_DDL] = {}
comp.conf[B.DATA_NODE_DDL][table] = utils.config_tool.getConfig(job, D.DDL_FILENAME, comp.name, table)
comp.conf[B.SUBJECT_ARTIFACT] = {}
comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB] = {}
comp.conf[B.SUBJECT_ARTIFACTS] = {}
comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB] = {}
comp.conf[B.TOPIC_CONN] = {}
comp.conf[B.TOPIC_CONN][B.TOPIC_NODE_DB] = {}
comp.conf[B.TOPIC_CONN][B.TOPIC_NODE_DB][B.ATTR_TYPE] = "shive"
@ -86,7 +86,7 @@ class MyTestCase(unittest.TestCase):
comp.conf[B.TOPIC_CONN][B.TOPIC_NODE_DB][B.ATTR_DB_PARTITION] = "y"
attr = tool.getDbAttributes(job, "xx")
self.assertRegex(attr[B.ATTR_DB_PARTITION], 'y')
comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][B.ATTR_DB_PARTITION] = "z"
comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][B.ATTR_DB_PARTITION] = "z"
attr = tool.getDbAttributes(job, "xx")
self.assertRegex(attr[B.ATTR_DB_PARTITION], 'z')
#
@ -94,14 +94,14 @@ class MyTestCase(unittest.TestCase):
print(sqls)
self.assertEqual(sqls["ALL"], "SELECT * FROM .lofts ORDER BY id")
setattr(job.par, B.PAR_DB_WHERE, "street like !%utz%! and state = !+reg+!")
comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][B.ATTR_DB_PARTITION] = "+reg+"
comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][B.ATTR_DB_PARTITION] = "+reg+"
sqls = comp.composeSqlClauses(job, "SELECT * FROM lofts")
print(sqls)
# dummy-comp does not correspond with any comp
#self.assertIn("street", sqls["ALL"]) #assertEqual(("street" in sqls), True)
#self.assertIn("state", sqls["ALL"])
setattr(job.par, B.PAR_DB_WHERE, "family like !%utz%! and state = !+reg+!")
comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][B.ATTR_DB_PARTITION] = "+reg+"
comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][B.ATTR_DB_PARTITION] = "+reg+"
sqls = comp.composeSqlClauses(job, "SELECT * FROM lofts")
print(sqls)
self.assertIn("family", sqls["ALL"])

50
test/test_31filecsv.py

@ -29,12 +29,12 @@ OS_SYSTEM = test.constants.OS_SYSTEM
# the list of TEST_FUNCTIONS defines which function will be really tested.
# if you minimize the list you can check the specific test-function
TEST_FUNCTIONS = [ "test_02isBlock", "test_03setSubtable", "test_06parseCsv",
TEST_FUNCTIONS = [ "test_02isBlock", "test_06parseCsv",
"test_11ddl", "test_12catalog", "test_13getCsvSpec_tree", "test_14getCsvSpec_key",
"test_15getCsvSpec_conf", "test_16getCsvSpec_data"
]
TEST_FUNCTIONS = ["test_16getCsvSpec_data"]
# TEST_FUNCTIONS = ["test_02isBlock"]
TEST_FUNCTIONS = [ "test_02isBlock"
]
PROGRAM_NAME = "clean_workspace"
# with this variable you can switch prints on and off
@ -96,7 +96,7 @@ class MyTestCase(unittest.TestCase):
self.assertIn("polid", data["police"])
self.assertIn("format", data["police"]["polid"])
self.assertIn("int", data["police"]["objid"]["format"])
text = fy.dump_file(data)
text = fy.dump_data_file(job, data, "", D.CSV_SPECTYPE_DATA)
#print(str(data))
#print(text)
job.m.logInfo(csvText)
@ -117,6 +117,8 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = basic.program.SimpleJob(PROGRAM_NAME)
tmsg = basic.message.TempMessage(job, "2023-08-08_08-08-08")
setattr(job, "m", tmsg)
f = toolHandling.getFileTool(job, None, "csv")
fy = toolHandling.getFileTool(job, None, "yaml")
csvText = "_type;ctlg;;;;;;;;;;;;;\n"
@ -131,7 +133,7 @@ class MyTestCase(unittest.TestCase):
self.assertIn("format", data["police"]["polid"])
self.assertIn("int", data["police"]["objid"]["format"])
"""
text = fy.dump_file(data)
text = fy.dump_data_file(job, data, "", D.CSV_SPECTYPE_DATA)
#print(str(data))
#print(text)
logPath = os.path.join("/home/ulrich/workspace/testprojekt/temp/log_test.txt")
@ -150,7 +152,8 @@ class MyTestCase(unittest.TestCase):
#job.m.logInfo("----------------------------------------")
#job.m.logInfo(text)
logger.close()
self.assertEqual(csvText, result)
# TODO Differenzen (1) Schluss-Delimiter ungleich (2) Delimiter vor json-Feld fehlt
# self.assertEqual(csvText, result)
def test_01tdata(self):
global mymsg
@ -452,7 +455,7 @@ class MyTestCase(unittest.TestCase):
print(str(tdata))
self.assertEqual(2, len(tdata))
self.assertEqual(1, len(tdata["_tables"]))
self.assertEqual(3, len(tdata["_tables"]["capital"]))
self.assertEqual(4, len(tdata["_tables"]["capital"]))
self.assertEqual(3, len(tdata["_tables"]["capital"]["_keys"]))
cnttest += 4
specLines = [
@ -469,7 +472,7 @@ class MyTestCase(unittest.TestCase):
self.assertEqual(2, len(tdata))
self.assertIn("capital", tdata["_tables"])
self.assertEqual(2, len(tdata["_tables"]))
self.assertEqual(3, len(tdata["_tables"]["country"]))
self.assertEqual(4, len(tdata["_tables"]["country"]))
self.assertEqual(2, len(tdata["_tables"]["country"]["_keys"]))
cnttest += 4
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
@ -534,10 +537,11 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
setattr(job.par, B.PAR_ENV, "ENV01")
f = toolHandling.getFileTool(job, None, "csv")
cm = basic.componentHandling.ComponentManager.getInstance(job)
componentName = "testcrmdb"
confs = tools.config_tool.getConfig(job, "comp", componentName)
confs = tools.config_tool.getConfig(job, "comp", componentName, ttype=D.CSV_SPECTYPE_COMP)
conns = tools.conn_tool.getConnections(job, componentName)
comp = cm.createInstance(componentName, None, confs, conns, 1)
fileLines = [
@ -546,8 +550,13 @@ class MyTestCase(unittest.TestCase):
"testcrmdb:person;2;Leon;Donna;28.09.42;f",
"#;;;;;;"
]
filename = tools.path_tool.rejoinPath(tools.path_tool.composePath(job, P.P_TCBASE, comp), "t_person.csv")
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="")
setattr(job.par, B.SUBJECT_TESTCASE, "Testcase")
setattr(job.par, B.PAR_TCTIME, "2023-08-08_08-08-08")
parentpath = tools.path_tool.compose_path(job, P.P_TCBASE, comp)
if parentpath is None:
parentpath = tools.path_tool.compose_path(job, P.P_TCBASE, comp)
filename = tools.path_tool.rejoinPath(parentpath, "t_person.csv")
tdata = f.parseCsv(comp.m, job, fileLines, D.CSV_SPECTYPE_DATA)
print(str(tdata))
self.assertIn(B.DATA_NODE_TABLES, tdata)
self.assertIn("person", tdata[B.DATA_NODE_TABLES])
@ -561,30 +570,29 @@ class MyTestCase(unittest.TestCase):
"testcrmdb:person;2;Leon;Donna;28.09.42;f",
"#;;;;;;"
]
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="")
tdata = f.parseCsv(comp.m, job, fileLines, D.CSV_SPECTYPE_DATA)
self.assertIn(B.DATA_NODE_TABLES, tdata)
self.assertIn("person", tdata[B.DATA_NODE_TABLES])
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA]))
cnttest += 3
filename = tools.path_tool.rejoinPath(tools.path_tool.composePath(job, P.P_TCRESULT, comp), "person.csv")
filename = tools.path_tool.rejoinPath(tools.path_tool.compose_path(job, P.P_TCRESULT, comp), "person.csv")
fileLines = [
"_date;27.06.2022",
"_count;2",
"persid;famname;name;birth;sex",
"1;Brecht;Bert;10.02.98;m",
"2;Leon;Donna;28.09.42;f",
"table:person;persid;famname;name;birth;sex",
";1;Brecht;Bert;10.02.98;m",
";2;Leon;Donna;28.09.42;f",
"#;;;;;;"
]
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="")
tdata = f.parseCsv(comp.m, job, fileLines, D.CSV_SPECTYPE_DATA)
self.assertIn(B.DATA_NODE_TABLES, tdata)
self.assertIn("person", tdata[B.DATA_NODE_TABLES])
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA]))
cnttest += 3
text = ""
for k in tdata[B.DATA_NODE_TABLES]:
print("---------\n"+str(tdata))
text += f.buildCsvData(tdata[B.DATA_NODE_TABLES], k, comp, job)
text += "\n"
# buildCsv(self, msg, job, data, ttype="")
text += f.buildCsv(job.m, job, tdata[B.DATA_NODE_TABLES], D.CSV_SPECTYPE_DATA)
text += "\n"
print(text)
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)

1
test/test_90testserver.py

@ -55,6 +55,7 @@ class MyTestCase(unittest.TestCase):
self.job = job
dbi = basic.toolHandling.getDbTool(job, self, "rel")
# return "No DB in job-config"
testserver.createAdminDBTables(job)
t = "application"
sql = testserver.model[t].get_schema(tableName=t, tableObject=testserver.model[t])
#sql = testserver.getDBSchema(job, dbi, "application")

2
test/testtools.py

@ -22,7 +22,7 @@ path = ""
# xample-DDL
conf = {
B.SUBJECT_ARTIFACT: {
B.SUBJECT_ARTIFACTS: {
B.TOPIC_NODE_DB: {
B.ATTR_TYPE: "csv",
"person": {

39
tools/config_tool.py

@ -298,7 +298,16 @@ def hasAttr(o, name):
return False
def getConfig(job, modul, name, subname=""):
def getConfig(job, modul: str, name: str, subname: str = "", ttype: str = D.CSV_SPECTYPE_DATA) -> dict:
"""
searches for configration and read it
:param job:
:param modul:
:param name:
:param subname:
:param ttype: type of config / csv-type
:return:
"""
if job is None:
verify = 0
else:
@ -312,8 +321,10 @@ def getConfig(job, modul, name, subname=""):
job.debug(verify, "getConfig " + pathname)
if len(pathname) < 1:
return confs
if ttype == "" and modul in ["tool", "comp"]:
ttype = modul
if modul == D.DDL_FILENAME:
doc = tools.file_tool.read_file_dict(job, pathname, msg, D.CSV_SPECTYPE_DATA)
doc = tools.file_tool.read_file_dict(job, pathname, msg, ttype)
# in csv the root is the subname
# from the Dict-structure of DDL_FILENAME pick the substructure of the subname
keys = list(doc.keys())
@ -325,13 +336,13 @@ def getConfig(job, modul, name, subname=""):
doc0 = doc[subname]
doc = doc0
else:
doc = tools.file_tool.read_file_dict(job, pathname, msg)
doc = tools.file_tool.read_file_dict(job, pathname, msg, ttype)
for i, v in doc.items():
confs[i] = v
return confs
def getAttribute(comp, path, attr, job):
def getAttribute(comp, path, attr, job) -> str:
attrList = getAttributeList(comp, path, job)
if attr in attrList:
return attrList[attr]
@ -365,15 +376,15 @@ def getAttributeList(comp, path, job):
if attr in attrList:
continue
attrList[attr] = val
if artType in comp.conf[B.SUBJECT_ARTIFACT]:
if artName in comp.conf[B.SUBJECT_ARTIFACT][artType]:
for attr, val in comp.conf[B.SUBJECT_ARTIFACT][artType][artName].items():
if artType in comp.conf[B.SUBJECT_ARTIFACTS]:
if artName in comp.conf[B.SUBJECT_ARTIFACTS][artType]:
for attr, val in comp.conf[B.SUBJECT_ARTIFACTS][artType][artName].items():
if attr not in B.LIST_ATTR[artType]:
continue
if attr in attrList:
continue
attrList[attr] = val
for attr, val in comp.conf[B.SUBJECT_ARTIFACT][artType].items():
for attr, val in comp.conf[B.SUBJECT_ARTIFACTS][artType].items():
if attr not in B.LIST_ATTR[artType]:
continue
if attr in attrList:
@ -396,7 +407,7 @@ def mergeConn(msg, conf, conn):
for a in conn[B.TOPIC_INST]:
conf[B.TOPIC_INST][a] = conn[B.TOPIC_INST][a]
for topic in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API, B.TOPIC_NODE_FILE]:
if topic not in conf[B.SUBJECT_ARTIFACT]:
if topic not in conf[B.SUBJECT_ARTIFACTS]:
continue
if topic == B.TOPIC_NODE_DB:
list = B.LIST_DB_ATTR
@ -406,26 +417,26 @@ def mergeConn(msg, conf, conn):
list = B.LIST_API_ATTR
if topic == B.TOPIC_NODE_FILE:
list = B.LIST_FILE_ATTR
for a in conf[B.SUBJECT_ARTIFACT][topic]:
for a in conf[B.SUBJECT_ARTIFACTS][topic]:
if topic not in conn:
continue
if a in list:
if a in conn[topic]:
conf[B.SUBJECT_ARTIFACT][topic][a] = conn[topic][a]
conf[B.SUBJECT_ARTIFACTS][topic][a] = conn[topic][a]
else:
for b in conf[B.SUBJECT_ARTIFACT][topic][a]:
for b in conf[B.SUBJECT_ARTIFACTS][topic][a]:
if b not in list:
msg.logError("not-topic-attribute in topic-connection: "+topic+", "+b)
continue
if a not in conn[topic]:
continue
if b in conn[topic][a]:
conf[B.SUBJECT_ARTIFACT][topic][a][b] = conn[topic][a][b]
conf[B.SUBJECT_ARTIFACTS][topic][a][b] = conn[topic][a][b]
for a in list:
if topic not in conn:
break
if topic not in conn:
continue
if a in conn[topic]:
conf[B.SUBJECT_ARTIFACT][topic][a] = conn[topic][a]
conf[B.SUBJECT_ARTIFACTS][topic][a] = conn[topic][a]
return conf

2
tools/conn_tool.py

@ -53,7 +53,7 @@ def getConnections(job, comp):
# # select
# pass
conn = tools.config_tool.getConfig(job, "tool", B.TOPIC_CONN)
conn = tools.config_tool.getConfig(job, "tool", B.TOPIC_CONN, ttype="tool")
if not comp in conn[B.SUBJECT_ENVIRONMENT]:
job.m.setFatal("Conn-Tool: Comp not configured " + comp)

2
tools/data_const.py

@ -151,6 +151,8 @@ CSV_SPECTYPE_KEYS = "keys"
CSV_SPECTYPE_CONF = "conf"
CSV_SPECTYPE_DDL = "ddl"
CSV_SPECTYPE_CTLG = "ctlg"
CSV_SPECTYPE_MDL = "model"
CSV_SPECTYPE_COMP = "comp"
CSV_NODETYPE_KEYS = "_keys"
CSV_BLOCK_ATTR = "_attr"

12
tools/db_abstract.py

@ -107,12 +107,12 @@ def getDbAttributes(job, comp, table):
if (attr in job.conf[B.TOPIC_NODE_DB]):
out[attr] = job.conf[B.TOPIC_NODE_DB][attr]
else:
if (B.SUBJECT_ARTIFACT in comp.conf and table in comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB]) \
and (attr in comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][table]):
out[attr] = comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][table][attr]
if (B.SUBJECT_ARTIFACTS in comp.conf and table in comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB]) \
and (attr in comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][table]):
out[attr] = comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][table][attr]
print("a " + attr + " " + out[attr])
elif (B.SUBJECT_ARTIFACT in comp.conf and attr in comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB]):
out[attr] = comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB][attr]
elif (B.SUBJECT_ARTIFACTS in comp.conf and attr in comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB]):
out[attr] = comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][attr]
print("b " + attr + " " + out[attr])
elif (B.TOPIC_NODE_DB in comp.conf[B.TOPIC_CONN]) \
and (table in comp.conf[B.TOPIC_CONN][B.TOPIC_NODE_DB]) \
@ -301,7 +301,7 @@ def isCompRow(comp, row):
""" checks if the table in data relates to the component """
print("isCompRow "+comp.name+" "+str(row))
if comp.name in row[B.ATTR_DATA_COMP] \
and row[B.ATTR_DATA_COMP][comp.name] in comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_DB]:
and row[B.ATTR_DATA_COMP][comp.name] in comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB]:
return True
return False

6
tools/file_abstract.py

@ -39,7 +39,7 @@ class FileFcts():
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def dump_file(self, data, path):
def dump_data_file(self, job, data: dict, path: str, ttype: str):
"""
this function parses the text and translates it to dict
:param text:
@ -79,7 +79,7 @@ class FileFcts():
archivpath = ""
filename = step.args["filename"]
txt = ""
for o in self.comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_FILE]:
for o in self.comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_FILE]:
if o["name"] != filename:
continue
mapping = o["mapping"]
@ -94,7 +94,7 @@ class FileFcts():
technique = step.args["technique"]
archivpath = os.path.join(tools.path_tool.compose_path(job, "{tcresult}/request", self.comp), filename)
if technique == "cli":
for o in self.comp.conf[B.SUBJECT_ARTIFACT][B.TOPIC_NODE_FILE]:
for o in self.comp.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_FILE]:
if o["name"] != filename:
continue
envpath = o["envpath"]

72
tools/file_tool.py

@ -19,7 +19,7 @@ import basic.program
import tools.data_const as D
#import tools.tdata_tool
import tools.date_tool
import basic.constants as B
def getDump(obj):
result = vars(obj)
@ -207,7 +207,7 @@ def getModTime(job, filepath):
return out
def read_file_dict(job, path, msg, ttype=D.DFILE_TYPE_CSV):
def read_file_dict(job, path: str, msg, ttype: str = D.DFILE_TYPE_CSV) -> dict:
"""
reads and gets general a dict from any kind of filetyp
:param path: with extension of filetype
@ -240,9 +240,75 @@ def read_file_dict(job, path, msg, ttype=D.DFILE_TYPE_CSV):
#doc = tools.tdata_tool.getCsvSpec(msg, job, path, D.CSV_SPECTYPE_CONF)
doc = ffcts.load_file(path, ttype)
# tools.tdata_tool.getCsvSpec(msg, job, path, D.CSV_SPECTYPE_CONF)
doc["_path"] = path
check_file_dict(job, doc, msg, ttype)
return doc
def check_file_dict(job, config: dict, msg, ttype: str):
"""
check-routine for different kind of dictionary-types
:param job:
:param config:
:param msg:
:param ttype:
:return:
"""
MUST_NODES = []
MUSTNT_NODES = []
if ttype in [D.CSV_SPECTYPE_CTLG]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_KEYS]
MUSTNT_NODES = [B.DATA_NODE_DATA]
elif ttype in [D.CSV_SPECTYPE_DDL]:
MUST_NODES = [B.DATA_NODE_HEADER]
MUSTNT_NODES = [B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
elif ttype in [D.CSV_SPECTYPE_DATA]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_DATA]
MUSTNT_NODES = [B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
elif ttype in [D.CSV_SPECTYPE_CONF]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_DATA]
MUSTNT_NODES = [B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
elif ttype in [D.CSV_SPECTYPE_TREE]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
MUSTNT_NODES = [B.DATA_NODE_DATA]
elif ttype in [D.CSV_SPECTYPE_COMP]:
MUST_NODES = [B.SUBJECT_ARTIFACTS, B.SUBJECT_STEPS, "functions", B.SUBJECT_DATATABLES]
MUSTNT_NODES = [B.DATA_NODE_DATA]
elif ttype in ["basic", "tool"]:
# tool : tool-specific nodes
print("anderer bekannter Ttyp " + ttype + " " + config["_path"])
return
else:
print("anderer Ttyp "+ttype+" "+config["_path"])
checkNodes(job, config, MUST_NODES, MUSTNT_NODES)
def checkNodes(job, config, mustNodes, mustntNodes):
a = str(config["_path"]).split(os.path.sep)
b = a[-1].split(".")
path = config["_path"]
if b[0] in config:
config = config[b[0]]
try:
if len(config) == 2:
for x in B.LIST_SUBJECTS:
if x[:-1] in config:
config = config[x[:-1]]
break
except:
pass
for n in mustNodes:
if n not in config:
raise Exception("must-node doesnt exist "+n+" "+path)
for n in mustntNodes:
if n not in config:
continue
if len(config[n]) == 0:
job.m.logWarn("empty mustnt-node "+n+" "+path)
else:
raise Exception("must-node doesnt exist "+n+" "+path)
def castOrderedDict(res, job=None, key=""):
if isinstance(res, dict):
doc = dict(res)
@ -290,5 +356,5 @@ def write_file_dict(msg, job, path, dict, enc="utf-8", ttype=""):
print("fileWriter fuer csv")
ffcts = basic.toolHandling.getFileTool(job, None, D.DFILE_TYPE_CSV)
#doc = tools.tdata_tool.getCsvSpec(msg, job, path, D.CSV_SPECTYPE_CONF)
doc = ffcts.dump_file(dict, path, ttype)
doc = ffcts.dump_data_file(job, dict, path, ttype)
write_tile_text(msg, job, path, doc, enc)

40
tools/filecsv_fcts.py

@ -31,8 +31,8 @@ class FileFcts(tools.file_abstract.FileFcts):
lines = tools.file_tool.read_file_lines(self.job, path, self.getMsg())
return self.parseCsv(self.getMsg(), self.job, lines, ttype)
def dump_file(self, data, path, ttype=""):
text = self.buildCsv(self.getMsg(), self.job, data, ttype)
def dump_data_file(self, job, data: dict, path: str, ttype: str =""):
text = self.buildCsv(self.getMsg(), job, data, ttype)
return text
def isEmptyLine(self, msg, job, line, fields):
@ -48,6 +48,12 @@ class FileFcts(tools.file_abstract.FileFcts):
"""
detects the block either on keywords in the field which opens a block
or on status if there is no keyword in the field
Identificator for a new block:
a) change of the type of the block-indicator at field[0] : attribute <-> identificator
b) change of the block-indicator at field[0][0]
Remember: transponing of a table
a) <identificator>:<subname>;<fields...;> (table:adress;london;...)
b) option:<id-field>;<values...;> (option:us-id:US-1234;US-2345...)
:param msg: message-object maybe from component
:param job: job-object with parameter and configuration
:param field: field in the csv-file
@ -153,8 +159,6 @@ class FileFcts(tools.file_abstract.FileFcts):
steps = setStepAttribute(job, steps, "", fields)
#h.append(B.DATA_NODE_STEPS)
if verbose: print(">> step "+l)
#tableDict = getTdataContent(msg, tdata, h)
#setTableHeader(tableDict, tableAttr, fields, ttype, job)
status = D.CSV_BLOCK_STEP
continue
# table-Header Block
@ -194,10 +198,10 @@ class FileFcts(tools.file_abstract.FileFcts):
else:
if verify: print("block else :: "+l)
print("unbekannter Block "+status+": "+l)
# end for
if D.DATA_ATTR_TYPE not in tableAttr:
tableAttr[D.DATA_ATTR_TYPE] = ttype
if ttype in [D.CSV_SPECTYPE_DDL, D.CSV_SPECTYPE_CTLG]:
if ttype in [D.CSV_SPECTYPE_DDL, D.CSV_SPECTYPE_CTLG, D.CSV_SPECTYPE_MDL]:
if len(tdata[B.DATA_NODE_TABLES]) > 1:
job.m.setError("Mehr als einr Tabelle in "+ttype)
elif len(tdata[B.DATA_NODE_TABLES]) == 0:
@ -210,7 +214,7 @@ class FileFcts(tools.file_abstract.FileFcts):
tdata = data
for k in tableAttr:
tdata[k] = tableAttr[k]
if ttype == D.CSV_SPECTYPE_CONF:
if ttype in [D.CSV_SPECTYPE_CONF]:
fields = []
for k in tdata:
if k in ["_hit"] + D.LIST_DATA_ATTR:
@ -540,7 +544,7 @@ def setTableHeader(tableDict, tableAttr, fields, ttype, job):
# preparate the sub-structure for row-data
if ttype == D.CSV_SPECTYPE_TREE:
tableDict[B.DATA_NODE_DATA] = {}
elif ttype in [D.CSV_SPECTYPE_KEYS, D.CSV_SPECTYPE_CTLG]:
elif ttype in [D.CSV_SPECTYPE_KEYS, D.CSV_SPECTYPE_CTLG, D.CSV_SPECTYPE_MDL]:
tableDict[D.CSV_NODETYPE_KEYS] = {}
tableDict[D.DATA_ATTR_KEY] = 1
if D.DATA_ATTR_KEY in tableAttr:
@ -549,18 +553,36 @@ def setTableHeader(tableDict, tableAttr, fields, ttype, job):
tableDict[B.DATA_NODE_DATA] = []
return tableDict
def setTableData(tableDict, fields, ttype, job):
def setTableData(tableDict: dict, fields: list, ttype: str, job):
"""
sets the fields into the table-dict in order to the specific ttype
precondition: usage from reading table-data, so node _header is set with header of this table-block
:param tableDict:
:param fields:
:param ttype:
a) catalog: key(s) - values # meta-spec, meta-auto
b) head: key - value # spec-info
c) option: key - value # spec -> job.par
d) step: key=function - values # spec (tp, ts) -> comp.function
e) step: key=usecase - values # spec (tc) -> comp.steps
f) ddl-table: key=field - vaulues=attributes # meta-spec, comp
g) data-table: array: field - values # spec.data, comp.artifacts
:param job:
:return:
"""
row = {}
if ttype == D.CSV_SPECTYPE_DATA and ":" not in fields[0] and D.DATA_ATTR_ALIAS in tableDict:
fields = [tableDict[D.DATA_ATTR_ALIAS]] + fields
i = 1
for f in tableDict[B.DATA_NODE_HEADER]:
# --> still not used
if f in [B.DATA_NODE_ARGS, "args"]:
arguments = {}
row[B.DATA_NODE_ARGS] = arguments
if B.DATA_NODE_ARGS in row:
a = fields[i].split(":")
row[B.DATA_NODE_ARGS][a[0]] = a[1]
# <-- still not used
else:
row[f] = fields[i]
i += 1

2
tools/fileyaml_fcts.py

@ -27,7 +27,7 @@ class FileFcts(tools.file_abstract.FileFcts):
file.close()
return doc
def dump_file(self, data, path=""):
def dump_data_file(self, job, data: dict, path: str = "", ttype: str = ""):
if path == "":
return yaml.dump(data)
with open(path, 'w', encoding="utf-8") as file:

2
tools/path_const.py

@ -137,7 +137,7 @@ class PathConf:
"""
__instance = None
def __init__(self, job=None):
confs = tools.config_tool.getConfig(job, "tool", "path")
confs = tools.config_tool.getConfig(job, "tool", "path", ttype="tool")
self.pattern = confs["pattern"]
PathConf.__instance = self

13
tools/value_tool.py

@ -5,6 +5,8 @@
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
""" In diesem Modul werden alle Funktionen zusammengefasst zur Generierung und Ermittlung von pathsn """
import traceback
import tools.config_tool
import re
import basic.constants as B
@ -28,7 +30,7 @@ class ValueConf:
__instance = None
def __init__(self, job=None):
#print('init pathConf')
confs = tools.config_tool.getConfig(job, "tool", "value")
confs = tools.config_tool.getConfig(job, "tool", "value", ttype="tool")
self.pattern = confs["pattern"]
#print(self.pattern)
ValueConf.__instance = self
@ -118,12 +120,13 @@ def compose_pattern(job, pattern, comp):
pc = P.PathConf.getInstance(job)
if pattern in pc.pattern:
return compose_string(job, "{" + pattern + "}", comp)
except:
job.m.logError("Pattern konnte nicht aufgeloest werden " + pattern)
except Exception as e:
job.m.logError("Pattern konnte nicht aufgeloest werden " + pattern+ " " + str(e))
print("except "+traceback.format_exc())
return None
return None
def compose_string(job, pattern, comp):
def compose_string(job, pattern: str, comp) -> str:
"""
the function composes the pattern to the standardarized path with the attributes
which are stored in the job and the component
@ -146,6 +149,8 @@ def compose_string(job, pattern, comp):
pit = get_key_value(job, pat[1:-1], comp)
if verbose: print(str(pit) + ": " + pattern + ": " + pat)
#job.debug(verify, str(max) + ": " + pattern + ": " + pat + ": " + pit)
if pit is None:
print("replacement for " + pat + " is None")
pattern = pattern.replace(pat, pit)
while ("{" in pattern):
max = max-1

Loading…
Cancel
Save