Compare commits

...

74 Commits

Author SHA1 Message Date
Ulrich 2f49df27d4 2024-05-08 pep-warnings 4 months ago
Ulrich 3ed019fdd5 unittest 90Testserver angepasst 5 months ago
Ulrich 6a3cb565e6 fixes for unittest 04, some new feature 5 months ago
Ulrich 681b9a55fa bugfixe because of keywords 1 year ago
Ulrich 0d40652d81 refactoring: basic-model - check csv with py 1 year ago
Ulrich 15589c9c69 rename release in prelease because it is a keyword in mysql 1 year ago
Ulrich 5beb75faab refactoring: basic-model - bugfixe 1 year ago
Ulrich e6f8227010 refactoring: basic-model - step+table 1 year ago
Ulrich a6dda95034 refactoring: basic-model - with objects + create once 1 year ago
Ulrich e65ebcbccc refactoring: basic-model - with objects 1 year ago
Ulrich ddd4ebb4da refactoring: basic-model - testcase 1 year ago
Ulrich 114d5ccd75 refactoring: basic-model - comp 1 year ago
Ulrich c35202180a refactoring: basic-model - comp 1 year ago
Ulrich 0a5f3dde03 refactoring: createAdmTables 1 year ago
Ulrich 06c138e554 refactoring: createAdmTables 1 year ago
Ulrich 302ebfb799 remove old utils 1 year ago
Ulrich 40bfeb694f refactoring: basic-model - app inclusive plus table-ddl 1 year ago
Ulrich 4b1e9d0a79 refactoring: basic-model - app 1 year ago
Ulrich 44d08444f7 on refactoring: file_type 1 year ago
Ulrich a7d907cba3 debugging, refactor subject-groupname 1 year ago
Ulrich 351962f1de refactor model and test 1 year ago
Ulrich ae216c08d4 unittests 1 year ago
Ulrich 78a68a9fce table.get_schema 1 year ago
Ulrich 8346e1eadc refactor: devide table - datatable 1 year ago
Ulrich 4007e30052 Failure in unittests solved 1 year ago
Ulrich e9a4a587c5 flask initial test - without new functionality 1 year ago
Ulrich c6a574d628 Miniprogramm 1 year ago
Ulrich da9e4cfb78 start testcase from dialog 1 year ago
Ulrich 7b98debdb9 test-execution start 1 year ago
Ulrich 602e5dda15 read testcase with subtable-objects 1 year ago
Ulrich 8d9df0feb6 read and setAttributes for entities 1 year ago
Ulrich 9fbd75e5a1 testcase reading 1 year ago
Ulrich 030ac13261 start-dialog - select testcase-args 1 year ago
Ulrich f8a96e0943 basic-constants new standardized 1 year ago
Ulrich 357362c641 start-dialogs 1 year ago
Ulrich a1a164c2b0 bugfixe of searches 1 year ago
Ulrich f825776264 one further classes, read-methods environment 1 year ago
Ulrich 858b1d1fce some further classes, read-methods testcase/-suite 1 year ago
Ulrich ed7e74d238 some further classes, read-methods 1 year ago
Ulrich 06280e6290 some further classes, read-methods 1 year ago
Ulrich 099f244892 example of entity-class: user 1 year ago
Ulrich a079a47875 model designed for install_workspace 1 year ago
Ulrich 8152541577 some model for check_spec 1 year ago
Ulrich 40f00933ba frame check-config with open editor 1 year ago
Ulrich 5936e88930 table-object in testserver 1 year ago
Ulrich 2760f83c8a csv-files 1 year ago
Ulrich 693e3d9260 entities from basic to model 2 years ago
Ulrich 87bc7b7b7b csv-fcts init 1 2 years ago
Ulrich a03c68d3b3 start-dialog 2 years ago
Ulrich b5a55cc6a4 refactoring around job_tool 2 years ago
Ulrich 09564f3143 dialog for job execution 2 years ago
Ulrich 1d7cdfddac logging of lists and dicts 2 years ago
Ulrich cdf12cf68b start dialog: testsuite 2 years ago
Ulrich 82c4448838 start_dialog first changes 2 years ago
Ulrich 3f260bf6e5 xample job unit-tester 2 years ago
Ulrich eff0669cbc POC datamodel 2 years ago
Ulrich db40fd5fa6 refactor prg-msg 2 years ago
Ulrich 3e10a5780f mv db-tools 2 years ago
Ulrich 4a3ae36410 refactor job and message 2 years ago
Ulrich a66f1b068a additional arg subkey 2 years ago
Ulrich 067e0c7aca program-config via catalog 2 years ago
Ulrich 234ae5179e environment object 2 years ago
Ulrich b6f822c375 application 2 years ago
Ulrich 6056eabb44 using insert-/select-Routinen 2 years ago
Ulrich 419715d629 Testserver incl. DB-mysql 2 years ago
Ulrich a78543222b job tools 2 years ago
Ulrich 0bae1ed3e7 start_dialog 2 years ago
Ulrich 3f7c9f7d78 dbmysql for webflask login 2 years ago
Ulrich 8a307baaa4 refactor create Job() with args 2 years ago
Ulrich 44ec50e669 some new files 2 years ago
Ulrich ef2c8c9a21 bugfixes 2 years ago
Ulrich 63dfe0a641 create test-db-tables 2 years ago
Ulrich c2a56125a4 git tool created 2 years ago
Ulrich 13bbfc6875 refactorings for new workspace 2 years ago
  1. 6
      .idea/vcs.xml
  2. 0
      __init__.py
  3. 133
      basic/Testserver.py
  4. 18
      basic/catalog.py
  5. 70
      basic/compexec.py
  6. 58
      basic/component.py
  7. 48
      basic/componentHandling.py
  8. 48
      basic/connection.py
  9. 538
      basic/constants.py
  10. 527
      basic/message.py
  11. 552
      basic/program.py
  12. 109
      basic/step.py
  13. 52
      basic/testexecution.py
  14. 69
      basic/testkonzept.txt
  15. 2
      basic/text_const.py
  16. 79
      basic/toolHandling.py
  17. 321
      basic/xxDATASTRUCTURE.yxml
  18. 24
      catalog/programs.csv
  19. 94
      check_configuration.py
  20. 14
      check_environment.py
  21. 60
      check_specification.py
  22. 115
      clean_workspace.py
  23. 56
      copy_appdummy.py
  24. 15
      execute_testcase.py
  25. 0
      features/environment.py
  26. 0
      features/steps/steps.py
  27. 2
      finish_testsuite.py
  28. 27
      init_testcase.py
  29. 16
      init_testsuite.py
  30. 319
      install_workspace.py
  31. 280
      job_dialog.py
  32. 140
      md5Hash.py
  33. 0
      model/__init__.py
  34. 6
      model/ap_applications.csv
  35. 7
      model/ap_components.csv
  36. 8
      model/ap_projects.csv
  37. 7
      model/ap_usecases.csv
  38. 7
      model/ap_variants.csv
  39. 13
      model/application.csv
  40. 246
      model/application.py
  41. 10
      model/artifact.csv
  42. 46
      model/artifact.py
  43. 117
      model/catalog.py
  44. 10
      model/co_artifacts.csv
  45. 7
      model/co_components.csv
  46. 10
      model/co_datatables.csv
  47. 11
      model/co_steps.csv
  48. 13
      model/component.csv
  49. 146
      model/component.py
  50. 11
      model/constants.py
  51. 12
      model/datatable.csv
  52. 58
      model/datatable.py
  53. 7
      model/en_components.csv
  54. 6
      model/en_projects.csv
  55. 643
      model/entity.py
  56. 10
      model/environment.csv
  57. 122
      model/environment.py
  58. 128
      model/factory.py
  59. 12
      model/job.csv
  60. 10
      model/prelease.csv
  61. 121
      model/prelease.py
  62. 12
      model/project.csv
  63. 272
      model/project.py
  64. 11
      model/rl_applications.csv
  65. 10
      model/rl_stories.csv
  66. 8
      model/st_preleases.csv
  67. 12
      model/step.csv
  68. 123
      model/step.py
  69. 10
      model/story.csv
  70. 63
      model/story.py
  71. 18
      model/table.csv
  72. 346
      model/table.py
  73. 8
      model/tc_datatables.csv
  74. 8
      model/tc_steps.csv
  75. 10
      model/tc_stories.csv
  76. 8
      model/tc_usecases.csv
  77. 8
      model/tc_variants.csv
  78. 15
      model/testcase.csv
  79. 268
      model/testcase.py
  80. 13
      model/testplan.csv
  81. 77
      model/testplan.py
  82. 24
      model/testreport.csv
  83. 13
      model/testsuite.csv
  84. 186
      model/testsuite.py
  85. 98
      model/topic.py
  86. 6
      model/tp_applications.csv
  87. 8
      model/tp_steps.csv
  88. 8
      model/tp_testsuites.csv
  89. 9
      model/tr_artifacts.csv
  90. 8
      model/ts_steps.csv
  91. 8
      model/ts_testcases.csv
  92. 8
      model/ts_usecases.csv
  93. 17
      model/usecase.csv
  94. 63
      model/usecase.py
  95. 12
      model/user.csv
  96. 297
      model/user.py
  97. 17
      model/variant.csv
  98. 88
      model/variant.py
  99. 2
      requirements.txt
  100. 325
      start_dialog.py

6
.idea/vcs.xml

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

0
utils/__init__.py → __init__.py

133
basic/Testserver.py

@ -0,0 +1,133 @@
import os.path
import re
import basic.component
import basic.constants as B
import basic.toolHandling
import tools.config_tool
import tools.data_const as D
import tools.file_tool
import tools.filecsv_fcts
import model.table
import model.factory
import tools.value_tool
import tools.data_tool
COMP_NAME = B.ATTR_INST_TESTSERVER
# class Testserver(basic.component.Component):
class Testserver:
"""
the Testserver represents the workspace with all resources for the automation
"""
tables = {}
__instance = None
__writeDB = True
def __init__(self, job):
"""
collect all resources into this object
:param job:
"""
print('init '+COMP_NAME)
self.m = job.m
self.conf = {}
if B.TOPIC_NODE_DB in job.conf:
self.conf[B.TOPIC_CONN] = {}
self.conf[B.TOPIC_CONN][B.TOPIC_NODE_DB] = {}
for attr in B.LIST_DB_ATTR:
if attr in job.conf[B.TOPIC_NODE_DB]:
self.conf[B.TOPIC_CONN][B.TOPIC_NODE_DB][attr] = job.conf[B.TOPIC_NODE_DB][attr]
# TODO was muss auf dem Testserver initial geladen werden?
self.model = {}
Testserver.__instance = self
for s in B.LIST_SUBJECTS:
self.model[tools.data_tool.getSingularKeyword(s)] = model.factory.get_entity_object(job, s, {})
pass
@staticmethod
def getInstance(job):
if Testserver.__instance == None:
return Testserver(job)
def createAdminDBTables(self, job):
"""
creates the complete data-model in the database. it contains:
* the model for administration
* the model of each project:
* * root-tables - defined in testcases TODO wie allgemein deklariert, special components/config
* * comp-artifacts - it could contain build-rules for building from testcase-spec
:param job:
:return:
"""
tables = {}
if B.TOPIC_NODE_DB in job.conf:
self.dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
# the model for administration
for m in self.model.keys():
print("\n==== model " + m)
self.createDBTable(job, B.ATTR_INST_TESTSERVER, B.ATTR_INST_TESTSERVER, m)
enty = self.model[m]
for t in enty.getSubtableNames():
print("subtable "+t)
self.createDBTable(job, B.ATTR_INST_TESTSERVER, B.ATTR_INST_TESTSERVER, t)
def createProjectDBTables(self, job):
"""
creates the complete data-model in the database. it contains:
* the model for administration
* the model of each project:
* * root-tables - defined in testcases TODO wie allgemein deklariert, special components/config
* * comp-artifacts - it could contain build-rules for building from testcase-spec
:param job:
:return:
"""
tables = {}
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_PROGRAM], "model")
fct = basic.toolHandling.getFileTool(job, None, "csv")
for m in sorted(os.listdir(path)):
if not re.match(r".*?\.csv", m):
print("sonstig "+m)
continue
print("model "+m)
modelPath = os.path.join(path, m)
modelDoc = fct.load_file(modelPath, D.CSV_SPECTYPE_DATA)
table = model.table.Table(job, project="", application="", component=COMP_NAME, name=m[:-4])
sql = table.get_schema(tableName=m[:-4], tableObject=table) # [B.DATA_NODE_TABLES][m[:-4]]
job.m.logInfo(sql)
tables[m[:-4]] = modelDoc
for s in sql.split(";\n"):
if len(s) < 3:
continue
try:
self.dbi.execStatement(s+";", job.conf[B.TOPIC_NODE_DB])
print("SQL executed: "+s)
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
def createDBTable(self, job, project, context, tablename):
"""
creates a table in the database
:param job:
:return:
"""
args = {}
args["context"] = context
table = model.table.Table(job)
table = table.read_entity(job, tablename, args=args)
sql = table.get_schema(tablename, model.table.TYPE_ADMIN) # [B.DATA_NODE_TABLES][m[:-4]]
job.m.logInfo(sql)
for s in sql.split(";\n"):
if len(s) < 3:
continue
try:
if self.__writeDB:
self.dbi.execStatement(s + ";", job.conf[B.TOPIC_NODE_DB])
print("SQL executed: " + s)
except Exception as e:
raise Exception("Fehler bei createSchema " + s)

18
basic/catalog.py

@ -7,12 +7,12 @@
import os
import basic.program
import basic.constants as B
import utils.path_const as P
import utils.data_const as D
import utils.config_tool
import utils.path_tool
import utils.file_tool
import utils.tdata_tool
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.path_tool
import tools.file_tool
# import tools.tdata_tool
EXP_KEY_MISSING = "key is missing {}"
EXP_KEY_DOESNT_EXIST = "key doesnt exist in domain {}"
@ -90,13 +90,13 @@ class Catalog:
raise Exception(EXP_KEY_MISSING, (domain))
if domain in self.catalog:
return
pathname = utils.config_tool.getConfigPath(job, P.KEY_CATALOG, domain)
pathname = tools.config_tool.getConfigPath(job, P.KEY_CATALOG, domain)
if pathname is None:
raise Exception(EXP_KEY_MISSING, (domain))
if pathname[-4:] == ".csv":
data = utils.tdata_tool.getCsvSpec(job.m, job, pathname, D.CSV_SPECTYPE_KEYS)
data = tools.tdata_tool.getCsvSpec(job.m, job, pathname, D.CSV_SPECTYPE_KEYS)
else:
data = utils.file_tool.readFileDict(job, pathname, job.m)
data = tools.file_tool.readFileDict(job, pathname, job.m)
self.catalog[domain] = data[B.DATA_NODE_TABLES][domain][B.DATA_NODE_KEYS]
return data

70
basic/compexec.py

@ -38,8 +38,7 @@ import basic.message
import basic.program
import inspect
import os
import re
import utils.db_abstract
import tools.db_abstract
import basic.toolHandling
import basic.component
import basic.componentHandling
@ -50,7 +49,6 @@ import utils.match_const as M
import utils.tdata_tool
import basic.constants as B
import basic.text_const as T
import utils.data_const as D
import utils.path_const as P
class Testexecuter():
@ -82,12 +80,12 @@ class Testexecuter():
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() started at "
+ datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API]:
if node not in self.conf[B.SUBJECT_ARTS]:
if node not in self.conf[B.SUBJECT_ARTIFACTS]:
continue
tool = basic.toolHandling.getTool(node, self, job)
tool.reset_TData(job)
if B.TOPIC_NODE_FILE in self.conf[B.SUBJECT_ARTS]:
for file in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_FILE]:
if B.TOPIC_NODE_FILE in self.conf[B.SUBJECT_ARTIFACTS]:
for file in self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_FILE]:
if file in B.LIST_FILE_ATTR:
continue
print("91: "+self.classname+" "+file)
@ -109,10 +107,10 @@ class Testexecuter():
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
for node in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_FILE, B.TOPIC_NODE_API]:
print(node)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS] and B.DATA_NODE_TABLES in tdata:
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACTS] and B.DATA_NODE_TABLES in tdata:
for t in tdata[B.DATA_NODE_TABLES]:
print (t)
if utils.db_abstract.isCompTable(self, job, tdata, t):
if tools.db_abstract.isCompTable(self, job, tdata, t):
self.m.logInfo("insert content "+ self.name)
dbi = basic.toolHandling.getDbTool(job, self)
dbi.insertTables(tdata, job)
@ -139,19 +137,17 @@ class Testexecuter():
#job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("select db-content "+ self.name)
dbi = basic.toolHandling.getDbTool(job, self)
data = dbi.selectTables(subdir, job)
print("ppp")
#data = {}
for t in data[subdir]:
data[B.DATA_NODE_TABLES] = {}
data[B.DATA_NODE_TABLES][t] = data[subdir][t]
utils.tdata_tool.writeCsvData(
utils.path_tool.rejoinPath(utils.path_tool.composePattern(job, "{tcresult}", self), subdir, t+".csv"),
data, self, job)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("check lob if is deleted with flaskdb "+ self.name)
self.m.setMsg("readInstance for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
@ -160,7 +156,7 @@ class Testexecuter():
def composeFileClauses(self, job, pattern):
#job = basic.program.Job.getInstance()
out = {}
attr = utils.db_abstract.getDbAttributes(self, job, "null")
attr = tools.db_abstract.getDbAttributes(job, self, job, "null")
while "{" in pattern:
pre = pattern[0:pattern.index("{")]
pat = pattern[pattern.index("{"):pattern.index("}")]
@ -202,7 +198,7 @@ class Testexecuter():
print("table "+table)
sql_new = sql[0:sql.upper().index(" FROM ")+5]
print("sql_new "+sql_new)
attr = utils.db_abstract.getDbAttributes(self, table)
attr = tools.db_abstract.getDbAttributes(job, self, table)
if attr[B.ATTR_DB_TABNAME] != "":
table = attr[B.ATTR_DB_TABNAME]
if attr[B.ATTR_DB_SCHEMA] != "":
@ -211,12 +207,12 @@ class Testexecuter():
print("sql_new "+sql_new)
if (hasattr(job.par, B.PAR_DB_WHERE)):
# actual it parses only conjunct or disjunct normalform without parentesis
parts = utils.db_abstract.parseSQLwhere(getattr(job.par, B.PAR_DB_WHERE), self.conf[B.DATA_NODE_DDL][table])
parts = tools.db_abstract.parseSQLwhere(getattr(job.par, B.PAR_DB_WHERE), self.conf[B.DATA_NODE_DDL][table])
# Felder und Operationen
# print(dbwhere)
sql_new += " WHERE "+parts
if sql_new[0:6] == "SELECT":
ids = utils.db_abstract.getTechnicalIDFields(self.conf["ddl"][table])
ids = tools.db_abstract.getTechnicalIDFields(self.conf["ddl"][table])
sql_new += " ORDER BY "+",".join(ids)
print("sql_new "+sql_new)
sql_new = sql_new.replace('!', "\'")
@ -255,17 +251,17 @@ class Testexecuter():
:return:
"""
if not step.fct in self.conf[B.DATA_NODE_STEPS]:
raise Exception(self.m.getMessageText(T.EXP_KEY_DOESNT_EXIST, [step.fct, self.name]))
raise Exception(self.m.getMessageText(job, T.EXP_KEY_DOESNT_EXIST, [step.fct, self.name]))
if step.fct in self.conf[B.DATA_NODE_STEPS]:
for stepconf in self.conf[B.DATA_NODE_STEPS][step.fct]:
if stepconf[B.SUBJECT_TOOL] == B.TOPIC_NODE_FILE:
if stepconf[B.TOPIC_TOOL] == B.TOPIC_NODE_FILE:
tool = basic.toolHandling.getFileTool(job, self, stepconf[B.ATTR_ARTS_TYPE])
print("file-tool")
elif stepconf[B.SUBJECT_TOOL] == B.TOPIC_NODE_API:
elif stepconf[B.TOPIC_TOOL] == B.TOPIC_NODE_API:
print("api-tool")
elif stepconf[B.SUBJECT_TOOL] == B.TOPIC_NODE_CLI:
elif stepconf[B.TOPIC_TOOL] == B.TOPIC_NODE_CLI:
print("cli-tool")
elif stepconf[B.SUBJECT_TOOL] == B.TOPIC_NODE_DB:
elif stepconf[B.TOPIC_TOOL] == B.TOPIC_NODE_DB:
print("db-tool")
else:
print("nichts da")
@ -301,14 +297,14 @@ class Testexecuter():
"""
#job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("get files in for "+ self.name + " in " + self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_LOG]["path"])
if "flaskdb" in self.conf[B.SUBJECT_ARTS]:
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("get files in for " + self.name + " in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_LOG]["path"])
if "flaskdb" in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("select flaskdb-content "+ self.name)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACTS]:
pass # after selection get file from flaskdb
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("get files in for "+ self.name + " in " + self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_FILE]["path"])
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("get files in for " + self.name + " in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_FILE]["path"])
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() " + str(self.name))
@ -321,14 +317,14 @@ class Testexecuter():
#job = basic.program.Job.getInstance()
verify = job.getDebugLevel(self.name)
self.m.debug(verify, "--- "+str(inspect.currentframe().f_code.co_name)+"() "+str(self.name))
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]:
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTIFACTS]:
pass #
if "flaskdb" in self.conf[B.SUBJECT_ARTS]:
if "flaskdb" in self.conf[B.SUBJECT_ARTIFACTS]:
pass # stored in table
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("tidy files in for "+self.name+" in "+self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_LOB]["format"])
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("tidy files in for "+self.name+" in "+self.conf[B.SUBJECT_ARTS][B.ATTR_ARTS_FILE]["format"])
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("tidy files in for " + self.name +" in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_LOB]["format"])
if B.ATTR_ARTS_FILE in self.conf[B.SUBJECT_ARTIFACTS]:
self.m.logInfo("tidy files in for " + self.name +" in " + self.conf[B.SUBJECT_ARTIFACTS][B.ATTR_ARTS_FILE]["format"])
def fix_TcResult(self, job, granularity):
"""
@ -354,15 +350,15 @@ class Testexecuter():
cm = basic.componentHandling.ComponentManager.getInstance(job)
data = {}
matching = utils.match_tool.Matching(job, self)
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
for t in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]:
if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTIFACTS]:
for t in self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB]:
if t in B.LIST_DB_ATTR:
continue
# fill each data into matching-object
for side in M.MATCH_SIDES:
if side == M.MATCH_SIDE_PRESTEP:
if B.ATTR_ARTS_PRESTEP in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][t]:
a = self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][t][B.ATTR_ARTS_PRESTEP].split(":")
if B.ATTR_ARTS_PRESTEP in self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][t]:
a = self.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB][t][B.ATTR_ARTS_PRESTEP].split(":")
if a[0] != self.name:
comp = cm.getComponent(a[0])
else:

58
basic/component.py

@ -8,12 +8,19 @@
# ---------------------------------------------------------------------
from datetime import datetime
import basic.compexec
#from basic.compexec import Testexecuter
import basic.message
import basic.program
#import basic.entity
import basic.constants as B
import tools.path_const as P
import inspect
import tools.config_tool
#import tools.git_tool
import basic.toolHandling
TABLE_NAMES = ["component", "co_table", "co_variant"]
DEFAULT_SYNC = "" #basic.entity.SYNC_HEAD_GIT2DB
class CompData:
def __init__(self):
@ -21,13 +28,54 @@ class CompData:
self.m = None
self.conf = None
def syncEnitity(job, elem):
"""
synchronize the configuration with the database
:param job:
:return:
"""
syncMethod = DEFAULT_SYNC
if syncMethod.count("-") < 2:
return
fileTime = basic.entity.VAL_ZERO_TIME
dbTime = basic.entity.VAL_ZERO_TIME
# get git-commit
if "git" in syncMethod:
comppath = tools.config_tool.select_config_path(job, P.KEY_COMP, elem)
repopath = comppath[len(job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS]) + 1:]
gitresult = tools.git_tool.gitLog(job, B.ATTR_PATH_COMPS, repopath, 1)
fileTime = gitresult[0]["date"]
print(str(gitresult))
if "db" in syncMethod:
if B.TOPIC_NODE_DB in job.conf:
dbi = basic.toolHandling.getDbTool(job, job.testserver, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
data = dbi.selectRows(TABLE_NAMES[0], job)
print(str(data[B.DATA_NODE_DATA]))
if len(data[B.DATA_NODE_DATA]) > 0:
dbTime = data[B.DATA_NODE_DATA][0]["updtime"]
if fileTime == dbTime:
print("gleich")
elif fileTime < dbTime:
print("db vorne")
#(appObjects, appDict) = selectEntities(job, dbi)
#print(str(appDict))
#applPath = tools.config_tool.getConfigPath(job, P.KEY_BASIC, B.SUBJECT_APPS)
#tools.file_tool.writeFileDict(job.m, job, applPath, appDict)
#
elif fileTime > dbTime:
print("git vorne")
compData = tools.config_tool.getConfig(job, P.KEY_COMP, elem)
#insertEntities(job, compData, dbTime, dbi)
#class Component(components.sysmonitor.SystemMonitor, components.testexec.Testexecuter, components.report.Report,
# components.maintain.Maintainer, components.catalog, threading.Thread):
class Component(basic.compexec.Testexecuter):
class Component():
"""
A component represents an application of the system-under-test or a data-artifact which is created from the system-under-test.
A component represents an application of the system-under-test or a data-artifact which is created from the system-under-test.g
As the representation it has to knowlegde of the url, which other components depends on this component.
During a test-run the component must be checked, prepared, appfiles has to be collected, etc. For this doing there are some standard-methods implemented.
"""
@ -171,8 +219,8 @@ class Component(basic.compexec.Testexecuter):
testreport = ""
# if job.par.context == "tset":
# for tc in testcases:
# header = utils.report_tool.getTcHeader()
# body = utils.report_tool.getTcExtraction()
# header = tools.report_tool.getTcHeader()
# body = tools.report_tool.getTcExtraction()
# if job.par.context == "tcontext":
# for ts in testsets:
reportheader = reportheader +'<\head>'

48
basic/componentHandling.py

@ -15,18 +15,18 @@ Each componente could be created mostly once, but not everytime:
Each kind of instance has its component-class and for each use should be an object be created.
Each crated component-onject are documented in the parameter-file.
"""
import utils.config_tool
import utils.conn_tool
import tools.config_tool
import tools.conn_tool
import basic.program
import basic.message
import basic.component
import importlib
import copy
import basic.constants as B
import utils.data_const as D
import tools.data_const as D
comps = {}
PARAM_NOSUBNODE = [B.SUBJECT_ARTS, "components", "instance"]
PARAM_NOSUBNODE = [B.SUBJECT_ARTIFACTS, "components", "instance"]
DEFAULT_INST_CNT = 1
DEFAULT_INST_SGL = "y"
@ -41,10 +41,10 @@ def getInstanceAttributes(conf):
B.ATTR_INST_CNT: DEFAULT_INST_CNT,
B.ATTR_INST_SGL: DEFAULT_INST_SGL
}
if B.SUBJECT_INST in conf:
if B.TOPIC_INST in conf:
for attr in [B.ATTR_INST_CNT, B.ATTR_INST_SGL]:
if attr in conf[B.SUBJECT_INST]:
out[attr] = conf[B.SUBJECT_INST][attr]
if attr in conf[B.TOPIC_INST]:
out[attr] = conf[B.TOPIC_INST][attr]
return out
@ -85,11 +85,11 @@ class ComponentManager:
job = self.job # basic.program.Job.getInstance()
anw = job.par.application
job.m.logDebug("applicationscomponente -- " + str(type(job.par)))
if not job.conf.confs[B.SUBJECT_APPS].get(anw):
if not job.conf[B.SUBJECT_APPS].get(anw):
job.m.setFatal("application " + job.par.application + " is not configured")
return
for k in job.conf.confs[B.SUBJECT_APPS].get(anw):
if k == B.ATTR_APPS_PROJECT:
for k in job.conf[B.SUBJECT_APPS].get(anw):
if k == B.SUBJECT_PROJECT:
continue
job.m.logDebug("applicationscomponente -- " + k + ":")
print("applicationscomponente -- " + k + ":")
@ -112,7 +112,7 @@ class ComponentManager:
out = []
for c in self.comps:
job.debug(verify, "getComponents " + c + ": " + str(self.comps[c].conf))
print("getComponents " + c + ": " + str(self.comps[c].conf))
#print("getComponents " + c + ": " + str(self.comps[c].conf))
if mainfct in self.comps[c].conf["function"]:
out.append(c)
return out
@ -141,29 +141,29 @@ class ComponentManager:
verify = job.getDebugLevel("job_tool")
componentName = componentName.lower()
job.debug(verify, "createComponent " + componentName)
confs = utils.config_tool.getConfig(job, "comp", componentName)
conns = utils.conn_tool.getConnections(job, componentName)
confs = tools.config_tool.getConfig(job, "comp", componentName)
conns = tools.conn_tool.getConnections(job, componentName)
instAttr = getInstanceAttributes(confs)
job.debug(verify, "createComponent -91- " + componentName + " : " + str(confs))
if nr > 0 and int(instAttr[B.ATTR_INST_CNT]) > 1:
job.m.setError("for multiple callers are multiple calls not implemented ")
if nr > 0 and len(conns) == 0:
job.m.setError("for multiple calls has only one call configured")
print(confs)
#print(confs)
parContent = job.loadParameter()
if len(conns) == 1:
c = self.createInstance(componentName, parContent, confs, conns, 0)
print("createComponent 3 a " + componentName)
#print("createComponent 3 a " + componentName)
self.createSubComponents(c, nr, suffix)
else:
i = 1
print("createComponent 3 b " + componentName)
#print("createComponent 3 b " + componentName)
for cn in conns:
c = self.createInstance(componentName, parContent, confs, conns, i)
self.createSubComponents(c, i, suffix)
i = i + 1
print("createComponent 9 " + componentName)
print(self.comps)
#print("createComponent 9 " + componentName)
#print(self.comps)
def createInstance(self, compName, parContent, confs, conns, nr):
@ -189,22 +189,22 @@ class ComponentManager:
c.name = name
c.classname = compName
c.m = basic.message.Message(job, basic.message.LIMIT_DEBUG, "logTime", name)
c.conf = utils.config_tool.mergeConn(c.m, confs["conf"], conns[i])
c.conf[B.SUBJECT_CONN] = conns[i]
c.m = basic.message.Message(job, basic.message.LIMIT_DEBUG, job.start, c.name)
c.conf = tools.config_tool.mergeConn(c.m, confs[B.SUBJECT_COMP], conns[i])
c.conf[B.TOPIC_CONN] = conns[i]
c.init(job)
if parContent is not None:
print("createComponent 5 a " + compName + " : " + str(parContent))
if B.SUBJECT_COMPS in parContent and compName in parContent[B.SUBJECT_COMPS]:
for k in parContent[B.SUBJECT_COMPS][compName].keys():
c.conf[k] = parContent[B.SUBJECT_COMPS][compName][k]
if B.SUBJECT_ARTS in c.conf and B.TOPIC_NODE_DB in c.conf[B.SUBJECT_ARTS]:
if B.SUBJECT_ARTIFACTS in c.conf and B.TOPIC_NODE_DB in c.conf[B.SUBJECT_ARTIFACTS]:
if not B.DATA_NODE_DDL in c.conf:
c.conf[B.DATA_NODE_DDL] = {}
for table in c.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]:
for table in c.conf[B.SUBJECT_ARTIFACTS][B.TOPIC_NODE_DB]:
if table in B.LIST_DB_ATTR:
continue
conf = utils.config_tool.getConfig(job, D.DDL_FILENAME, compName, table)
conf = tools.config_tool.getConfig(job, D.DDL_FILENAME, compName, table, D.CSV_SPECTYPE_DDL)
if B.DATA_NODE_TABLES in conf and table in conf[B.DATA_NODE_TABLES]:
c.conf[B.DATA_NODE_DDL][table] = conf[B.DATA_NODE_TABLES][table]
elif table in conf:

48
basic/connection.py

@ -0,0 +1,48 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.toolHandling
import tools.data_const as D
import basic.constants as B
import model.entity
class Connection(model.entity.Entity):
name = ""
description = ""
application = ""
usecase = []
story = []
tables = {}
steps = []
def __init__(self, job):
"""
to be initialized by readSpec
:param job:
"""
self.job = job
def get_schema(self, table=""):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
print(str(dbi))
sql = dbi.getCreateTable("connection")
sql += dbi.getSchemaAttribut("cnid", "id")+","
sql += dbi.getSchemaAttribut("environment", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("component", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("type", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("ip", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("port", D.TYPE_INT)+","
sql += dbi.getSchemaAttribut("hostname", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("dompath", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut(B.NODE_ATTRIBUTES, D.TYPE_TEXT)+","
sql += self.getHistoryFields()
sql += ");\n"
sql += dbi.getSchemaIndex("connection", "environment") + "\n"
sql += self.getHistoryIndex("connection")
return sql
#dbi.execStatement(sql)

538
basic/constants.py

@ -7,9 +7,9 @@
"""
The constants desribes the keywords of the main datastructures, these are
* the configurations of
* program-configuration for general technical variables in tools - stored in internal conf-folder
* program-configuration for general technical variables in xtools - stored in internal conf-folder
it represents the general knowledge of this test-automatism
* basic-configuration for global variables in job - stored in external conf-folder
* xbasic-configuration for global variables in job - stored in external conf-folder
it represents the knowledge of your test-topic
* comp-configuration for component-attributes - stored in internal component-folder in
+ ddl-configuration of an entity of the component as attributes - stored in internal component-folder
@ -18,102 +18,246 @@ The constants desribes the keywords of the main datastructures, these are
* test-specification with testdata - stored in external testdata-folder
* the internal datastructure
"""
import os
home = ""
if home == "":
home = os.getcwd()
prgdir = ""
if home[-4:] == "test":
home = home[0:-5]
if home[-10:] == "components":
home = home[0:-11]
if home[-9:] == "program":
prgdir = home[-6:]
home = home[0:-7]
elif home[-7:] == "program":
prgdir = home[-7:]
home = home[0:-8]
HOME_PATH = home
# -------------------------------------------------------------
# values and keywords
BASIS_FILE = "basis"
SVAL_YES = "y"
SVAL_NO = "n"
SVAL_NULL = "null"
# -------------------------------------------------------------
# parameter with arguments
PAR_APP = 'application'
""" definition of the application which will be tested """
PAR_ENV = 'environment'
""" definition of the environment where the application runs which will be tested """
PAR_REL = 'release'
""" definition of the release of the application which will be tested """
PAR_TSDIR = 'tsdir'
""" definition of the directory of the testsuite for summaries of the testcases """
PAR_TCDIR = 'tcdir'
""" definition of the directory of the testcase for logs and results """
PAR_XPDIR = 'xpdir'
""" definition of the directory of expectation for comparing the testcase """
PAR_WSDIR = "wsdir"
""" definition of the directory of the workspace """
PAR_TDTYP = 'tdtyp'
PAR_TDSRC = 'tdsrc'
PAR_TDNAME = 'tdname'
PAR_LOG = 'loglevel'
PAR_MODUS = 'modus'
PAR_COMP = 'component'
PAR_FCT = 'function'
PAR_TOOL = 'tool'
PAR_STEP = 'step'
PAR_DESCRIPT = 'desription'
""" """
PAR_TESTCASE = "testcase"
PAR_TESTCASES = "testcases"
PAR_TESTSUITE = "usecase"
PAR_TCTIME = "tctime"
PAR_TSTIME = "tstime"
PAR_TESTINSTANCES = "testinstances"
""" name of testcase extracted from PAR_TCDIR """
PAR_DB_WHERE = "dbwhere"
""" optional parameter with a where-clause """
PAR_DB_PARTITION = "dbparts"
""" optional parameter for partitions of a partitioned tables """
# -- example -----------------------------------------------------
# example testcase - nodes
# the internal datastructure is a tree with this design:
# root { : subject itself - subject-singular-constant
# + option { : subtable of subject - subject-plural-constant
# + attribute : attribute of subtable - allowed attribute-names in data-const LIST_ATTR_<subject>
# + stories : subtable of subject
# + <NAME> : element of subtable - the unique name
# + name : redundant name-attribute
# + attribute : some main attributes
# + datas :
# + <NAME> : table name
# + ddl : topic, functionality relating to ddl
# + _header [ : specific data-node of the topic - constant
# - fields : variable field-name
# + steps : special topic
#
# + comp { : variable component-name
# + substructure { : variable maybe scheme, table of a database-component
# + + _header [ : constant
# - fields : variable field-name
# -------------------------------------------------------------
# -------------------------------------------------------------
# attributes
# --Domain ----------------------------------------------------
# ____ _
# | _ \ ___ _ __ ___ __ _(_)_ __
# | | | |/ _ \| '_ ` _ \ / _` | | '_ \
# | |_| | (_) | | | | | | (_| | | | | |
# |____/ \___/|_| |_| |_|\__,_|_|_| |_|
#
# -------------------------------------------------------------
DOM_PROJECT = "project"
"""
this domain is relating to project-management and -knowledge
subjects of the domain are:
- project
- application
- release
topics relating to the domain are:
"""
DOM_SPECIFICATION = "specification"
"""
this domain is relating to project-management and -knowledge
subjects of the domain are:
- application
- usercase
- variant
- release
- story
topics relating to the domain are:
"""
DOM_TEST = "test"
"""
this domain is relating to project-management and -knowledge
subjects of the domain are:
- testplan
- release
- testsuite
- application
- variant
- testcase
- application
- usercase
- variant
- story
- option
- data
- step
- testreport
- release
- artifact
topics relating to the domain are:
"""
DOM_IMPLEMENTATION = "implementation"
"""
this domain is relating to project-management and -knowledge
subjects of the domain are:
- component
- artifact
- data
- step
topics relating to the domain are:
- db
- cli
- api
- conn
- ddl
"""
DOM_INSTALLATION = "installation"
"""
this domain is relating to project-management and -knowledge
subjects of the domain are:
- environent
- component
topics relating to the domain are:
- db
- cli
- api
- conn
- instance
"""
# --Subject ----------------------------------------------------
# ____ _ _ _
# / ___| _ _| |__ (_) ___ ___| |_
# \___ \| | | | '_ \| |/ _ \/ __| __|
# ___) | |_| | |_) | | __/ (__| |_
# |____/ \__,_|_.__// |\___|\___|\__|
# |__/
# -------------------------------------------------------------
# structure - nodes
# the internal datastructure is a tree with this design:
# root { : constant
# + option { : constant
# + steps
# + comp { : variable component-name
# + substructure { : variable maybe scheme, table of a database-component
# + + _header [ : constant
# - fields : variable field-name
DATA_NODE_HEADER = "_header"
""" This constant defines a subnode of a table for the column-names """
DATA_NODE_DATA = "_data"
""" This constant defines a subnode of a table for the data which are set as key-value-pair with the column-names as key """
DATA_NODE_STEPS = "_steps"
""" This constant defines the main node in the testdata for the steps to execute """
DATA_NODE_OPTION = "_option"
""" This constant defines main node in the testdata for testcase specific parameters """
DATA_NODE_TABLES = "_tables"
""" This constant defines the main node in the testdata for the steps to execute """
DATA_NODE_KEYS = "_keys"
""" This constant defines the main node in the testdata for the steps to execute """
DATA_NODE_DDL = "ddl"
""" This constant defines the node for data scheme (DataDefinitionLanguage)
The fields are defined in data_const (D) """
DATA_NODE_COMP = "comp"
""" This constant defines """
DATA_NODE_PAR = "par"
DATA_NODE_CATALOG = "_catalog"
DATA_NODE_ROW = "_row"
# the configuration of a component or tool
# entity { : variable name of the group, xbasic, component-name or tool-name
# + subject { : variable subject-name - it correspondends to a tool
# + + sub-subject { : variable subject-name - it correspondends to a tool
# + attributes : constant of the tool in which the attribute ist implemented
# the main subjects # prog xbasic envir tool comp testcase main implentation module
""" This constant defines the debug-folder in testing-filesystem """
SUBJECT_PROJECT = "project"
SUBJECT_PROJECTS = SUBJECT_PROJECT + "s"
SUBJECT_APP = "application"
SUBJECT_APPS = SUBJECT_APP+"s" #
SUBJECT_COMP = "component"
SUBJECT_COMPS = SUBJECT_COMP + "s"
SUBJECT_USECASE = "usecase"
SUBJECT_USECASES = SUBJECT_USECASE + "s"
SUBJECT_USER = "user"
SUBJECT_USERS = SUBJECT_USER + "s"
SUBJECT_REL = "prelease"
SUBJECT_RELS = SUBJECT_REL + "s"
SUBJECT_TESTCASE = "testcase"
SUBJECT_TESTCASES = SUBJECT_TESTCASE + "s"
SUBJECT_TESTSUITE = "testsuite"
SUBJECT_TESTSUITES = SUBJECT_TESTSUITE + "s"
SUBJECT_TESTPLAN = "testplan"
SUBJECT_TESTPLANS = SUBJECT_TESTPLAN + "s"
SUBJECT_ARTIFACT = "artifact"
SUBJECT_ARTIFACTS = SUBJECT_ARTIFACT + "s"
ATTR_ARTS_LOG = "log"
ATTR_ARTS_LOB = "lob"
ATTR_ARTS_FILE = "file"
ATTR_ARTS_TYPE = "type"
""" must attribute for the type of the technique """
ATTR_ARTS_NAME = "name"
""" optional attribute just for information """
ATTR_ARTS_PATH = "path"
""" optional attribute for the basic folder if the artifact is stored in the filesystem """
""" optional attribute for the xbasic folder if the artifact is stored in the filesystem """
ATTR_ARTS_RESET = "reset"
""" optional attribute if the artefact should be reset in the initializing-phase """
ATTR_ARTS_PRESTEP = "prestep"
""" optional attribute to define a source-table for this table """
LIST_ARTS_ATTR = [ATTR_ARTS_TYPE, ATTR_ARTS_PATH, ATTR_ARTS_RESET, ATTR_ARTS_PRESTEP, ATTR_ARTS_NAME]
SUBJECT_VARIANT = "variant"
SUBJECT_VARIANTS = SUBJECT_VARIANT + "s"
SUBJECT_ENVIRONMENT = 'environment'
SUBJECT_ENVIRONMENTS = SUBJECT_ENVIRONMENT + "s"
SUBJECT_STORY = "story"
SUBJECT_STORIES = "stories"
SUBJECT_DESCRIPTION = "description" # --> MAIN-ATTR
SUBJECT_REFERENCE = "reference" # --> MAIN-ATTR
SUBJECT_STEP = "step"
SUBJECT_STEPS = "steps"
SUBJECT_DATATABLE = "datatable"
SUBJECT_DATATABLES = SUBJECT_DATATABLE + "s"
# List of persistent models
LIST_SUBJECTS = [SUBJECT_PROJECTS, SUBJECT_APPS, SUBJECT_USECASES, SUBJECT_VARIANTS, SUBJECT_RELS, SUBJECT_STORIES,
SUBJECT_TESTPLANS, SUBJECT_TESTSUITES, SUBJECT_TESTCASES, SUBJECT_STEPS, SUBJECT_DATATABLES,
SUBJECT_ENVIRONMENTS, SUBJECT_COMPS, SUBJECT_ARTIFACTS, "storys"]
# --Topic -----------------------------------------------------
# _____ _
# |_ _|__ _ __ (_) ___
# | |/ _ \| '_ \| |/ __|
# | | (_) | |_) | | (__
# |_|\___/| .__/|_|\___|
# |_|
# -------------------------------------------------------------
# topics
"""
in this subject-node are each kind of result of any component with the structure:
* topic (db, cli, api, ...)
* * general attributes - to-know: technical attributes are stored in connection-tree
* * partial-component - to-know: the existence of db-tables can be defined in the ddl-tree
* * * specific attributes - it overrides the general attributes
"""
TOPIC_INST = "instance"
ATTR_INST_CNT = "count" #
ATTR_INST_SGL = "single"
LIST_INST_ATTR = [ATTR_INST_CNT, ATTR_INST_SGL]
ATTR_INST_SUBCOMP = SUBJECT_COMPS
ATTR_INST_TESTSERVER = "Testserver"
TOPIC_PATH = "paths"
ATTR_PATH_MODE = "mode"
""" This constant defines the home-folder in filesystem of test """
ATTR_PATH_HOME = "home"
""" This constant defines the home-folder in testing-filesystem """
ATTR_PATH_DEBUG = "debugs"
""" This constant defines the debug-folder in testing-filesystem """
ATTR_PATH_ARCHIV = "archiv"
""" This constant defines the folder in testing-filesystem for results and log of execution """
ATTR_PATH_EXPECT = "expect"
""" This constant defines the folder in testing-filesystem for test-expectation values """
ATTR_PATH_PROGRAM = "program"
""" This constant defines the program-folder in the workspace """
ATTR_PATH_COMPS = "components"
""" This constant defines the subfolder in the program-folder in the workspace """
ATTR_PATH_ENV = "environment"
""" This constant defines the folder in testing-filesystem, used for configs related to environments """
ATTR_PATH_RELEASE = "release"
""" This constant defines the folder in testing-filesystem, used for configs related to release """
ATTR_PATH_TDATA = "testdata"
""" This constant defines the folder in testing-filesystem with the testcase-specifications """
ATTR_PATH_PATTN = "pattern"
""" This constant defines the debug-folder in testing-filesystem """
# SUBJECT_FCT = "function" # --> TOPIC
TOPIC_DDL = "ddl"
ATTR_TYPE = "type" #
RULE_ACCEPTANCE = "acceptance" #
ATTR_STEP_ARGS = "args"
ATTR_EXEC_REF = "_exec"
ATTR_DATA_REF = "_nr"
ATTR_DATA_COMP = "_comp"
TOPIC_TOOL = "tool" # ?? -> TOPIC
TOPIC_NODE_DB = "db"
# testexec, db_abstr
ATTR_DB_PARTITION = "partitioned"
@ -125,7 +269,28 @@ ATTR_DB_SCHEMA = "schema"
""" optional attribute for technical name of the schema """
ATTR_DB_TABNAME = "tabname"
""" optional attribute in order to use a different technical name for the db-table """
LIST_DB_ATTR = [ATTR_DB_PARTITION, ATTR_DB_DATABASE, ATTR_DB_SCHEMA, ATTR_DB_TABNAME, ATTR_ARTS_PRESTEP] + LIST_ARTS_ATTR
ATTR_DB_USER = "user"
""" optional attribute in order to use a different technical name for the db-table """
ATTR_DB_PASSWD = "password"
""" optional attribute in order to use a different technical name for the db-table """
ATTR_DB_HOST = "hostname"
""" optional attribute in order to use a different technical name for the db-table """
LIST_DB_ATTR = [ATTR_DB_PARTITION, ATTR_DB_DATABASE, ATTR_DB_SCHEMA, ATTR_DB_TABNAME, ATTR_ARTS_PRESTEP,
ATTR_DB_USER, ATTR_DB_PASSWD, ATTR_DB_HOST] + LIST_ARTS_ATTR
TOPIC_CONN = "conn"
ATTR_DB_CONN_JAR = "conn_jar_name"
""" optional attribute for connection-jar-file instead of connection by ip, port """
ATTR_CONN_HOST = "hostname"
""" optional attribute for connection-jar-file instead of connection by ip, port """
ATTR_CONN_TENANT = "tenant"
""" optional attribute for connection-jar-file instead of connection by ip, port """
ATTR_CONN_IP = "ip"
ATTR_CONN_PORT = "port"
ATTR_CONN_DOMPATH = "dompath"
""" directory where the component is stored in the filesystem """
ATTR_CONN_USER = "user"
ATTR_CONN_PASSWD = "password"
LIST_CONN_ATTR = [ATTR_CONN_HOST, ATTR_CONN_IP, ATTR_CONN_PORT, ATTR_CONN_DOMPATH, ATTR_CONN_USER, ATTR_CONN_PASSWD]
TOPIC_NODE_CLI = "cli"
LIST_CLI_ATTR = [] + LIST_ARTS_ATTR
TOPIC_NODE_API = "api"
@ -133,7 +298,7 @@ LIST_API_ATTR = [] + LIST_ARTS_ATTR
TOPIC_NODE_FILE = "file"
ATTR_FILE_OLD = "oldfile"
ATTR_FILE_ROTATE = "rotate"
NODE_ATTRIBUTES = "attributes"
LIST_FILE_ATTR = [ATTR_FILE_OLD, ATTR_FILE_ROTATE] + LIST_ARTS_ATTR
LIST_ATTR = {
TOPIC_NODE_DB: LIST_DB_ATTR,
@ -141,94 +306,137 @@ LIST_ATTR = {
TOPIC_NODE_CLI: LIST_CLI_ATTR,
TOPIC_NODE_FILE: LIST_FILE_ATTR
}
LIST_TOPIC_NODES = [TOPIC_NODE_FILE, TOPIC_NODE_DB, TOPIC_NODE_CLI, TOPIC_NODE_API]
ATTR_DB_CONN_JAR = "conn_jar_name"
""" optional attribute for connection-jar-file instead of connection by ip, port """
ATTR_CONN_HOST = "hostname"
""" optional attribute for connection-jar-file instead of connection by ip, port """
ATTR_CONN_TENANT = "tenant"
""" optional attribute for connection-jar-file instead of connection by ip, port """
ATTR_CONN_IP = "ip"
ATTR_CONN_PORT = "port"
ATTR_CONN_DOMPATH = "dompath"
""" directory where the component is stored in the filesystem """
ATTR_CONN_USER = "user"
ATTR_CONN_PASSWD = "password"
LIST_CONN_ATTR = [ATTR_DB_CONN_JAR, ATTR_CONN_HOST, ATTR_CONN_IP, ATTR_CONN_PORT, ATTR_CONN_DOMPATH, ATTR_CONN_USER, ATTR_CONN_PASSWD]
# the configuration of a component or tool
# entity { : variable name of the group, basic, component-name or tool-name
# + subject { : variable subject-name - it correspondends to a tool
# + + sub-subject { : variable subject-name - it correspondends to a tool
# + attributes : constant of the tool in which the attribute ist implemented
# the main subjects # prog basic envir tool comp testcase main implentation module
SUBJECT_PATH = "paths" # | x | | x | | path_tool, config_tool
""" This constant defines the subject in order to define paths of filesystem of any testuse """
ATTR_PATH_MODE = "mode"
""" This constant defines the home-folder in filesystem of test """
ATTR_PATH_HOME = "home"
""" This constant defines the home-folder in testing-filesystem """
ATTR_PATH_DEBUG = "debugs"
""" This constant defines the debug-folder in testing-filesystem """
ATTR_PATH_ARCHIV = "archiv"
""" This constant defines the folder in testing-filesystem for results and log of execution """
ATTR_PATH_EXPECT = "expect"
""" This constant defines the folder in testing-filesystem for test-expectation values """
ATTR_PATH_PROGRAM = "program"
""" This constant defines the program-folder in the workspace """
ATTR_PATH_COMPS = "components"
""" This constant defines the subfolder in the program-folder in the workspace """
ATTR_PATH_ENV = "environment"
""" This constant defines the folder in testing-filesystem, used for configs related to environments """
ATTR_PATH_RELEASE = "release"
""" This constant defines the folder in testing-filesystem, used for configs related to release """
ATTR_PATH_TDATA = "testdata"
""" This constant defines the folder in testing-filesystem with the testcase-specifications """
ATTR_PATH_PATTN = "pattern"
""" This constant defines the debug-folder in testing-filesystem """
SUBJECT_APPS = "applications" # | x | | | | CompHandling
ATTR_APPS_PROJECT = "project" # | | | | x | CompHanlding
SUBJECT_INST = "instance" # | | | | x | CompHanlding
ATTR_INST_CNT = "count" # | | | | x | CompHanlding
ATTR_INST_SGL = "single"
LIST_INST_ATTR = [ATTR_INST_CNT, ATTR_INST_SGL]
SUBJECT_COMPS = "components"
ATTR_INST_SUBCOMP = SUBJECT_COMPS
#SUBJECT_FCT = "function" # | | | | x | main-programs
SUBJECT_ARTS = "artifact" # | | | | x | Component
"""
in this subject-node are each kind of result of any component with the structure:
* topic (db, cli, api, ...)
* * general attributes - to-know: technical attributes are stored in connection-tree
* * partial-component - to-know: the existence of db-tables can be defined in the ddl-tree
* * * specific attributes - it overrides the general attributes
"""
#ATTR_ARTS_DB = "db"
ATTR_ARTS_LOG = "log"
ATTR_ARTS_LOB = "lob"
ATTR_ARTS_FILE = "file"
#SUBJECT_DB = "databases" # | | | | # | db*_tools, match_tool
SUBJECT_CONN = "conn" # | | x | | | conn_tool, db*_tools, cli*_toold
ATTR_TYPE = "type" # | x | x | | x | conn_tool, toolHandling, db*_tools
# -- data nodes -----------------------------------------------
# ____ _ _ _ _
# | _ \ __ _| |_ __ _ | \ | | ___ __| | ___ ___
# | | | |/ _` | __/ _` |_____| \| |/ _ \ / _` |/ _ \/ __|
# | |_| | (_| | || (_| |_____| |\ | (_) | (_| | __/\__ \
# |____/ \__,_|\__\__,_| |_| \_|\___/ \__,_|\___||___/
#
# -------------------------------------------------------------
""" This constant defines a subnode of a table for the column-names """
CONF_NODE_GENERAL = "_general"
""" it defines a subnode of a table for the column-names """
DATA_NODE_HEADER = "_header"
""" it defines a subnode of a table for the field-list, used for ddl """
DATA_NODE_FIELDS = "_fields"
""" it defines a subnode of a table for the data as key-value-pair, keys from header """
DATA_NODE_DDLFIELDS = "_ddlfields"
""" it defines a subnode of a table for the data as key-value-pair, keys from header """
DATA_NODE_DATA = "_data"
""" it defines the main node in the testdata for the steps to execute """
DATA_NODE_STEPS = SUBJECT_STEPS # "_step" ## ?? SUBJECT
""" it defines main node in the testdata for testcase specific parameters """
DATA_NODE_OPTION = "_option" ## ?? SUBJECT
DATA_NODE_HEAD = "_head" ## ??
DATA_NODE_ARGS = "_arguments"
""" it defines arguments as internal key-value-pairs by delimted with :, used in steps """
""" This constant defines the main node in the testdata for the steps to execute """
DATA_NODE_TABLES = "_tables" # ?? SUBJECT
DATA_NODE_KEYS = "_keys"
""" This constant defines the node for data scheme (DataDefinitionLanguage)
The fields are defined in data_const (D) """
DATA_NODE_DDLKEYS = "_ddlkeys"
""" This constant defines the node for data scheme (DataDefinitionLanguage)
The fields are defined in data_const (D) """
DATA_NODE_DDL = "ddl" # ?? TOPIC
DATA_NODE_COMP = "comp" # ?? SUBJECT
""" This constant defines """
DATA_NODE_PAR = "par"
DATA_NODE_CATALOG = "_catalog"
DATA_NODE_ROW = "_row"
DATA_NODE_SUBTABLES = "subtables" # ?? SUBJECT
DATA_NODE_TOPICS = "topics"
DATA_NODE_PATH = "_path"
LIST_DATA_NODE = [DATA_NODE_HEADER, DATA_NODE_DATA, DATA_NODE_DDL, DATA_NODE_TOPICS,
DATA_NODE_TABLES, DATA_NODE_STEPS, DATA_NODE_OPTION, DATA_NODE_ROW, DATA_NODE_PATH]
RULE_ACCEPTANCE = "acceptance" # | | | | x | tools_match
# -- Parameter ------------------------------------------------
# ____ _
# | _ \ __ _ _ __ __ _ _ __ ___ ___| |_ ___ _ __
# | |_) / _` | '__/ _` | '_ ` _ \ / _ \ __/ _ \ '__|
# | __/ (_| | | | (_| | | | | | | __/ || __/ |
# |_| \__,_|_| \__,_|_| |_| |_|\___|\__\___|_|
#
# -------------------------------------------------------------
PAR_PROGRAM = 'program'
PAR_USER = 'user'
PAR_GRAN = "gran"
PAR_PROJ = 'project'
""" definition of the project which will be tested """
PAR_APP = 'application'
""" definition of the application which will be tested """
PAR_ENV = 'environment'
""" definition of the environment where the application runs which will be tested """
PAR_VAR = 'variant'
""" definition of a variant dataflow especially the channel """
PAR_REL = 'release'
""" definition of the release of the application which will be tested """
PAR_BASEDIR = 'basedir'
PAR_DIRNAME = 'dirname'
PAR_TSDIR = 'tsdir'
""" definition of the directory of the testsuite for summaries of the testcases """
PAR_TCDIR = 'tcdir'
""" definition of the directory of the testcase for logs and results """
PAR_XPDIR = 'xpdir'
""" definition of the directory of expectation for comparing the testcase """
PAR_WSDIR = "wsdir"
""" definition of the directory of the workspace """
PAR_TDTYP = 'tdtyp'
PAR_TDSRC = 'tdsrc'
PAR_TDNAME = 'tdname'
PAR_LOG = 'loglevel'
PAR_MODUS = 'modus'
PAR_COMP = 'component'
PAR_FCT = 'function'
PAR_TOOL = 'tool'
PAR_STEP = 'step'
PAR_DESCRIPT = 'desription'
PAR_STRING = 'parstring'
""" """
PAR_TESTPLAN = "testplan"
PAR_TESTCASE = "testcase"
PAR_TESTCASES = "testcases"
PAR_TESTSUITE = "testsuite"
# todo Trennung testsuite - usecase denn n:1-Zuordnung
PAR_TPTIME = "tptime"
PAR_TCTIME = "tctime"
PAR_TSTIME = "tstime"
PAR_TESTINSTANCES = "testinstances"
""" name of testcase extracted from PAR_TCDIR """
PAR_DB_WHERE = "dbwhere"
""" optional parameter with a where-clause """
PAR_DB_PARTITION = "dbparts"
""" optional parameter for partitions of a partitioned tables """
LIST_MAIN_PAR = [PAR_APP, PAR_ENV, PAR_VAR, PAR_REL, PAR_TCDIR, PAR_TSDIR]
ATTR_STEP_ARGS = "args"
ATTR_EXEC_REF = "_exec"
ATTR_DATA_REF = "_nr"
ATTR_DATA_COMP = "_comp"
# -- attributes -----------------------------------------------
# _ _ _ _ _
# __ _| |_| |_ _ __(_) |__ _ _| |_ ___ ___
# / _` | __| __| '__| | '_ \| | | | __/ _ \/ __|
# | (_| | |_| |_| | | | |_) | |_| | || __/\__ \
# \__,_|\__|\__|_| |_|_.__/ \__,_|\__\___||___/
#
# -------------------------------------------------------------
SUBJECT_TOOL = "tool"
# -- key-values -----------------------------------------------
# _ _
# | | _____ _ ___ ____ _| |_ _ ___ ___
# | |/ / _ \ | | \ \ / / _` | | | | |/ _ \/ __|
# | < __/ |_| |\ V / (_| | | |_| | __/\__ \
# |_|\_\___|\__, | \_/ \__,_|_|\__,_|\___||___/
# |___/
# -------------------------------------------------------------
BASIS_FILE = "basis"
SVAL_YES = "y"
SVAL_NO = "n"
SVAL_NULL = "null"
# -------------------------------------------------------------
# exception texts
EXP_NO_BASIS_FILE = "basis file cant be found"
EXCEPT_NOT_IMPLEMENT = "method is not implemented"
EXCEPT_NOT_INITIALIZED = "class is not initialized"

527
basic/message.py

@ -20,110 +20,170 @@ import basic.program
import os
import math
from datetime import datetime
import utils.path_tool
import utils.i18n_tool
import basic.text_const as T
import tools.path_tool
#import tools.i18n_tool
#import basic.text_const as T
import basic.constants as B
LIMIT_FATAL = 0
MTEXT_FATAL = "fatal"
MTEXT_ERROR = "error"
MTEXT_WARN = "warn"
MTEXT_MSG = "msg"
MTEXT_INFO = "info"
MTEXT_DEBUG = "debug"
MTEXT_TRACE = "trace"
LIST_MTEXT = [MTEXT_FATAL, MTEXT_ERROR, MTEXT_WARN, MTEXT_MSG, MTEXT_INFO, MTEXT_DEBUG, MTEXT_TRACE, MTEXT_TRACE]
LIMIT_FATAL = 2
LIMIT_ERROR = 4
LIMIT_WARN = 8
LIMIT_MSG = 12
LIMIT_INFO = 16
LIMIT_DEBUG = 20
LIMIT_TRACE = 24
RC_OFFSET = 4
RC_FATAL = 8
RC_ERROR = 6
RC_WARN = 5
RC_MSG = 4
RC_INFO = 3
RC_DEBUG = 2
RC_TRACE = 1
LIMIT_WARN = 6
LIMIT_MSG = 8
LIMIT_INFO = 10
LIMIT_DEBUG = 12
LIMIT_TRACE = 14
LIMIT_XTRACE = 16
RC_FATAL = 3
RC_ERROR = 2
RC_WARN = 1
RC_MSG = 0
RC_INFO = 0
class TempMessage:
"""
simple implementation just to print first messages into temporary debugfile
in order to get a message-object before the job information are set
"""
def __init__(self, job, logTime):
# (self, componente, out, level):
self.job = job
self.level = LIMIT_DEBUG
self.openDebug(job, logTime)
def openDebug(self, job, logTime):
path = os.path.join(B.HOME_PATH, "temp")
if not os.path.exists(path):
os.mkdir(path)
self.debugpath = os.path.join(path, "debug_"+logTime+".txt")
self.debugfile = open(self.debugpath, "w")
def getLogLevel(self, tool="", comp=None):
return 0
def logFatal(self, text):
self.debug(LIMIT_FATAL, "FATAL: " + text)
def logError(self, text):
self.debug(LIMIT_ERROR, "ERROR: " + text)
def setError(self, text):
self.debug(LIMIT_ERROR, "ERROR: " + text)
def logWarn(self, text):
self.debug(LIMIT_WARN, "WARN: " + text)
def logMsg(self, text):
self.debug(LIMIT_MSG, "MSG: " + text)
def logInfo(self, text):
self.debug(LIMIT_INFO, text)
def logDebug(self, prio, text=""):
mprio = LIMIT_DEBUG
mtext = str(prio)
if len(text) > 1:
mtext = text
if isinstance(prio, int):
mprio = int(prio)
self.debug(mprio, mtext)
def logTrace(self, prio, text):
pass
def logXTrace(self, prio, text):
pass
def debug(self, prio, text):
""" eigentliche Schreibroutine: hierin wird debug-Level beruecksichtgigt"""
try:
if prio <= self.level:
self.debugfile.write(text + "\n")
except:
print("debug closed "+text)
def closeMessage(self) :
self.debug(LIMIT_INFO, "closeMessage ------------------------------------------- \n")
self.debugfile.close()
class Message:
"""
Ausgaben erfolgen prioritaeten-gesteuert anhand
* Typ (fatal..trace)
* Einstellung (a) ueber Parameter ODER (b) in Funktion
Im Funktionskopf wird Einstellung gesetzt, z.B. verify=job.getDebugLevel (ggf keine Debug-Ausgabe) bzw. verify=job.getDebugLevel-1 (eingeschaltete Debug-Ausgabe)
"fatal": "4", # Abbruchfehlker, wird immer in debug und log ausgegeben, setzt RC
"error": "8", # Fehler, wird immer in debug und log ausgegeben, setzt RC
"warn": "12", # Warnung, wird immer in debug und log ausgegeben, setzt RC
"msg": "16", # Ergebnis, wird immer in debug und log ausgegeben, setzt RC
"info": "20", # Info, wird ggf. in debug und log ausgegeben, setzt RC
"debug": "24", # wird nur in debug ausgegeben, wenn log-level hoechstens auf eingestelltem job-level steht
"trace": "28", # wird nur in debug ausgegeben, wenn log-level hoechstens auf eingestelltem job-level steht
"""
def __init__(self, job, level, logTime, componente):
# (self, componente, out, level):
Im Funktionskopf wird Einstellung gesetzt, z.B. verify=job.getDebugLevel (ggf keine Debug-Ausgabe) bzw. verify=job.getMessageLevel-1 (eingeschaltete Debug-Ausgabe)
"fatal": "3", # Abbruchfehlker, wird immer in debug und log ausgegeben, setzt RC
"error": "2", # Fehler, wird immer in debug und log ausgegeben, setzt RC
"warn": "1", # Warnung, wird immer in debug und log ausgegeben, setzt RC
"msg": "0", # Ergebnis, wird immer in debug und log ausgegeben, setzt RC
"info": "0", # Info, wird ggf. in debug und log ausgegeben, setzt RC
"debug": "0", # wird nur in debug ausgegeben, wenn log-level hoechstens auf eingestelltem job-level steht
"trace": "0", # wird nur in debug ausgegeben, wenn log-level hoechstens auf eingestelltem job-level steht
"""
def __init__(self, job, level, logTime, compName=None):
# (self, compName, out, level):
self.job = job
self.componente = componente # dezantrales Logsystem
self.compName = compName # dezentrales Logsystem
verify = LIMIT_DEBUG
self.initErrorTyp()
self.rc = RC_INFO
if isinstance(level, str):
i = 1
for l in LIST_MTEXT:
if level.lower() == l:
break
i += 1
level = i * 2
if (level == 0):
self.level = LIMIT_DEBUG
else:
self.level = level
# init debugfile - except for each component
if componente is not None: # use general debugfile
self.debugfile = job.m.debugfile
self.debug(verify, "> > > debugfile uebernommen zu " + str(componente))
else:
debugpath = job.conf.confs["paths"]["debugs"] + "/debug_" + logTime[0:-4] + "00.txt"
print ("debugpathx "+debugpath)
if os.path.exists(debugpath):
self.debugfile = open(debugpath, "a")
else:
self.debugfile = open(debugpath, "w")
self.debug(verify, "> > > debugfile geoeffnet zu " + job.program + " mit " + debugpath)
# init logfile - except for components or unittest
self.logDebug("logfile " + str(componente) + ", " + str(job.par.basedir))
if componente is not None: #
self.logfile = self.debugfile
elif job.program == "unit":
self.logfile = self.debugfile
else:
self.setLogdir(job, logTime)
self.openDebug(job, logTime, compName)
self.openLog(job, logTime, compName)
self.topmessage = ""
def openDebug(self, job, logTime, compName):
path = job.conf[B.TOPIC_PATH][B.ATTR_PATH_DEBUG]
if not os.path.exists(path):
os.mkdir(path)
logTime = logTime[0:11] + "0000"
if compName is None:
if hasattr(job.m, "debugpath"):
self.debugpath = job.m.debugpath
self.debugfile = job.m.debugfile
return
else:
self.debugpath = os.path.join(path, "debug_"+logTime+".txt")
else:
self.debugpath = os.path.join(path, "debug_" + compName + "_" + logTime + ".txt")
if os.path.exists(self.debugpath):
self.debugfile = open(self.debugpath, "a")
else:
self.debugfile = open(self.debugpath, "w")
def openLog(self, job, logTime, compName):
if not hasattr(job, "par"):
return
pathPattern = job.programDef["logpath"]
path = tools.path_tool.compose_path(job, pathPattern, None)
parent = os.path.dirname(path)
if not os.path.exists(parent):
os.makedirs(parent)
if compName is None:
self.logpath = path
self.logfile = open(path, "w")
else:
self.messages = []
print("message initialisiert mit level " + str(self.level))
def setLogdir(self, job, logTime):
basedir = job.par.basedir
basedir = basedir.replace("base", "log")
# basedir = utils.path_tool.composePath(basedir, None)
basedir = utils.path_tool.composePath(job, basedir, None)
os.makedirs(basedir, exist_ok=True)
logpath = os.path.join(basedir , job.program + "_" + logTime + ".txt")
self.logDebug("logfile " + logpath)
self.logfile = open(logpath, "w")
def initErrorTyp(self):
self.CONST_ERRTYP = {
"fatal": "4", # wird immer in debug und log ausgegeben, setzt RC
"error": "8", # wird immer in debug und log ausgegeben, setzt RC
"warn": "12", # wird immer in debug und log ausgegeben, setzt RC
"msg": "16", # wird immer in debug und log ausgegeben, setzt RC
"info": "20", # wird immer in debug und log ausgegeben, setzt RC
"debug": "24", # wird nur in debug ausgegeben, wenn log-level hoechstens auf eingestelltem job-level steht
"trace": "28", # wird nur in debug ausgegeben, wenn log-level hoechstens auf eingestelltem job-level steht
"rc1": "fatal",
"rc2": "error",
"rc3": "warn",
"rc4": "info",
"rc5": "debug",
"rc6": "trace",
"1": "fatal",
"2": "error",
"3": "warn",
"4": "info",
"5": "debug",
"6": "trace"
}
def getErrortyp(self, prio):
if prio <= LIMIT_FATAL:
@ -148,31 +208,54 @@ class Message:
self.logfile.close()
self.debugfile.close()
def getLogLevel(self, tool="", comp=None):
"""
gets the increasing level depending on tool and component
if these arguments matches with a job-parameter the level decreases
:param tool:
:param comp:
:return:
"""
out = 0
job = self.job
if not hasattr(job, "par"):
return out
if comp is not None and hasattr(job.par, "component") and comp.name in getattr(job.par, "component"):
out += 2
if tool != "" and hasattr(job.par, "tool") and tool in getattr(job.par, "tool"):
out += 2
return out
def setRc(self, rc, text):
job = self.job #basic.program.Job.getInstance()
verify = -0+LIMIT_DEBUG
self.debug(verify, "setRc " + str(rc) + " " + str(self.rc)+ "\n")
if (int(rc) > self.rc):
if (int(rc) > self.rc) or (self.topmessage == ""):
self.rc = rc
self.topmessage = self.CONST_ERRTYP["rc"+str(rc)].upper() + ": " + text
elif (int(rc) == self.rc):
self.topmessage = self.CONST_ERRTYP["rc"+str(rc)].upper() + ": " + text
def isRc(self, rc):
rcId = int(int(self.CONST_ERRTYP[rc]) / 4 - RC_OFFSET)
print("< < < isRc " + str(self.rc) + " <=? " + str(rcId))
if self.rc <= int(rcId):
print("return True")
i = rc * -1 + 3
if i < 0: return
self.topmessage = LIST_MTEXT[i].upper() + ": " + text
def isRc(self, returnCode):
"""
prueft, ob der gesetzte ReturnCode mit dem angefragten ReturnCode vereinbar ist
Beispiel: gesetzt WARN, angefragt ERROR ist OK=True, anders herum: KO=False
:param returnCode:
:return:
"""
if isinstance(returnCode, int):
rcId = returnCode
else:
rcId = ( LIST_MTEXT.index(returnCode.lower()) - 3 ) * (-1)
if self.rc >= int(rcId) or rcId < 0:
return True
else:
print("return False")
return False
def getFinalReturncode(self):
RETURN_TEXT = ["OK", "WARN", "ERROR", "FATAL"]
return RETURN_TEXT[self.rc]
def getFinalRc(self):
if (self.rc <= RC_OFFSET):
return 0
else:
return int(int(self.rc) - RC_OFFSET)
RETURN_TEXT = ["OK", "WARN", "ERROR", "FATAL"]
return int(self.rc)
def setFatal(self, text):
""" Routine zum Setzen des RC und gleichzeitigem Schreiben des Logs """
@ -192,55 +275,213 @@ class Message:
def setMsg(self, text):
""" Routine zum Setzen des RC und gleichzeitigem Schreiben des Logs """
self.setRc(RC_MSG, text)
self.logInfo(text)
def getMessageText(self, text, args):
out = ""
constName = ""
for i in range(0, len(T.LIST_EXP_TEXT)):
if text == T.LIST_EXP_TEXT[i]:
constName = T.LIST_EXP_CONST[i]
txt = utils.i18n_tool.I18n.getInstance(job).getMessage(self.job, constName, args)
out = txt.format(args)
return out
self.logMsg(text)
def logFatal(self, text):
self.log(LIMIT_FATAL, "FATAL: " + text)
self.debug(LIMIT_FATAL, "FATAL: " + text)
def getMessageText(self, job, text, args):
return text
def logError(self, text):
self.log(LIMIT_ERROR, "ERROR: " + text)
self.debug(LIMIT_ERROR, "ERROR: " + text)
def logFatal(self, prio, text=""):
"""
it logs a fatal error in logfile and debugfile -
please use setFatal() in order to set the return-code
FATAL means, the program can not be finished stable
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.log(LIMIT_FATAL, prio, "FATAL: " + text)
self.debug(LIMIT_FATAL, prio, "FATAL: " + text)
def logWarn(self, text):
self.log(LIMIT_WARN, "WARN: " + text)
def logError(self, prio, text=""):
"""
it logs an error in logfile and debugfile -
please use setError() in order to set the return-code
ERROR means, the program can be finished incorrect but stable
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.log(LIMIT_ERROR, prio, "ERROR: " + text)
self.debug(LIMIT_ERROR, prio, "ERROR: " + text)
def logMsg(self, text):
self.log(LIMIT_MSG, text)
self.log(LIMIT_MSG, "MSG: " + text)
def logWarn(self, prio, text=""):
"""
it logs a warning in logfile and debugfile -
please use setWarn() in order to set the return-code
WARNING means, the program can be finished correct and stable but with points to check - especially founded business faults
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.log(LIMIT_WARN, prio, "WARN: " + text)
self.debug(LIMIT_WARN, prio, "WARN: " + text)
def logInfo(self, text):
self.log(LIMIT_INFO, text)
def logMsg(self, prio, text=""):
"""
it logs a message in logfile and debugfile -
please use setMsg() in order to set the return-code
MESSAGE means, the program can be finished without any points to check it manually afterwards
in different to INFO it logs a working-result
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.log(LIMIT_MSG, prio, "MSG: " + text)
self.debug(LIMIT_MSG, prio, "MSG: " + text)
def logDebug(self, text):
self.debug(LIMIT_DEBUG, text)
def logInfo(self, prio, text=""):
"""
it logs a message into logfile and debugfile without setting the return-code
INFO means, the program can be finished without any points to check it manually afterwards
in different to MESSAGE it logs just a working-step relating to the test-application
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.log(LIMIT_INFO, prio, text)
self.debug(LIMIT_INFO, prio, text)
def logTrace(self, text):
self.debug(LIMIT_TRACE, text)
def logDebug(self, prio, text=""):
"""
it logs a message into the debugfile without setting the return-code
DEBUG means a working-step
in different to INFO it logs a working-step without any relevance to the test-application
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.debug(LIMIT_DEBUG, prio, text)
def log(self, prio, text):
""" eigentliche Schreibroutine: hierin wird debug-Level beruecksichtgigt"""
if (int(prio) <= int(self.level)) and (self.componente is None): # and self.logfile.closed == False:
self.logfile.write(text + "\n")
elif (int(prio) <= int(self.level)):
self.messages.append(text)
def logTrace(self, prio, text=""):
"""
it logs a message into the debugfile without setting the return-code
TRACE means a working-step with some relevant data
in different to DEBUG it logs a working-step with some relevant controlling-data
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.debug(LIMIT_TRACE, prio, text)
def logXTrace(self, prio, text=""):
"""
it logs a message into the debugfile without setting the return-code
XTRACE means a working-step with a lot of data
in different to TRACE it logs not only the known relevant controlling-data
:param prio: optional int [-2..+2] , it increases / decreases the necessary log-level in relation to the parametrized level
:param text: must, if it is not set then get text from prio
:return:
"""
self.debug(LIMIT_XTRACE, prio, text)
def getLoggingArgs(self, mlevel, prio, text):
verify = self.getLogLevel("msg_tool")
if verify:
self.debugfile.write("getLoggingArgs: " + str(mlevel)+ ", "+self.format2Str(prio)+", "+self.format2Str(text))
out = {}
prefix = ""
if isinstance(mlevel, int):
out["mlevel"] = mlevel
else:
raise Exception("argument mlevel is not int "+str(mlevel))
if len(text) < 1:
out["mprio"] = 0
if isinstance(prio, int):
txt = str(prio)
elif isinstance(prio, dict):
txt = self.formatDict2Str(prio)
elif isinstance(prio, str):
txt = prio
elif isinstance(prio, list):
txt = self.formatList2Str(prio)
else:
txt = str(prio)
else:
if isinstance(prio, int):
out["mprio"] = prio
else:
out["mprio"] = 0
prefix = text
text = prio
if isinstance(text, dict):
txt = self.formatDict2Str(text)
elif isinstance(text, str):
txt = text
elif isinstance(text, list):
txt = self.formatList2Str(text)
else:
txt = str(text)
if len(prefix) > 1:
if ":" in prefix:
out["mtext"] = prefix.strip() + " " + txt.strip()
else:
self.debug(prio, self.getErrortyp(prio) + ": " + text)
out["mtext"] = prefix.strip() + ": " + txt.strip()
else:
out["mtext"] = txt
return out
def debug(self, prio, text):
def format2Str(self, elem):
if isinstance(elem, dict):
return self.formatDict2Str(elem)
elif isinstance(elem, str):
return elem
elif isinstance(elem, list):
return self.formatList2Str(elem)
else:
return str(elem)
def formatDict2Str(self, args):
txt = "{"
for k in args:
if isinstance(args[k], dict):
txt += k + ": " + self.formatDict2Str(args[k]) + ", "
elif isinstance(args[k], str):
txt += k + ": " + args[k] + ", "
elif isinstance(args[k], list):
txt += k + ": " + self.formatList2Str(args[k]) + ", "
else:
txt += k + ": " + str(args[k]) + ", "
return txt[0:-2] + "}"
def formatList2Str(self, args):
txt = "["
for k in args:
if isinstance(k, dict):
txt += k + ": {" + self.formatDict2Str(k) + "}, "
elif isinstance(k, str):
txt += k + ", "
elif isinstance(k, list):
txt += self.formatList2Str(k) + ", "
else:
txt += str(k) + ", "
return txt[0:-2] + "]"
def log(self, mlevel, prio, text):
args = self.getLoggingArgs(mlevel, prio, text)
""" eigentliche Schreibroutine: hierin wird debug-Level beruecksichtgigt"""
if (int(prio) < int(self.level)+1) : #and self.debugfile.closed == False:
self.debugfile.write(text + "\n")
if (args["mlevel"] + args["mprio"] > int(self.level)):
return
elif (self.compName is None): # and self.logfile.closed == False:
try:
self.logfile.write(args["mtext"] + "\n")
except:
pass
else:
self.messages.append(text)
def debug(self, mlevel, prio, text=""):
verify = self.getLogLevel("msg_tool")
args = self.getLoggingArgs(mlevel, prio, text)
if verify:
self.debugfile.write("m.debug "+self.format2Str(args)+" >? "+str(self.level)+"\n")
if (args["mlevel"] - args["mprio"] > int(self.level)):
return
if (args["mprio"] + 20) % 2 == 1:
print(args["mtext"])
try:
self.debugfile.write(args["mtext"] + "\n")
except:
raise Exception("debugfile closed: "+args["mtext"])
def resetLog(self):
self.messages = []

552
basic/program.py

@ -9,265 +9,240 @@
#import sys, getopt
import argparse
import copy
import subprocess
import time
import yaml
import os
from datetime import datetime
import basic.constants as B
import basic.message
import basic.message
import basic.componentHandling
import utils.date_tool
import utils.path_tool
import utils.file_tool
import utils.config_tool
import test.constants as T
jobdef = {
"webflask": {
"pardef": "",
"pfilesource": "",
"pfiletarget": "",
"basedir": "workbase",
"dirname": "workdir",
"logdir": "{job.par.debugs}/webflask/log_{time}.txt" },
"unit": {
"pardef": "",
"pfilesource": "",
"pfiletarget": "envparfile",
"basedir": "envbase",
"dirname": "envdir",
"logdir": "{job.par.envdir}/{log}/log_{time}.txt" },
"check_environment": {
"pardef": "",
"pfilesource": "",
"pfiletarget": "envparfile",
"basedir": "envbase",
"dirname": "envdir",
"logdir": "{job.par.envdir}/{log}/log_{time}.txt" },
"test_executer": {
"pardef": "",
"pfilesource": "tsparfile",
"pfiletarget": "tsparfile",
"basedir": "tsbase",
"dirname": "tsdir",
"logdir": "{job.par.tsdir}/{log}/log_{time}.txt" },
"init_testsuite": {
"pardef": "tsdir", # ,tdtyp,tdsrc,tdname",
"pfilesource": "envparfile",
"pfiletarget": "tsparfile",
"basedir": "tsbase",
"dirname": "tsdir",
"logdir": "{job.par.tsdir}/{log}/log_{tstime}.txt" },
"init_testcase": {
"pardef": "tcdir", # ",tdtyp,tdsrc,tdname",
"pfilesource": "envparfile",
"pfiletarget": "tcparfile",
"basedir": "tcbase",
"dirname": "tcdir",
"logdir": "{job.par.tcdir}/{log}/log_{tctime}.txt" },
"execute_testcase": {
"pardef": "tcdir", # ",tdtyp,tdsrc,tdname",
"pfilesource": "tcparfile",
"pfiletarget": "tcparfile",
"basedir": "tcbase",
"dirname": "tcdir",
"logdir": "{job.par.tcdir}/{log}/log_{tctime}.txt" },
"collect_testcase": {
"pardef": "tcdir", # ",tdtyp,tdsrc,tdname",
"pfilesource": "tcparfile",
"pfiletarget": "tcparfile",
"basedir": "tcbase",
"dirname": "tcdir",
"logdir": "{job.par.tcdir}/{log}/log_{tctime}.txt" },
"compare_testcase": {
"pardef": "tcdir", # ",tdtyp,tdsrc,tdname",
"pfilesource": "tcparfile",
"pfiletarget": "tcparfile",
"basedir": "tcbase",
"dirname": "tcdir",
"logdir": "{job.par.tcdir}/{log}/log_{tctime}.txt" },
"test_system": {
"pardef": "tcdir,tdtyp,tdsrc,tdname",
"pfilesource": "tsparfile",
"pfiletarget": "tsparfile",
"basedir": "tcbase",
"dirname": "tcdir",
"logdir": "{job.par.tcdir}/{log}/log_{tctime}.txt"},
"finish_testcase": {
"pardef": "tcdir",
"pfilesource": "tcparfile",
"pfiletarget": "tcparfile",
"basedir": "tcbase",
"dirname": "tcdir",
"logdir": "{job.par.tcdir}/{log}/log_{tctime}.txt"},
"finish_testsuite": {
"pardef": "tsdir",
"pfilesource": "tsparfile",
"pfiletarget": "tssarfile",
"basedir": "tsbase",
"dirname": "tsdir",
"logdir": "{job.par.tcdir}/{log}/log_{tctime}.txt"}
}
def setGlobal():
pass
import model.catalog
import tools.value_tool
import tools.date_tool
import tools.path_tool
import tools.file_tool
import tools.config_tool
import tools.path_const as P
import tools.job_tool
LIMIT_INFO = 16 #basic.message.LIMIT_INFO
LIMIT_DEBUG = 12 #basic.message.LIMIT_DEBUG
TOOL_NAME = "job_tool"
CTLG_NAME = "programs"
CTLG_PARDEF = "pardef"
CTLG_PARSOURCE = "pfilesource"
CTLG_PARTARGET = "pfiletarget"
CTLG_BASEDIR = "basedir"
CTLG_LOGPATH = "logpath"
CTLG_LOGLEVEL = "loglevel"
EXCP_CANT_POP = "cant pop this job from the instances"
DEFAULT_ARCHIV_DIR = T.DATA_PATH + "/lauf"
DEFAULT_GRAN = "ws"
DEFAULT_PRG = "webflask"
DEFAULT_GRAN = "ws"
DEFAULT_APP = "WEBFLASK"
DEFAULT_ENV = "Workspace"
DEFAULT_MODE = "test"
DEFAULT_TIME = "2022-08-29_17-29-59"
def createJob(pprg="", pgran="", papp="", penv="", ptstamp="", pmode=""):
class SimpleJob:
"""
this creates a Job-Object with the main arguments.
:param pprg: program-name
:param pgran: tc|ts|
:param papp: application
:param penv: environment
:param ptstamp: timestamp - part of specific testfolder
:param pmode: if it is a productive or development execution
:return:
the simple job is just used for test issues
"""
if len(pprg) < 1:
prgname = DEFAULT_PRG
else:
prgname = pprg
if len(pgran) < 1:
gran = DEFAULT_GRAN
else:
gran = pgran
if len(pgran) < 1:
gran = DEFAULT_GRAN
else:
gran = pgran
if len(papp) < 1:
app = DEFAULT_APP
else:
app = papp
if len(penv) < 1:
env = DEFAULT_ENV
else:
env = penv
if len(ptstamp) < 1:
tstamp = DEFAULT_TIME
else:
tstamp = ptstamp
if len(pmode) < 1:
mode = DEFAULT_MODE
else:
mode = pmode
if gran == "tc":
path = DEFAULT_ARCHIV_DIR + "/TC0001/" + tstamp
elif gran == "ts":
path = DEFAULT_ARCHIV_DIR + "/testlauf/TST001_" + tstamp
__logtime = "20200101_000000"
def __init__(self, program, username="", args=None):
self.program = program
self.username = username
path = tools.path_tool.getBasisConfigPath()
self.conf = getConfiguration(self, path)
self.jobid = str(100000)
try:
catalog = model.catalog.Catalog.getInstance()
self.programDef = catalog.getValue(self, CTLG_NAME, program, "")
except:
self.m = basic.message.TempMessage(self, "testtime")
pass
if args is not None:
if "par" in args:
self.par = Parameter(self, args["par"])
for k in args:
if k == "par":
continue
setattr(self, k, args[k])
else:
path = T.DATA_PATH + "/workspace/"
self.par == Parameter(self, None)
if not hasattr(self, "start"):
logTime = tools.date_tool.getActdate(tools.date_tool.F_LOG)
while logTime <= SimpleJob.__logtime:
time.sleep(1)
logTime = tools.date_tool.getActdate(tools.date_tool.F_LOG)
self.start = logTime
job = basic.program.Job(prgname)
# job.conf.confs[B.SUBJECT_PATH]["components"] = T.COMP_PATH
args = {"application": app, "environment": env, "modus": mode, gran + "time": tstamp,
gran + "dir": path,
"step": 1}
print(str(args))
# "usecase": "TST001", "tstime": "2022-03-17_17-28"}
job.par.setParameterArgs(job, args)
return job
def getDebugLevel(self, arg):
return 12
def debug(self, prio, text):
pass
def getParameter(self, parameter):
if hasattr(self.par, parameter) and getattr(self.par, parameter) is not None:
return getattr(self.par, parameter)
else:
val = tools.value_tool.compose_pattern(self, parameter, None)
if val is None:
self.m.logError("Parameter "+parameter+" nicht in job.par ")
return
setattr(self.par, parameter, val)
return val
class SimpleParameter():
def __init__(self, job, args=None):
self.program = job.program
class Job:
__instance = None
__instances = []
__jobid = 100000
__logtime = "20200101_000000"
#catalog = model.catalog.Catalog.getInstance()
def __init__ (self, program):
print ("################# init Job ## " + program + " #################")
def __init__ (self, program, username="", args=None):
"""
initializing the job-object as a global structure for the actual doing
type of job:
a) (single) job for testing work [--args]
work-folder is the testcase-folder, logging into testcase-log
b) (single) job for reorganization of the testing area [--args]
working-folder is the workspace or the environment-folder, logging is there
c) webflask-job for the web-server --args
work-folder is the web-workspace,
if real jobs created these jobs runs as testing resp. reorganization-job
d) unit-testing job --args!
work-folder is the debug-area
if real jobs for unit-testing created these jobs runs with unit-mode like a unit-test
:param program:
:param args: optional with arguments otherwise the cli-parameter
"""
# postcondition
# - job.conf with basis config
# - job.par with parameter-args or cli-args
# - job.msg
# logtime muss unique sein denn logDateien und Verzeichnisse muessen eindeutig sein
logTime = tools.date_tool.getActdate(tools.date_tool.F_LOG)
while logTime <= Job.__logtime:
time.sleep(1)
logTime = tools.date_tool.getActdate(tools.date_tool.F_LOG)
self.start = logTime
self.m = basic.message.TempMessage(self, logTime)
Job.__jobid += 1
self.jobid = str(Job.__jobid)
if len(program) < 3:
print("FATAL: programname is missing")
exit(3)
self.program = program
Job.__instance = self
if Job.__instances is None:
Job.__instances = []
#Job.pushInstance(self)
par = Parameter(self, program)
self.par = par
print("prog-42 " + str(self.par.basedir))
conf = Configuration(self, program)
self.conf = conf
appl = utils.config_tool.getConfig(self, "basic", B.SUBJECT_APPS)
print(appl)
if appl is not None:
self.conf.confs[B.SUBJECT_APPS] = appl[B.SUBJECT_APPS]
print("prog-45 " + str(self.par.basedir))
dirpath = self.par.getDirParameter()
setGlobal()
if dirpath is not None:
utils.path_tool.extractPath(dirpath[0], dirpath[1])
if program == "unit": # no job will be started
self.start = datetime.now()
logTime = self.start.strftime("%Y%m%d_%H%M%S")
if len(username) < 2:
self.username = os.getlogin()
path = tools.path_tool.getBasisConfigPath()
self.conf = getConfiguration(self, path)
catalog = model.catalog.Catalog.getInstance()
print("program "+program)
self.programDef = catalog.getValue(self, CTLG_NAME, program, "")
try:
path = tools.config_tool.select_config_path(self, P.KEY_BASIC, B.BASIS_FILE)
self.conf = self.getConfiguration(path)
except:
print("FATAL: config-file could not be loaded")
exit(3)
if args is not None:
self.setParameter(args)
if isinstance(self.programDef, dict):
self.m = basic.message.Message(self, self.programDef[CTLG_LOGLEVEL], logTime, None)
else:
self.m = basic.message.Message(self, basic.message.LIMIT_DEBUG, logTime, None)
print("prog-50 " + str(self.par.basedir))
def getLogpath(self):
path = tools.path_tool.compose_path(self, self.programDef[CTLG_LOGPATH], None)
return path
def getConfiguration(self, path):
conf = {}
conf["configpath"] = []
if hasattr(self, "conf"):
conf = self.conf
conf["configpath"].append(path)
doc = tools.file_tool.read_file_dict(self, path, None, "basic")
if "basic" in doc:
for k, v in doc["basic"].items():
if k not in conf:
conf[k] = v
else:
for k, v in doc.items():
if k not in conf:
conf[k] = v
return conf
def setProgram(self, program):
self.program = program
basedir = jobdef[program]["basedir"]
basedir = self.programDef[CTLG_BASEDIR]
self.basedir = basedir
if (self.par is not None):
setattr(self.par, "program", program)
setattr(self.par, "basedir", basedir)
parstring = getattr(self.par, "parstring")
if (self.par is not None) and self.par in B.LIST_MAIN_PAR:
setattr(self.par, B.PAR_PROGRAM, program)
setattr(self.par, B.PAR_BASEDIR, basedir)
parstring = getattr(self.par, B.PAR_STRING)
parstring = parstring[parstring.find("--"):]
parstring = "python "+program+" "+parstring
setattr(self.par, "parstring", parstring)
if not hasattr(self.par, jobdef[program]["dirname"]):
setattr(self.par, jobdef[program]["dirname"],
utils.path_tool.composePattern(self, "{"+basedir+"}", None))
if not hasattr(self.par, self.programDef[B.PAR_DIRNAME]):
setattr(self.par, self.programDef[B.PAR_DIRNAME],
tools.path_tool.compose_path(self, "{"+basedir+"}", None))
self.par.setParameterLoaded(self)
def startJob(self):
self.start = datetime.now()
print("prog-68 " + str(self.par.basedir))
logTime = self.start.strftime("%Y%m%d_%H%M%S")
self.m = basic.message.Message(self, basic.message.LIMIT_DEBUG, logTime, None)
print("prog-68 " + str(self.m.rc))
tools.job_tool.startJobProcesses(self)
if self.programDef[CTLG_PARSOURCE] != "":
self.par.setParameterLoaded(self)
self.m.logInfo("# # # Start Job " + self.start.strftime("%d.%m.%Y %H:%M:%S") + " # # # ")
self.m.debug(basic.message.LIMIT_INFO, "# # # Start Job " + self.start.strftime("%d.%m.%Y %H:%M:%S") + " # # # ")
header1 = "# # # Start " + self.program + " # "
header1 += tools.date_tool.formatParsedDate(str(self.start), tools.date_tool.F_DE_TSTAMP) + " # # # "
self.m.logInfo(header1)
print(header1)
self.par.checkParameter(self)
self.m.logInfo(self.par.parstring)
def stopJob(self, reboot=0):
tools.job_tool.stopJobProcesses(self)
self.ende = datetime.now()
if self.programDef[CTLG_PARTARGET] != "":
self.dumpParameter()
print("stopJob " + str(self.m.messages) + ", " + str(self.m.debugfile))
self.m.logInfo("# # " + self.m.topmessage + " # # # ")
self.m.logInfo("# # # Stop Job " + self.start.strftime("%d.%m.%Y %H:%M:%S") + " # " + self.ende.strftime("%d.%m.%Y %H:%M:%S") + " # # # ")
self.m.debug(basic.message.LIMIT_INFO, "# # " + self.m.topmessage + " # # # ")
self.m.debug(basic.message.LIMIT_INFO, "# # # Stop Job " + self.start.strftime("%d.%m.%Y %H:%M:%S") + " # " + self.ende.strftime("%d.%m.%Y %H:%M:%S") + " # # # RC: " + str(self.m.getFinalRc()))
footer1 = "# # " + self.m.topmessage + " # # # "
footer2 = "# # # Stop " + self.program + " # "
footer2 += tools.date_tool.formatParsedDate(str(self.start), tools.date_tool.F_DE_TSTAMP)
footer2 += " # " + tools.date_tool.formatParsedDate(str(self.ende), tools.date_tool.F_DE_TSTAMP) + " # # # "
footer2 += " # # # RC: " + self.m.getFinalReturncode()
self.m.logDebug("Logpath: "+self.m.logpath)
self.m.logInfo(footer1)
self.m.logInfo(footer2)
self.m.closeMessage()
rc = self.m.getFinalRc()
print ("rc " + str(rc))
print("Logpath: "+self.m.logpath)
print(footer1)
print(footer2)
if "editor" in self.conf[B.TOPIC_PATH]:
subprocess.Popen([self.conf[B.TOPIC_PATH]["editor"], str(self.m.logpath)])
if reboot == 0:
exit(rc)
def dumpParameter(self):
parpath = utils.path_tool.composePath(self, jobdef[self.program]["pfiletarget"], None)
if len(self.programDef[CTLG_PARTARGET]) < 2:
return
parpath = tools.path_tool.compose_path(self, self.programDef[CTLG_PARTARGET], None)
output = {}
cconf = basic.componentHandling.getComponentDict()
output["par"] = self.par.__dict__
if len(cconf) < 1:
utils.file_tool.writeFileDict(self.m, self, parpath, output)
tools.file_tool.write_file_dict(self.m, self, parpath, output)
return
output[B.SUBJECT_COMPS] = {}
for c in cconf:
@ -276,31 +251,41 @@ class Job:
if x not in cconf[c]:
continue
output[B.SUBJECT_COMPS][c][x] = cconf[c][x]
if x == B.SUBJECT_CONN and "passwd" in cconf[c][x]:
if x == B.TOPIC_CONN and "passwd" in cconf[c][x]:
cconf[B.SUBJECT_COMPS][c][x]["passwd"] = "xxxxx"
utils.file_tool.writeFileDict(self.m, self, parpath, output)
tools.file_tool.write_file_dict(self.m, self, parpath, output)
def loadParameter(self):
output = {}
if len(str(jobdef[self.program]["pfilesource"])) < 2:
if len(str(self.programDef[CTLG_PARSOURCE])) < 2:
return None
parpath = utils.path_tool.composePath(self, jobdef[self.program]["pfilesource"], None)
if not os.path.join(parpath):
parpath = tools.path_tool.compose_path(self, self.programDef[CTLG_PARSOURCE], None)
if parpath is None or not os.path.join(parpath):
return None
doc = utils.file_tool.readFileDict(self, parpath, self.m)
doc = tools.file_tool.read_file_dict(self, parpath, None)
for k in doc.keys():
output[k] = copy.deepcopy(doc[k])
return output
def setParameter(self, args):
self.par = Parameter(self, args)
def getParameter(self, parameter):
if hasattr(self.par, parameter):
if hasattr(self.par, parameter) and getattr(self.par, parameter) is not None:
return getattr(self.par, parameter)
elif "xxxtime" in parameter:
neu = utils.date_tool.getActdate(utils.date_tool.F_DIR)
neu = tools.date_tool.getActdate(tools.date_tool.F_DIR)
# setattr(self.par, parameter, neu)
return neu
else:
val = tools.value_tool.compose_pattern(self, parameter, None)
if val is None:
self.m.logError("Parameter "+parameter+" nicht in job.par ")
return
setattr(self.par, parameter, val)
return val
def hasElement(self, parameter, elem):
@ -314,7 +299,6 @@ class Job:
:return:
"""
if hasattr(self.par, parameter):
print (parameter + " in Parameter")
if getattr(self.par, parameter).find(elem) >= 0:
return True
return False
@ -343,13 +327,14 @@ class Job:
def getMessageLevel(self, errtyp, elem):
if (not hasattr(self, "m")) or (self.m is None):
return basic.message.LIMIT_DEBUG
elif elem.find("tool") > 1:
if not hasattr(self.par, "tool") or getattr(self.par, "tool").find(elem) <= 0:
return int(self.m.CONST_ERRTYP[errtyp]) -1
elif elem.find("tool") > 1 and hasattr(self, "par"):
if not hasattr(self.par, "tool") or elem not in getattr(self.par, "tool"):
return 0
else:
return int(self.m.CONST_ERRTYP[errtyp])
return 2
else:
return int(self.m.CONST_ERRTYP[errtyp])
# TODO quickfix
return 0
def getInfoLevel(self, elem):
return self.getMessageLevel("info", elem)
def getDebugLevel(self, elem):
@ -357,47 +342,65 @@ class Job:
def getTraceLevel(self, elem):
return self.getMessageLevel("trace", elem)
def debug(self, prio, text):
#print("job.debug "+str(prio)+" "+text)
if hasattr(self, "m"):
self.m.debug(prio, text)
def getConfiguration(job, path):
conf = {}
conf["configpath"] = []
if hasattr(job, "conf"):
conf = job.conf
conf["configpath"].append(path)
doc = tools.file_tool.read_file_dict(job, path, None, ttype="basic")
if "basic" in doc:
for k, v in doc["basic"].items():
if k not in conf:
conf[k] = v
else:
print(text)
for k, v in doc.items():
if k not in conf:
conf[k] = v
return conf
# ------------------------------------------------------------------------------------------------------------------
class Parameter:
print ("class Parameter")
def __init__ (self, job, program):
print ("# init Parameter for " + program)
self.program = program
self.basedir = "debugs"
self.setBasedir(program)
print (f"# Parameter initialisiert {self.program} mit basedir {self.basedir}")
if (program not in ["unit", "webflask"] ): self.setParameter(job)
def setBasedir(self, program):
if jobdef[program]:
self.basedir = jobdef[program]["basedir"]
if hasattr(self, jobdef[program]["dirname"]):
utils.path_tool.extractPath(self.basedir, getattr(self, jobdef[program]["dirname"]))
elif self.basedir == "workbase":
home = utils.path_tool.getHome()
dirpath = os.path.join(home, "data", "workspace")
setattr(self, jobdef[program]["dirname"], dirpath)
elif program != "unit":
dirpath = utils.path_tool.composePattern(None, "{"+jobdef[program]["basedir"]+"}", None)
setattr(self, jobdef[program]["dirname"], dirpath)
"""
parameter with source either the list args or the args of the main-program
content of parameter:
+ single arguments
+ dirname of work-folder either from dir-parameter or composed from single parameter
comparison with a parameter-file from the work-folder
"""
def __init__ (self, job, args=None):
self.program = job.program
if args is not None and isinstance(args, dict):
self.setParameterArgs(job, args)
else:
self.basedir = "debugs"
self.setParameter(job)
self.basedir = job.programDef[B.PAR_BASEDIR]
job.basedir = job.programDef[B.PAR_BASEDIR]
job.par = self
# Parameter aus Verzeichnis extraieren bzw. Verzeichnis aus Parameter erstellen
if not hasattr(self, job.programDef[B.PAR_DIRNAME]):
setattr(self, job.programDef[B.PAR_DIRNAME],
tools.value_tool.compose_pattern(job, self.basedir, None))
else:
tools.path_tool.extractPath(job, job.programDef[B.PAR_BASEDIR],
getattr(self, job.programDef[B.PAR_DIRNAME]))
# Abgleich mit zuvor gespeicherten Parametern
if len(job.programDef["pfilesource"]) > 2:
self.setParameterLoaded(job)
def checkParameter(self, job):
print (f"Parameter initialisiert {self.program}")
pardef = jobdef[job.program]["pardef"]
for p in pardef.split(","):
print(p)
pardef = job.programDef[CTLG_PARDEF]
for p in pardef:
if p == "testelem":
continue
if len(p) > 1 and not hasattr(self, p):
job.m.setFatal("Parameter " + p + " is not set!")
raise Exception("Parameter {} is not set for {}!".format(p, self.program))
def setParameter(self, job):
"""
@ -418,11 +421,10 @@ class Parameter:
12. funktion -- schraenkt Verarbeitung auf parametriserte Funktionen ein
13. tool -- schraenkt Protokollierung/Verarbeitung auf parametriserte Tools ein
"""
# args = str(sys.argv)
# print ("Job-Programm %s : " % args)
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--'+B.SUBJECT_APPS, required=True, action='store')
parser.add_argument('-e', '--'+B.PAR_ENV, required=True, action='store')
parser.add_argument('-v', '--'+B.PAR_VAR, required=False, action='store')
parser.add_argument('-r', '--release', action='store')
parser.add_argument('-ts', '--'+B.PAR_TSDIR, action='store')
parser.add_argument('-tc', '--'+B.PAR_TCDIR, action='store')
@ -452,7 +454,6 @@ class Parameter:
def setParameterArgs(self, job, args):
print("setParArgs " + str(type(args)))
self.parstring = "python " + self.program
if "dict" in str(type(args)):
for k in args:
@ -462,19 +463,15 @@ class Parameter:
if getattr(args, k) is not None:
self.setJobAttr(k , getattr(args, k))
dirpath = self.getDirParameter()
if dirpath is not None:
utils.path_tool.extractPath(job, dirpath[0], dirpath[1])
app = self.application
if self.application in job.conf.confs[B.SUBJECT_APPS]:
if B.ATTR_APPS_PROJECT in job.conf.confs[B.SUBJECT_APPS][self.application]:
setattr(self, B.ATTR_APPS_PROJECT, job.conf.confs[B.SUBJECT_APPS][self.application][B.ATTR_APPS_PROJECT])
proj = getattr(self, B.ATTR_APPS_PROJECT)
app2 = self.application
if hasattr(self, "application") and B.SUBJECT_APPS in job.conf \
and getattr(self, "application") in job.conf[B.SUBJECT_APPS]:
if B.SUBJECT_PROJECT in job.conf[B.SUBJECT_APPS][self.application]:
setattr(self, B.SUBJECT_PROJECT, job.conf[B.SUBJECT_APPS][self.application][B.SUBJECT_PROJECT])
proj = getattr(self, B.SUBJECT_PROJECT)
def setParameterLoaded(self, job):
#job = Job.getInstance()
print("setParLoaded " )
readedPar = job.loadParameter()
if readedPar is not None:
for k in readedPar["par"].keys():
@ -492,44 +489,3 @@ class Parameter:
if len(a) == 2:
return self[a[0]][a[1]]
return
# ------------------------------------------------------------------------------------------------------------------
class Configuration:
def __init__ (self, job, program):
self.program = program
print (f"job initialisiert {self.program}")
path = utils.path_tool.getBasisConfigPath()
self.setConfiguration(job, path)
return
def setConfiguration(self, job, path):
self.confs = {}
doc = utils.file_tool.readFileDict(job, path, None)
self.confs["configpath"] = path
if "basic" in doc:
for i, v in doc["basic"].items():
self.confs[i] = v
else:
for i, v in doc.items():
self.confs[i] = v
def setConfig(self, path, val):
a = path.split(".")
if len(a) == 1:
self.confs[a[0]] = val
elif len(a) == 2:
self.confs[a[0]][a[1]] = val
elif len(a) == 3:
self.confs[a[0]][a[1]][a[2]] = val
def getPath(self, key):
return self.confs.get(B.SUBJECT_PATH).get(key)
def getJobConf(self, key):
a = key.split(":")
if len(a) == 1:
return self.confs[a[0]]
if len(a) == 2:
return self.confs[a[0]][a[1]]

109
basic/step.py

@ -1,109 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
"""
this module implements the functionality of a test-step
which is defined in the test-specification
and is executed by any executer
there are 2 kinds of test-step
a) execute specific component in the job
b) execute specific test-entity in the test-suite-execution
"""
import basic.constants as B
import utils.data_const as D
import utils.i18n_tool
LIST_ARGS = [
"start", # for starting the specified main-program
"fct" # for calling the specified component-function
]
class Step:
comp = ""
refLine = "" # in a: references the data-line(s) to be executed
execStep = "" # in a,b: executes only if the step is set in the job
args = {}
"""
the class contains each attribute of a test-step
"""
def __init__(self):
self.comp = ""
self.refLine = ""
self.execStep = ""
self.args = {}
def getStepText(self, job):
text = self.comp+D.CSV_DELIMITER+str(self.execStep)+D.CSV_DELIMITER+self.refLine
for k in self.args:
text += D.CSV_DELIMITER+k+":"+self.args[k]
return text+"\n"
def parseOldStep(job, fields):
step = {}
step[B.DATA_NODE_COMP] = fields[D.STEP_COMP_I]
step[B.ATTR_EXEC_REF] = fields[D.STEP_EXECNR_I]
step[B.ATTR_DATA_REF] = fields[D.STEP_REFNR_I]
step[B.ATTR_STEP_ARGS] = {}
if D.STEP_ARGS_I == D.STEP_LIST_I:
args = ""
for i in range(D.STEP_ARGS_I, len(fields)):
if len(fields[i]) < 1:
continue
if fields[i][0:1] == "#":
continue
args += "," + fields[i]
args = args[1:]
else:
args = fields[D.STEP_ARGS_I]
a = args.split(",")
for arg in a:
print("arg " + arg)
b = arg.split(":")
if len(b) < 2:
raise Exception(D.EXCP_MALFORMAT + "" + l)
step[B.ATTR_STEP_ARGS][b[0]] = b[1]
# data[B.DATA_NODE_STEPS].append(step)
return step
def parseStep(job, fields):
step = Step()
step.comp = fields[D.STEP_COMP_I]
step.execStep = fields[D.STEP_EXECNR_I]
step.refLine = fields[D.STEP_REFNR_I]
setattr(step, B.ATTR_DATA_REF, step.refLine)
if D.STEP_ARGS_I == D.STEP_LIST_I:
args = ""
for i in range(D.STEP_ARGS_I, len(fields)):
if len(fields[i]) < 1:
continue
if fields[i][0:1] == "#":
continue
args += "," + fields[i]
args = args[1:]
else:
args = fields[D.STEP_ARGS_I]
a = args.split(",")
for arg in a:
print("arg " + arg)
b = arg.split(":")
if len(b) < 2:
raise Exception(D.EXCP_MALFORMAT + "" + str(fields))
step.args[b[0]] = b[1]
if b[0] in LIST_ARGS:
setattr(step, b[0], b[1])
# data[B.DATA_NODE_STEPS].append(step)
return step
def getStepHeader(job):
text = "# "
text += utils.i18n_tool.I18n.getInstance(job).getText(f"{D.CSV_BLOCK_STEP=}", job)
text += ";"+utils.i18n_tool.I18n.getInstance(job).getText(f"{D.STEP_ATTR_COMP=}", job)
text += ";"+utils.i18n_tool.I18n.getInstance(job).getText(f"{D.STEP_ATTR_EXECNR=}", job)
text += ";"+utils.i18n_tool.I18n.getInstance(job).getText(f"{D.STEP_ATTR_REFNR=}", job)
text += ";"+utils.i18n_tool.I18n.getInstance(job).getText(f"{D.STEP_ATTR_ARGS=}", job)
return text + ";..;;;\n"

52
basic/testexecution.py

@ -0,0 +1,52 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.toolHandling
import utils.data_const as D
import basic.constants as B
import model.entity
class Testexecution(model.entity.Entity):
name = ""
description = "" # from testplan, testsuite, testcase
release = ""
path = ""
level = "" # testplan, testsuite, testcase
entities = {}
def __init__(self, job):
"""
to be initialized by readSpec
:param job:
"""
self.job = job
def get_schema(self):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getCreateTable("testexecution")
sql += dbi.getSchemaAttribut("teid", "id")+","
sql += dbi.getSchemaAttribut("name", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut(B.SUBJECT_REFERENCE, D.TYPE_TEXT)+","
sql += dbi.getSchemaAttribut("prelease", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("type", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("entity", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("path", D.TYPE_STRING)+","
sql += dbi.getSchemaAttribut("starttime", D.TYPE_TIME)+","
sql += dbi.getSchemaAttribut("finishtime", D.TYPE_TIME)+","
sql += dbi.getSchemaAttribut(B.NODE_ATTRIBUTES, D.TYPE_TEXT)+","
sql += self.getHistoryFields()
sql += ");\n"
sql += dbi.getSchemaIndex("testexecution", "release") + "\n"
sql += self.getHistoryIndex("testplan")
for attr in ["entity"]:
sql += dbi.getSchemaSubtable("te", [{"attr":attr, "atype": D.TYPE_STR},
{"attr":"type", "atype": D.TYPE_STR},
{"attr":"path", "atype": D.TYPE_STRING},
{"attr":B.NODE_ATTRIBUTES, "atype": D.TYPE_TEXT}])+"\n"
sql += dbi.getSchemaIndex(dbi.getSubTableName("te", attr),
dbi.getSubTableId(dbi.getSubTableName("te", attr), attr))+"\n"
return sql

69
basic/testkonzept.txt

@ -0,0 +1,69 @@
Durch den Test soll die Qualität der Anwendung systematisch (Testabdeckung) nachgewiesen
und (mittels Regressionen) langfristig sichergestellt werden.
Rollen - sind jetzt beschrieben, ok
Testobjekte
logischer Testfall
Testfall in Managementsicht, aus den User-Story-Akzeptanzkriterien abgeleitet
Diese werden auf allen Ebenen reportet, insb. deren Status.
physischer Testfall
Testfall in Spezifikations- und Ausführungssicht
konkreter auszuführender Testfall, die einzelnen Testschritte müssen spezifiziert/konfiguriert und protokolliert werden.
Arten physischer Testfälle:
* automatisierter Testfall
Alle Einzelschritte werden entsprechend des Automatisierungsframeworks im git-Repo spezifiziert. Entsprechend der Spezifikation wird der Testfall ausgeführt.
* manueller Testfall
Alle Einzelschritte werden detailliert (in Jira-..) spezifiziert. Entsprechend der Spezifikation wird der Testfall ausgeführt.
* explorativer Testfall
Die wesentlichen Schritt-Sequenzen werden detailliert (in Jira-..) spezifiziert. Von der Spezifikation kann und soll bei der Durchführung variiert werden. Die wesentlichen Schritte werden protokolliert.
Test im Software-Prozess
@pre: Komponenten-/Unittests durch Entwickler:innen
Q1: Lieferung entgegennehmen
* Entschlüsseln
* Programm: Artefakte verifizieren mittels md5-Hash
* Lieferung in git-Repo pushen
<< Exit wenn Lieferung nicht gelesen werden kann
Q2: Statischer Test
--> NFA Wartbarkeit
* Vollständigkeit prüfen >>> gitVerteiler
* Code-Analyse >>> SonarQube
<< Exit bei Fehlen wesentlicher Programme (auch Fehlbennung gelten als Fehlen!)
<< Warnung bei Unvollständigkeit
Q3: Installierbarkeit
--> NFA Installierbarkeit, Portierbarkeit
* Kubernetes-Container >>> JEE-Plattform?
* DB-Scripte auf Hive ausfuehren ?
* Cloudera-1-Rechner-Maschine >>> Linux-Maschine
* DevOps-Installation ** Testfälle hierzu beschreiben!
<< Exit bei Nicht-Installierbarkeit
Q4: System-/Smoketest
* Bedienelemente
* dynamischer Smoketest (minimale Testfälle, v.a. Gutfälle)
* minimaler GUI-Test >>>
<< Exit bei technischen Blockern
Q5: Regressions-/Progressionstest
--> Funktionaler Test, Sicherheitstest
* funktionale Regression (umfassende Testfälle, vollständige Äquivalenzklassen)
* erweiterte Testfälle zu neuen Funktionen
* Sichtbarkeit, Sperrkonstellationen >>>
<< Exit bei groben Fehlfunktionen
Q6: Nutzbarkeit
--> NFA Usability, Performance, Last
* manuelle Benutzbarkeit, edu@ETU
<< Exit wenn die Nutzung unbrauchbar ist
<< Warnungen

2
basic/text_const.py

@ -1,5 +1,7 @@
# -----------------
EXP_PATH_MISSING = "path is missing {}"
EXP_CONFIG_MISSING = "config is missing {}"
EXP_KEY_MISSING = "key is missing {}"
EXP_KEY_DOESNT_EXIST = "key {} doesnt exist in domain {}"
LIST_EXP_TEXT = [EXP_KEY_MISSING, EXP_KEY_DOESNT_EXIST]

79
basic/toolHandling.py

@ -6,10 +6,10 @@
# ---------------------------------------------------------------------------------------------------------
import importlib
import os
import basic.program
# import basic.program
import basic.constants as B
# -------------------------------------------------
import utils.config_tool
import tools.config_tool
def hasAttr(o, name):
@ -17,36 +17,40 @@ def hasAttr(o, name):
if (name in o.keys()):
return True
elif (isinstance(o, list)):
print("hasAttr list "+str(type(o)))
print("hasAttr list " + str(type(o)))
elif hasattr(o, name):
return True
return False
def getAttr(o, name):
if (isinstance(o, dict)):
if (name in o.keys()):
return o[name]
elif (isinstance(o, list)):
print("getAttr c list "+str(type(o)))
print("getAttr c list " + str(type(o)))
elif hasattr(o, name):
return o.get(name)
"""
Toolmanager
"""
def getCompAttr(comp, topic, attr, table=""):
out = ""
print(topic+" "+attr+" "+str(comp))
if hasAttr(comp.conf[B.SUBJECT_CONN], topic) and hasAttr(comp.conf[B.SUBJECT_CONN][topic], attr):
return getAttr(comp.conf[B.SUBJECT_CONN][topic], attr)
if len(table) > 1 and hasAttr(comp.conf[B.SUBJECT_ARTS][topic], table) \
and hasAttr(comp.conf[B.SUBJECT_ARTS][topic][table], attr):
return getAttr(comp.conf[B.SUBJECT_ARTS][topic][table], attr)
if hasAttr(comp.conf[B.SUBJECT_ARTS], topic) and hasAttr(comp.conf[B.SUBJECT_ARTS][topic], attr):
print("attr "+attr+" vorhanden")
return getAttr(comp.conf[B.SUBJECT_ARTS][topic], attr)
raise LookupError(topic+"."+attr+" is not set in comp " + comp.name)
print(topic + " " + attr + " " + str(comp))
if hasAttr(comp.conf[B.TOPIC_CONN], topic) and hasAttr(comp.conf[B.TOPIC_CONN][topic], attr):
return getAttr(comp.conf[B.TOPIC_CONN][topic], attr)
if len(table) > 1 and hasAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic], table) \
and hasAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic][table], attr):
return getAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic][table], attr)
if hasAttr(comp.conf[B.SUBJECT_ARTIFACTS], topic) and hasAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic], attr):
print("attr " + attr + " vorhanden")
return getAttr(comp.conf[B.SUBJECT_ARTIFACTS][topic], attr)
raise LookupError(topic + "." + attr + " is not set in comp " + comp.name)
def getTool(technicType, comp, job):
if technicType == B.TOPIC_NODE_DB:
@ -60,63 +64,68 @@ def getTool(technicType, comp, job):
# denn zu einer Komponente koennen unterschiedliche Dateien vorkommen
return getFileTool(job, comp, "")
# class ToolManager:
def getDbTool(job, comp, dbtype=""):
verify = int(job.getDebugLevel("db_tool"))
if len(dbtype) < 3:
dbtype = getCompAttr(comp, B.TOPIC_NODE_DB, B.ATTR_TYPE, "")
toolname = "db"+dbtype+"_tool"
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")
#comp.m.debug(verify, "toolname "+filepath)
toolname = "db" + dbtype + "_tool"
filepath = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_PROGRAM], "tools", toolname + ".py")
# comp.m.debug(verify, "toolname "+filepath)
if not os.path.exists(filepath):
raise FileNotFoundError("file for tool "+toolname+" does not exist "+filepath)
cmodul = importlib.import_module("utils."+toolname)
raise FileNotFoundError("dbi for tool " + toolname + " does not exist " + filepath)
cmodul = importlib.import_module("tools." + toolname)
class_ = getattr(cmodul, "DbFcts")
c = class_()
c.setComp(job, comp)
return c
def getCliTool(job, comp):
verify = int(job.getDebugLevel("db_tool"))
clitype = getCompAttr(comp, B.TOPIC_NODE_CLI, B.ATTR_TYPE, "")
toolname = "cli"+clitype+"_tool"
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")
#comp.m.debug(verify, "toolname "+filepath)
toolname = "cli" + clitype + "_tool"
filepath = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_PROGRAM], "tools", toolname + ".py")
# comp.m.debug(verify, "toolname "+filepath)
if not os.path.exists(filepath):
raise FileNotFoundError("file for tool "+toolname+" does not exist "+filepath)
cmodul = importlib.import_module("utils."+toolname)
raise FileNotFoundError("file for tool " + toolname + " does not exist " + filepath)
cmodul = importlib.import_module("tools." + toolname)
class_ = getattr(cmodul, "CliFcts")
c = class_()
c.setComp(job, comp)
return c
def getApiTool(job, comp):
verify = int(job.getDebugLevel("db_tool"))
apitype = getCompAttr(comp, B.TOPIC_NODE_API, B.ATTR_TYPE, "")
toolname = "api"+apitype+"_tool"
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")
#comp.m.debug(verify, "toolname "+filepath)
toolname = "api" + apitype + "_tool"
filepath = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_PROGRAM], "tools", toolname + ".py")
# comp.m.debug(verify, "toolname "+filepath)
if not os.path.exists(filepath):
raise FileNotFoundError("file for tool "+toolname+" does not exist "+filepath)
cmodul = importlib.import_module("utils."+toolname)
raise FileNotFoundError("file for tool " + toolname + " does not exist " + filepath)
cmodul = importlib.import_module("tools." + toolname)
class_ = getattr(cmodul, "ApiFcts")
c = class_()
c.setComp(job, comp)
return c
def getFileTool(job, comp, filenode=""):
verify = int(job.getDebugLevel("file_tool"))
if len(filenode) > 3 and "." in filenode and filenode[-1:] != ".":
filetype = utils.config_tool.getAttribute(comp, filenode, B.ATTR_ARTS_TYPE, job)
filetype = tools.config_tool.getAttribute(comp, filenode, B.ATTR_ARTS_TYPE, job)
elif len(filenode) > 2 and len(filenode) < 5:
filetype = filenode
else:
filetype = getCompAttr(comp, B.TOPIC_NODE_FILE, B.ATTR_TYPE, "")
toolname = "file"+filetype+"_tool"
filepath = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_PROGRAM], "utils", toolname+".py")
#comp.m.debug(verify, "toolname "+filepath)
toolname = "file" + filetype + "_fcts"
filepath = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_PROGRAM], "tools", toolname + ".py")
# comp.m.debug(verify, "toolname "+filepath)
if not os.path.exists(filepath):
raise FileNotFoundError("file for tool "+toolname+" does not exist "+filepath)
cmodul = importlib.import_module("utils."+toolname)
raise FileNotFoundError("file for tool " + toolname + " does not exist " + filepath)
cmodul = importlib.import_module("tools." + toolname)
class_ = getattr(cmodul, "FileFcts")
c = class_()
c.setComp(job, comp)

321
basic/xxDATASTRUCTURE.yxml

@ -0,0 +1,321 @@
application:
_header:
- _field
- type
- format
- index
_fields:
- apid
- name
- description
- reference
- attributes
- inscommit
- insauthor
- instime
- updcommit
- updauthor
- updtime
- actual
apid:
_field: apid
type: pk
name:
_field: name
type: str
index: I
description:
_field: description
type: string
reference:
_field: reference
type: str
attributes:
_field: attributes
type: string
insauthor:
_field: insauthor
type: str
inscommit:
_field: inscommit
type: str
instime:
_field: instime
type: time
updauthor:
_field: updauthor
type: str
updcommit:
_field: updcommit
type: str
updtime:
_field: updtime
type: time
actual:
_field: actual
type: int
index: I
ap_component:
_header:
- _field
- type
- format
- index
_fields:
- apcomid
- apid
- component
apcomid:
_field: apcomid
type: pk
apid:
_field: apid
type: int
index: I
component:
_field: component
type: str
index: I
ap_application:
_header:
- _field
- type
- format
- index
_fields:
- apappid
- apid
- application
apappid:
_field: apappid
type: pk
apid:
_field: apid
type: int
index: I
application:
_field: component
type: str
index: I
ap_project:
_header:
- _field
- type
- format
- index
_fields:
- approid
- apid
- project
- description
- reference
approid:
_field: apid
type: pk
apid:
_field: apid
type: int
index: I
project:
_field: project
type: str
index: I
description:
_field: description
type: string
reference:
_field: reference
type: str
environment:
_header:
- _field
- type
- format
- index
_fields:
- enid
- name
- description
- reference
- attributes
- inscommit
- insauthor
- instime
- updcommit
- updauthor
- updtime
- actual
enid:
_field: enid
type: pk
name:
_field: name
type: str
index: I
description:
_field: description
type: string
reference:
_field: reference
type: str
attributes:
_field: attributes
type: string
insauthor:
_field: insauthor
type: str
inscommit:
_field: inscommit
type: str
instime:
_field: instime
type: time
updauthor:
_field: updauthor
type: str
updcommit:
_field: updcommit
type: str
updtime:
_field: updtime
type: time
actual:
_field: actual
type: int
index: I
en_project:
_header:
- _field
- type
- format
- index
_fields:
- enproid
- enid
- project
enproid:
_field: enproid
type: pk
enid:
_field: enid
type: int
index: I
project:
_field: project
type: str
index: I
en_component:
_header:
- _field
- type
- format
- index
_fields:
- encomid
- enid
- component
- instance
- type
- ip
- port
- hostname
- dompath
- user
- password
- attributes
encomid:
_field: encomid
type: pk
enid:
_field: enid
index: I
type: int
component:
_field: component
index: I
type: str
instance:
_field: instance
type: int
type:
_field: type
type: str
ip:
_field: ip
type: str
port:
_field: port
type: str
hostname:
_field: hostname
type: str
dompath:
_field: dompath
type: str
user:
_field: user
type: str
password:
_field: password
type: str
attributes:
_field: attributes
type: string
component:
_header:
- _field
- type
- format
- index
_fields:
- coid
- name
- description
- reference
- attributes
- inscommit
- insauthor
- instime
- updcommit
- updauthor
- updtime
- actual
coid:
_field: apid
type: pk
name:
_field: name
type: str
index: I
description:
_field: description
type: string
reference:
_field: reference
type: str
attributes:
_field: attributes
type: string
insauthor:
_field: insauthor
type: str
inscommit:
_field: inscommit
type: str
instime:
_field: instime
type: time
updauthor:
_field: updauthor
type: str
updcommit:
_field: updcommit
type: str
updtime:
_field: updtime
type: time
actual:
_field: actual
type: int
index: I

24
catalog/programs.csv

@ -0,0 +1,24 @@
_type;ctlg;;;;;;;;;;;;;
_key;name;;;;;;;;;;;;;
table:programs;name;objtype;objname;time;env;app;variant;pardef;pfilesource;pfiletarget;dirname;basedir;loglevel;logpath
;test_executer;tp,ts,tc;m;m;m;m;o;"{""gran"":""args"",""application"":""args"",""environment"":""args"",""testelem"":""args"",""variant"":""args""}";;;{objtype}dir;{objtype}base;info;{job.par.wsdir}/{log}/log_{job.start}.txt
;init_testsuite;ts;m;o;m;m;o;"{""gran"":""testsuite"",""application"":""args"",""environment"":""args"",""testsuite"":""args"",""variant"":""args""}";envparfile;tsparfile;tsdir;tsbase;info;{job.par.tsdir}/{log}/{job.program}_{job.start}.txt
;execute_testsuite;ts;m;m;m;m;o;"{""gran"":""testsuite"",""application"":""args"",""environment"":""args"",""testsuite"":""args"",""tstime"":""args"",""variant"":""args""}";tsparfile;tsparfile;tsdir;tsbase;info;{job.par.tsdir}/{log}/{job.program}_{job.start}.txt
;collect_testsuite;ts;m;m;m;m;o;"{""gran"":""testsuite"",""application"":""args"",""environment"":""args"",""testsuite"":""args"",""tstime"":""args"",""variant"":""args""}";tsparfile;tsparfile;tsdir;tsbase;info;{job.par.tsdir}/{log}/{job.program}_{job.start}.txt
;finish_testsuite;ts;m;m;m;m;o;"{""gran"":""testsuite"",""application"":""args"",""environment"":""args"",""testsuite"":""args"",""tstime"":""args"",""variant"":""args""}";tsparfile;tsparfile;tsdir;tsbase;info;{job.par.tsdir}/{log}/{job.program}_{job.start}.txt
;unzip_testsuite;ts;m;m;m;m;o;"{""gran"":""testsuite"",""application"":""args"",""environment"":""args"",""testsuite"":""args"",""tstime"":""args"",""variant"":""args""}";;;;;info;{job.par.tsdir}/{log}/{job.program}_{job.start}.txt
;init_testcase;tc;m;o;m;m;o;"{""gran"":""testcase"",""application"":""args"",""environment"":""args"",""testcase"":""args"",""tctime"":""args"",""variant"":""args""}";tsparfile;tcparfile;tcdir;tcbase;info;{job.par.tcdir}/{log}/{job.program}_{job.start}.txt
;exec_testcase;tc;m;m;m;m;o;"{""gran"":""testcase"",""application"":""args"",""environment"":""args"",""testcase"":""args"",""tctime"":""args"",""variant"":""args""}";tcparfile;tcparfile;tcdir;tcbase;info;{job.par.tcdir}/{log}/{job.program}_{job.start}.txt
;collect_testcase;tc;m;m;m;m;o;"{""gran"":""testcase"",""application"":""args"",""environment"":""args"",""testcase"":""args"",""tctime"":""args"",""variant"":""args""}";tcparfile;tcparfile;tcdir;tcbase;info;{job.par.tcdir}/{log}/{job.program}_{job.start}.txt
;compare_testcase;tc;m;m;m;m;o;"{""gran"":""testcase"",""application"":""args"",""environment"":""args"",""testcase"":""args"",""tctime"":""args"",""variant"":""args""}";tcparfile;tcparfile;tcdir;tcbase;info;{job.par.tcdir}/{log}/{job.program}_{job.start}.txt
;check_environment;env;;;m;;o;"{""application"":""args"",""environment"":""args""}";;envparfile;envdir;envbase;info;{job.par.envdir}/{log}/{job.program}_{job.start}.txt
;check_specification;tp,ts,tc;o;;;;n;"{""gran"":""args"",""application"":""args"",""environment"":""args"",""testcase"":""args"",""tctime"":""args"",""variant"":""args""}";;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;check_configuration;ws;o;;;o;n;"{""application"":""args"",""environment"":""args""}";;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;make_workspace;ws;;;_;;n;"{""application"":""service"",""environment"":""Testserver""}";;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;unit_tester;ws;o;;d;;n;"{""application"":""service"",""environment"":""Testserver""}";;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;declare_expection;ts,tc;m;m;m;m;o;"{""application"":""service"",""environment"":""Testserver""}";;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;clean_workspace;ws;o ;;_;;n;"{""application"":""service"",""environment"":""Testserver""}";;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;start_dialog;ws;input;;input;input;input;"{""gran"":""service"",""application"":""select-app"",""environment"":""select-env""}";;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;web_start;ws;;;;;n;"{""application"":""service"",""environment"":""Testserver""}";;;wsdir;wsbase;warn;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
;unzip_result;tp,ts,tc;o;;;;n;"{""par"":{""application"":""service"",""environment"":""Testserver"",""variant"":""""}}";;;wsdir;wsbase;info;{wsbase}/{log}/{job.program}_{job.start}.txt
;sync_model;;;;;;;;;;wsdir;wsbase;info;{job.par.wsdir}/{log}/{job.program}_{job.start}.txt
1 _type ctlg
2 _key name
3 table:programs name objtype objname time env app variant pardef pfilesource pfiletarget dirname basedir loglevel logpath
4 test_executer tp,ts,tc m m m m o {"gran":"args","application":"args","environment":"args","testelem":"args","variant":"args"} {objtype}dir {objtype}base info {job.par.wsdir}/{log}/log_{job.start}.txt
5 init_testsuite ts m o m m o {"gran":"testsuite","application":"args","environment":"args","testsuite":"args","variant":"args"} envparfile tsparfile tsdir tsbase info {job.par.tsdir}/{log}/{job.program}_{job.start}.txt
6 execute_testsuite ts m m m m o {"gran":"testsuite","application":"args","environment":"args","testsuite":"args","tstime":"args","variant":"args"} tsparfile tsparfile tsdir tsbase info {job.par.tsdir}/{log}/{job.program}_{job.start}.txt
7 collect_testsuite ts m m m m o {"gran":"testsuite","application":"args","environment":"args","testsuite":"args","tstime":"args","variant":"args"} tsparfile tsparfile tsdir tsbase info {job.par.tsdir}/{log}/{job.program}_{job.start}.txt
8 finish_testsuite ts m m m m o {"gran":"testsuite","application":"args","environment":"args","testsuite":"args","tstime":"args","variant":"args"} tsparfile tsparfile tsdir tsbase info {job.par.tsdir}/{log}/{job.program}_{job.start}.txt
9 unzip_testsuite ts m m m m o {"gran":"testsuite","application":"args","environment":"args","testsuite":"args","tstime":"args","variant":"args"} info {job.par.tsdir}/{log}/{job.program}_{job.start}.txt
10 init_testcase tc m o m m o {"gran":"testcase","application":"args","environment":"args","testcase":"args","tctime":"args","variant":"args"} tsparfile tcparfile tcdir tcbase info {job.par.tcdir}/{log}/{job.program}_{job.start}.txt
11 exec_testcase tc m m m m o {"gran":"testcase","application":"args","environment":"args","testcase":"args","tctime":"args","variant":"args"} tcparfile tcparfile tcdir tcbase info {job.par.tcdir}/{log}/{job.program}_{job.start}.txt
12 collect_testcase tc m m m m o {"gran":"testcase","application":"args","environment":"args","testcase":"args","tctime":"args","variant":"args"} tcparfile tcparfile tcdir tcbase info {job.par.tcdir}/{log}/{job.program}_{job.start}.txt
13 compare_testcase tc m m m m o {"gran":"testcase","application":"args","environment":"args","testcase":"args","tctime":"args","variant":"args"} tcparfile tcparfile tcdir tcbase info {job.par.tcdir}/{log}/{job.program}_{job.start}.txt
14 check_environment env m o {"application":"args","environment":"args"} envparfile envdir envbase info {job.par.envdir}/{log}/{job.program}_{job.start}.txt
15 check_specification tp,ts,tc o n {"gran":"args","application":"args","environment":"args","testcase":"args","tctime":"args","variant":"args"} wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
16 check_configuration ws o o n {"application":"args","environment":"args"} wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
17 make_workspace ws _ n {"application":"service","environment":"Testserver"} wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
18 unit_tester ws o d n {"application":"service","environment":"Testserver"} wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
19 declare_expection ts,tc m m m m o {"application":"service","environment":"Testserver"} wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
20 clean_workspace ws o _ n {"application":"service","environment":"Testserver"} wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
21 start_dialog ws input input input input {"gran":"service","application":"select-app","environment":"select-env"} wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
22 web_start ws n {"application":"service","environment":"Testserver"} wsdir wsbase warn {job.par.wsdir}/{log}/{job.program}_{job.start}.txt
23 unzip_result tp,ts,tc o n {"par":{"application":"service","environment":"Testserver","variant":""}} wsdir wsbase info {wsbase}/{log}/{job.program}_{job.start}.txt
24 sync_model wsdir wsbase info {job.par.wsdir}/{log}/{job.program}_{job.start}.txt

94
check_configuration.py

@ -0,0 +1,94 @@
# This is a sample Python script.
import os
import traceback
import sys
import yaml
import basic.program
import basic.constants as B
import basic.message
import tools.path_const as P
import tools.config_tool as config_tool
import tools.file_tool as file_tool
import model.entity
import model.factory
#import model.table
PROGRAM_NAME = "check_configuration"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
components = job.par.component.split(",")
for c in components:
job.m.logInfo("------------------------------------------\ncheck component "+c)
checkComponent(job, c)
job.m.setMsg("Job " + PROGRAM_NAME + " fertig")
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
def checkComponent(job, componentName):
"""
checks the configurations of the component
:param job:
:param componentName:
:return:
"""
import model.component
configPath = config_tool.getExistingPath(job, [os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS], componentName, "CONFIG")])
configTree = file_tool.read_file_dict(job, configPath, job.m)
for x in model.component.LIST_CP_SUBJECTS:
if "conf" not in configTree:
job.m.setError(componentName + ": root conf is not set: ")
break
if x not in configTree["conf"]:
job.m.setError(componentName + ": subject is not set: " + x)
else:
for c in configTree["conf"][x]:
if c == "none":
if len(configTree["conf"][x]) != 1:
job.m.setWarn("none is not the only subject in "+x)
continue
comps = model.component.select_components(job, None, None)
job.m.logInfo("Komponenten pruefen")
for c in configTree["conf"][model.component.CP_SUBJECT_COMPS]:
if c in ["none"]:
continue
if c not in comps:
job.m.setError(componentName + ": component " + c + " does not exist")
job.m.logInfo("- " + componentName + " uses component " + c)
job.m.logInfo("Steps pruefen")
for v in configTree["conf"][model.component.CP_SUBJECT_STEPS]:
if v == "none":
continue
job.m.logInfo("- "+componentName + " uses variant "+v)
job.m.logInfo("Tabellen pruefen")
tables = model.table.select_tables(job, None, None)
for t in configTree["conf"][model.component.CP_SUBJECT_TABLES]:
if t == "none":
continue
if t in tables:
job.m.logInfo("- "+componentName + " uses table " + t)
else:
job.m.setError(componentName + ": table " + t + " ist not defined.")
job.m.logInfo("Artefakte pruefen")
for a in configTree["conf"][model.component.CP_SUBJECT_ARTS]:
if t == "none":
continue
job.m.logInfo("- "+componentName + " uses artifact " + a)
if __name__ == '__main__':
job = basic.program.Job(PROGRAM_NAME)
print ("job "+str(job.__dict__))
job.startJob()
if job.m.isRc("fatal"):
job.stopJob()
# now in theory the program is runnable
startPyJob(job)
job.stopJob()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/

14
check_environment.py

@ -5,10 +5,22 @@ import yaml # pip install pyyaml
import basic.program
import basic.componentHandling
import basic.message
import utils.tdata_tool
#import utils.tdata_tool
import traceback
PROGRAM_NAME = "check_environment"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
if __name__ == '__main__':
x = basic.program.Job(PROGRAM_NAME)
print ("x "+str(x))

60
check_specification.py

@ -0,0 +1,60 @@
# This is a sample Python script.
import sys#
# import jsonpickle # pip install jsonpickle
import yaml # pip install pyyaml
import basic.program
import basic.componentHandling
import basic.message
#import utils.tdata_tool
import traceback
PROGRAM_NAME = "check_specification"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
job.m.setMsg("Job " + PROGRAM_NAME + " fertig")
if hasattr(job.par, "testcase"):
testcase = getattr(job.par, "testcase")
print("Check testcase "+testcase)
elif hasattr(job.par, "testsuite"):
testsuite = getattr(job.par, "testsuite")
print("Check testsuite "+testsuite)
elif hasattr(job.par, "testplan"):
testplan = getattr(job.par, "testplan")
print("Check testplan "+testplan)
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
def checkHead(job):
pass
if __name__ == '__main__':
x = basic.program.Job(PROGRAM_NAME)
print ("x "+str(x))
x.startJob()
x.m.logDebug(str(vars(x.par)) + "\n" + str(vars(x.conf)))
if x.m.isRc("fatal"):
x.stopJob()
exit(x.m.rc * (-1) + 3)
# now in theory the program is runnable
x.m.setMsg("# job initialized")
cm = basic.componentHandling.ComponentManager.getInstance(x)
print("cm "+str(cm))
cm.initComponents()
comps = cm.getComponents(x, PROGRAM_NAME)
x.m.setMsg("# Components initialized with these relevant components " + str(comps))
for c in comps:
comp = cm.getComponent(c)
print(str(comp))
comp.check_Instance()
x.m.merge(comp.m)
comp.confs["function"][PROGRAM_NAME] = comp.m.topmessage
x.stopJob()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/

115
clean_workspace.py

@ -0,0 +1,115 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
"""
program to clean the workspace :
* remove old debug-files
"""
import os
import re
import shutil
import sys
import traceback
import basic.program
import basic.constants as B
import tools.date_tool as date_tool
import tools.path_tool as path_tool
import tools.job_tool as job_tool
LIMIT_DEBUG_FILES = -7
PROGRAM_NAME = "clean_workspace"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
# remove debug-files
removeDebugFiles(job)
# clean and archive log-files
cleanLogFiles(job)
job.m.setMsg("Job " + PROGRAM_NAME + " fertig")
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
def removeDebugFiles(job):
"""
to remove debug-files in any relevant folder
:param job:
:return:
"""
job.m.logInfo("# # remove log-files # # #")
limit = date_tool.getActdate(date_tool.F_LOG, LIMIT_DEBUG_FILES)[0:8]
path = job.conf[B.TOPIC_PATH][B.ATTR_PATH_DEBUG]
cleanFolder(job, path, limit)
path = os.path.join(B.HOME_PATH, "temp")
cleanFolder(job, path, limit)
def cleanFolder(job, path, limit):
"""
remove all files in the folder with a log-date older than limit
:param job:
:param path:
:param limit:
:return:
"""
date_regex = r"(.*)_(\d{8})_\d{6}"
cntRm = 0
cntMv = 0
cntAll = 0
for f in os.listdir(path):
cntAll += 1
if re.match(date_regex, f):
res = re.search(date_regex, f)
fileType = str(res.group(1))
fileDate = str(res.group(2))
if fileType in ["debug", "log", "start_dialog"]:
if fileDate >= limit:
continue
job.m.logInfo("remove " + os.path.join(path, f))
os.remove(os.path.join(path, f))
cntRm += 1
else:
fileYear = fileDate[0:4]
actYear = date_tool.getActdate(date_tool.F_LOG)[0:4]
archivPath = os.path.join(path, fileYear)
if fileYear < actYear:
if not os.path.exists(archivPath):
os.mkdir(archivPath)
if not os.path.isdir(archivPath):
raise Exception("archiv-folder is not a directory: " + archivPath)
shutil.move(os.path.join(path, f), os.path.join(archivPath, f))
cntMv += 1
job.m.setMsg(str(cntRm) + " / " + str(cntAll) + " files removed in " + path)
job.m.setMsg(str(cntMv) + " / " + str(cntAll) + " files moved from " + path)
def cleanLogFiles(job):
"""
searches all log-folder in test-documents and remove the oldest log-files except the newest
:param job:
:return:
"""
job.m.logInfo("# # clean log-files # # #")
limit = date_tool.getActdate(date_tool.F_LOG, LIMIT_DEBUG_FILES)[0:8]
path = path_tool.compose_path(job, "{job.par.wsdir}/{log}", None)
cleanFolder(job, path, limit)
environments = job_tool.select_environment(job, "", "ALL")
for env in environments:
jobEnv = ""
if hasattr(job.par, "environment"):
jobEnv = getattr(job.par, "environment")
setattr(job.par, "environment", env)
path = path_tool.compose_path(job, "{envlog}", None)
cleanFolder(job, path, limit)
setattr(job.par, "environment", jobEnv)
pass
if __name__ == '__main__':
job = basic.program.Job(PROGRAM_NAME)
startPyJob(job)

56
copy_appdummy.py

@ -0,0 +1,56 @@
# program to copy dummy-file as testcase-results
# -------------------------------------------------------------------------------------------------------------
"""
"""
import os
import shutil
import basic.program
import utils.path_tool
import utils.file_tool
import basic.constants as B
import utils.tdata_tool
import basic.componentHandling
import utils.path_const as P
import basic.message as message
PROGRAM_NAME = "copy_appdummy"
PROGRAM_DUMMY = "collect_testcase"
def startPyJob(job):
cm = basic.componentHandling.ComponentManager.getInstance(job)
cm.initComponents()
comps = cm.getComponents(PROGRAM_DUMMY)
job.m.setMsg("# Components initialized with these relevant components " + str(comps))
cm = basic.componentHandling.ComponentManager.getInstance(job, "init")
print("cm " + str(cm))
cm.initComponents()
comps = cm.getComponents(PROGRAM_DUMMY)
for c in comps:
comp = cm.getComponent(c)
for cond in ["pre", "post"]:
tdatapath = utils.path_tool.composePattern(job, "{td"+cond+"exec}", comp)
envapppath = utils.path_tool.composePattern(job, "{tc"+cond+"cond}", comp)
if os.path.exists(tdatapath):
files = utils.file_tool.getFiles(job.m, job, tdatapath, ".+\.csv", None)
for f in files:
# shutil.copy()
print("cp " + os.path.join(tdatapath, f) + " " + os.path.join(envapppath, f))
utils.file_tool.mkPaths(job, os.path.join(envapppath, f), job.m)
shutil.copy(os.path.join(tdatapath, f), os.path.join(envapppath, f))
print(tdatapath)
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print(PROGRAM_NAME)
x = basic.program.Job(PROGRAM_NAME)
x.startJob()
x.m.logDebug(str(vars(x.par)) + "\n" + str(vars(x.conf)))
if x.m.isRc("fatal"):
x.stopJob()
exit(x.m.rc * (-1) + 3)
startPyJob(x)
x.stopJob()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/

15
execute_testcase.py

@ -5,16 +5,15 @@
# + craete_request() : testspec --> tdata.step --> archiv.request
# + send_request() : archiv.request -- comp-config --> system.interface
# ---------------------------------------------------
import sys
import os
import basic.step
import traceback
import basic.program as program
import utils.tdata_tool
import utils.report_tool
import utils.path_tool
import utils.file_tool
import basic.componentHandling
import basic.message as message
# Press Umschalt+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
@ -22,12 +21,14 @@ import basic.message as message
PROGRAM_NAME = "execute_testcase"
def startPyJob(job):
try:
cm = basic.componentHandling.ComponentManager.getInstance(job)
print("cm "+str(cm))
cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME)
job.m.setMsg("# Components initialized with these relevant components " + str(comps))
tdata = utils.tdata_tool.getTestdata(job)
print(str(tdata))
if not "_steps" in tdata:
raise Exception("no steps to execute in testdata")
for (step) in tdata["_steps"]:
@ -37,6 +38,12 @@ def startPyJob(job):
job.m.merge(comp.m)
else:
job.m.setError(step.comp+" kann nicht aufgerufen werden!")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
# Press the green button in the gutter to run the script.

0
features/environment.py

0
features/steps/steps.py

2
finish_testsuite.py

@ -27,7 +27,7 @@ def startPyJob(job):
text = report.reportTestsuite()
path = os.path.join(job.par.tsdir, "Result.html")
utils.file_tool.writeFileText(job.m, job, path, text)
archivFolder = job.conf.confs["paths"]["archiv"]
archivFolder = job.conf["paths"]["archiv"]
tsFolder = os.path.join(archivFolder, "Testlauf")
tarfile = utils.zip_tool.openNewTarFile(job, tsFolder, job.par.usecase+"_"+job.par.tstime+".tar.gz")
utils.zip_tool.appendFolderIntoTarFile(job, tsFolder, job.par.usecase+"_"+job.par.tstime, tarfile)

27
init_testcase.py

@ -6,38 +6,53 @@
# + load_testcase() : testspec --> tdata --> system.data
# + select_testcase() : system.data --> data --> archiv.result
# ---------------------------------------------------import os
import traceback
import basic.program as program
import utils.tdata_tool
#import tools.tdata_tool as tdata_tool
import basic.componentHandling
import basic.constants as B
import utils.file_tool
import utils.path_tool
import utils.path_const as P
import tools.file_tool as file_tool
import tools.path_tool as path_tool
import tools.path_const as P
import basic.message as message
PROGRAM_NAME = "init_testcase"
def startPyJob(job):
try:
cm = basic.componentHandling.ComponentManager.getInstance(job)
cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME)
job.m.setMsg("# Components initialized with these relevant components " + str(comps))
testdata = utils.tdata_tool.getTestdata(job)
testdata = "" # tdata_tool.getTestdata(job)
print("------------------------------------------------------------")
for c in comps:
comp = cm.getComponent(c)
comp.m.logInfo("------- "+comp.name+" ----------------------------------------")
try:
if job.hasFunction("reset_TData"):
comp.reset_TData(job, B.PAR_TESTCASE)
if job.hasFunction("load_TData"):
comp.load_TData(job, B.PAR_TESTCASE, testdata)
if job.hasFunction("read_TData"):
comp.read_TData(job, utils.path_tool.getKeyValue(job, P.KEY_PRECOND), B.PAR_TESTCASE)
comp.read_TData(job, path_tool.getKeyValue(job, P.KEY_PRECOND), B.PAR_TESTCASE)
except Exception as e:
txt = traceback.format_exc()
job.m.setFatal("Exception "+str(e)+"\n"+txt)
print("Exception " + str(e)+"\n"+txt)
job.stopJob(0)
comp.m.logInfo("------- "+comp.name+" ----------------------------------------")
job.m.merge(comp.m)
print(str(comp))
comp.conf["function"][PROGRAM_NAME] = comp.m.topmessage
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
# Press the green button in the gutter to run the script.
if __name__ == '__main__':

16
init_testsuite.py

@ -5,9 +5,16 @@ import yaml # pip install pyyaml
import basic.program
from basic.componentHandling import ComponentManager
import basic.message
import utils.tdata_tool
# import tools.tdata_tool as tdata_tool
PROGRAM_NAME = "init_testset"
def startPyJob(job):
#cm = basic.componentHandling.ComponentManager.getInstance(job)
#cm.initComponents()
#comps = cm.getComponents(PROGRAM_NAME)
comps = []
job.m.setMsg("# Components initialized with these relevant components " + str(comps))
#testdata = tdata_tool.getTestdata(job)
if __name__ == '__main__':
x = basic.program.Job(PROGRAM_NAME)
@ -19,11 +26,6 @@ if __name__ == '__main__':
print("fatal Error at begin")
x.stopJob()
exit(x.m.rc * (-1) + 3)
# now in theory the program is runnable
cm = ComponentManager(x)
cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME)
print(" relevant components for this job: " + str(comps))
tdata = utils.tdata_tool.getTestdata(x)
startPyJob(x)
x.stopJob()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/

319
install_workspace.py

@ -0,0 +1,319 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import json
import os
import datetime
import re
import subprocess
import traceback
import yaml
INSTALLED = False
try:
import basic.program
INSTALLED = True
except:
INSTALLED = False
PROGRAM_NAME = "install_workspace"
CONFIG_FORMAT = "yml"
BASIS_FORMAT = "json"
REPO_NAME = "_name"
REPO_URL = "url"
REPO_BRANCH = "_branch"
job = None
# -----------------------------------------------------------------------------------------
# Miniimplementierung des Programmeahmens
class Logger:
"""
Kurzversion des Messages mit Standardfunktionen
* opel_logs()
* close_logs()
* log_info()
* log_error()
"""
def __init__(self, job, level, logTime, comp):
self.openLog(job, logTime)
def openLog(self, job, logTime):
# job, level, logTime, componente
home = getHome()
path = os.path.join(home, "log")
if not os.path.exists(path):
os.mkdir(path)
logpath = os.path.join(home, "log", job.program+"_"+logTime+".txt")
print("logpath "+logpath)
self.logfile = open(logpath, "w")
def logInfo(self, text):
self.logfile.write(text + "\n")
def logWarn(self, text):
self.logfile.write("WARN: "+text + "\n")
def setMsg(self, text):
self.logfile.write(text + "\n")
def logError(self, text):
self.logfile.write("ERROR:" + text + "\n")
print("ERROR:" + text)
def closeLog(self):
self.logfile.close()
class ActJob:
"""
Kurzversion des Jobs mit Standardfunktionen
* start_job() startet Job mit Messaging
* set_parameter() setzt Parameter aus args oder aus Aufruf
* stop_job() startet Job mit Messaging
"""
def __init__(self, program):
self.program = program
self.start = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
self.jobid = 100000
self.conf = {}
self.par = {}
def startJob(self):
self.m = Logger(self, "info", self.start, None) # job, level, logTime, componente
text = "# # # Start Job " + self.start + " # # # "
self.m.logInfo(text)
print(text)
def stopJob(self):
self.ende = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
text = "# # # Stop Job " + self.start + " - " + self.ende + " # # # "
self.m.logInfo(text)
self.m.closeLog()
print(text)
def getDebugLevel(self, tool):
return 0
def debug(self, verify, text):
self.m.logInfo(text)
def setParameter(self, args):
for k in args:
setattr(self, k, args[k])
# -----------------------------------------------------------------------------------------
# Standardsteuerung Hauptverarbeitung
def startPyJob(job):
"""
Steuerung der Hauptverarbeitung, aufrufbar vom Programm selbst oder aus job_tool
:param job:
:return:
"""
job.m.logInfo("startPyJob gestertet ")
try:
setParameter(job)
readConfig(job)
createFolders(job)
createGit(job)
createBasisConfig(job)
createDb(job)
except Exception as e:
job.m.logError("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logError(str(e))
job.m.logError("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logError("execpt "+traceback.format_exc())
job.m.logError("+++++++++++++++++++++++++++++++++++++++++++++")
# -----------------------------------------------------------------------------------------
# konkrete Verarbeitungsroutinen
def setParameter(job):
job.m.logInfo("--- setze Parameter ")
def readConfig(job):
job.m.logInfo("--- suche config-Datei ")
args = {}
args["home"] = getHome()
configPath = ""
for p in os.listdir(args["home"]):
print(p)
path = os.path.join(args["home"], p)
if os.path.isfile(path) and "workspace" in p:
configPath = path
break
if len(configPath) < 1:
raise Exception("Keine Konfiguration gefunden in "+args["home"])
with open(configPath, 'r') as file:
doc = yaml.full_load(file)
file.close()
for k in doc:
args[k] = doc[k]
job.conf[k] = doc[k]
home = getHome()
for k in job.conf["paths"]:
job.conf["paths"][k] = os.path.join(home, job.conf["paths"][k])
job.setParameter(args)
def createFolders(job):
job.m.logInfo("--- erstelle Verzeichnisse ")
for p in job.paths:
path = os.path.join(job.home, job.paths[p])
createFolder(job, path)
def createFolder(job, path):
if not os.path.exists(path):
os.mkdir(path)
job.m.logInfo("Verzeichnis angelegt: "+ path)
elif not os.path.isdir(path):
job.m.logError("Verzeichnisname existiert und ist kein Verzeichnis "+ path)
else:
job.m.logInfo("Verzeichnis existiert: " + path)
# --------------------------------------------------------------------------------------
# git_tool
# --------------------------------------------------------------------------------------
def createGit(job):
job.m.logInfo("--- erstelle und aktualisiere git-Repos ")
repos = {}
local = {}
attr = {
REPO_NAME: "",
REPO_BRANCH: ""
}
# erstelle Repoliste mit den Attributen: name, branch, url
for r in job.repos:
if r in attr:
attr[r] = job.repos[r]
else:
repo = {}
for a in job.repos[r]:
repo[a] = job.repos[r][a]
repos[r] = repo
for k in attr:
a = k
for r in repos:
if a not in repos[r]:
repos[r][a] = attr[k]
for r in repos:
repo = repos[r]
path = os.path.join(job.home, job.paths[r])
if os.path.exists(path):
local[REPO_URL] = os.path.join(job.home, job.paths[r])
local[REPO_BRANCH] = repo[REPO_BRANCH]
local[REPO_NAME] = repo[REPO_NAME]
rpath = os.path.join(local[REPO_URL], ".git")
if os.path.exists(rpath):
job.m.logInfo("Repo existiert bereits "+r)
else:
job.m.logInfo("Repo erzeugen "+r)
initGit(job, local, repo)
updateLocal(job, local, repo)
else:
job.m.logError("Verzeichnis existiert nicht: " + path)
def initGit(job, local, repo, bare=False):
job.m.logInfo("--- initialisiere git-Repo "+str(repo)+","+str(local))
os.chdir(local[REPO_URL])
cmd = "git init "
if bare:
cmd += " --bare"
execCmd(job, cmd)
cmd = "git checkout " + local[REPO_BRANCH]
execCmd(job, cmd)
cmd = "git remote add " + repo[REPO_NAME] + " " + repo[REPO_URL]
execCmd(job, cmd)
os.chdir(job.home)
def execCmd(job, cmd):
job.m.logInfo(cmd)
text = ""
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
btext = process.communicate()[0]
text = btext.decode('utf-8')
job.m.logInfo(text)
return text
def checkoutLocal(job, local):
os.chdir(local[REPO_URL])
cmd = "git checkout " + local[REPO_BRANCH]
text = execCmd(job, cmd)
return text
def updateLocal(job, local, repo):
job.m.logInfo("--- aktualisiere git-Repo "+str(repo)+","+str(local))
text = checkoutLocal(job, local)
# if len(text) > 0 and re.match(r"[MA]\s\w+", text):
match = re.search(r"([DMA])\s(\S+)", text)
if match is not None:
os.chdir(job.home)
job.m.logError("ERROR: lokales Repo " + local[REPO_URL] + ", " + local[REPO_BRANCH] + " hat uncommited Aenderungen")
print("regex gefunden")
return
cmd = "git pull " + repo[REPO_NAME] + " " + repo[REPO_BRANCH]
text = execCmd(job, cmd)
job.m.logInfo(text)
os.chdir(job.home)
def updateRemote(job, local, repo):
job.m.logInfo("--- aktualisiere git-Repo "+str(repo)+","+str(local))
text = checkoutLocal(job, local)
cmd = "git push " + repo[REPO_NAME] + " " + repo[REPO_BRANCH]
text = execCmd(job, cmd)
os.chdir(job.home)
def createBasisConfig(job):
job.m.logInfo("--- erstelle Basis-Koniguration ")
config = {}
config["basic"] = {}
config["basic"]["paths"] = {}
config["basic"]["paths"]["home"] = job.home
for p in job.paths:
path = os.path.join(job.home, job.paths[p])
config["basic"]["paths"][p] = path
for p in ["temp", "config"]:
path = os.path.join(job.home, p)
createFolder(job, path)
config["basic"]["paths"][p] = path
if BASIS_FORMAT == "yml":
path = os.path.join(job.home, "config", "basis.json")
with open(path, 'w', encoding="utf-8") as file:
doc = yaml.dump(config, file)
file.write(doc)
file.close()
elif BASIS_FORMAT == "json":
path = os.path.join(job.home, "config", "basis.json")
with open(path, 'w', encoding="utf-8") as file:
doc = json.dumps(config, indent=4)
file.write(doc)
file.close()
def createDb(job):
if "db" in job.conf:
import basic.connection
import basic.Testserver
testserver = basic.Testserver.Testserver(job)
testserver.createAdminDBTables(job)
def getHome():
home = os.getcwd()
if home[-7:] == "program":
home = home[0:-8]
return home
# -----------------------------------------------------------------------------------------
# Pythonstandard Programmaufruf
# Job-Objekt erzeugen und beenden
if __name__ == '__main__':
if INSTALLED:
#job = basic.program.Job(PROGRAM_NAME)
job = ActJob(PROGRAM_NAME)
else:
job = ActJob(PROGRAM_NAME)
job.startJob()
startPyJob(job)
job.stopJob()

280
job_dialog.py

@ -0,0 +1,280 @@
#!/usr/bin/python
# program to execute programs for a testcases or for a testsuite
# PARAM from INPUT: --granularity --application --environment --testcase/testsuite
# main functions
# + input_param() : cache-actjob --> user-input --> local-param
# + start_job() : local-param --> cache-actjob --> start-param
# ---------------------------------------------------
"""
"""
import os.path
import json
import re
import basic.program
import basic.constants as B
import tools.job_tool
import tools.file_tool
import tools.data_const as D
import tools.date_tool
import tools.path_tool
import tools.path_const as P
tempJob = {}
PROGRAM_NAME = "service"
DLG_TESTCASE = "Testfall"
DLG_TESTSUITE = "Testsuite"
DLG_COMPLETE = "Komplettausfuehrung"
LIST_DLG_GRAN = [DLG_TESTCASE, DLG_TESTSUITE]
DLG_START_QUESTION = "was soll getestet werden"
# DLG_TESTPLAN = "Testplan"
DLG_ENVIRONMENT = "Umgebung"
DLG_APPLICATION = "Anwendung"
DLG_REDO = "wiederholen"
DLG_CONTINUE = "fortsetzen"
DLG_DUMMY_STEP = "Dummy-Schritt"
DLG_NEWJOB = "neuer Job"
JOB_NR = {
DLG_TESTSUITE : {
"start": "init_testsuite",
"init_testsuite": {
"jobnr": "0" },
"execute_testsuite": {
"jobnr": "1"},
"collect_testsuite": {
"jobnr": "2"},
"compare_testsuite": {
"jobnr": "3"},
"finish_testsuite": {
"jobnr": "4"}
},
DLG_TESTCASE: {
"start": "init_testcase",
"init_testcase": {
"jobnr": "5" },
"execute_testcase": {
"jobnr": "6" },
"collect_testcase": {
"jobnr": "7" },
"copy_appdummy": {
"jobnr": "8" },
"compare_testcase": {
"jobnr": "9" },
},
"check_environment": {
"jobnr": "10" },
"test_executer": {
"jobnr": "11"},
}
JOB_LIST = [
"init_testsuite", # 0
"execute_testsuite", # 1
"collect_testsuite", # 2
"compare_testsuite", # 3
"finish_testsuite", # 4
"init_testcase", # 5
"execute_testcase", # 6
"collect_testcase", # 7
"copy_appdummy", # 8
"compare_testcase", # 9
"check_environment", # 10
"test_executer" # 11
]
appList = []
envList = []
entities = {}
entities[DLG_TESTCASE] = {}
entities[DLG_TESTSUITE] = {}
def readContext(job):
for k in job.conf[B.SUBJECT_APPS]:
appList.append(k)
path = job.conf[B.TOPIC_PATH][B.ATTR_PATH_ENV]
if os.path.exists(path):
for d in os.listdir(path):
print ("-- "+d)
if not os.path.isdir(os.path.join(path, d)):
continue
if d[0:1] == "_":
continue
envList.append(d)
path = job.conf[B.TOPIC_PATH][B.ATTR_PATH_TDATA]
if os.path.exists(path):
for d in os.listdir(path):
print("tdata path "+d)
if not os.path.isdir(os.path.join(path, d)):
print("continue a")
continue
if d[0:1] == "_":
print("continue b")
continue
specpath = os.path.join(path, d, D.DFILE_TESTCASE_NAME + ".csv")
readSpec(job, d, DLG_TESTCASE, specpath)
specpath = os.path.join(path, d, D.DFILE_TESTSUITE_NAME + ".csv")
readSpec(job, d, DLG_TESTSUITE, specpath)
def readSpec(job, testentity, testgran, specpath):
print("spec "+specpath)
if not os.path.isfile(specpath):
print("continue c")
return
text = tools.file_tool.read_file_text(job, specpath, job.m)
print("-----------\n"+text+"\n------------------")
if re.match(r".*?depricated;[jJyY]", text):
return
if re.match(r".*\nhead:application;", text):
print("## app gematcht")
res = re.search(r".*head:application;(.+)\n", text)
apps = res.group(1).replace(";", ",").split(",")
print("# "+str(apps))
for a in apps:
if len(a) < 1:
break
if a not in entities[testgran]:
entities[testgran][a] = []
print(a+" in "+testentity+" "+testgran+" -- "+str(entities))
entities[testgran][a].append(testentity)
def printProc(job, process):
print("--------------------------------------------------")
for k in process:
print("| {0:15s} : {1}".format(k, process[k]))
print("--------------------------------------------------")
def restartActualProcess(job):
"""
check if an actual process is open
:return:
"""
path = tools.path_tool.getActualJsonPath(job)
if os.path.exists(path):
actProc = tools.file_tool.read_file_dict(job, path, job.m)
print("restartActJob "+str(actProc))
printProc(job, actProc)
step = int(actProc["step"])
if actProc["program"] == "test_executer":
if step > 5:
dialogProcess(job)
else:
actProc["step"] = str(step+1)
tools.job_tool.start_child_process(job, actProc)
restartActualProcess(job)
selection = [DLG_NEWJOB, DLG_REDO]
nr = int(JOB_NR[actProc["gran"]][actProc["program"]]["jobnr"])
if (actProc["gran"] == DLG_TESTSUITE and nr < 4) or (actProc["gran"] == DLG_TESTCASE and nr < 9):
selection.append(DLG_CONTINUE)
if nr == 7:
selection.append(DLG_DUMMY_STEP)
choice = getChoice(job, selection, DLG_ENVIRONMENT)
print(choice)
if choice == DLG_REDO:
tools.job_tool.start_child_process(job, actProc)
restartActualProcess(job)
elif choice == DLG_DUMMY_STEP:
actProc["program"] = JOB_LIST[nr+1]
tools.job_tool.start_child_process(job, actProc)
restartActualProcess(job)
elif choice == DLG_CONTINUE:
if nr == 7:
nr = 9
else:
nr += 1
print (" act nr "+str(nr))
actProc["step"] = str(step + 1)
actProc["program"] = JOB_LIST[nr]
tools.job_tool.start_child_process(job, actProc)
elif choice == DLG_NEWJOB:
dialogProcess(job)
else:
dialogProcess(job)
def dialogProcess(job):
"""
dialog for selection and starting a process
:param job:
:return:
"""
process = {}
index = 0
print("create new process")
selection = []
selection = LIST_DLG_GRAN
if DLG_TESTCASE + " - " + DLG_COMPLETE not in selection:
selection.append(DLG_TESTCASE + " - " + DLG_COMPLETE)
if DLG_TESTSUITE + " - " + DLG_COMPLETE not in selection:
selection.append(DLG_TESTSUITE + " - " + DLG_COMPLETE)
choice = getChoice(job, LIST_DLG_GRAN, DLG_START_QUESTION)
if DLG_COMPLETE in choice:
process["gran"] = choice[0:-3-len(DLG_COMPLETE)]
process["program"] = "test_executer"
process["step"] = 1
else:
process["gran"] = choice
process["program"] = JOB_NR[process["gran"]]["start"]
process["step"] = 1
if len(appList) == 1:
process["app"] = appList[0]
else:
process["app"] = getChoice(job, appList, DLG_ENVIRONMENT)
#
if len(envList) == 1:
process["env"] = envList[0]
else:
process["env"] = getChoice(job, envList, DLG_ENVIRONMENT)
#
if len(entities[process["gran"]][process["app"]]) == 1:
process["entity"] = entities[process["gran"]][process["app"]][0]
else:
process["entity"] = getChoice(job, entities[process["gran"]][process["app"]], process["gran"])
print(str(process))
setattr(job.par, B.PAR_ENV, process["env"])
setattr(job.par, B.PAR_APP, process["app"])
if process["gran"] == DLG_TESTCASE:
setattr(job.par, B.PAR_TESTCASE, process["entity"])
setattr(job.par, B.PAR_TCTIME, tools.date_tool.getActdate(tools.date_tool.F_DIR))
path = tools.path_tool.composePattern(job, "{"+P.P_TCBASE+"}", None)
process[B.PAR_TCDIR] = path
elif process["gran"] == DLG_TESTSUITE:
setattr(job.par, B.PAR_TESTSUITE, process["entity"])
setattr(job.par, B.PAR_TSTIME, tools.date_tool.getActdate(tools.date_tool.F_DIR))
path = tools.path_tool.composePattern(job, "{"+P.P_TSBASE+"}", None)
process[B.PAR_TSDIR] = path
tools.job_tool.start_child_process(job, process)
restartActualProcess(job)
def getChoice(job, choiselist, description):
index = 0
print("+------------- "+description+" ----------")
print('| | {:2d} : {:60s}'.format(0, "exit"))
for k in choiselist:
index += 1
print('| | {:2d} : {:60s}'.format(index, k))
print("+-----------------------------------------------")
choice = input("Auswahl 1-" + str(index) + ": ")
if not choice.isnumeric():
print("FEHLER Fehleingabe "+choice)
getChoice(job, choiselist, description)
elif int(choice) < 1:
exit(0)
elif int(choice) > index:
print("FEHLER Fehleingabe "+choice)
getChoice(job, choiselist, description)
else:
return choiselist[int(choice) - 1]
if __name__ == '__main__':
job = basic.program.Job(PROGRAM_NAME, "", {})
readContext(job)
restartActualProcess(job)

140
md5Hash.py

@ -0,0 +1,140 @@
"""
Dieses Programm durchlaeuft das angegebene Programmverzeichnis und ermittelt zu jeder Datei den md5-Hash.
Wenn neben diesem Programm eine Datei *md5Hash.txt liegt, werden die Werte gegen diese Datei verglichen.
weitere Feature:
* in Anwendung ueberfuehren, z.B. eine jar
* aufrubar ueber cli und Dialog
* config zu Standardeingaben --path, --work;
* --name mit Aufbauregel Release + Name
* Namensliste hinterlegen mit: unterverzeichnis, repo-name und repo-branch
* Methoden zum Annehmen einer Lieferung (unzip Subzips, pruefen, git-push nach korrekter Pruefung
* Methoden zum Erzeugen einer Lieferung
Definition *_md5protokoll.txt: datei \t md5checksum \n
"""
import argparse
import datetime
import hashlib
import os
def openLog(args):
startTime = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
path = os.path.join(getattr(args, "work"), getattr(args, "name") + "_" + startTime + ".txt")
logfile = open(path, 'w', encoding="utf-8")
logfile.write("* * * * * * * * * * PROTOKOLLL MD5-Checksum-Pruefung * * * * * * * * * * * * * * *\n")
logfile.write("Name: " + getattr(args, "name") + "\n")
logfile.write("Path: " + getattr(args, "path") + "\n")
logfile.write("Dir : " + getattr(args, "dir") + "\n")
return logfile
def openResult(args, mode, suffix):
path = os.path.join(getattr(args, "work"), getattr(args, "name") + "_"+suffix+".txt")
if mode == "r" and not os.path.exists(path):
return None
resultfile = open(path, mode, encoding="utf-8")
return resultfile
def traverseDir(logfile, resultfile, path, rootpath):
logfile.write("traverse " + path + "\n")
for f in sorted(os.listdir(path)):
if f[:1] == ".":
continue
if f[:2] == "__":
continue
if os.path.isfile(os.path.join(path, f)):
fname = os.path.join(path, f)
lname = fname.replace(rootpath, "")
logfile.write(". " + lname + "\n")
resultfile.write(lname + "\t" + getMD5Hash(fname) + "\n")
elif os.path.isdir(os.path.join(path, f)):
traverseDir(logfile, resultfile, os.path.join(path, f), rootpath)
def getMD5Hash(path):
hash_md5 = hashlib.md5()
with open(path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def compareLists(logfile, args):
protokollfile = openResult(args, "r", "md5protokoll")
if protokollfile is None:
logfile.write("Kein Vergleich, da Protokolldatei fehlt! \n")
return
resultfile = openResult(args, "r", "md5result")
protLines = protokollfile.readlines()
protokollfile.close()
resultLines = resultfile.readlines()
resultfile.close()
p = 0
r = 0
error = False
while (True):
# print("p " + str(p) + " r " + str(r))
if len(protLines) > p:
protRow = protLines[p].replace("\r","").split("\t")
else:
protRow = None
if len(resultLines) > r:
resRow = resultLines[r].replace("\r","").split("\t")
else:
resRow = None
if protRow is None and resRow is None:
break
elif protRow is None and resRow is not None:
error = True
logfile.write("ERROR Result " + resRow[0] + ": ist ueberzaehlig\n")
r += 1
elif resRow is not None and resRow is not None and protRow[0] > resRow[0]:
error = True
logfile.write("ERROR Result " + resRow[0] + ": ist ueberzaehlig\n")
r += 1
elif resRow is None and protRow is not None:
error = True
logfile.write("ERROR Protokoll " + protRow[0] + ": ist ueberzaehlig\n")
p += 1
elif protRow is not None and resRow is not None and protRow[0] < resRow[0]:
error = True
logfile.write("ERROR Protokoll " + protRow[0] + ": ist ueberzaehlig\n")
p += 1
elif protRow is not None and resRow is not None and protRow[0] == resRow[0]:
if protRow[1] != resRow[1]:
error = True
logfile.write("ERROR "+protRow[0]+": md5Hash unterscheiden sich (" + protRow[1] + "!=" + resRow[1].strip() + ")\n")
r += 1
p += 1
if error:
logfile.write("\n+--------------------------------------------------------+\n")
logfile.write("| Fehler aufgetreten, die Dateien unterscheiden sich |\n")
logfile.write("+--------------------------------------------------------+\n")
else:
logfile.write("\nDateien unterscheiden sich nicht\n")
def readParameter():
"""
--dir das zu durchsuchende Verzeichnis
--name Namenszusatz fuer das zu untersuchende Programmpaket
--work Arbeitserzeichnis mit:
<name>_md5result.txt erstellte Ergebnisdatei
<name>_md5protokoll.txt mitgelieferte Vergleichsdatei
<name>_JJJJMMTT_hhmmss.txt Protokolldatei
"""
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path', required=True, action='store')
parser.add_argument('-d', '--dir', required=True, action='store')
parser.add_argument('-n', '--name', required=True, action='store')
parser.add_argument('-w', '--work', required=True, action='store')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = readParameter()
logfile = openLog(args)
logfile.write("\n")
resultfile = openResult(args, "w", "md5result")
path = os.path.join(getattr(args, "path"))
traverseDir(logfile, resultfile, path, path)
resultfile.close()
logfile.write("\n")
compareLists(logfile, args)
logfile.close()

0
model/__init__.py

6
model/ap_applications.csv

@ -0,0 +1,6 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ap_applications;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;apappid;pk;vchar(256);N;;;;;;
;apid;int;vchar(256);I;;;;;;
;appid;int;vchar(256);I;;;;;;
1 _type ddl
2 _key _field
3 table:ap_applications _field type format index generic aggregat key acceptance alias description
4 apappid pk vchar(256) N
5 apid int vchar(256) I
6 appid int vchar(256) I

7
model/ap_components.csv

@ -0,0 +1,7 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ap_components;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;apcomid;pk;int;N;;;;;;
;apid;int;int;I;;;;;;
;coid;int;int;I;;;;;;
;component;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:ap_components _field type format index generic aggregat key acceptance alias description
4 apcomid pk int N
5 apid int int I
6 coid int int I
7 component str vchar(256) N

8
model/ap_projects.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ap_projects;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;pjid;pk;int;N;;;;;;
;apid;int;int;I;;;;;;
;project;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:ap_projects _field type format index generic aggregat key acceptance alias description
4 pjid pk int N
5 apid int int I
6 project str vchar(256) I
7 description string vchar(256) N
8 reference str vchar(256) N

7
model/ap_usecases.csv

@ -0,0 +1,7 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ap_usecases;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;apuseid;pk;int;N;;;;;;
;apid;int;int;I;;;;;;
;usid;int;int;I;;;;;;
;usecase;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:ap_usecases _field type format index generic aggregat key acceptance alias description
4 apuseid pk int N
5 apid int int I
6 usid int int I
7 usecase str vchar(256) N

7
model/ap_variants.csv

@ -0,0 +1,7 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ap_variants;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;apvarid;pk;int;N;;;;;;
;apid;int;int;I;;;;;;
;vaid;int;int;I;;;;;;
;variannt;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:ap_variants _field type format index generic aggregat key acceptance alias description
4 apvarid pk int N
5 apid int int I
6 vaid int int I
7 variannt str vchar(256) N

13
model/application.csv

@ -0,0 +1,13 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:application;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;apid;pk;autoint;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;project;str;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
;applications;subtable;subtable;N;;;;;;
;components;subtable;subtable;N;;;;;;
;usecases;subtable;subtable;N;;;;;;
;variants;subtable;subtable;N;;;;;;
1 _type ctlg
2 _key _field
3 table:application _field type format index generic aggregat key acceptance alias description
4 apid pk autoint N
5 name str vchar(256) I
6 description string vchar(256) N
7 reference str vchar(256) N
8 project str vchar(256) N
9 attributes string jlob N
10 applications subtable subtable N
11 components subtable subtable N
12 usecases subtable subtable N
13 variants subtable subtable N

246
model/application.py

@ -0,0 +1,246 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.program
import basic.toolHandling
import basic.constants as B
import model.entity
import model.constants as M
import tools.data_const as D
import tools.path_const as P
import tools.config_tool
import tools.file_tool
import tools.git_tool
import tools.file_type
TABLE_NAMES = ["application", "ap_project", "ap_component"]
DEFAULT_SYNC = M.SYNC_FULL_GIT2DB
TABLE_NAME = B.SUBJECT_APP
""" system-name for this entity """
FIELD_ID = "apid"
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [D.FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
def searchProjects(job, appl):
"""
search all relevant projects from server-configuration
filtered by parameter --application , --project
:param job:
:return:
"""
projects = {}
if B.SUBJECT_PROJECTS in job.conf:
for k in job.conf[B.SUBJECT_PROJECTS]:
if k in B.LIST_SUBJECTS:
continue
if hasattr(job.par, B.PAR_PROJ) and k != getattr(job.par, B.PAR_PROJ):
continue
if hasattr(job.par, B.PAR_APP) \
and k not in appl[B.SUBJECT_APPS][getattr(job.par, B.PAR_APP)][B.SUBJECT_PROJECTS]:
continue
projects[k] = appl[B.SUBJECT_PROJECTS][k]
projects[k][B.SUBJECT_ENVIRONMENT] = []
else:
job.conf[B.SUBJECT_PROJECTS] = appl[B.SUBJECT_PROJECTS]
return projects
def select_applications(job, projectList):
"""
get all project which are configured for the workspace
with all environments where the application of the project are installed
:param job:
:return:
"""
appl = tools.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
return searchApplications(job, projectList, appl)
def searchApplications(job, projectList, appl):
appList = {}
for proj in projectList:
if hasattr(job, "par") and hasattr(job.par, B.PAR_PROJ) and proj != getattr(job.par, B.PAR_PROJ):
continue
for app in appl[B.SUBJECT_APPS]:
if B.SUBJECT_PROJECT in appl[B.SUBJECT_APPS][app] and proj != appl[B.SUBJECT_APPS][app][B.SUBJECT_PROJECT]:
continue
appList[app] = appl[B.SUBJECT_APPS][app]
return appList
import model.entity
def syncEnitities(job):
"""
synchronize the configuration with the database
:param job:
:return:
"""
syncMethod = DEFAULT_SYNC
if syncMethod.count("-") < 2:
return
fileTime = model.entity.VAL_ZERO_TIME
dbTime = model.entity.VAL_ZERO_TIME
# get git-commit
if "git" in syncMethod:
apppath = tools.config_tool.select_config_path(job, P.KEY_BASIC, B.SUBJECT_APPS, "")
repopath = apppath[len(job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS]) + 1:]
gitresult = tools.git_tool.gitLog(job, B.ATTR_PATH_COMPS, repopath, 1)
fileTime = gitresult[0]["date"]
print(str(gitresult))
if "db" in syncMethod:
if B.TOPIC_NODE_DB in job.conf:
dbi = basic.toolHandling.getDbTool(job, job.testserver, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
data = dbi.selectRows(TABLE_NAMES[0], job)
print(str(data[B.DATA_NODE_DATA]))
if len(data[B.DATA_NODE_DATA]) > 0:
dbTime = data[B.DATA_NODE_DATA][0]["updtime"]
if fileTime == dbTime:
print("gleich")
elif fileTime < dbTime:
print("db vorne")
(appObjects, appDict) = selectEntities(job, dbi)
print(str(appDict))
applPath = tools.config_tool.select_config_path(job, P.KEY_BASIC, B.SUBJECT_APPS)
tools.file_tool.write_file_dict(job.m, job, applPath, appDict)
#
elif fileTime > dbTime:
print("git vorne")
applData = tools.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
insertEntities(job, applData, dbTime, dbi)
def selectEntities(job, dbi):
appObjects = []
appDict = {}
appDict[B.SUBJECT_PROJECTS] = {}
appDict[B.SUBJECT_APPS] = {}
appData = dbi.selectRows(TABLE_NAMES[0], job)
projData = dbi.selectRows(TABLE_NAMES[1], job)
compData = dbi.selectRows(TABLE_NAMES[2], job)
for row in appData[B.DATA_NODE_DATA]:
ao = Application(job)
ao.setAppRow(row, "")
appDict[B.SUBJECT_APPS][ao.name] = {}
for f in job.testserver.conf[B.DATA_NODE_DDL][TABLE_NAMES[0]][B.DATA_NODE_HEADER]:
if f in model.entity.ENTITY_FIELDS:
continue
appDict[B.SUBJECT_APPS][ao.name][f] = getattr(ao, f)
apid = ao.apid
rows = [row for row in projData[B.DATA_NODE_DATA] if row["apid"] == apid]
ao.setProjRow(rows)
appDict[B.SUBJECT_APPS][ao.name][B.SUBJECT_PROJECTS] = []
for proj in getattr(ao, B.PAR_PROJ):
appDict[B.SUBJECT_APPS][ao.name][B.SUBJECT_PROJECTS].append(proj)
if proj in appDict[B.SUBJECT_PROJECTS]:
appDict[B.SUBJECT_PROJECTS][proj][B.SUBJECT_APPS].append(ao.name)
continue
appDict[B.SUBJECT_PROJECTS][proj] = {}
appDict[B.SUBJECT_PROJECTS][proj][B.SUBJECT_APPS] = []
appDict[B.SUBJECT_PROJECTS][proj][B.SUBJECT_APPS].append(ao.name)
aoproj = getattr(ao, "project")[proj]
for f in job.testserver.conf[B.DATA_NODE_DDL][TABLE_NAMES[1]][B.DATA_NODE_HEADER]:
if f in model.entity.ENTITY_FIELDS + ["approid", "apid"]:
continue
appDict[B.SUBJECT_PROJECTS][proj][f] = aoproj[f]
rows = [row for row in compData[B.DATA_NODE_DATA] if row["apid"] == apid]
ao.setCompRow(rows)
appDict[B.SUBJECT_APPS][ao.name][B.SUBJECT_COMPS] = []
for comp in getattr(ao, B.PAR_COMP):
appDict[B.SUBJECT_APPS][ao.name][B.SUBJECT_COMPS].append(comp)
appObjects.append(ao)
return appObjects, appDict
def insertEntities(job,applData, dbTime, dbi):
# insertRows
# get list of application
if dbTime != model.entity.VAL_ZERO_TIME:
for t in TABLE_NAMES:
dbi.deleteRows(job, t)
for app in applData[B.SUBJECT_APPS]:
ao = Application(job)
ao.read_entity(job, app)
ao.insertEntity(dbi)
class Application(model.entity.Entity):
""" table = "application"
job = None
name = ""
description = ""
reference = ""
components = {}
project = {}
"""
FIELD_ID = "apid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_APPS, B.SUBJECT_COMPS, B.SUBJECT_USECASES, B.SUBJECT_VARIANTS]
PREFIX_SUBTABLE = "ap"
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS,
tools.config_tool.get_plain_filename(job, ""), ttype=B.SUBJECT_APP)
conf = list(config[B.SUBJECT_APPS].keys())
outList = []
for k in conf:
if k[:1] != "_":
outList.append(k)
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS,
tools.config_tool.get_plain_filename(job, name), ttype=B.SUBJECT_APP)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
# data = tools.file_type.popNameNode(job, data)
return data
def check_data(self, job, data: dict) -> dict:
"""
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = [B.SUBJECT_COMPS]
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_APPS, B.SUBJECT_VARIANTS, B.SUBJECT_USECASES]
for conf in data:
tools.file_type.check_nodes(job, data[conf], checkNodes)
return data

10
model/artifact.csv

@ -0,0 +1,10 @@
table:artifact;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;arid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;artype;str;vchar(256);I;;;;;;
;project;str;vchar(256);I;;;;;;
;component;str;vchar(256);I;;;;;;
;testcase;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
1 table:artifact _field type format index generic aggregat key acceptance alias description
2 arid pk int N
3 name str vchar(256) I
4 artype str vchar(256) I
5 project str vchar(256) I
6 component str vchar(256) I
7 testcase str vchar(256) I
8 description string vchar(256) N
9 reference str vchar(256) N
10 attributes string jlob N

46
model/artifact.py

@ -0,0 +1,46 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.toolHandling
import tools.data_const as D
import basic.constants as B
import model.entity
import tools.config_tool
FIELD_ID = "arid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE,
B.SUBJECT_PROJECT, B.SUBJECT_COMP, B.SUBJECT_TESTCASE]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = []
class Artifact(model.entity.Entity):
FIELD_ID = "arid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, "artype", B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE,
B.SUBJECT_PROJECT, B.SUBJECT_COMP, B.SUBJECT_TESTCASE]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = []
name = ""
description = ""
prelease = ""
testsuites = {}
steps = []
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = {}
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)

117
model/catalog.py

@ -0,0 +1,117 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.program
import basic.constants as B
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.path_tool
import tools.file_tool
# import tools.tdata_tool
EXP_KEY_MISSING = "key is missing {}"
EXP_KEY_DOESNT_EXIST = "key doesnt exist in domain {}"
class Catalog:
__instance = None
"""
in this class there should be managed each defined key-value-pairs
the pairs ara loaded from the path testdata/catalog:
* initially the csv-file catalog.csv
* on demand other csv-files in the path
"""
def __init__(self):
self.catalog = {}
Catalog.__instance = self
pass
@staticmethod
def getInstance():
if Catalog.__instance == None:
return Catalog()
return Catalog.__instance
def getValue(self, job, domain, key, subkey=""):
"""
this function gets the value of the domain an key
:param domain:
:param key:
:return:
"""
if not (isinstance(domain, str) or len(domain) < 1):
raise Exception(EXP_KEY_MISSING, (domain, key))
if not (isinstance(key, str) or len(key) < 1):
job.m.setError(EXP_KEY_MISSING+" ("+domain+", "+key+")")
return ""
if domain not in self.catalog:
self.readDomain(domain, job)
if key not in self.catalog[domain]:
job.m.setError(EXP_KEY_DOESNT_EXIST+" ("+domain+", "+key+")")
return ""
if len(subkey) > 0:
if subkey not in self.catalog[domain][key]:
job.m.setError(EXP_KEY_DOESNT_EXIST + " (" + domain + ", " + key + ", " + subkey + ")")
return ""
return self.catalog[domain][key][subkey].strip()
return self.catalog[domain][key]
def getKeys(self, domain, job):
"""
this function gets the value of the domain an key
:param domain:
:param key:
:return:
"""
if not (isinstance(domain, str) or len(domain) < 1):
raise Exception(EXP_KEY_MISSING, (domain))
if domain not in self.catalog:
self.readDomain(domain, job)
if domain not in self.catalog:
return []
out = []
for x in self.catalog[domain].keys():
out.append(x)
return out
def readDomain(self, domain, job):
"""
this function reads the domain-entries
:param domain:
:return:
"""
if not (isinstance(domain, str) or len(domain) < 1):
raise Exception(EXP_KEY_MISSING, (domain))
if domain in self.catalog:
return self.catalog[domain]
pathname = tools.config_tool.select_config_path(job, P.KEY_CATALOG, domain)
if pathname is None:
raise Exception(EXP_KEY_MISSING, (domain))
if hasattr(job, "m"):
msg = job.m
else:
msg = None
data = tools.file_tool.read_file_dict(job, pathname, msg, D.CSV_SPECTYPE_CTLG)
if hasattr(job, "m"):
job.m.debug(12, "domain " + domain + " readed from " + pathname)
self.catalog[domain] = data[B.DATA_NODE_KEYS]
return data
def exportXSD(self, domain):
"""
this function exports the domain into xsd-declaration of simple types
:return:
"""
pass

10
model/co_artifacts.csv

@ -0,0 +1,10 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:co_artifacts;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;coartid;pk;int;N;;;;;;
;coid;int;integer;I;;;T:1;;;
;stid;int;integer;I;;;T:2;;;
;story;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;acceptcriteria;str;clob;N;;;;;;
1 _type ddl
2 _key _field
3 table:co_artifacts _field type format index generic aggregat key acceptance alias description
4 coartid pk int N
5 coid int integer I T:1
6 stid int integer I T:2
7 story str vchar(256) I
8 description string vchar(256) N
9 reference str vchar(256) N
10 acceptcriteria str clob N

7
model/co_components.csv

@ -0,0 +1,7 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:en_components;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;encomid;pk;int;N;;;;;;
;enid;int;int;I;;;;;;
;coid;int;int;I;;;;;;
;component;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:en_components _field type format index generic aggregat key acceptance alias description
4 encomid pk int N
5 enid int int I
6 coid int int I
7 component str vchar(256) N

10
model/co_datatables.csv

@ -0,0 +1,10 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:co_datatables;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;codtbid;pk;int;N;;;;;;
;coid;int;integer;I;;;T:1;;;
;dtid;int;integer;I;;;T:2;;;
;datatable;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;acceptcriteria;str;clob;N;;;;;;
1 _type ddl
2 _key _field
3 table:co_datatables _field type format index generic aggregat key acceptance alias description
4 codtbid pk int N
5 coid int integer I T:1
6 dtid int integer I T:2
7 datatable str vchar(256) I
8 description string vchar(256) N
9 reference str vchar(256) N
10 acceptcriteria str clob N

11
model/co_steps.csv

@ -0,0 +1,11 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:co_steps;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;costpid;pk;int;N;;;;;;
;coid;int;integer;I;;;T:1;;;
;vaid;int;integer;I;;;T:2;;;
;stid;int;integer;I;;;T:2;;;
;step;str;vchar(256);I;;;;;;
;variant;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:co_steps _field type format index generic aggregat key acceptance alias description
4 costpid pk int N
5 coid int integer I T:1
6 vaid int integer I T:2
7 stid int integer I T:2
8 step str vchar(256) I
9 variant str vchar(256) I
10 description string vchar(256) N

13
model/component.csv

@ -0,0 +1,13 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:component;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;coid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;reference;str;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
;components;subtable;subtable;N;;;;;;
;datatables;subtable;subtable;N;;;;;;
;steps;subtable;subtable;N;;;;;;
;artifacts;subtable;subtable;N;;;;;;
1 _type ctlg
2 _key _field
3 table:component _field type format index generic aggregat key acceptance alias description
4 coid pk int N
5 name str vchar(256) I
6 description string vchar(256) N
7 project string vchar(256) I
8 reference str vchar(256) N
9 attributes string jlob N
10 components subtable subtable N
11 datatables subtable subtable N
12 steps subtable subtable N
13 artifacts subtable subtable N

146
model/component.py

@ -0,0 +1,146 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.constants as B
import model.entity
import model.factory
import tools.data_const as D
import tools.path_const as P
import tools.config_tool as config_tool
import tools.file_tool as file_tool
import tools.git_tool
import tools.file_type
TABLE_NAMES = ["component", "co_step", "co_table", "co_artifact", "co_comps"]
DEFAULT_SYNC = model.entity.SYNC_FULL_GIT2DB
TABLE_NAME = "component"
""" system-name for this entity """
FIELD_ID = "coid"
CP_SUBJECT_COMPS = "components"
CP_SUBJECT_STEPS = "steps"
CP_SUBJECT_TABLES = "tables"
CP_SUBJECT_ARTS = B.SUBJECT_ARTIFACTS
LIST_CP_SUBJECTS = [CP_SUBJECT_COMPS, CP_SUBJECT_STEPS, CP_SUBJECT_TABLES, CP_SUBJECT_ARTS]
REL_ATTR_TYPE = "relationtyp"
REL_ATTR_FILE = "conffile"
REL_ATTR_FTYPE = "filetyp"
REL_ATTR_IP_PATTERN = "ippattern"
REL_ATTR_HOST_PATTERN = "hostpattern"
REL_ATTR_PORT_PATTERN = "portpattern"
REL_ATTR_URL_PATTERN = "urlpattern"
LIST_REL_ATTR = [REL_ATTR_TYPE, REL_ATTR_FILE, REL_ATTR_FTYPE,
REL_ATTR_IP_PATTERN, REL_ATTR_HOST_PATTERN, REL_ATTR_PORT_PATTERN, REL_ATTR_URL_PATTERN]
def select_components(job, project, application):
"""
get all project which are configured for the workspace
with all environments where the application of the project are installed
:param job:
:return:
"""
outList = []
appl = tools.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
path = job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS]
for p in os.listdir(path):
if p in ["catalog", "config", "test", "tools"]:
continue
if p[0:1] in [".", "_"]:
continue
if not os.path.isdir(os.path.join(path, p)):
continue
outList.append(p)
return outList
class Component(model.entity.Entity):
FIELD_ID = "coid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES, B.DATA_NODE_TOPICS]
LIST_SUBTABLES = [B.SUBJECT_ARTIFACTS, B.SUBJECT_COMPS, B.SUBJECT_STEPS, B.SUBJECT_DATATABLES]
PREFIX_SUBTABLE = "co"
coid = 0
name = ""
description = ""
reference = ""
project = ""
application = ""
attributes = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
# suche weiterleiten
if application != "":
app = model.factory.getApplication()
return list(app.components.keys())
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS])
outList = self.getDirlist(job, path, "csv")
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_COMP, tools.config_tool.get_plain_filename(job, name), "", ttype=B.SUBJECT_COMP)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
# data = tools.file_type.popNameNode(job, data)
return data
def check_data(self, job, data: dict) -> dict:
"""
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = [B.SUBJECT_COMPS]
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_APPS, B.SUBJECT_VARIANTS, B.SUBJECT_USECASES]
for conf in data:
tools.file_type.check_nodes(job, data[conf], checkNodes)
return data
def write_entity(self, job, name):
return
def remove_entity(self, job, name):
return
def select_entity(self, job, name):
return
def update_entity(self, job, name):
return
def delete_entity(self, job, name):
return

11
model/constants.py

@ -0,0 +1,11 @@
import basic.constants as B
SYNC_FULL_GIT2DB = "full-git-db"
SYNC_HEAD_GIT2DB = "head-git-db"
SYNC_COPY_FILE2DB = "copy-file-db"
SYNC_ONLY_GIT = "only-git"
SYNC_ONLY_DB = "only-db"
STORAGE_DB = B.TOPIC_NODE_DB
STORAGE_FILE = B.TOPIC_NODE_FILE
LIST_ENTITY_SYNC = [SYNC_ONLY_GIT, SYNC_FULL_GIT2DB, SYNC_HEAD_GIT2DB, SYNC_COPY_FILE2DB, SYNC_ONLY_DB]

12
model/datatable.csv

@ -0,0 +1,12 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:datatable;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;dtid;pk;integer;N;;;T:1;;;
;name;str;vchar(128);I;;;F:3;;;
;component;str;vchar(128);I;;;F:3;;;
;dtdatabase;str;vchar(128);I;;;F:1;;;
;dtschema;str;vchar(128);I;;;F:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;story
;attributes;jlob;jlob;N;;;;;;
;fieldnames;jlob;jlob;N;;;;;;
1 _type ctlg
2 _key _field
3 table:datatable _field type format index generic aggregat key acceptance alias description
4 dtid pk integer N T:1
5 name str vchar(128) I F:3
6 component str vchar(128) I F:3
7 dtdatabase str vchar(128) I F:1
8 dtschema str vchar(128) I F:2
9 description string vchar(256) N
10 reference str vchar(256) N story
11 attributes jlob jlob N
12 fieldnames jlob jlob N

58
model/datatable.py

@ -0,0 +1,58 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
FIELD_ID = "dtid"
FIELD_NAME = "name"
FIELD_DESCRIPTION = B.SUBJECT_DESCRIPTION
FIELD_REFERENCE = B.SUBJECT_REFERENCE
FIELD_COMPONENT = B.SUBJECT_COMP
FIELD_ATTRIBUTES = B.NODE_ATTRIBUTES
FIELD_HEADER = ""
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE, FIELD_COMPONENT]
LIST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_DATA, B.DATA_NODE_FIELDS]
LIST_SUBTABLES = {}
class Datatable(model.entity.Entity):
FIELD_ID = "dtid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, "dtdatabase", "dtschema",
B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_COMP]
LIST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.NODE_ATTRIBUTES, "fieldnames"]
LIST_SUBTABLES = []
dcid = 0
document = ""
description = ""
project = ""
reference = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS], P.KEY_CATALOG, P.VAL_TABLES)
outList = self.getDirlist(job, path, "csv")
return outList
def read_entity(self, job, name):
config = self.getConfig(job, P.KEY_CATALOG, name, tools.config_tool.get_plain_filename(job, name))
return self.setAttributes(job, config, name, LIST_FIELDS, LIST_NODES, LIST_SUBTABLES)

7
model/en_components.csv

@ -0,0 +1,7 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:en_components;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;encomid;pk;int;N;;;;;;
;enid;int;int;I;;;;;;
;coid;int;int;I;;;;;;
;component;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:en_components _field type format index generic aggregat key acceptance alias description
4 encomid pk int N
5 enid int int I
6 coid int int I
7 component str vchar(256) N

6
model/en_projects.csv

@ -0,0 +1,6 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:en_projects;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;enproid;pk;int;N;;;;;;
;enid;int;vchar(256);I;;;;;;
;project;str;vchar(256);I;;;;;;
1 _type ddl
2 _key _field
3 table:en_projects _field type format index generic aggregat key acceptance alias description
4 enproid pk int N
5 enid int vchar(256) I
6 project str vchar(256) I

643
model/entity.py

@ -0,0 +1,643 @@
import getpass
import os
import re
import basic.toolHandling
#import model.factory
# import model.entity
import tools.data_const as D
import tools.path_const as P
import basic.constants as B
import tools.config_tool
import tools.data_tool
import tools.date_tool
import tools.file_tool
ENTITY_NAME = "name"
ENTITY_ATTRIBUTES = B.NODE_ATTRIBUTES
ENTITY_INS_COMMIT = "inscommit"
ENTITY_INS_AUTHOR = "insauthor"
ENTITY_INS_TIME = "instime"
ENTITY_UPD_COMMIT = "updcommit"
ENTITY_UPD_AUTHOR = "updauthor"
ENTITY_UPD_TIME = "updtime"
ENTITY_ACTUAL = "actual"
VAL_ACTUAL = 1
VAL_ZERO_TIME = "2000-01-01_00-00-00"
ENTITY_FIELDS = [ENTITY_INS_COMMIT, ENTITY_INS_AUTHOR, ENTITY_INS_TIME,
ENTITY_UPD_COMMIT, ENTITY_UPD_AUTHOR, ENTITY_UPD_TIME, ENTITY_ACTUAL]
SYNC_FULL_GIT2DB = "full-git-db"
SYNC_HEAD_GIT2DB = "head-git-db"
SYNC_COPY_FILE2DB = "copy-file-db"
SYNC_ONLY_GIT = "only-git"
SYNC_ONLY_DB = "only-db"
STORAGE_DB = B.TOPIC_NODE_DB
STORAGE_FILE = B.TOPIC_NODE_FILE
LIST_ENTITY_SYNC = [SYNC_ONLY_GIT, SYNC_FULL_GIT2DB, SYNC_HEAD_GIT2DB, SYNC_COPY_FILE2DB, SYNC_ONLY_DB]
print("is importing module.entity")
def getEntityValue(job, field, gitcommit):
if field == ENTITY_INS_COMMIT:
return ""
if field == ENTITY_INS_AUTHOR:
return getpass.getuser()
if field == ENTITY_INS_TIME:
return tools.date_tool.getActdate(tools.date_tool.F_DIR)
if field == ENTITY_UPD_COMMIT:
return gitcommit["commit"]
if field == ENTITY_UPD_AUTHOR:
return gitcommit["author"]
if field == ENTITY_UPD_TIME:
return gitcommit["date"]
if field == ENTITY_ACTUAL:
return VAL_ACTUAL
class Entity:
""" system-name for this entity """
FIELD_ID = ""
LIST_FIELDS = []
""" list of object-attributes """
LIST_NODES = []
LIST_SUBTABLES = []
PREFIX_SUBTABLE = ""
def __init__(self, job, entityname: str = "", name: str = "", args: dict = {}):
import model.table
self.job = job
if entityname == "":
classname = str(self)
a = classname.split(".")
entityname = a[1]
entityname = tools.data_tool.getSingularKeyword(entityname)
self.entityname = entityname
if entityname not in ["", "table"]:
self.setDdlAttributes(job, entityname)
for f in self.ddls[entityname][model.table.LISTNAME_SUBTABLE]:
self.setDdlAttributes(job, self.PREFIX_SUBTABLE + "_" + tools.data_tool.getSingularKeyword(f))
if len(name) > 1:
self.getEntity(job, name, args)
def setDdlAttributes(self, job, entityname: str=""):
"""
:param job:
:return:
"""
import model.table
self.ddls = {}
ddlargs = {model.table.TYPE_CONTEXT: B.ATTR_INST_TESTSERVER}
if entityname not in ["", B.SUBJECT_DATATABLES]:
table = model.table.Table(job)
table = table.read_entity(job, self.entityname, args=ddlargs)
self.ddls[entityname] = {}
self.ddls[entityname][model.table.LISTNAME_DDLNAMES] = getattr(table, model.table.LISTNAME_DDLNAMES)
self.ddls[entityname][model.table.LISTNAME_DDLFIELDS] = getattr(table, model.table.LISTNAME_DDLFIELDS)
listFields = []
listNodes = []
listSubtables = []
for f in self.ddls[entityname][model.table.LISTNAME_DDLNAMES]:
if self.ddls[entityname][model.table.LISTNAME_DDLFIELDS][f][D.DDL_FIELD] in B.LIST_SUBJECTS:
listSubtables.append(f)
elif self.ddls[entityname][model.table.LISTNAME_DDLFIELDS][f][D.DDL_FORMAT] in ["jlob"]:
listNodes.append(f)
elif self.ddls[entityname][model.table.LISTNAME_DDLFIELDS][f][D.DDL_FIELD] in table.LIST_ADMINFIELDS:
pass
else:
listFields.append(f)
self.ddls[entityname][model.table.LISTNAME_FIELDS] = listFields
self.ddls[entityname][model.table.LISTNAME_NODES] = listNodes
self.ddls[entityname][model.table.LISTNAME_SUBTABLE] = listSubtables
# check LISTEN ... hard coded vs. configuered
# TODO why hard coded const ??
for f in listFields:
if f not in self.LIST_FIELDS:
raise Exception(entityname + " " + str(self) + " a check list <-> LIST_FIELDS " + f)
for f in listNodes:
if f not in self.LIST_NODES:
raise Exception(entityname + " " + str(self) + " a check list <-> LIST_NODES " + f)
for f in listSubtables:
if f not in self.LIST_SUBTABLES:
raise Exception(entityname + " " + str(self) + " a check list <-> LIST_SUBTABLES " + f)
for f in self.LIST_FIELDS:
if f not in listFields:
raise Exception(entityname + " " + str(self) + " b check list <-> LIST_FIELDS " + f)
for f in self.LIST_NODES:
if f in B.LIST_DATA_NODE or f[:1] == "_":
continue
if f not in listNodes:
raise Exception(entityname + " " + str(self) + " b check list <-> LIST_NODES " + f)
for f in self.LIST_SUBTABLES:
if f not in listSubtables:
raise Exception(entityname + " " + str(self) + " b check list <-> LIST_SUBTABLES " + f)
def get_unique_names(self, job, storage = "", project = "", application = "", gran = "",
ttype: str = "", args: dict = {}) -> list:
"""
gets the entity-names from the defined storage - the field name must be an unique identifier
:param job:
:param storage: opt. values db / files - default files
:param project: opt. select-criteria if used and defined
:param application: opt. select-criteria if used and defined
:param gran: opt. granularity values testcase / testsuite / testplan
:param ttype: opt. ddd
:param args: opt. additional args
:return: list of entity-names
"""
entityNames = []
if storage == STORAGE_DB:
entityNames = self.select_unique_names(job, project, application, gran, args)
elif storage == STORAGE_FILE:
entityNames = self.read_unique_names(job, project, application, gran, args)
else:
entityNames = self.read_unique_names(job, project, application, gran, args)
return [item for item in entityNames if item not in B.LIST_DATA_NODE]
def select_unique_names(self, job, project, application, gran, args):
"""
reads the entity-names from file-storage
:param job:
:param project: opt. select-criteria if used and defined
:param application: opt. select-criteria if used and defined
:param gran: opt. granularity values testcase / testsuite / testplan
:param args: opt. additional args
:return: list of entity-names
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def get_entities(self, job, storage="", project="", application="", gran="", ttype="", args={}):
"""
gets the entity-names from the defined storage
:param job:
:param storage: opt. values db / files - default files
:param project: opt. select-criteria if used and defined
:param application: opt. select-criteria if used and defined
:param gran: opt. granularity values testcase / testsuite / testplan
:param args: opt. additional args
:return: list of entity-names
"""
entities = []
entityNames = self.get_unique_names(job, storage=storage, project=project, application=application,
gran=gran, args=args, ttype=ttype)
for k in entityNames:
if storage == STORAGE_DB:
entity = self.select_entity(job, k)
elif storage == STORAGE_FILE:
print(" entity.read_e "+ k)
entity = self.read_entity(job, k)
else:
entity = self.read_entity(job, k)
entities.append(entity)
return entities
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param project: select-criteria if used and defined
:param application: select-criteria if used and defined
:param gran: granularity values testcase / testsuite / testplan
:param args additional args
:return: list of entity-names
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def setDbAttributes(self, job, tables):
"""
set the db-attributes like connection and ddl
:param job:
:param tables: list of table-names
:return:
"""
setattr(self, "m", job.m)
config = {}
config[B.TOPIC_CONN] = job.conf[B.TOPIC_NODE_DB]
config[B.DATA_NODE_DDL] = {}
for t in tables:
ddl = tools.db_abstract.get_ddl(job, B.ATTR_INST_TESTSERVER, t)
config[B.DATA_NODE_DDL][t] = ddl
setattr(self, "conf", config)
def getEntity(self, job, name: str, args: dict={}):
if len(args) > 0:
self.set_entity(job, name, args)
elif B.TOPIC_NODE_DB in job.conf:
self.select_entity(job, name)
#self.read_entity(job, name)
else:
self.read_entity(job, name)
def set_entity(self, job, name: str, args: dict):
setattr(self, D.FIELD_NAME, name)
for k in self.LIST_FIELDS:
if k in args:
setattr(self, k, args[k])
for k in self.LIST_SUBTABLES:
if k in args:
setattr(self, k, args[k])
for k in self.LIST_NODES:
if k in args:
setattr(self, k, args[k])
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
@staticmethod
def rebuild_data(job, tdata: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param tdata:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def check_data(self, job, tdata: dict) -> dict:
"""
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def select_entity(self, job, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def write_entity(self, job, name):
"""
writes the entity into the file-system
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def insert_entity(self, job, name):
"""
inserts the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def update_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def remove_entity(self, job, name):
"""
removes the entity from the file-system
it similar to delete_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def removeEntity(self, job, name, storagepath, ext):
"""
removes the entity from the file-system
it similar to delete_entity
:param job:
:param name: single substring or list of name or dict of names with the keys as
:return:
"""
nameList = []
if isinstance(name, dict):
nameList = name.keys()
elif isinstance(name, list):
nameList = name
else:
nameList.append(name)
for name in nameList:
pathname = os.path.join(storagepath, name + "." + ext)
os.remove(pathname)
def delete_entity(self, job, name, table):
"""
deletes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
""" 2023-05 """
@staticmethod
def getConfig(job, module: str, subject: str, name: str, ttype: str = D.CSV_SPECTYPE_DDL) -> dict:
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
config = tools.config_tool.getConfig(job, module, subject, ttype=ttype)
oldConfig = config
if config is not None:
if subject not in config:
newConfig = {}
newConfig[subject] = {}
for k in config:
newConfig[subject][k] = config[k]
config = newConfig
pass
if len(name) == 0:
return config
elif name in config[subject]:
outConfig = {}
outConfig[name] = config[subject][name]
return outConfig
elif B.DATA_NODE_KEYS in config[subject] \
and name in config[subject][B.DATA_NODE_KEYS]:
# if csv-data is a catalog
outConfig = {}
outConfig[name] = config[subject][B.DATA_NODE_KEYS][name]
return outConfig
elif name == subject:
return config
raise Exception("keine Config zu "+name)
@staticmethod
def set_subtables(job, tdata: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param tdata:
:return:
"""
raise Exception("not implemented ")
@staticmethod
def getDirlist(job, path, ext) -> list:
outList = []
for k in os.listdir(path):
if k[:1] in [".", "_"]:
continue
if k in [P.KEY_CATALOG, P.KEY_TOOL, P.VAL_CONFIG, P.VAL_TEST, P.VAL_TOOLS]:
continue
if ext == "":
if not os.path.isdir(os.path.join(path, k)):
continue
outList.append(k)
continue
else:
if not os.path.isfile(os.path.join(path, k)):
continue
if len(k) < len(ext):
continue
xx = k[-len(ext):]
if ext != k[-len(ext):]:
continue
outList.append(k[:-len(ext)-1])
return outList
def setAttributes(self, job, config, rootname, fields, nodes, subjects):
"""
it sets the attributes of config into the entity-object
:param job:
:param config: dictionary of readed specification resp. configuration
:param rootname: rootname of config
:param fields: list of field-names, the model-const LIST_FIELDS
:param nodes: list of node-names, the model-const LIST_NODES
:param subjects: list of subtables-names, the model-const LIST_SUBTABLES
:return:
"""
""" 2023-05 """
import model.factory
verify = False
if not job is None:
self.job = job
if rootname not in config:
return self
for k in fields + nodes:
key = tools.data_tool.getExistKeyword(k, config[rootname])
if verify: print("setFields " + k + " / " + key)
if key in ["", D.FIELD_PROJECT]:
continue
if verify: print("setFields " + str(k) + " = " + str(config[rootname][key]))
if k in fields:
setattr(self, tools.data_tool.getSingularKeyword(k), tools.data_tool.getValueStr(config[rootname][key]))
elif k == "fieldnames":
setattr(self, tools.data_tool.getPluralKeyword(k), config[rootname][key])
else:
setattr(self, tools.data_tool.getSingularKeyword(k), config[rootname][key])
setattr(self, D.FIELD_NAME, rootname)
for k in subjects:
# tables: { person: { _header: [] , _data: {} } }
#
if k in [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_ROW]:
continue
objects = {}
key = tools.data_tool.getExistKeyword(k, config[rootname])
if key == "":
continue
if not isinstance(config[rootname][key], dict):
continue
for o in config[rootname][key]:
if o in [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_ROW, B.DATA_NODE_PATH]:
continue
args = {}
print("### " + k + " " + o + " " + str(config[rootname][key][o]))
if not isinstance(config[rootname][key][o], dict):
objects[k] = o
continue
for x in config[rootname][key][o]:
args[x] = config[rootname][key][o][x]
# args[k] = config[rootname][key][o]
if verify: print("setSubObject " + o + " = " + str(args[k]))
object = model.factory.get_entity_object(self.job, entityname=k, name=o, args=args)
objects[object.getIDName()] = object
if verify: print("setSubtables " + k + " = " + str(objects))
setattr(self, k, objects)
topics = {}
key = tools.data_tool.getExistKeyword(B.DATA_NODE_TOPICS, config[rootname])
if key != "":
for k in B.LIST_TOPIC_NODES:
if k in config[rootname][key]:
topics[k] = config[rootname][key][k]
setattr(self, tools.data_tool.getPluralKeyword(B.DATA_NODE_TOPICS), topics)
return self
def getFieldList(self) -> list:
"""
returns a list of scalar attributes
:return: LIST_FIELDS
"""
return self.LIST_FIELDS
def getNodeList(self) -> list:
"""
returns a list of sub-nodes - which can be persisted in a clob-field
:return: LIST_NODES
"""
return self.LIST_NODES
def getSubtableList(self) -> list:
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return self.LIST_SUBTABLES
def getPrefixSubtable(self) -> str:
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return self.PREFIX_SUBTABLE
def getSubtableNames(self) -> list:
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
out = []
for t in self.LIST_SUBTABLES:
out.append(self.PREFIX_SUBTABLE+"_"+t)
return out
def getName(self) -> str:
"""
returns the name - maybe build from other attributes
:return:
"""
return self.name
def getIDName(self) -> str:
"""
it returns the name as unique-id - maybe build from few attributes
:return:
"""
return self.name
def setSubtable(self, job, subtable, sublist):
outDict = {}
for k in sublist:
pass
def getDbAttr(self, job):
out = {}
for attr in [B.ATTR_DB_HOST, B.ATTR_DB_USER, B.ATTR_DB_DATABASE, B.ATTR_DB_PASSWD]:
out[attr] = job.conf[B.TOPIC_NODE_DB][attr]
return out
def getDdl(self, job, ddl):
out = {}
for t in ddl:
out[t] = {}
for f in ddl[t]:
out[t][f] = {}
for a in ddl[t][f]:
print("entity-23 "+f+", "+a+" "+str(ddl))
out[t][f][a] = ddl[t][f][a]
out[t][f][D.DDL_FIELD] = f
out[t][B.DATA_NODE_HEADER] = list(ddl[t].keys())
return out
def createSchema(self, testserver):
if B.TOPIC_NODE_DB in self.job.conf:
dbi = basic.toolHandling.getDbTool(self.job, testserver, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
sql = self.get_schema()
print(sql)
for s in sql.split(";\n"):
if len(s) < 3: continue
try:
# dbi.execStatement(s+";", self.job.conf[B.TOPIC_NODE_DB])
print("SQL executed: "+s)
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
def getHistoryFields(self):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getSchemaAttribut("inscommit", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("insauthor", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("instime", D.TYPE_TIME)+","
sql += dbi.getSchemaAttribut("updcommit", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("updauthor", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("updtime", D.TYPE_TIME)+","
sql += dbi.getSchemaAttribut("actual", D.TYPE_INT)
return sql
def selectHistoryFields(self):
if B.TOPIC_NODE_DB in self.job.conf:
dbi = basic.toolHandling.getDbTool(self.job, self.testserver, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
dbi.selectRows
def getHistoryIndex(self, table) -> str:
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getSchemaIndex(table, "actual") + "\n"
return sql
def get_schema(self, tableName, tableObject):
pass
def insert_entity(self, job):
"""
inserts the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
pass
def read_spec(job, testentity, testgran, specpath):
if not os.path.isfile(specpath):
return
text = tools.file_tool.read_file_text(job, specpath, job.m)
if re.match(r".*?depricated;[jJyY]", text):
return None
spec = {}
regex = re.compile(r".*\nhead:(.*?);(.+)")
for res in regex.finditer(text):
#res = re.search(r".*head:(.*?);(.+)\n", text)
key = res.group(1)
if key == B.SUBJECT_DESCRIPTION:
spec[B.SUBJECT_DESCRIPTION] = res.group(2).replace(";", "")
elif key in [B.SUBJECT_APPS, B.PAR_APP]:
apps = res.group(2).replace(";", ",").split(",")
spec[B.SUBJECT_APPS] = apps
else:
val = res.group(2).replace(";", "")
spec[key] = val
return spec

10
model/environment.csv

@ -0,0 +1,10 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:environment;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;enid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
;components;subtable;subtable;N;;;;;;
;projects;subtable;subtable;N;;;;;;
1 _type ctlg
2 _key _field
3 table:environment _field type format index generic aggregat key acceptance alias description
4 enid pk int N
5 name str vchar(256) I
6 description string vchar(256) N
7 reference str vchar(256) N
8 attributes string jlob N
9 components subtable subtable N
10 projects subtable subtable N

122
model/environment.py

@ -0,0 +1,122 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.config_tool
import tools.file_tool
import tools.git_tool
import tools.data_const as D
import tools.file_type
TABLE_NAME = "environment"
""" system-name for this entity """
FIELD_ID = "enid"
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [D.FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
TABLE_NAMES = ["environment", "en_project", "en_component"]
DEFAULT_SYNC = model.entity.SYNC_FULL_GIT2DB
def select_environments(job, projectList):
"""
searches and gets environments in which the applications of the project are declared that these are installed
filtered by parameter --environment
:param job:
:return:
"""
environments = {}
path = job.conf[B.TOPIC_PATH][B.ATTR_PATH_ENV]
if not os.path.exists(path):
raise Exception("Umgebungsverzeichnis existiert nicht "+path)
for envdir in os.listdir(path):
if not os.path.isdir(os.path.join(path, envdir)):
continue
if envdir[0:1] == "_":
continue
try:
pathname = tools.config_tool.select_config_path(job, P.KEY_TOOL, "conn", envdir)
doc = tools.file_tool.read_file_dict(job, pathname, job.m)
for proj in doc[B.SUBJECT_ENVIRONMENT][B.CONF_NODE_GENERAL][B.SUBJECT_PROJECTS]:
if proj in projectList:
environments[envdir] = doc[B.SUBJECT_ENVIRONMENT][B.CONF_NODE_GENERAL]
elif len(projectList) == 1 and projectList[0] == "ALL":
environments[envdir] = doc[B.SUBJECT_ENVIRONMENT][B.CONF_NODE_GENERAL]
except:
continue
return environments
class Environment(model.entity.Entity):
FIELD_ID = "enid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE]
""" list of object-attributes """
LIST_SUBTABLES = [B.SUBJECT_COMPS, B.SUBJECT_PROJECTS]
LIST_NODES = [B.NODE_ATTRIBUTES]
PREFIX_SUBTABLE = "en"
name = ""
description = ""
reference = ""
attributes = ""
project = ""
component = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_ENV])
outList = self.getDirlist(job, path, "")
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_ENV, name,
tools.config_tool.get_plain_filename(job, name), ttype=B.SUBJECT_ENVIRONMENT)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
data = tools.file_type.popNameNode(job, data)
return data
def check_data(self, job, data: dict) -> dict:
"""
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = [B.SUBJECT_COMPS]
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_PROJECTS]
return tools.file_type.check_nodes(job, data, checkNodes)

128
model/factory.py

@ -0,0 +1,128 @@
import model.entity
import basic.constants as B
import basic.Testserver
class Magazin():
__instance = None
__entities = {}
@staticmethod
def getInstance():
if Magazin.__instance == None:
return Magazin()
@staticmethod
def setEntity(name, object):
if name not in Magazin.__entities:
Magazin.__entities[name] = object
return Magazin.__entities[name]
@staticmethod
def getEntity(name):
if name in Magazin.__entities:
return Magazin.__entities[name]
@staticmethod
def hasEntity(name):
if name in Magazin.__entities:
return True
return False
def get_entity_object(job, entityname: str, name: str="", args: dict={}):
if name == "" and len(args) == 0 and Magazin.hasEntity(entityname):
return Magazin.getEntity(entityname)
if entityname in [B.SUBJECT_STEPS, B.SUBJECT_STEP]:
entity = getStep(job, entityname, name, args)
elif entityname in [B.SUBJECT_STORIES, B.SUBJECT_STORY, "storys"]:
entity = getStory(job, entityname, name, args)
elif entityname in [B.SUBJECT_VARIANTS, B.SUBJECT_VARIANT]:
entity = getVariant(job, entityname, name, args)
elif entityname in [B.SUBJECT_DATATABLES, B.SUBJECT_DATATABLE]:
entity = getDatatable(job, entityname, name, args)
elif entityname in [B.SUBJECT_USECASES, B.SUBJECT_USECASE]:
entity = getUsecase(job, entityname, name, args)
elif entityname in [B.SUBJECT_PROJECTS, B.SUBJECT_PROJECT]:
entity = getProject(job, entityname, name, args)
elif entityname in [B.SUBJECT_APPS, B.SUBJECT_APP]:
entity = getApplication(job, entityname, name, args)
elif entityname in [B.SUBJECT_COMPS, B.SUBJECT_COMP]:
entity = getComponent(job, entityname, name, args)
elif entityname in [B.SUBJECT_ARTIFACTS, B.SUBJECT_ARTIFACT]:
entity = getArtifact(job, entityname, name, args)
elif entityname in [B.SUBJECT_TESTCASES, B.SUBJECT_TESTCASE]:
entity = getTestcase(job, entityname, name, args)
elif entityname in [B.SUBJECT_TESTSUITES, B.SUBJECT_TESTSUITE]:
entity = getTestsuite(job, entityname, name, args)
elif entityname in [B.SUBJECT_TESTPLANS, B.SUBJECT_TESTPLAN]:
entity = getTestplan(job, entityname, name, args)
elif entityname in [B.SUBJECT_USERS, B.SUBJECT_USER]:
entity = getUser(job, entityname, name, args)
elif entityname in [B.SUBJECT_REL, B.SUBJECT_RELS]:
entity = getRelease(job, entityname, name, args)
elif entityname in [B.SUBJECT_ENVIRONMENT, B.SUBJECT_ENVIRONMENTS]:
entity = getEnvironment(job, entityname, name, args)
else:
return None
if name == "" and len(args) == 0 and not Magazin.hasEntity(entityname):
return Magazin.setEntity(entityname, entity)
return entity
def getRelease(job=None, entityname: str="" , name: str="", args: dict={}):
import model.prelease
return model.prelease.Release(job, entityname, name, args)
def getEnvironment(job=None, entityname: str="" , name: str="", args: dict={}):
import model.environment
return model.environment.Environment(job, entityname, name, args)
def getArtifact(job=None, entityname: str="" , name: str="", args: dict={}):
import model.artifact
return model.artifact.Artifact(job, entityname, name, args)
def getApplication(job=None, entityname: str="" , name: str="", args: dict={}):
import model.application
return model.application.Application(job, entityname, name, args)
def getProject(job=None, entityname: str="" , name: str="", args: dict={}):
import model.project
return model.project.Project(job, entityname, name, args)
def getComponent(job=None, entityname: str="" , name: str="", args: dict={}):
import model.component
return model.component.Component(job, entityname, name, args)
def getTestplan(job=None, entityname: str="" , name: str="", args: dict={}):
import model.testplan
return model.testplan.Testplan(job, entityname, name, args)
def getTestsuite(job=None, entityname: str="" , name: str="", args: dict={}):
import model.testsuite
return model.testsuite.Testsuite(job, entityname, name, args)
def getTestcase(job=None, entityname: str="" , name: str="", args: dict={}):
import model.testcase
return model.testcase.Testcase(job, entityname, name, args)
def getStep(job=None, entityname: str="" , name: str="", args: dict={}):
import model.step
return model.step.Step(job, entityname, name, args)
def getStory(job=None, entityname: str="" , name: str="", args: dict={}):
import model.story
return model.story.Story(job, entityname, name, args)
def getUsecase(job=None, entityname: str="" , name: str="", args: dict={}):
import model.usecase
return model.usecase.Usecase(job, entityname, name, args)
def getUser(job=None, entityname: str="" , name: str="", args: dict={}):
import model.user
return model.user.User(job, entityname, name, args)
def getVariant(job=None, entityname: str="" , name: str="", args: dict={}):
import model.variant
return model.variant.Variant(job, entityname, name, args)
def getDatatable(job=None, entityname: str="" , name: str="", args: dict={}):
import model.datatable
return model.datatable.Datatable(job, entityname, name, args)

12
model/job.csv

@ -0,0 +1,12 @@
table:job;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;jbid;pk;integer;N;;;T:1;;;
;name;str;vchar(256);I;;;F:1;;;
;description;string;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;application;string;vchar(256);N;;;;;;
;prelease;str;vchar(256);I;;;;;;
;environment;str;vchar(256);N;;;;;;
;status;str;vchar(256);I;;;;;;
;attributes;jlob;clob;N;;;;;;
;starttime;time;datetime;N;;;;;;
;endtime;time;datetime;N;;;;;;
1 table:job _field type format index generic aggregat key acceptance alias description
2 jbid pk integer N T:1
3 name str vchar(256) I F:1
4 description string vchar(256) N
5 project string vchar(256) I
6 application string vchar(256) N
7 prelease str vchar(256) I
8 environment str vchar(256) N
9 status str vchar(256) I
10 attributes jlob clob N
11 starttime time datetime N
12 endtime time datetime N

10
model/prelease.csv

@ -0,0 +1,10 @@
table:prelease;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;rlid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;attributes;string;jlob;N;;;;;;
;applications;subtable;subtable;N;;;;;;
;stories;subtable;subtable;N;;;;;;
1 table:prelease _field type format index generic aggregat key acceptance alias description
2 rlid pk int N
3 name str vchar(256) I
4 description string vchar(256) N
5 reference str vchar(256) N
6 project string vchar(256) I
7 attributes string jlob N
8 applications subtable subtable N
9 stories subtable subtable N

121
model/prelease.py

@ -0,0 +1,121 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.toolHandling
import basic.componentHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
import tools.file_type
TABLE_NAME = "prelease"
""" system-name for this entity """
FIELD_ID = "rlid"
FIELD_PRELEASE = "prelease"
""" project-release"""
FIELD_APPRELEASE = "apprelease"
FILE_EXTENSION = D.DFILE_TYPE_CSV
UNIQUE_FIELDS = [D.FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Release(model.entity.Entity):
FIELD_ID = "rlid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_APPS, B.SUBJECT_STORIES]
PREFIX_SUBTABLE = "rl"
rlid = 0
name = ""
project = ""
application = ""
description = ""
attributes = ""
reference = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, P.KEY_CATALOG, B.SUBJECT_RELS, tools.config_tool.get_plain_filename(job, ""), D.CSV_SPECTYPE_CTLG)
outList = list(config[B.SUBJECT_RELS][B.DATA_NODE_KEYS].keys())
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_CATALOG, B.SUBJECT_RELS,
tools.config_tool.get_plain_filename(job, name), ttype=B.SUBJECT_REL)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
def rebuild_data(self, job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popTablesNode(job, data)
data = tools.file_type.popSubjectsNode(job, data)
data = self.rebuildReleases(job, data)
return data
def rebuildReleases(self, job, data: dict) -> dict:
outdata = {}
for row in data[B.DATA_NODE_DATA]:
if FIELD_PRELEASE not in row:
continue
if row[FIELD_PRELEASE] in outdata:
general = outdata[row[FIELD_PRELEASE]]
else:
general = {}
general[B.SUBJECT_APPS] = {}
if ( FIELD_APPRELEASE not in row
or len(FIELD_APPRELEASE) == 0
or row[FIELD_APPRELEASE] == row[FIELD_PRELEASE]):
for f in self.LIST_FIELDS:
if f in row:
general[f] = row[f]
if B.SUBJECT_APPS in row and len(row[B.SUBJECT_APPS]) > 0:
a = str(row[B.SUBJECT_APPS]).split(",")
for app in a:
o = {}
o["appname"] = app
o["apprelease"] = row[FIELD_APPRELEASE]
o["prelease"] = row[FIELD_PRELEASE]
general[B.SUBJECT_APPS][app] = o
outdata[row[FIELD_PRELEASE]] = general
return outdata
def check_data(self, job, data: dict) -> dict:
"""
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = [] #[B.SUBJECT_APPS]
checkNodes[tools.file_type.MUSTNT_NODES] = [] # [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_PROJECTS]
return tools.file_type.check_nodes(job, data, checkNodes)

12
model/project.csv

@ -0,0 +1,12 @@
table:project;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;prid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;insauthor;str;vchar(256);N;;;;;;
;inscommit;str;vchar(256);N;;;;;;
;instime;time;vchar(256);N;;;;;;
;updauthor;str;vchar(256);N;;;;;;
;updcommit;str;vchar(256);N;;;;;;
;updtime;time;vchar(256);N;;;;;;
;actual;int;vchar(256);I;;;;;;
1 table:project _field type format index generic aggregat key acceptance alias description
2 prid pk int N
3 name str vchar(256) I
4 description string vchar(256) N
5 reference str vchar(256) N
6 insauthor str vchar(256) N
7 inscommit str vchar(256) N
8 instime time vchar(256) N
9 updauthor str vchar(256) N
10 updcommit str vchar(256) N
11 updtime time vchar(256) N
12 actual int vchar(256) I

272
model/project.py

@ -0,0 +1,272 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.db_abstract
import tools.git_tool
import tools.file_type
TABLE_NAME = "project"
""" system-name for this entity """
FIELD_ID = "prid"
FIELD_NAME = "name"
FIELD_DESCRIPTION = B.SUBJECT_DESCRIPTION
FIELD_REFERENCE = B.SUBJECT_REFERENCE
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE]
""" list of object-attributes """
LIST_NODES = []
LIST_SUBTABLES = {}
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Project(model.entity.Entity):
FIELD_ID = "prid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE]
""" list of object-attributes """
LIST_NODES = []
LIST_SUBTABLES = []
prid = 0
name = ""
description = ""
reference = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, B.SUBJECT_PROJECTS, "")
if B.SUBJECT_PROJECTS in config:
conf = list(config[B.SUBJECT_PROJECTS].keys())
else:
conf = config.keys()
outList = []
for k in conf:
if k[:1] != "_":
outList.append(k)
return outList
def select_unique_names(self, job, project, application, gran, args):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
outList = []
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
data = dbi.selectRows(TABLE_NAME, job)
checkList = {}
for row in data[B.DATA_NODE_DATA]:
key = ""
for f in UNIQUE_FIELDS:
key += "_" + row[f]
if key in checkList:
continue
else:
checkList[key] = key
fields = []
for f in UNIQUE_FIELDS:
fields.append(row[f])
outList.append(fields)
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
print("name "+name)
config = self.getConfig(job, B.SUBJECT_PROJECTS, tools.config_tool.get_plain_filename(job, name))
for k in LIST_FIELDS:
if k not in config:
continue
setattr(self, k, config[k])
return self
def select_entity(self, job, name, row={}):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name: unique field as string, unique fields as list
the unique-fields are defined in the class
:return: itself with filled object-attributes
"""
if row is None or len(row) == 0:
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
if type(name) is list:
names = name
elif type(name) is str:
names = [name]
condition = "where "
for v in names:
condition += " and " + ""
data = dbi.selectRows(TABLE_NAME, job, "where username = \'" + names[0] + "\'")
if len(data[B.DATA_NODE_DATA]) > 1:
raise Exception("single selection with more than one result: "+names[0])
elif len(data[B.DATA_NODE_DATA]) == 1:
row = data[B.DATA_NODE_DATA][0]
else:
raise Exception("no result for: "+names[0])
for k in LIST_FIELDS:
if k not in row:
continue
setattr(self, k, row[k])
return self
def write_entity(self, job, name):
"""
writes the entity into the file-system
it similar to update_entity
:param job:
:param name:
:return:
"""
config = {}
config[model.project.TABLE_NAME] = {}
pathname = os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_HOME], P.VAL_CONFIG,
P.VAL_USER, name + ".yml")
for k in LIST_FIELDS:
if getattr(self, k, "") == "" \
or k == FIELD_ID:
continue
config[model.project.TABLE_NAME][k] = getattr(self, k, "")
tools.file_tool.write_file_dict(job.m, job, pathname, config)
return self
def insert_entity(self, job, name="", table="", rows={}):
"""
inserts the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
if table == "":
table = self.entityname
if len(self.ddls) == 0:
self.insert_entity(job, name=name, table=self.entityname, rows=rows)
# self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
condition = "where"
for f in UNIQUE_FIELDS:
# TODO other db-formats than string has to be implemented
condition += " and " + f + " = \'" + getattr(self, f, "") + "\'"
condition = condition.replace("where and", "where ")
data = dbi.selectRows(TABLE_NAME, job, condition)
if len(data[B.DATA_NODE_DATA]) > 0:
print("update statt insert")
return
if rows is None or len(rows) == 0:
rows = []
row = {}
for f in self.ddls[table]:
row[f] = getattr(self, f, "")
rows.append(row)
dbi.insertRows(job, table, rows)
def update_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def remove_entity(self, job, name):
"""
removes the entity from the file-system
it similar to delete_entity
:param job:
:param name: single substring or list of name or dict of names with the keys as
:return:
"""
self.removeEntity(job, name, os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_HOME], P.VAL_CONFIG, P.VAL_USER), "yml")
def delete_entity(self, job, name, table):
"""
deletes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
condition = "where"
for f in IDENTIFYER_FIELDS:
# TODO other db-formats than string has to be implemented
val = dbi.getDbValue(self.conf[B.DATA_NODE_DDL][table][f], getattr(self, f, ""))
condition += " and " + f + " = " + val + ""
condition = condition.replace("where and", "where ")
dbi.deleteRows(job, table, condition)
@staticmethod
def getConfig(job, subject, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
config = tools.config_tool.getConfig(job, P.KEY_BASIC, subject, ttype=B.SUBJECT_PROJECT)
if config is not None:
if len(name) == 0:
return config
elif subject in config and name in config[subject]:
return config[subject][name]
elif name in config:
return config[name]
raise Exception("keine Config zu "+name)
@staticmethod
def getCurrentUser(job):
return os.environ.get("USERNAME")
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
data = tools.file_type.popNameNode(job, data)
return data
@staticmethod
def check_data(job, data: dict) -> dict:
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = []
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_OPTION, B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_HEADER]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_PROJECTS, B.NODE_ATTRIBUTES]
return tools.file_type.check_nodes(job, data, checkNodes)

11
model/rl_applications.csv

@ -0,0 +1,11 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:rl_applications;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;rlappid;pk;vchar(256);N;;;;;;
;rlid;int;vchar(256);I;;;;;;
;appid;int;vchar(256);I;;;;;;
;appname;str;vchar(256);N;;;;;;
;apprelease;str;vchar(256);N;;;;;;
;appgenrelease;str;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:rl_applications _field type format index generic aggregat key acceptance alias description
4 rlappid pk vchar(256) N
5 rlid int vchar(256) I
6 appid int vchar(256) I
7 appname str vchar(256) N
8 apprelease str vchar(256) N
9 appgenrelease str vchar(256) N
10 reference str vchar(256) N

10
model/rl_stories.csv

@ -0,0 +1,10 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:rl_stories;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;rlstrid;pk;integer;N;;;T:1;;;
;rlid;int;integer;I;;;T:1;;;
;stid;int;integer;I;;;T:2;;;
;story;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;acceptcriteria;str;clob;N;;;;;;
1 _type ddl
2 _key _field
3 table:rl_stories _field type format index generic aggregat key acceptance alias description
4 rlstrid pk integer N T:1
5 rlid int integer I T:1
6 stid int integer I T:2
7 story str vchar(256) I
8 description string vchar(256) N
9 reference str vchar(256) N
10 acceptcriteria str clob N

8
model/st_preleases.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:st_preleases;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;strelid;pk;integer;N;;;;;;
;stid;int;integer;I;;;T:1;;;
;rlid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:st_preleases _field type format index generic aggregat key acceptance alias description
4 strelid pk integer N
5 stid int integer I T:1
6 rlid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

12
model/step.csv

@ -0,0 +1,12 @@
table:step;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;spid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;sortnr;int;int;;;;;;;
;stepnr;int;int;;;;;;;
;dataref;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;component;str;vchar(256);N;;;;;;
;variant;str;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
;topics;string;jlob;N;;;;;;
1 table:step _field type format index generic aggregat key acceptance alias description
2 spid pk int N
3 name str vchar(256) I
4 sortnr int int
5 stepnr int int
6 dataref str vchar(256) I
7 description string vchar(256) N
8 reference str vchar(256) N
9 component str vchar(256) N
10 variant str vchar(256) N
11 attributes string jlob N
12 topics string jlob N

123
model/step.py

@ -0,0 +1,123 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.componentHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
TABLE_NAME = "step"
""" system-name for this entity """
FIELD_ID = "spid"
FIELD_NAME = "name"
FIELD_VARIANT = B.SUBJECT_VARIANT # variant
FIELD_COMPONENT = B.SUBJECT_COMP # variant
FIELD_SORTNR = "sortnr" # testcase
FIELD_STEPNR = "stepnr" # testcase
FIELD_DATAREF = "dataref" # testcase
FIELD_ATTRIBUTES = B.NODE_ATTRIBUTES
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_VARIANT, FIELD_COMPONENT, FIELD_SORTNR, FIELD_STEPNR, FIELD_DATAREF]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES, B.DATA_NODE_TOPICS]
LIST_SUBTABLES = []
PREFIX_SUBTABLE = ""
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Step(model.entity.Entity):
"""
this class describes execution steps in a formal language = fields,
in testcase-specification it describe the start-point of an action, here the business information are specified like
* variant
* reference of program-step, sorting-nr, reference to testdata which are specified above
* entity which implements the action
* further business-arguments as json-string
the detailed steps (build the request, send the request, get the response, ..) are configured in the component
in sub-steps of the specified step, here the technical information are configured like
* topic and type which implements the detail-step
* further arguments inside the dict
"""
FIELD_ID = "spid"
FIELD_SORTNR = "sortnr" # testcase
FIELD_STEPNR = "stepnr" # testcase
FIELD_DATAREF = "dataref" # testcase
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_VARIANT, B.SUBJECT_COMP, FIELD_SORTNR, FIELD_STEPNR, FIELD_DATAREF,
B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES, B.DATA_NODE_TOPICS]
LIST_SUBTABLES = []
name = "" # variant
variant = ""
sortnr = 0
""" sorting, alternative step may have the same sortnr, then the correct step is selected by program-variant """
stepnr = 0
""" in order to filter the steps for specific program-executions """
dataref = ""
component = "" # the name of the component or of the parameter "testcases"
attributes = {}
topics = "" # is set by component-configuration
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_VARIANTS,
tools.config_tool.get_plain_filename(job, ""), B.SUBJECT_STEP)
outList = list(config[B.SUBJECT_VARIANTS].keys())
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_VARIANTS,
tools.config_tool.get_plain_filename(job, name), B.SUBJECT_STEP)
return self.setAttributes(job, config, name, LIST_FIELDS, LIST_NODES, LIST_SUBTABLES)
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
# data = tools.file_type.popNameNode(job, data)
return data
@staticmethod
def check_data(job, data: dict) -> dict:
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = []
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_OPTION, B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_HEADER]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_COMP, B.NODE_ATTRIBUTES]
return tools.file_type.check_nodes(job, data, checkNodes)
def getIDName(self):
"""
it returns the name as unique-id - maybe build from few attributes
:return:
"""
return "{:02d}_{}".format(int(self.sortnr), self.variant)

10
model/story.csv

@ -0,0 +1,10 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:story;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;stid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;attributes;string;jlob;N;;;;;;
;preleases;subtable;subtable;N;;;;;;
1 _type ctlg
2 _key _field
3 table:story _field type format index generic aggregat key acceptance alias description
4 stid pk int N
5 name str vchar(256) I
6 description string vchar(256) N
7 reference str vchar(256) N
8 project string vchar(256) I
9 attributes string jlob N
10 preleases subtable subtable N

63
model/story.py

@ -0,0 +1,63 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
TABLE_NAME = "story"
""" system-name for this entity """
FIELD_ID = "stid"
""" list of object-attributes """
FILE_EXTENSION = D.DFILE_TYPE_CSV
UNIQUE_FIELDS = [D.FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Story(model.entity.Entity):
FIELD_ID = "stid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_RELS]
PREFIX_SUBTABLE = "st"
stid = 0
story = ""
project = ""
description = ""
reference = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, P.KEY_CATALOG, B.SUBJECT_STORIES,
tools.config_tool.get_plain_filename(job, ""), ttype=D.CSV_SPECTYPE_CTLG)
outList = list(config[B.SUBJECT_STORIES][B.DATA_NODE_KEYS].keys())
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_CATALOG, B.SUBJECT_STORIES,
tools.config_tool.get_plain_filename(job, name), ttype=D.CSV_SPECTYPE_CTLG)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)

18
model/table.csv

@ -0,0 +1,18 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:datatable;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;dtid;pk;integer;N;;;T:1;;;
;name;str;vchar(256);I;;;F:3;;;
;database;str;vchar(256);I;;;F:1;;;
;schema;str;vchar(256);I;;;F:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;story
;attributes;jlob;clob;N;;;;;;
;fieldnames;jlob;clob;N;;;;;;
;insauthor;str;vchar(256);N;;;;;;
;inscommit;str;vchar(256);N;;;;;;
;instime;time;datetime;N;;;;;;
;updauthor;str;vchar(256);N;;;;;;
;updcommit;str;vchar(256);N;;;;;;
;updtime;time;datetime;N;;;;;;
;actual;int;integer;I;;;;;;
1 _type ctlg
2 _key _field
3 table:datatable _field type format index generic aggregat key acceptance alias description
4 dtid pk integer N T:1
5 name str vchar(256) I F:3
6 database str vchar(256) I F:1
7 schema str vchar(256) I F:2
8 description string vchar(256) N
9 reference str vchar(256) N story
10 attributes jlob clob N
11 fieldnames jlob clob N
12 insauthor str vchar(256) N
13 inscommit str vchar(256) N
14 instime time datetime N
15 updauthor str vchar(256) N
16 updcommit str vchar(256) N
17 updtime time datetime N
18 actual int integer I

346
model/table.py

@ -0,0 +1,346 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
TYPE_ADMIN = "admin"
TYPE_CTLG = "catalog"
TYPE_CONTEXT = "context"
LISTNAME_DDLNAMES = "fieldnames"
LISTNAME_DDLFIELDS = "fielddef"
LISTNAME_FIELDS = "fieldlist"
LISTNAME_NODES = "nodelist"
LISTNAME_SUBTABLE = "subtables"
TABLE_NAME = "table"
""" system-name for this entity """
FIELD_ID = "tbid"
DEFAULT_FIELD = ""
DEFAULT_TYPE = "string"
DEFAULT_FORMAT = "vchar(256)"
DEFAULT_INDEX = "N"
DEFAULT_CONSTRAINT = "nullable"
DEFAULT_AGGREGAT = ""
DEFAULT_GENERIC = ""
DEFAULT_KEY = ""
DEFAULT_ACCEPTANCE = ""
DEFAULT_ALIAS = ""
DEFAULT_DESCRIPTION = ""
DEFAULTS = {
D.DDL_FIELD : DEFAULT_FIELD,
D.DDL_TYPE : DEFAULT_TYPE,
D.DDL_FORMAT : DEFAULT_FORMAT,
D.DDL_INDEX : DEFAULT_INDEX,
D.DDL_CONSTRAINT : DEFAULT_CONSTRAINT,
D.DDL_AGGREGAT : DEFAULT_AGGREGAT,
D.DDL_GENERIC : DEFAULT_GENERIC,
D.DDL_KEY : DEFAULT_KEY,
D.DDL_ACCEPTANCE : DEFAULT_ACCEPTANCE,
D.DDL_ALIAS : DEFAULT_ALIAS,
D.DDL_DESCRIPTION : DEFAULT_DESCRIPTION
}
def select_tables(job, project="", application="", component=""):
outList = []
appl = tools.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_COMPS], "catalog", "tables")
for p in os.listdir(path):
if p[-4:] not in [".csv", ".yml", ".xml", "json"]:
continue
table = p[:-4]
outList.append(table)
return outList
class Table(model.entity.Entity):
"""
table-object as part of a database
in different of datatable it is neither a concrete table in the automation-model
nor a concrete table in the system-model
it is an abstract super-class in order to relation to the database-management-system
"""
FIELD_ID = "tbid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES, "fielddef", "fieldnames"]
LIST_SUBTABLES = []
LIST_ADMINFIELDS = {
"insauthor": {
"_field": "insauthor",
"type": "str",
"format": "varchar(128)",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"inscommit": {
"_field": "inscommit",
"type": "str",
"format": "varchar(1024)",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"instime": {
"_field": "instime",
"type": "time",
"format": "time",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"updauthor": {
"_field": "updauthor",
"type": "str",
"format": "varchar(128)",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"updcommit": {
"_field": "updcommit",
"type": "str",
"format": "varchar(1024)",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"updtime": {
"_field": "updtime",
"type": "time",
"format": "time",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"actual": {
"_field": "actual",
"type": "int",
"format": "int",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
}
}
# project
# testcase
# artefact :
#
LIST_CATALOGFIELDS = {
"project": {
"_field": "project",
"type": "str",
"format": "varchar(128)",
"index": "I",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"application": {
"_field": "application",
"type": "str",
"format": "varchar(128)",
"index": "I",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"testcase": {
"_field": "testcase",
"type": "str",
"format": "varchar(128)",
"index": "I",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
},
"artifact": {
"_field": "artifact",
"type": "str",
"format": "varchar(128)",
"index": "I",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": "result "
},
"refdate": {
"_field": "refdate",
"type": "time",
"format": "time",
"index": "N",
"generic": "",
"aggregat": "",
"key": "",
"acceptance": "",
"alias": "",
"description": ""
}
}
tbid = 0
name = ""
project = ""
fieldnames = []
fielddef = {}
def set_object(self, project, name):
self.project = project
self.name = name
def get_schema(self, tableName="", tableType=""):
"""
gets schema/ddl-informations in order to create the database
"""
sql = ""
sqlTable = ""
sqlSub = ""
dbi = basic.toolHandling.getDbTool(self.job, None, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
sqlTable += dbi.getCreateTable(tableName)
tableId = ""
if tableType in [TYPE_CTLG]:
for f in self.LIST_CATALOGFIELDS.keys():
if f not in self.fieldnames:
self.fieldnames.append(f)
self.fielddef[f] = self.LIST_CATALOGFIELDS[f]
if tableType in [TYPE_ADMIN, TYPE_CTLG]:
for f in self.LIST_ADMINFIELDS.keys():
if f not in self.fieldnames:
self.fieldnames.append(f)
self.fielddef[f] = self.LIST_ADMINFIELDS[f]
for f in self.fieldnames:
if f[0:1] == "_":
continue
fo = self.fielddef[f]
if D.DDL_INDEX in fo and len(fo[D.DDL_INDEX]) > 0:
a = fo[D.DDL_INDEX].split(":")
if a[0] == "I":
sqlSub += dbi.getSchemaIndex(tableName, fo[D.DDL_FIELD]) + "\n"
elif a[0] == "S":
attrList = []
attr = {"attr":fo[D.DDL_FIELD], "atype": fo[D.DDL_TYPE]}
attrList.append(attr)
for i in range(2, len(a)):
if i % 2 == 1:
continue
if a[i] == "attr":
attr = {"attr":B.NODE_ATTRIBUTES, "atype": D.TYPE_TEXT}
elif i+1 < len(a):
attr = {"attr": a[i], "atype": a[i+1]}
attrList.append(attr)
sqlSub += dbi.getSchemaSubtable(a[1], attrList) + "\n"
sqlSub += dbi.getSchemaIndex(dbi.getSubtableName(a[1], fo[D.DDL_FIELD]), tableId) + "\n"
continue
if fo[D.DDL_TYPE] not in ["subtable"]:
sqlTable += dbi.getSchemaAttribut(fo[D.DDL_FIELD], fo[D.DDL_TYPE]) + ","
if fo[D.DDL_TYPE] == D.TYPE_PK:
tableId = fo[D.DDL_FIELD]
sql = sqlTable[0:-1]+");\n"+sqlSub
"""
# print(sql)
"""
return sql
def read_unique_names(self, job, project="", application="", gran= "", args={}, ttype: str="") -> list:
return []
# table is not an real entity
def read_entity(self, job, name: str, args: dict={}):
return self.read_ddl(job, name, args=args)
# table is not an real entity
def read_ddl(self, job: any, name: str, args: dict = {}):
"""
reads the ddl of the table depending on context
a) component: the ddl is read from specific or general component-folder
b) testcase: the ddl is read from general component-folder
c) testserver: the ddl is read from model-folder
:param job:
:param name:
:param args:
:return:
"""
config = {}
if "context" in args:
if args["context"] == "component":
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, self.component.name, name, ttype=D.CSV_SPECTYPE_DDL)
elif args["context"] == "testdata":
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, args["context"], name, ttype=D.CSV_SPECTYPE_DDL)
elif args["context"] == B.ATTR_INST_TESTSERVER:
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, B.ATTR_INST_TESTSERVER, name, ttype=D.CSV_SPECTYPE_DDL)
if "_name" in ddl:
config[ddl["_name"]] = ddl
else:
config = ddl
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
def select_entity(self, job, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
self.read_entity(job, name)
# raise Exception(B.EXCEPT_NOT_IMPLEMENT)
# def write_entity(self, job, name):
# table is not an real entity
# def update_entity(self, job, name):
# table is not an real entity
# def remove_entity(self, job, name):
# table is not an real entity
# def delete_entity(self, job, name):
# table is not an real entity

8
model/tc_datatables.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tc_datatables;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tcdtbid;pk;integer;N;;;;;;
;tcid;int;integer;I;;;T:1;;;
;dtid;int;integer;I;;;T:2;;;
;count;int;int;N;;;;;;
;refdate;date;date;N;;;;;;
1 _type ddl
2 _key _field
3 table:tc_datatables _field type format index generic aggregat key acceptance alias description
4 tcdtbid pk integer N
5 tcid int integer I T:1
6 dtid int integer I T:2
7 count int int N
8 refdate date date N

8
model/tc_steps.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tc_steps;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tcstpid;pk;integer;N;;;;;;
;tcid;int;integer;I;;;T:1;;;
;spid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:tc_steps _field type format index generic aggregat key acceptance alias description
4 tcstpid pk integer N
5 tcid int integer I T:1
6 spid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

10
model/tc_stories.csv

@ -0,0 +1,10 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tc_stories;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tcstrid;pk;integer;N;;;T:1;;;
;tcid;int;integer;I;;;T:1;;;
;stid;int;integer;I;;;T:2;;;
;story;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;acceptcriteria;str;clob;N;;;;;;
1 _type ddl
2 _key _field
3 table:tc_stories _field type format index generic aggregat key acceptance alias description
4 tcstrid pk integer N T:1
5 tcid int integer I T:1
6 stid int integer I T:2
7 story str vchar(256) I
8 description string vchar(256) N
9 reference str vchar(256) N
10 acceptcriteria str clob N

8
model/tc_usecases.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tc_usecases;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tcucsid;pk;integer;N;;;;;;
;tcid;int;integer;I;;;T:1;;;
;ucid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:tc_usecases _field type format index generic aggregat key acceptance alias description
4 tcucsid pk integer N
5 tcid int integer I T:1
6 ucid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

8
model/tc_variants.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tc_variants;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tcvarid;pk;integer;N;;;;;;
;tcid;int;integer;I;;;T:1;;;
;vaid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:tc_variants _field type format index generic aggregat key acceptance alias description
4 tcvarid pk integer N
5 tcid int integer I T:1
6 vaid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

15
model/testcase.csv

@ -0,0 +1,15 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:testcase;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tcid;pk;integer;N;;;T:1;;;
;name;str;vchar(256);I;;;F:1;;;
;description;string;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;application;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;story
;attributes;jlob;jlob;N;;;;;;
;steps;subtable;subtable;N;;;;;;
;datatables;subtable;subtable;N;;;;;;
;stories;subtable;subtable;N;;;;;;
;usecases;subtable;subtable;N;;;;;;
;variants;subtable;subtable;N;;;;;;
1 _type ctlg
2 _key _field
3 table:testcase _field type format index generic aggregat key acceptance alias description
4 tcid pk integer N T:1
5 name str vchar(256) I F:1
6 description string vchar(256) N
7 project string vchar(256) I
8 application string vchar(256) N
9 reference str vchar(256) N story
10 attributes jlob jlob N
11 steps subtable subtable N
12 datatables subtable subtable N
13 stories subtable subtable N
14 usecases subtable subtable N
15 variants subtable subtable N

268
model/testcase.py

@ -0,0 +1,268 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import traceback
import basic.toolHandling
import tools.data_tool
import tools.job_const as J
import tools.data_const as D
import basic.constants as B
import model.entity
import tools.config_tool
import tools.job_tool
import tools.path_tool
import tools.path_const as P
import model.entity
import model.story
import model.datatable
import model.step
import model.factory
import tools.file_type
TABLE_NAMES = ["application", "ap_project", "ap_component"]
STORAGES = [model.entity.STORAGE_FILE, model.entity.STORAGE_DB]
""" used storage in priority sortage, so: if file then read_fcts / if db then select-fcts """
DEFAULT_SYNC = model.entity.SYNC_FULL_GIT2DB
TABLE_NAME = B.SUBJECT_APP
""" system-name for this entity """
FIELD_ID = "tcid"
LIST_SUB_DESCRIPT = [D.DATA_ATTR_USECASE_DESCR, D.DATA_ATTR_STORY_DESCR]
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [D.FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Testcase(model.entity.Entity):
"""
Generally this object can be stored as a file with data or in a database.
references:
application ->
story -> story
variant -> comp.step
subtables
steps -> comp.step
tables -> comp.table
"""
FIELD_ID = "tcid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_APP,
B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_VARIANTS, B.SUBJECT_USECASES, B.SUBJECT_STORIES, B.SUBJECT_DATATABLES, B.SUBJECT_STEPS]
PREFIX_SUBTABLE = "tc"
tcid = ""
name = ""
description = ""
project = ""
reference = ""
attributes = ""
stories = {}
tables = {}
steps = {}
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
if project == "":
project = getattr(job.par, B.SUBJECT_PROJECT)
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_TDATA], project,
B.SUBJECT_TESTCASES)
outList = self.getDirlist(job, path, "")
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
# r = tools.config_tool.select_config_path(job, P.KEY_TESTCASE, "TC0001")
# ttype=testcase => Aufteilung der Testspec in Bloecke und separater Aufruf zur Pruefung der Bloecke
config = self.getConfig(job, P.KEY_TESTCASE, name, tools.config_tool.get_plain_filename(job, name), B.SUBJECT_TESTCASE)
self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
return self
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
# data = tools.file_type.popNameNode(job, data)
data = Testcase.buildHeadAttributes(job, data, Testcase.LIST_FIELDS, Testcase.LIST_SUBTABLES)
return data
@staticmethod
def buildHeadAttributes(job, data: dict, fields: list, subtables: list) -> dict:
"""
workaround for _head:attr resp _option:attr
:param data:
:param head:
:return:
"""
outdata = {}
attributes = {}
subtable = {}
xsubtable = {}
try:
for k in data:
if k in [B.DATA_NODE_HEAD, B.DATA_NODE_OPTION]:
if k not in outdata:
outdata[k] = {}
xkeys = []
for l in data[k]:
a = l.split("-")
val = data[k][l]
if tools.data_tool.getPluralKeyword(a[0]) in B.LIST_SUBJECTS \
and tools.data_tool.getSingularKeyword(a[0]) in fields:
outdata[tools.data_tool.getSingularKeyword(a[0])] = tools.data_tool.splitDelimitedValues(val)[0]
elif tools.data_tool.getPluralKeyword(a[0]) in B.LIST_SUBJECTS \
and tools.data_tool.getPluralKeyword(a[0]) in subtables:
xsubtable = Testcase.setSubtable(job, xsubtable, l, val, outdata)
#outdata[tools.data_tool.getPluralKeyword(a[0])] = subtable
elif k == B.DATA_NODE_OPTION:
attributes[l] = data[k][l]
else:
outdata[k][l] = data[k][l]
elif (k == B.DATA_NODE_TABLES and B.SUBJECT_DATATABLES in subtables):
outdata[tools.data_tool.getPluralKeyword(B.SUBJECT_DATATABLES)] = data[k]
elif k in B.LIST_DATA_NODE and tools.data_tool.getPluralKeyword(k) in subtables:
outdata[tools.data_tool.getPluralKeyword(k)] = data[k]
elif k[:1] == "_":
a = k.split("-")
val = data[k]
if tools.data_tool.getPluralKeyword(a[0]) in B.LIST_SUBJECTS \
and tools.data_tool.getSingularKeyword(a[0]) in fields:
outdata[tools.data_tool.getSingularKeyword(a[0])] = tools.data_tool.splitDelimitedValues(val)[0]
elif tools.data_tool.getPluralKeyword(a[0]) in B.LIST_SUBJECTS \
and tools.data_tool.getPluralKeyword(a[0]) in subtables:
xsubtable = Testcase.setSubtable(job, xsubtable, l, val, outdata)
else:
outdata[k] = data[k]
if B.DATA_NODE_OPTION in outdata and len(outdata[B.DATA_NODE_OPTION]) == 0:
outdata.pop(B.DATA_NODE_OPTION)
if B.DATA_NODE_HEAD in outdata and len(outdata[B.DATA_NODE_HEAD]) == 0:
outdata.pop(B.DATA_NODE_HEAD)
outdata[B.NODE_ATTRIBUTES] = attributes
for k in xsubtable:
if k == "xkeys":
continue
outdata[k] = xsubtable[k]
except Exception as e:
print(traceback.format_exc())
pass
return outdata
@staticmethod
def setSubtable(job, xsubtable, key: str, val: str, data: dict):
a = key.split("-")
if tools.data_tool.getPluralKeyword(a[0]) not in xsubtable:
xkeys = []
subtable = {}
for x in tools.data_tool.splitDelimitedValues(val):
if x == "" or x[:1] == "#": break
subtable[x] = {D.FIELD_NAME: x}
xkeys.append(x)
else:
subtable = xsubtable[tools.data_tool.getPluralKeyword(a[0])]
i = 0
vals = tools.data_tool.splitDelimitedValues(val)
xkeys = xsubtable["xkeys"]
for x in xkeys:
subtable[x][a[1]] = vals[i]
i += 1
xsubtable["xkeys"] = xkeys
xsubtable[tools.data_tool.getPluralKeyword(a[0])] = subtable
return xsubtable
@staticmethod
def check_data(job, data: dict) -> dict:
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = [B.SUBJECT_APP, B.SUBJECT_DATATABLES, B.SUBJECT_STEPS, B.SUBJECT_VARIANTS,
B.NODE_ATTRIBUTES]
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_OPTION, B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_HEADER]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_USECASES, B.SUBJECT_STORIES]
return tools.file_type.check_nodes(job, data, checkNodes)
def select_entity(self, job, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
return self.read_entity(job, name)
def select_testcase(job, project, testcase):
"""
to select a concrete testcase
:param job:
:param project:
:param testcase:
:return:
"""
jobProj = None
if hasattr(job.par, B.PAR_PROJ):
jobProj = getattr(job.par, B.PAR_PROJ)
setattr(job.par, B.PAR_PROJ, project)
path = tools.path_tool.compose_path(job, P.P_TDROOT, None)
specpath = os.path.join(path, testcase, D.DFILE_TESTCASE_NAME + ".csv")
spec = model.entity.read_spec(job, testcase, J.GRAN_TS, specpath)
if jobProj is None:
delattr(job.par, B.PAR_PROJ)
else:
setattr(job.par, B.PAR_PROJ, jobProj)
print("select_testcase "+str(spec))
return spec
def select_testcases(job, projList, appList):
out = {}
jobProj = None
if hasattr(job.par, B.PAR_PROJ):
jobProj = getattr(job.par, B.PAR_PROJ)
for proj in projList:
setattr(job.par, B.PAR_PROJ, proj)
path = tools.path_tool.compose_path(job, P.P_TDROOT, None)
if os.path.exists(path):
for d in os.listdir(path):
if not os.path.isdir(os.path.join(path, d)):
continue
if d[0:1] == "_":
continue
specpath = os.path.join(path, d, D.DFILE_TESTCASE_NAME + ".csv")
spec = model.entity.read_spec(job, d, J.GRAN_TS, specpath)
if spec is None:
continue
out[d] = spec
out[d][B.SUBJECT_PROJECTS] = [proj]
if jobProj is None:
delattr(job.par, B.PAR_PROJ)
else:
setattr(job.par, B.PAR_PROJ, jobProj)
return out

13
model/testplan.csv

@ -0,0 +1,13 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:testplan;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tpid;pk;integer;N;;;T:1;;;
;name;str;vchar(256);I;;;F:1;;;
;description;string;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;prelease;string;vchar(256);I;;;;;;
;reference;str;vchar(256);N;;;;;;story
;attributes;jlob;jlob;N;;;;;;
;applications;subtable;subtable;N;;;;;;
;steps;subtable;subtable;N;;;;;;
;testsuites;subtable;subtable;N;;;;;;
1 _type ctlg
2 _key _field
3 table:testplan _field type format index generic aggregat key acceptance alias description
4 tpid pk integer N T:1
5 name str vchar(256) I F:1
6 description string vchar(256) N
7 project string vchar(256) I
8 prelease string vchar(256) I
9 reference str vchar(256) N story
10 attributes jlob jlob N
11 applications subtable subtable N
12 steps subtable subtable N
13 testsuites subtable subtable N

77
model/testplan.py

@ -0,0 +1,77 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.toolHandling
import tools.data_const as D
import basic.constants as B
import tools.path_const as P
import model.entity
import tools.config_tool
FIELD_ID = "tpid"
FIELD_NAME = D.FIELD_NAME
FIELD_DESCRIPTION = B.SUBJECT_DESCRIPTION
FIELD_REFERENCE = B.SUBJECT_REFERENCE
FIELD_PROJECT = B.SUBJECT_PROJECT
FIELD_RELEASE = B.SUBJECT_REL
class Testplan(model.entity.Entity):
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE, FIELD_PROJECT, FIELD_RELEASE]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_STEPS, B.SUBJECT_TESTSUITES, B.SUBJECT_APPS]
PREFIX_SUBTABLE = "tp"
name = ""
description = ""
prelease = ""
testsuites = {}
steps = []
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_VARIANTS, tools.config_tool.get_plain_filename(job, name))
return self.setAttributes(job, config, name, LIST_FIELDS, LIST_NODES, LIST_SUBTABLES)
def getFieldList(self):
"""
returns a list of scalar attributes
:return: LIST_FIELDS
"""
return LIST_FIELDS
def getNodeList(self):
"""
returns a list of sub-nodes - which can be persisted in a clob-field
:return: LIST_NODES
"""
return LIST_NODES
def getSubtableList(self):
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return LIST_SUBTABLES
def getName(self):
"""
returns the name - maybe build from other attributes
:return:
"""
return self.name
def getIDName(self):
"""
it returns the name as unique-id - maybe build from few attributes
:return:
"""
return self.name

24
model/testreport.csv

@ -0,0 +1,24 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:testreport;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;trid;pk;integer;N;;;T:1;;;
;name;str;vchar(256);I;;;F:1;;;
;description;string;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;application;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;prelease;str;vchar(256);I;;;;;;
;environment;str;vchar(256);N;;;;;;
;testplan;str;vchar(256);N;;;;;;
;testsuite;str;vchar(256);N;;;;;;
;testcase;str;vchar(256);N;;;;;;
;artifact;str;vchar(256);N;;;;;;
;result;str;vchar(256);I;;;;;;
;attributes;jlob;clob;N;;;;;;
;insauthor;str;vchar(256);N;;;;;;
;inscommit;str;vchar(256);N;;;;;;
;instime;time;datetime;N;;;;;;
;updauthor;str;vchar(256);N;;;;;;
;updcommit;str;vchar(256);N;;;;;;
;updtime;time;datetime;N;;;;;;
;actual;int;integer;I;;;;;;
1 _type ctlg
2 _key _field
3 table:testreport _field type format index generic aggregat key acceptance alias description
4 trid pk integer N T:1
5 name str vchar(256) I F:1
6 description string vchar(256) N
7 project string vchar(256) I
8 application string vchar(256) N
9 reference str vchar(256) N
10 prelease str vchar(256) I
11 environment str vchar(256) N
12 testplan str vchar(256) N
13 testsuite str vchar(256) N
14 testcase str vchar(256) N
15 artifact str vchar(256) N
16 result str vchar(256) I
17 attributes jlob clob N
18 insauthor str vchar(256) N
19 inscommit str vchar(256) N
20 instime time datetime N
21 updauthor str vchar(256) N
22 updcommit str vchar(256) N
23 updtime time datetime N
24 actual int integer I

13
model/testsuite.csv

@ -0,0 +1,13 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:testsuite;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tsid;pk;integer;N;;;T:1;;;
;name;str;vchar(256);I;;;F:1;;;
;description;string;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;application;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;story
;attributes;jlob;jlob;N;;;;;;
;steps;subtable;subtable;N;;;;;;
;usecases;subtable;subtable;N;;;;;;
;testcases;subtable;subtable;N;;;;;;
1 _type ctlg
2 _key _field
3 table:testsuite _field type format index generic aggregat key acceptance alias description
4 tsid pk integer N T:1
5 name str vchar(256) I F:1
6 description string vchar(256) N
7 project string vchar(256) I
8 application string vchar(256) N
9 reference str vchar(256) N story
10 attributes jlob jlob N
11 steps subtable subtable N
12 usecases subtable subtable N
13 testcases subtable subtable N

186
model/testsuite.py

@ -0,0 +1,186 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import tools.job_const as J
import tools.data_const as D
import basic.constants as B
import model.entity
import tools.config_tool
import tools.job_tool
import tools.path_tool
import tools.path_const as P
import model.entity
import tools.file_type
TABLE_NAMES = ["application", "ap_project", "ap_component"]
STORAGES = [model.entity.STORAGE_FILE, model.entity.STORAGE_DB]
""" used storage in priority sortage, so: if file then read_fcts / if db then select-fcts """
DEFAULT_SYNC = model.entity.SYNC_FULL_GIT2DB
FIELD_ID = "tsid"
FIELD_NAME = D.FIELD_NAME
FIELD_DESCRIPTION = B.SUBJECT_DESCRIPTION
FIELD_REFERENCE = B.SUBJECT_REFERENCE
FIELD_PROJECT = B.SUBJECT_PROJECT
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE, FIELD_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
SUB_USECASE = B.SUBJECT_USECASES
SUB_TESTCASES = B.SUBJECT_TESTCASES
SUB_STEPS = "steps"
LIST_SUBTABLES = [SUB_USECASE, SUB_STEPS, SUB_TESTCASES]
LIST_SUB_DESCRIPT = [D.DATA_ATTR_USECASE_DESCR]
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Testsuite(model.entity.Entity):
FIELD_ID = "tsid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT, B.SUBJECT_APP]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_USECASES, B.SUBJECT_STEPS, B.SUBJECT_TESTCASES]
PREFIX_SUBTABLE = "ts"
name = ""
description = ""
application = ""
usecase = []
testcases = {}
tables = {}
steps = []
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_TDATA], getattr(job.par, B.SUBJECT_PROJECT),
B.SUBJECT_TESTSUITES)
outList = self.getDirlist(job, path, "")
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_TESTSUITE, tools.config_tool.get_plain_filename(job, name), "", ttype=B.SUBJECT_TESTSUITE)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
# data = tools.file_type.popNameNode(job, data)
return data
def check_data(self, job, data: dict) -> dict:
"""
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = [B.SUBJECT_TESTCASES]
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_APPS, B.SUBJECT_USECASES]
tools.file_type.check_nodes(job, data, checkNodes)
return data
def old_read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
# r = tools.config_tool.select_config_path(job, P.KEY_TESTCASE, "TC0001")
setattr(self, FIELD_NAME, name)
config = self.getConfig(job, P.KEY_TESTSUITE, name, tools.config_tool.get_plain_filename(job, name))
#self.setAttributes(config, name, LIST_FIELDS, LIST_SUBTABLES)
for k in LIST_SUBTABLES:
if not hasattr(self, k):
continue
if "_"+k in config[name] and "_"+k+"-description" in LIST_SUB_DESCRIPT:
values = {}
if "_"+k+"-description" in config[name]:
for l in config[name]["_"+k]:
if l in config[name]["_"+k+"-description"]:
values[config[name]["_"+k][l]] = config[name]["_" + k + "-description"][l]
else:
values[config[name]["_"+k][l]] = ""
else:
for l in config[name]["_"+k]:
values[config[name]["_" + k][l]] = ""
setattr(self, k, values)
return self
def select_testsuite(job, project, testsuite):
jobProj = None
print("testsuite select: "+str(project)+" "+str(testsuite))
if hasattr(job.par, B.PAR_PROJ):
jobProj = getattr(job.par, B.PAR_PROJ)
setattr(job.par, B.PAR_PROJ, project)
path = tools.path_tool.compose_path(job, P.P_TDROOT, None)
specpath = os.path.join(path, testsuite, D.DFILE_TESTSUITE_NAME + ".csv")
spec = model.entity.read_spec(job, testsuite, J.GRAN_TS, specpath)
if jobProj is None:
delattr(job.par, B.PAR_PROJ)
else:
setattr(job.par, B.PAR_PROJ, jobProj)
return spec
def select_testsuites(job, projList, appList):
out = {}
jobProj = None
print("testsuite select: "+str(projList)+" "+str(appList))
if hasattr(job.par, B.PAR_PROJ):
jobProj = getattr(job.par, B.PAR_PROJ)
for proj in projList:
setattr(job.par, B.PAR_PROJ, proj)
path = tools.path_tool.compose_path(job, P.P_TDROOT, None)
if os.path.exists(path):
for d in os.listdir(path):
if not os.path.isdir(os.path.join(path, d)):
continue
if d[0:1] == "_":
continue
print(d)
specpath = os.path.join(path, d, D.DFILE_TESTSUITE_NAME + ".csv")
spec = model.entity.read_spec(job, d, J.GRAN_TS, specpath)
if spec is None:
continue
out[d] = spec
out[d][B.SUBJECT_PROJECTS] = [proj]
if jobProj is None:
delattr(job.par, B.PAR_PROJ)
else:
setattr(job.par, B.PAR_PROJ, jobProj)
return out

98
model/topic.py

@ -0,0 +1,98 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
"""
spontanes Fliegengewicht, das aus einer der folgenden Contexte gebodlet wird.
step:
hier als umzusetzende Technik
artefact:
hier als umzusetzende Technik bei init oder collect
component:
als Superklasse zwecks Ererben fehlernder Attribute
environment:
als Superklasse zwecks Ererben fehlernder Attribute
in den jeweiligen technischen Elternklassenv werden die Attribute gesammelt und bei der Ausfuehrung komprimmiert.
Die so zusammen gesammelten Konfigurationen werden der jeweilgen Funktionsklasse uebergeben.
"""
import os
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_tool
import tools.data_const as D
TABLE_NAME = tools.data_tool.getSingularKeyword(B.DATA_NODE_TOPICS)
""" system-name for this entity """
FIELD_ID = "toid"
FIELD_NAME = D.FIELD_NAME
LIST_FIELDS = [FIELD_ID, FIELD_NAME]
""" list of object-attributes """
LIST_NODES = []
LIST_SUBTABLES = {}
component = ""
environment = ""
topic = "" #
""" one of B.LIST_TOPIC_NODES """
type = "" #
""" one of the implemented Tools """
class Topic(model.entity.Entity):
name = ""
type = ""
attributes = ""
environment = ""
component = ""
def __init__(self, job, environment, component, name=""):
"""
to be initialized by readSpec
"""
self.job = job
self.environment = environment
self.component = component
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_VARIANTS, "")
def getFieldList(self):
"""
returns a list of scalar attributes
:return: LIST_FIELDS
"""
return LIST_FIELDS
def getNodeList(self):
"""
returns a list of sub-nodes - which can be persisted in a clob-field
:return: LIST_NODES
"""
return LIST_NODES
def getSubtableList(self):
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return LIST_SUBTABLES
def getName(self):
"""
returns the name - maybe build from other attributes
:return:
"""
return self.name
def getIDName(self):
"""
it returns the name as unique-id - maybe build from few attributes
:return:
"""
return self.name

6
model/tp_applications.csv

@ -0,0 +1,6 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tp_applications;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tpappid;pk;vchar(256);N;;;;;;
;tpid;int;vchar(256);I;;;;;;
;appid;int;vchar(256);I;;;;;;
1 _type ddl
2 _key _field
3 table:tp_applications _field type format index generic aggregat key acceptance alias description
4 tpappid pk vchar(256) N
5 tpid int vchar(256) I
6 appid int vchar(256) I

8
model/tp_steps.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tp_steps;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tpstpid;pk;integer;N;;;;;;
;tpid;int;integer;I;;;T:1;;;
;spid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:tp_steps _field type format index generic aggregat key acceptance alias description
4 tpstpid pk integer N
5 tpid int integer I T:1
6 spid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

8
model/tp_testsuites.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tp_testsuites;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tptsuid;pk;integer;N;;;;;;
;tpid;int;integer;I;;;T:1;;;
;tsid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:tp_testsuites _field type format index generic aggregat key acceptance alias description
4 tptsuid pk integer N
5 tpid int integer I T:1
6 tsid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

9
model/tr_artifacts.csv

@ -0,0 +1,9 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:tr_artifacts;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;trartid;pk;integer;N;;;;;;
;trid;int;integer;I;;;T:1;;;
;arid;int;integer;I;;;T:2;;;
;result;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:tr_artifacts _field type format index generic aggregat key acceptance alias description
4 trartid pk integer N
5 trid int integer I T:1
6 arid int integer I T:2
7 result str vchar(256) I
8 description string vchar(256) N
9 reference str vchar(256) N

8
model/ts_steps.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ts_steps;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tsstpid;pk;integer;N;;;;;;
;tsid;int;integer;I;;;T:1;;;
;spid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:ts_steps _field type format index generic aggregat key acceptance alias description
4 tsstpid pk integer N
5 tsid int integer I T:1
6 spid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

8
model/ts_testcases.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ts_testcases;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tstcsid;pk;integer;N;;;;;;
;tsid;int;integer;I;;;T:1;;;
;tcid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:ts_testcases _field type format index generic aggregat key acceptance alias description
4 tstcsid pk integer N
5 tsid int integer I T:1
6 tcid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

8
model/ts_usecases.csv

@ -0,0 +1,8 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:ts_usecases;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;tsucsid;pk;integer;N;;;;;;
;tsid;int;integer;I;;;T:1;;;
;ucid;int;integer;I;;;T:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
1 _type ddl
2 _key _field
3 table:ts_usecases _field type format index generic aggregat key acceptance alias description
4 tsucsid pk integer N
5 tsid int integer I T:1
6 ucid int integer I T:2
7 description string vchar(256) N
8 reference str vchar(256) N

17
model/usecase.csv

@ -0,0 +1,17 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:usecase;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;ucid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;project;string;vchar(256);I;;;;;;
;application;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
;insauthor;str;vchar(256);N;;;;;;
;inscommit;str;vchar(256);N;;;;;;
;instime;time;vchar(256);N;;;;;;
;updauthor;str;vchar(256);N;;;;;;
;updcommit;str;vchar(256);N;;;;;;
;updtime;time;vchar(256);N;;;;;;
;actual;int;vchar(256);I;;;;;;
1 _type ctlg
2 _key _field
3 table:usecase _field type format index generic aggregat key acceptance alias description
4 ucid pk int N
5 name str vchar(256) I
6 description string vchar(256) N
7 project string vchar(256) I
8 application string vchar(256) N
9 reference str vchar(256) N
10 attributes string jlob N
11 insauthor str vchar(256) N
12 inscommit str vchar(256) N
13 instime time vchar(256) N
14 updauthor str vchar(256) N
15 updcommit str vchar(256) N
16 updtime time vchar(256) N
17 actual int vchar(256) I

63
model/usecase.py

@ -0,0 +1,63 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
import tools.file_type
TABLE_NAME = "uscase"
""" system-name for this entity """
FIELD_ID = "ucid"
FILE_EXTENSION = D.DFILE_TYPE_CSV
UNIQUE_FIELDS = [D.FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Usecase(model.entity.Entity):
FIELD_ID = "ucid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT, B.SUBJECT_APP]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = []
ucid = 0
usecase = ""
project = ""
application = ""
description = ""
reference = ""
attributes = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, P.KEY_CATALOG, B.SUBJECT_USECASES,
tools.config_tool.get_plain_filename(job, ""), ttype=D.CSV_SPECTYPE_CTLG)
outList = list(config[B.SUBJECT_USECASES][B.DATA_NODE_KEYS].keys())
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_CATALOG, B.SUBJECT_USECASES,
tools.config_tool.get_plain_filename(job, name), ttype=D.CSV_SPECTYPE_CTLG)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)

12
model/user.csv

@ -0,0 +1,12 @@
_type;ddl;;;;;;;;;
_key;_field;;;;;;;;;
table:user;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;id;pk;autoint;N;;;T:1;;;
;username;str;vchar(256);N;;;F:1;;;
;name;str;vchar(256);N;;;;;;
;famname;str;vchar(256);N;;;;;;
;email;string;vchar(256);N;;;;;;
;password;string;vchar(256);N;;;;;;
;project;string;vchar(256);N;;;;;;
;role;string;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
1 _type ddl
2 _key _field
3 table:user _field type format index generic aggregat key acceptance alias description
4 id pk autoint N T:1
5 username str vchar(256) N F:1
6 name str vchar(256) N
7 famname str vchar(256) N
8 email string vchar(256) N
9 password string vchar(256) N
10 project string vchar(256) N
11 role string vchar(256) N
12 attributes string jlob N

297
model/user.py

@ -0,0 +1,297 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.db_abstract
import tools.git_tool
TABLE_NAME = "user"
""" system-name for this entity """
FIELD_ID = "id"
FIELD_USERNAME = "username"
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [FIELD_USERNAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class User(model.entity.Entity):
FIELD_ID = "id"
FIELD_USERNAME = "username"
FIELD_NAME = "name"
FIELD_FAMNAME = "famname"
FIELD_EMAIL = "email"
FIELD_PASSWORD = "password"
FIELD_PROJECT = B.SUBJECT_PROJECT
FIELD_ROLE = "role"
LIST_FIELDS = [FIELD_ID, FIELD_ROLE, FIELD_PROJECT, FIELD_PASSWORD, FIELD_EMAIL, FIELD_FAMNAME, FIELD_NAME,
FIELD_USERNAME]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = {}
UNIQUE_FIELDS = [FIELD_USERNAME]
id = 0
username = ""
name = ""
famname = ""
email = ""
password = ""
project = ""
role = ""
attributes = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
outList = []
path = os.path.join(job.conf[B.TOPIC_PATH][B.ATTR_PATH_HOME], P.VAL_CONFIG, "user")
for k in os.listdir(path):
filename = tools.config_tool.get_plain_filename(job, k)
if "default" == filename:
continue
outList.append(filename)
return outList
def select_unique_names(self, job, project, application, gran, args):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
outList = []
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
data = dbi.selectRows(TABLE_NAME, job)
checkList = {}
for row in data[B.DATA_NODE_DATA]:
key = ""
for f in UNIQUE_FIELDS:
key += "_" + row[f]
if key in checkList:
continue
else:
checkList[key] = key
fields = []
for f in UNIQUE_FIELDS:
fields.append(row[f])
outList.append(fields)
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
print("name "+name)
config = model.user.User.getUserConfig(job, tools.config_tool.get_plain_filename(job, name))
for k in self.LIST_FIELDS:
if k not in config:
continue
setattr(self, k, config[k])
return self
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
data = tools.file_type.popNameNode(job, data)
outdata = {}
for k in data:
if k == "sysuser":
sysuser = User.getCurrentUser(job)
outdata[sysuser] = data[k]
outdata[sysuser][FIELD_USERNAME] = sysuser
else:
outdata[k] = data[k]
return outdata
def check_data(self, job, data: dict) -> dict:
"""
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
import tools.file_type
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = []
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS] + B.LIST_SUBJECTS
checkNodes[tools.file_type.OPT_NODES] = []
return tools.file_type.check_nodes(job, data, checkNodes)
def select_entity(self, job, name, row={}):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name: unique field as string, unique fields as list
the unique-fields are defined in the class
:return: itself with filled object-attributes
"""
if row is None or len(row) == 0:
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
if type(name) is list:
names = name
elif type(name) is str:
names = [name]
condition = "where "
for v in names:
condition += " and " + ""
data = dbi.selectRows(TABLE_NAME, job, "where username = \'" + names[0] + "\'")
if len(data[B.DATA_NODE_DATA]) > 1:
raise Exception("single selection with more than one result: "+names[0])
elif len(data[B.DATA_NODE_DATA]) == 1:
row = data[B.DATA_NODE_DATA][0]
else:
raise Exception("no result for: "+names[0])
for k in self.LIST_FIELDS:
if k not in row:
continue
setattr(self, k, row[k])
return self
def write_entity(self, job, name):
"""
writes the entity into the file-system
it similar to update_entity
:param job:
:param name:
:return:
"""
config = {}
config[model.user.TABLE_NAME] = {}
pathname = os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_HOME], P.VAL_CONFIG,
P.VAL_USER, name + ".yml")
for k in self.LIST_FIELDS:
if getattr(self, k, "") == "" \
or k == FIELD_ID:
continue
config[model.user.TABLE_NAME][k] = getattr(self, k, "")
tools.file_tool.write_file_dict(job.m, job, pathname, config)
return self
def insert_entity(self, job, name, table="", rows={}):
"""
inserts the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
condition = "where"
for f in UNIQUE_FIELDS:
# TODO other db-formats than string has to be implemented
condition += " and " + f + " = \'" + getattr(self, f, "") + "\'"
condition = condition.replace("where and", "where ")
data = dbi.selectRows(TABLE_NAME, job, condition)
if len(data[B.DATA_NODE_DATA]) > 0:
print("update statt insert")
return
if rows is None or len(rows) == 0:
insheader = dbi.getInsertFields(self.conf[B.DATA_NODE_DDL][table])
rows = []
row = {}
for f in insheader:
row[f] = getattr(self, f)
rows.append(row)
dbi.insertRows(job, table, rows)
def update_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def remove_entity(self, job, name):
"""
removes the entity from the file-system
it similar to delete_entity
:param job:
:param name: single substring or list of name or dict of names with the keys as
:return:
"""
self.removeEntity(job, name, os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_HOME], P.VAL_CONFIG, P.VAL_USER), "yml")
def delete_entity(self, job, name, table):
"""
deletes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
condition = "where"
if B.DATA_NODE_DDLKEYS in self.conf[B.DATA_NODE_DDL][table]:
keys = self.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DDLKEYS]
else:
keys = self.conf[B.DATA_NODE_DDL][table]
for f in IDENTIFYER_FIELDS:
# TODO other db-formats than string has to be implemented
val = dbi.getDbValue(keys[f], getattr(self, f, ""))
condition += " and " + f + " = " + val + ""
condition = condition.replace("where and", "where ")
dbi.deleteRows(job, table, condition)
@staticmethod
def getUserConfig(job, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
config = tools.config_tool.getConfig(job, P.KEY_USER, name, ttype=B.SUBJECT_USER)
if config is not None:
return config
if name == model.user.User.getCurrentUser(job):
config = tools.config_tool.getConfig(job, P.KEY_USER, "default", ttype=B.SUBJECT_USER)
if "user" in config:
config = config["user"]
if config is not None:
config["username"] = name
return config
raise Exception("keine Config zu "+name)
@staticmethod
def getCurrentUser(job):
return os.environ.get("USERNAME")

17
model/variant.csv

@ -0,0 +1,17 @@
_type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:variant;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;vrid;pk;int;N;;;;;;
;name;str;vchar(256);I;;;;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;
;project;str;vchar(256);N;;;;;;
;component;str;vchar(256);N;;;;;;
;attributes;string;jlob;N;;;;;;
;insauthor;str;vchar(256);N;;;;;;
;inscommit;str;vchar(256);N;;;;;;
;instime;time;vchar(256);N;;;;;;
;updauthor;str;vchar(256);N;;;;;;
;updcommit;str;vchar(256);N;;;;;;
;updtime;time;vchar(256);N;;;;;;
;actual;int;vchar(256);I;;;;;;
1 _type ctlg
2 _key _field
3 table:variant _field type format index generic aggregat key acceptance alias description
4 vrid pk int N
5 name str vchar(256) I
6 description string vchar(256) N
7 reference str vchar(256) N
8 project str vchar(256) N
9 component str vchar(256) N
10 attributes string jlob N
11 insauthor str vchar(256) N
12 inscommit str vchar(256) N
13 instime time vchar(256) N
14 updauthor str vchar(256) N
15 updcommit str vchar(256) N
16 updtime time vchar(256) N
17 actual int vchar(256) I

88
model/variant.py

@ -0,0 +1,88 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.componentHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
TABLE_NAME = "variant"
""" system-name for this entity """
FIELD_ID = "vrid"
FILE_EXTENSION = D.DFILE_TYPE_YML
UNIQUE_FIELDS = [D.FIELD_NAME]
""" unique business field as human identifer """
IDENTIFYER_FIELDS = [FIELD_ID]
""" unique technical field as technical identifer """
class Variant(model.entity.Entity):
FIELD_ID = "vrid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT, B.SUBJECT_COMP]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = []
""" list of object-attributes """
name = ""
description = ""
reference = ""
attributes = ""
project = ""
component = ""
def read_unique_names(self, job, project, application, gran, args, ttype: str=""):
"""
reads the entity-names from file-storage
:param job:
:param opt. project: select-criteria if used and defined
:param opt. application: select-criteria if used and defined
:param opt. gran: granularity values testcase / testsuite / testplan
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_VARIANTS,
tools.config_tool.get_plain_filename(job, ""), B.SUBJECT_VARIANT)
outList = list(config[B.SUBJECT_VARIANTS].keys())
return outList
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_VARIANTS,
tools.config_tool.get_plain_filename(job, name), B.SUBJECT_VARIANT)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
data = tools.file_type.popSubjectsNode(job, data)
data = tools.file_type.popNameNode(job, data)
return data
@staticmethod
def check_data(job, data: dict) -> dict:
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = []
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_OPTION, B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_HEADER]
checkNodes[tools.file_type.OPT_NODES] = [B.SUBJECT_PROJECTS, B.NODE_ATTRIBUTES]
return tools.file_type.check_nodes(job, data, checkNodes)

2
requirements.txt

@ -1,6 +1,6 @@
pyyaml~=6.0
paramiko~=2.9.2
mysql-connector-python
cryptography~=36.0.1
pip~=21.3.1
MarkupSafe~=2.1.1

325
start_dialog.py

@ -1,153 +1,232 @@
#!/usr/bin/python
# program to execute programs for a testcases or for a testsuite
# PARAM from INPUT: --granularity --application --environment --testcase/testsuite
# main functions
# + input_param() : cache-actjob --> user-input --> local-param
# + start_job() : local-param --> cache-actjob --> start-param
# ---------------------------------------------------
#
#
# ----------------------------------------------------------
"""
This program is created for the business-test on the level of unit testing.
On this level there is an intensive test-set up to a carthesian product possible.
The test-specification is written in a yaml-config in the data-directory.
workflow:
1. generate test-cases with all inputs into a csv-file of the spec-yaml
2. run the csv-file and collect all results into a new result-csv-file with all inputs and outputs
3. check the result-csv-file and declare it as a target-results
OR instead 2 and 3 on test-first-strategy:
3. create manually the target-result with your inputs and the target system-outputs
4. run and compare the actual-result with the target-result
5. at the end you get an xls-sheet like your result-csv-file
but additionally with comparsion-result as detailed result and the only counts of passed and failed tests
as minimum-result which can be inserted into management-tools
"""
import os.path
import json
# Press the green button in the gutter to run the script.
import importlib
import os, glob
import io
import sys
import basic.program
import basic.constants as B
import utils.file_tool
tempJob = {}
PROGRAM_NAME = "unit"
JSON_FILE = "actualJob.json"
JOB_NR = {
"init_testsuite": {
"jobnr": "0" },
"execute_testsuite": {
"jobnr": "1"},
"collect_testsuite": {
"jobnr": "2"},
"compare_testsuite": {
"jobnr": "3"},
"finish_testsuite": {
"jobnr": "4"},
"init_testcase": {
"jobnr": "5" },
"execute_testcase": {
"jobnr": "6" },
"collect_testcase": {
"jobnr": "7" },
"compare_testcase": {
"jobnr": "8" },
"check_environment": {
"jobnr": "9" },
"test_executer": {
"jobnr": "10"},
}
JOB_LIST = [
"init_testsuite",
"execute_testsuite",
"collect_testsuite",
"compare_testsuite",
"finish_testsuite",
"init_testcase",
"execute_testcase",
"collect_testcase",
"compare_testcase",
"check_environment",
"test_executer"
]
appList = []
envList = ["ENV01"]
testList = {"TEST": [
"TC0001", "TST001"
]}
def readContext(job):
for k in job.conf.confs["applications"]:
appList.append(k)
import tools.job_tool as job_tool
import model.catalog
import tools.job_const as J
import tools.step_tool
PROGRAM_NAME = "start_dialog"
JOB_PROC = "proc"
verbose = False
def startPyJob(job):
# for debugging write
setattr(job.par, "tool", "job_tool")
# check if an actual job is stored
childArgs = job_tool.read_child_args(job)
if childArgs is None:
childArgs = initDialog(job)
else:
childDialog(job)
pass
def restartActualProcess(job):
def getChoice(job, choiselist, description):
"""
check if an actual process is open
to choise one of the list or exit the program
:param job:
:param choiselist:
:param description:
:return:
"""
path = os.path.join(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_DEBUG], JSON_FILE)
if os.path.exists(path):
actProc = utils.file_tool.readFileDict(job, path)
jobNr = int(JOB_NR[actProc["job"]]["jobnr"])
question = "Prozess "
choiceList = ""
if jobNr < 8 and jobNr != 4:
question += "F_ortsetzen | "
choiceList += "f | "
question += "W_iederholen | N_euen Prozess | X_exit"
choiceList = "w | n | x ?"
print("+-----------------------------------------------")
for k in actProc:
print('| {:6s} : {:60s}'.format(k, actProc[k]))
print("+-----------------------------------------------")
print(question)
choice = input("Auswahl "+choiceList)
choiceList.replace(" | ","")
if choice.lower() not in choiceList[:-1]:
print("FEHLER : falsche Auswahl")
elif choice.lower() == "x":
job.stopJob(0)
elif choice.lower() == "w":
startProcess(job, actProc)
elif choice.lower() == "f":
actProc["job"] = JOB_NR[int(jobNr)+1]
startProcess(job, actProc)
elif choice.lower() == "n":
createProcess(job)
else:
print("unbekannte Situation")
else:
createProcess(job)
def createProcess(job):
process = {}
index = 0
print("create new process")
process["app"] = getChoice(job, appList, "Anwendung")
#
if len(envList) == 1:
process["env"] = envList[0]
else:
process["env"] = getChoice(job, envList, "Umgebung")
#
process["tc"] = getChoice(job, testList[process["app"]], "Testfall")
print(str(process))
def getChoice(job, choiselist, description):
index = 0
if isinstance(choiselist, dict):
choiselist = list(choiselist.keys())
if len(choiselist) == 0:
raise Exception("Keine Auswahl zu: "+description)
if len(choiselist) == 1:
return choiselist[0]
print("+------------- "+description+" ----------")
print('| {:2d} : {:60s}'.format(0, "exit"))
print('| | {:2d} : {:60s}'.format(0, "exit"))
for k in choiselist:
index += 1
print('| {:2d} : {:60s}'.format(index, k))
print('| | {:2d} : {:60s}'.format(index, k))
print("+-----------------------------------------------")
choice = input("Auswahl 1-" + str(index) + ": ")
if verbose: print("auswahl "+str(choice))
if not choice.isnumeric():
print("FEHLER Fehleingabe "+choice)
getChoice(job, choiselist, description)
elif int(choice) < 1:
job.stopJob(0)
exit(0)
elif int(choice) > index:
print("FEHLER Fehleingabe "+choice)
getChoice(job, choiselist, description)
else:
if verbose: print("treffer "+str(choiselist))
return choiselist[int(choice) - 1]
def initDialog(job):
"""
dialog to initialize a child-process
:param job:
:return:
"""
# which process
args = {}
verify = job.getDebugLevel("job_tool")
if JOB_PROC not in args:
args[JOB_PROC] = getChoice(job, J.LIST_PROC, "Welchen Prozess starten")
args[B.PAR_GRAN] = ""
args[B.PAR_USER] = job_tool.getUser()
args[B.PAR_PROJ] = job_tool.getUserProject()
args[B.PAR_STEP] = ""
job.m.logTrace(verify, args[JOB_PROC])
# """
print("JOB_PROC "+args[JOB_PROC])
if args[JOB_PROC] == J.PROC_TP_EXECUTION:
args[B.PAR_GRAN] = B.PAR_TESTPLAN
args[B.PAR_STEP] = tools.step_tool.getNextStepID(job, 0, "", args[B.PAR_GRAN])
args[B.PAR_PROGRAM] = J.PROG_TEST_EXECUTER
elif args[JOB_PROC] in [J.PROC_TS_STEPWISE, J.PROC_TS_EXECUTION]:
args[B.PAR_GRAN] = B.PAR_TESTSUITE
args[B.PAR_STEP] = tools.step_tool.getNextStepID(job, 0, "", args[B.PAR_GRAN])
if args[JOB_PROC] == J.PROC_TS_EXECUTION:
args[B.PAR_PROGRAM] = J.PROG_TEST_EXECUTER
else:
args[B.PAR_PROGRAM] = J.PROG_TS_INIT
# elif args[JOB_PROC] in [J.PROC_TC_STEPWISE, J.PROC_TC_EXECUTION]:
elif args[JOB_PROC] == J.PROC_TC_EXECUTION:
print("JOB_PROC - 111 " + args[JOB_PROC])
args[B.PAR_GRAN] = B.PAR_TESTCASE
args[B.PAR_STEP] = tools.step_tool.getNextStepID(job, 0, "", args[B.PAR_GRAN])
args[B.PAR_PROGRAM] = J.PROG_TEST_EXECUTER
elif args[JOB_PROC] == J.PROC_TC_STEPWISE:
print("JOB_PROC - 116 " + args[JOB_PROC])
args[B.PAR_GRAN] = B.PAR_TESTCASE
args[B.PAR_STEP] = tools.step_tool.getNextStepID(job, 0, "", args[B.PAR_GRAN])
args[B.PAR_PROGRAM] = J.PROG_TC_INIT
elif args[JOB_PROC] == J.PROC_REDO_EXECUTION:
args[B.PAR_PROGRAM] = getChoice(job, J.LIST_TS_PROGS + J.LIST_TC_PROGS,
"Welches Programm starten")
elif args[JOB_PROC] == J.PROC_SINGLE_JOB:
args[B.PAR_PROGRAM] = getChoice(job, J.LIST_SERVICE_PROG, "Welches Programm starten")
else:
args[B.PAR_GRAN] = ""
# args[B.PAR_GRAN] = ""
# """2
catalog = model.catalog.Catalog.getInstance()
setattr(job.par, B.SUBJECT_PROJECT, "TESTPROJ")
programDef = catalog.getValue(job, basic.program.CTLG_NAME, args[B.PAR_PROGRAM], "")
job.m.logTrace(verify, "programdefinition "+str(programDef))
print("programdefinition "+str(programDef))
if verbose: print("programdefinition "+args[B.PAR_PROGRAM]+" "+str(programDef))
#for p in programDef[basic.program.CTLG_PARDEF]:
print(str(args))
for p in [B.PAR_PROJ, B.PAR_GRAN, B.PAR_APP, J.ARG_TESTELEM, B.PAR_COMP, B.PAR_ENV,
B.PAR_TESTPLAN, B.PAR_TESTSUITE, B.PAR_TESTCASE,
B.PAR_TSTIME, B.PAR_TCTIME, B.PAR_TPTIME, B.PAR_VAR]:
if p in args and len(args[p]) > 0:
continue
if p not in programDef[basic.program.CTLG_PARDEF]:
continue
if programDef[basic.program.CTLG_PARDEF][p] != "args":
args[p] = programDef[basic.program.CTLG_PARDEF][p]
continue
print("+ bearbeite "+p)
if programDef[basic.program.CTLG_PARDEF][p] == "args":
description = ""
job.m.logDebug(verify, "to select "+p)
if p == B.PAR_GRAN:
description = J.CHOICE_GRAN
choiceList = J.LIST_GRAN
elif p == B.PAR_APP:
description = J.CHOICE_APP
choiceList = job_tool.select_application(job, programDef, args[B.PAR_PROJ])
elif p == B.PAR_TESTPLAN or p == J.ARG_TESTELEM and args[B.PAR_GRAN] in [J.GRAN_TP, B.PAR_TESTPLAN]:
description = J.CHOICE_TP
choiceList = job_tool.select_testplan(job, programDef, args)
p = B.PAR_TESTPLAN
if J.ARG_TESTELEM in programDef[basic.program.CTLG_PARDEF]: args[J.ARG_TESTELEM] = p
elif p == B.PAR_TESTSUITE or p == J.ARG_TESTELEM and args[B.PAR_GRAN] in [J.GRAN_TS, B.PAR_TESTSUITE]:
description = J.CHOICE_TS
choiceList = job_tool.select_testsuite(job, programDef, args)
p = B.PAR_TESTSUITE
if J.ARG_TESTELEM in programDef[basic.program.CTLG_PARDEF]: args[J.ARG_TESTELEM] = p
elif p == B.PAR_TESTCASE or p == J.ARG_TESTELEM and args[B.PAR_GRAN] in [J.GRAN_TC, B.PAR_TESTCASE]:
description = J.CHOICE_TC
choiceList = job_tool.select_testcase(job, programDef, args)
p = B.PAR_TESTCASE
if J.ARG_TESTELEM in programDef[basic.program.CTLG_PARDEF]: args[J.ARG_TESTELEM] = p
elif p == B.PAR_COMP:
description = J.CHOICE_ENV
choiceList = job_tool.select_components(job, programDef, args[B.PAR_PROJ], args[B.PAR_APP])
choiceAll = ",".join(choiceList)
choiceList.append(choiceAll)
elif p == B.PAR_ENV:
description = J.CHOICE_ENV
choiceList = job_tool.select_environment(job, programDef, args[B.PAR_PROJ])
elif p in [B.PAR_TESTPLAN, B.PAR_TESTSUITE, B.PAR_TESTCASE]:
if args[JOB_PROC] == J.PROC_REDO_EXECUTION:
description = J.CHOICE_ARCHIV
choiceList = job_tool.select_archiv(job, programDef, args[B.PAR_GRAN], args[B.PAR_APP])
else:
args[B.PAR_STEP] = "1"
description = J.CHOICE_SPEC
choiceList = job_tool.select_spec(job, programDef, args[B.PAR_GRAN], args)
elif p in [B.PAR_TSTIME, B.PAR_TCTIME] and args[JOB_PROC] in [J.PROC_REDO_EXECUTION]:
description = J.CHOICE_TIME
choiceList = job_tool.select_testtime(job, programDef, args[B.PAR_GRAN], args)
elif p == B.PAR_VAR:
description = J.CHOICE_VARIANT
choiceList = job_tool.select_variant(job, programDef, args[B.PAR_GRAN], args)
else:
continue
if choiceList is None:
job.m.logError(verify, "choiceList in None "+p)
args[p] = getChoice(job, choiceList, description)
job.m.logDebug(verify, args)
job_tool.write_child_args(job, args)
job_tool.start_child_process(job, args)
childDialog(job)
def startProcess(job, process):
print(str(process))
def childDialog(job):
"""
dialog to manage the child-process
:param job:
:return:
"""
args = job_tool.read_child_args(job)
print("+-----------------------------------------------")
for p in args:
if p in [JOB_PROC]:
continue
if len(args[p]) < 1:
continue
print('+ {:12s} : {:60s}'.format(p, str(args[p])))
print("+-----------------------------------------------")
initDialog(job)
if __name__ == '__main__':
job = basic.program.Job(PROGRAM_NAME)
readContext(job)
restartActualProcess(job)
# TODO to move to job_tool
args = {}
if isinstance(job.programDef[basic.program.CTLG_PARDEF], dict):
for p in job.programDef[basic.program.CTLG_PARDEF]:
args[p] = job.programDef[basic.program.CTLG_PARDEF][p]
job.setParameter(args)
job.startJob()
startPyJob(job)
job.stopJob(0)

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save