Browse Source

entities from basic to model

refactor
Ulrich 2 years ago
parent
commit
693e3d9260
  1. 0
      __init__.py
  2. 65
      check_configuration.py
  3. 14
      check_environment.py
  4. 50
      check_specification.py
  5. 115
      clean_workspace.py
  6. 12
      init_testcase.py
  7. 15
      init_testsuite.py
  8. 267
      install_workspace.py
  9. 280
      job_dialog.py
  10. 28
      make_workspace.py
  11. 143
      model/Entity.py
  12. 18
      model/application.py
  13. 4
      model/catalog.py
  14. 83
      model/component.py
  15. 75
      model/entity.py
  16. 2
      model/environment.py
  17. 32
      model/step.py
  18. 30
      model/testcase.py
  19. 2
      model/testplan.py
  20. 7
      model/testsuite.py
  21. 9
      start_dialog.py
  22. 78
      test_executer.py

0
__init__.py

65
check_configuration.py

@ -0,0 +1,65 @@
# This is a sample Python script.
import os
import traceback
import sys
import yaml
import basic.program
import basic.constants as B
import basic.message
import tools.path_const as P
import tools.config_tool as config_tool
import tools.file_tool as file_tool
import model.component
PROGRAM_NAME = "check_configuration"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
components = job.par.component.split(",")
for c in components:
print("component "+c)
checkComponent(job, c)
job.m.setMsg("Job " + PROGRAM_NAME + " fertig")
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
def checkComponent(job, componentName):
"""
checks the configurations of the component
:param job:
:param componentName:
:return:
"""
configPath = config_tool.getExistingPath(job, [os.path.join(job.conf[B.SUBJECT_PATH][B.ATTR_PATH_COMPS], componentName, "CONFIG")])
configTree = file_tool.read_file_dict(job, configPath, job.m)
for x in model.component.LIST_CP_SUBJECTS:
if "conf" not in configTree:
job.m.setError(componentName + ": root conf is not set: ")
break
if x not in configTree["conf"]:
job.m.setError(componentName + ": subject is not set: " + x)
else:
for c in configTree["conf"][x]:
if c == "none":
if len(configTree["conf"][x]) != 1:
job.m.setWarn("none is not the only subject in "+x)
continue
if __name__ == '__main__':
job = basic.program.Job(PROGRAM_NAME)
print ("job "+str(job.__dict__))
job.startJob()
if job.m.isRc("fatal"):
job.stopJob()
# now in theory the program is runnable
startPyJob(job)
job.stopJob()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/

14
check_environment.py

@ -5,10 +5,22 @@ import yaml # pip install pyyaml
import basic.program
import basic.componentHandling
import basic.message
import utils.tdata_tool
#import utils.tdata_tool
import traceback
PROGRAM_NAME = "check_environment"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
if __name__ == '__main__':
x = basic.program.Job(PROGRAM_NAME)
print ("x "+str(x))

50
check_specification.py

@ -0,0 +1,50 @@
# This is a sample Python script.
import sys#
# import jsonpickle # pip install jsonpickle
import yaml # pip install pyyaml
import basic.program
import basic.componentHandling
import basic.message
#import utils.tdata_tool
import traceback
PROGRAM_NAME = "check_specification"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
job.m.setMsg("Job " + PROGRAM_NAME + " fertig")
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
def checkHead(job):
pass
if __name__ == '__main__':
x = basic.program.Job(PROGRAM_NAME)
print ("x "+str(x))
x.startJob()
x.m.logDebug(str(vars(x.par)) + "\n" + str(vars(x.conf)))
if x.m.isRc("fatal"):
x.stopJob()
exit(x.m.rc * (-1) + 3)
# now in theory the program is runnable
x.m.setMsg("# job initialized")
cm = basic.componentHandling.ComponentManager.getInstance(x)
print("cm "+str(cm))
cm.initComponents()
comps = cm.getComponents(x, PROGRAM_NAME)
x.m.setMsg("# Components initialized with these relevant components " + str(comps))
for c in comps:
comp = cm.getComponent(c)
print(str(comp))
comp.check_Instance()
x.m.merge(comp.m)
comp.confs["function"][PROGRAM_NAME] = comp.m.topmessage
x.stopJob()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/

115
clean_workspace.py

@ -0,0 +1,115 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
"""
program to clean the workspace :
* remove old debug-files
"""
import os
import re
import shutil
import sys
import traceback
import basic.program
import basic.constants as B
import tools.date_tool as date_tool
import tools.path_tool as path_tool
import tools.job_tool as job_tool
LIMIT_DEBUG_FILES = -7
PROGRAM_NAME = "clean_workspace"
def startPyJob(job):
try:
job.m.logDebug("--- start " + PROGRAM_NAME + " ------>>>>")
# remove debug-files
removeDebugFiles(job)
# clean and archive log-files
cleanLogFiles(job)
job.m.setMsg("Job " + PROGRAM_NAME + " fertig")
job.m.logDebug("<<<<<<<<----- " + PROGRAM_NAME + " ------")
except Exception as e:
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.setFatal(str(e))
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
job.m.logDebug("execpt "+traceback.format_exc())
job.m.logDebug("+++++++++++++++++++++++++++++++++++++++++++++")
def removeDebugFiles(job):
"""
to remove debug-files in any relevant folder
:param job:
:return:
"""
job.m.logInfo("# # remove log-files # # #")
limit = date_tool.getActdate(date_tool.F_LOG, LIMIT_DEBUG_FILES)[0:8]
path = job.conf[B.SUBJECT_PATH][B.ATTR_PATH_DEBUG]
cleanFolder(job, path, limit)
path = os.path.join(B.HOME_PATH, "temp")
cleanFolder(job, path, limit)
def cleanFolder(job, path, limit):
"""
remove all files in the folder with a log-date older than limit
:param job:
:param path:
:param limit:
:return:
"""
date_regex = r"(.*)_(\d{8})_\d{6}"
cntRm = 0
cntMv = 0
cntAll = 0
for f in os.listdir(path):
cntAll += 1
if re.match(date_regex, f):
res = re.search(date_regex, f)
fileType = str(res.group(1))
fileDate = str(res.group(2))
if fileType in ["debug", "log", "start_dialog"]:
if fileDate >= limit:
continue
job.m.logInfo("remove " + os.path.join(path, f))
os.remove(os.path.join(path, f))
cntRm += 1
else:
fileYear = fileDate[0:4]
actYear = date_tool.getActdate(date_tool.F_LOG)[0:4]
archivPath = os.path.join(path, fileYear)
if fileYear < actYear:
if not os.path.exists(archivPath):
os.mkdir(archivPath)
if not os.path.isdir(archivPath):
raise Exception("archiv-folder is not a directory: " + archivPath)
shutil.move(os.path.join(path, f), os.path.join(archivPath, f))
cntMv += 1
job.m.setMsg(str(cntRm) + " / " + str(cntAll) + " files removed in " + path)
job.m.setMsg(str(cntMv) + " / " + str(cntAll) + " files moved from " + path)
def cleanLogFiles(job):
"""
searches all log-folder in test-documents and remove the oldest log-files except the newest
:param job:
:return:
"""
job.m.logInfo("# # clean log-files # # #")
limit = date_tool.getActdate(date_tool.F_LOG, LIMIT_DEBUG_FILES)[0:8]
path = path_tool.compose_path(job, "{job.par.wsdir}/{log}", None)
cleanFolder(job, path, limit)
environments = job_tool.select_environment(job, "", "ALL")
for env in environments:
jobEnv = ""
if hasattr(job.par, "environment"):
jobEnv = getattr(job.par, "environment")
setattr(job.par, "environment", env)
path = path_tool.compose_path(job, "{envlog}", None)
cleanFolder(job, path, limit)
setattr(job.par, "environment", jobEnv)
pass
if __name__ == '__main__':
job = basic.program.Job(PROGRAM_NAME)
startPyJob(job)

12
init_testcase.py

@ -8,12 +8,12 @@
# ---------------------------------------------------import os
import traceback
import basic.program as program
import utils.tdata_tool
#import tools.tdata_tool as tdata_tool
import basic.componentHandling
import basic.constants as B
import utils.file_tool
import utils.path_tool
import utils.path_const as P
import tools.file_tool as file_tool
import tools.path_tool as path_tool
import tools.path_const as P
import basic.message as message
@ -24,7 +24,7 @@ def startPyJob(job):
cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME)
job.m.setMsg("# Components initialized with these relevant components " + str(comps))
testdata = utils.tdata_tool.getTestdata(job)
testdata = "" # tdata_tool.getTestdata(job)
print("------------------------------------------------------------")
for c in comps:
comp = cm.getComponent(c)
@ -35,7 +35,7 @@ def startPyJob(job):
if job.hasFunction("load_TData"):
comp.load_TData(job, B.PAR_TESTCASE, testdata)
if job.hasFunction("read_TData"):
comp.read_TData(job, utils.path_tool.getKeyValue(job, P.KEY_PRECOND), B.PAR_TESTCASE)
comp.read_TData(job, path_tool.getKeyValue(job, P.KEY_PRECOND), B.PAR_TESTCASE)
except Exception as e:
txt = traceback.format_exc()
job.m.setFatal("Exception "+str(e)+"\n"+txt)

15
init_testsuite.py

@ -5,9 +5,15 @@ import yaml # pip install pyyaml
import basic.program
from basic.componentHandling import ComponentManager
import basic.message
import utils.tdata_tool
# import tools.tdata_tool as tdata_tool
PROGRAM_NAME = "init_testset"
def startPyJob(job):
cm = basic.componentHandling.ComponentManager.getInstance(job)
cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME)
job.m.setMsg("# Components initialized with these relevant components " + str(comps))
#testdata = tdata_tool.getTestdata(job)
if __name__ == '__main__':
x = basic.program.Job(PROGRAM_NAME)
@ -19,11 +25,6 @@ if __name__ == '__main__':
print("fatal Error at begin")
x.stopJob()
exit(x.m.rc * (-1) + 3)
# now in theory the program is runnable
cm = ComponentManager(x)
cm.initComponents()
comps = cm.getComponents(PROGRAM_NAME)
print(" relevant components for this job: " + str(comps))
tdata = utils.tdata_tool.getTestdata(x)
startPyJob(x)
x.stopJob()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/

267
install_workspace.py

@ -0,0 +1,267 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import json
import os
import datetime
import subprocess
import yaml
INSTALLED = False
try:
import basic.program
INSTALLED = True
except:
INSTALLED = False
PROGRAM_NAME = "install_workspace"
CONFIG_FORMAT = "yml"
BASIS_FORMAT = "json"
job = None
# -----------------------------------------------------------------------------------------
# Miniimplementierung des Programmeahmens
class Logger:
"""
Kurzversion des Messages mit Standardfunktionen
* opel_logs()
* close_logs()
* log_info()
* log_error()
"""
def __init__(self, job, level, logTime, comp):
self.openLog(job, logTime)
def openLog(self, job, logTime):
# job, level, logTime, componente
home = getHome()
path = os.path.join(home, "log")
if not os.path.exists(path):
os.mkdir(path)
logpath = os.path.join(home, "log", job.program+"_"+logTime+".txt")
print("logpath "+logpath)
self.logfile = open(logpath, "w")
def logInfo(self, text):
self.logfile.write(text + "\n")
def logError(self, text):
self.logfile.write("ERROR:" + text + "\n")
print("ERROR:" + text)
def closeLog(self):
self.logfile.close()
class ActJob:
"""
Kurzversion des Jobs mit Standardfunktionen
* start_job() startet Job mit Messaging
* set_parameter() setzt Parameter aus args oder aus Aufruf
* stop_job() startet Job mit Messaging
"""
def __init__(self, program):
self.program = program
self.start = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
self.jobid = 100000
def startJob(self):
self.m = Logger(self, "info", self.start, None) # job, level, logTime, componente
text = "# # # Start Job " + self.start + " # # # "
self.m.logInfo(text)
print(text)
def stopJob(self):
self.ende = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
text = "# # # Stop Job " + self.start + " - " + self.ende + " # # # "
self.m.logInfo(text)
self.m.closeLog()
print(text)
def setParameter(self, args):
for k in args:
setattr(self, k, args[k])
# -----------------------------------------------------------------------------------------
# Standardsteuerung Hauptverarbeitung
def startPyJob(job):
"""
Steuerung der Hauptverarbeitung, aufrufbar vom Programm selbst oder aus job_tool
:param job:
:return:
"""
job.m.logInfo("startPyJob gestertet ")
try:
setParameter(job)
readConfig(job)
createFolders(job)
createGit(job)
createBasisConfig(job)
except Exception as e:
job.m.logError(str(e))
# -----------------------------------------------------------------------------------------
# konkrete Verarbeitungsroutinen
def setParameter(job):
job.m.logInfo("--- setze Parameter ")
def readConfig(job):
job.m.logInfo("--- suche config-Datei ")
args = {}
args["home"] = getHome()
configPath = ""
for p in os.listdir(args["home"]):
print(p)
path = os.path.join(args["home"], p)
if os.path.isfile(path) and "workspace" in p:
configPath = path
break
if len(configPath) < 1:
raise Exception("Keine Konfiguration gefunden in "+args["home"])
with open(configPath, 'r') as file:
doc = yaml.full_load(file)
file.close()
for k in doc:
args[k] = doc[k]
job.setParameter(args)
def createFolders(job):
job.m.logInfo("--- erstelle Verzeichnisse ")
for p in job.paths:
path = os.path.join(job.home, job.paths[p])
createFolder(job, path)
def createFolder(job, path):
if not os.path.exists(path):
os.mkdir(path)
job.m.logInfo("Verzeichnis angelegt: "+ path)
elif not os.path.isdir(path):
job.m.logError("Verzeichnisname existiert und ist kein Verzeichnis "+ path)
else:
job.m.logInfo("Verzeichnis existiert: " + path)
def createGit(job):
job.m.logInfo("--- erstelle und aktualisiere git-Repos ")
repos = {}
local = {}
attr = {
"_name": "",
"_branch": ""
}
# erstelle Repoliste mit den Attributen: name, branch, url
for r in job.repos:
if r in attr:
attr[r] = job.repos[r]
else:
repo = {}
for a in job.repos[r]:
repo[a] = job.repos[r][a]
repos[r] = repo
for k in attr:
a = k[1:]
for r in repos:
if a not in repos[r]:
repos[r][a] = attr[k]
for r in repos:
repo = repos[r]
path = os.path.join(job.home, job.paths[r])
if os.path.exists(path):
local["url"] = os.path.join(job.home, job.paths[r])
local["branch"] = repo["branch"]
rpath = os.path.join(local["url"], ".git")
if os.path.exists(rpath):
job.m.logInfo("Repo existiert bereits "+r)
else:
job.m.logInfo("Repo erzeugen "+r)
initGit(job, local, repo)
updateLocal(job, local, repo)
else:
job.m.logError("Verzeichnis existiert nicht: " + path)
def initGit(job, local, repo, bare=False):
job.m.logInfo("--- initialisiere git-Repo "+str(repo)+","+str(local))
os.chdir(local["url"])
cmd = "git init "
if bare:
cmd += " --bare"
execCmd(job, cmd)
cmd = "git checkout " + local["branch"]
execCmd(job, cmd)
cmd = "git remote add " + repo["name"] + " " + repo["url"]
execCmd(job, cmd)
os.chdir(job.home)
def execCmd(job, cmd):
job.m.logInfo(cmd)
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
btext = process.communicate()[0]
text = btext.decode('utf-8')
job.m.logInfo(text)
return text
def switchLocal(job, local):
os.chdir(local["url"])
cmd = "git checkout " + local["branch"]
execCmd(job, cmd)
def updateLocal(job, local, repo):
job.m.logInfo("--- aktualisiere git-Repo "+str(repo)+","+str(local))
switchLocal(job, local)
cmd = "git pull " + repo["name"] + " " + repo["branch"]
execCmd(job, cmd)
os.chdir(job.home)
def updateRemote(job, local, repo):
job.m.logInfo("--- aktualisiere git-Repo "+str(repo)+","+str(local))
switchLocal(job, local)
cmd = "git push " + repo["name"] + " " + repo["branch"]
execCmd(job, cmd)
os.chdir(job.home)
def createBasisConfig(job):
job.m.logInfo("--- erstelle Basis-Koniguration ")
config = {}
config["basic"] = {}
config["basic"]["paths"] = {}
config["basic"]["paths"]["home"] = job.home
for p in job.paths:
path = os.path.join(job.home, job.paths[p])
config["basic"]["paths"][p] = path
for p in ["temp", "config"]:
path = os.path.join(job.home, p)
createFolder(job, path)
config["basic"]["paths"][p] = path
if BASIS_FORMAT == "yml":
path = os.path.join(job.home, "config", "basis.json")
with open(path, 'w', encoding="utf-8") as file:
doc = yaml.dump(config, file)
file.write(doc)
file.close()
elif BASIS_FORMAT == "json":
path = os.path.join(job.home, "config", "basis.json")
with open(path, 'w', encoding="utf-8") as file:
doc = json.dumps(config, indent=4)
file.write(doc)
file.close()
def getHome():
home = os.getcwd()
if home[-7:] == "program":
home = home[0:-8]
return home
# -----------------------------------------------------------------------------------------
# Pythonstandard Programmaufruf
# Job-Objekt erzeugen und beenden
if __name__ == '__main__':
if INSTALLED:
#job = basic.program.Job(PROGRAM_NAME)
job = ActJob(PROGRAM_NAME)
else:
job = ActJob(PROGRAM_NAME)
job.startJob()
startPyJob(job)
job.stopJob()

280
job_dialog.py

@ -0,0 +1,280 @@
#!/usr/bin/python
# program to execute programs for a testcases or for a testsuite
# PARAM from INPUT: --granularity --application --environment --testcase/testsuite
# main functions
# + input_param() : cache-actjob --> user-input --> local-param
# + start_job() : local-param --> cache-actjob --> start-param
# ---------------------------------------------------
"""
"""
import os.path
import json
import re
import basic.program
import basic.constants as B
import tools.job_tool
import tools.file_tool
import tools.data_const as D
import tools.date_tool
import tools.path_tool
import tools.path_const as P
tempJob = {}
PROGRAM_NAME = "service"
DLG_TESTCASE = "Testfall"
DLG_TESTSUITE = "Testsuite"
DLG_COMPLETE = "Komplettausfuehrung"
LIST_DLG_GRAN = [DLG_TESTCASE, DLG_TESTSUITE]
DLG_START_QUESTION = "was soll getestet werden"
# DLG_TESTPLAN = "Testplan"
DLG_ENVIRONMENT = "Umgebung"
DLG_APPLICATION = "Anwendung"
DLG_REDO = "wiederholen"
DLG_CONTINUE = "fortsetzen"
DLG_DUMMY_STEP = "Dummy-Schritt"
DLG_NEWJOB = "neuer Job"
JOB_NR = {
DLG_TESTSUITE : {
"start": "init_testsuite",
"init_testsuite": {
"jobnr": "0" },
"execute_testsuite": {
"jobnr": "1"},
"collect_testsuite": {
"jobnr": "2"},
"compare_testsuite": {
"jobnr": "3"},
"finish_testsuite": {
"jobnr": "4"}
},
DLG_TESTCASE: {
"start": "init_testcase",
"init_testcase": {
"jobnr": "5" },
"execute_testcase": {
"jobnr": "6" },
"collect_testcase": {
"jobnr": "7" },
"copy_appdummy": {
"jobnr": "8" },
"compare_testcase": {
"jobnr": "9" },
},
"check_environment": {
"jobnr": "10" },
"test_executer": {
"jobnr": "11"},
}
JOB_LIST = [
"init_testsuite", # 0
"execute_testsuite", # 1
"collect_testsuite", # 2
"compare_testsuite", # 3
"finish_testsuite", # 4
"init_testcase", # 5
"execute_testcase", # 6
"collect_testcase", # 7
"copy_appdummy", # 8
"compare_testcase", # 9
"check_environment", # 10
"test_executer" # 11
]
appList = []
envList = []
entities = {}
entities[DLG_TESTCASE] = {}
entities[DLG_TESTSUITE] = {}
def readContext(job):
for k in job.conf[B.SUBJECT_APPS]:
appList.append(k)
path = job.conf[B.SUBJECT_PATH][B.ATTR_PATH_ENV]
if os.path.exists(path):
for d in os.listdir(path):
print ("-- "+d)
if not os.path.isdir(os.path.join(path, d)):
continue
if d[0:1] == "_":
continue
envList.append(d)
path = job.conf[B.SUBJECT_PATH][B.ATTR_PATH_TDATA]
if os.path.exists(path):
for d in os.listdir(path):
print("tdata path "+d)
if not os.path.isdir(os.path.join(path, d)):
print("continue a")
continue
if d[0:1] == "_":
print("continue b")
continue
specpath = os.path.join(path, d, D.DFILE_TESTCASE_NAME + ".csv")
readSpec(job, d, DLG_TESTCASE, specpath)
specpath = os.path.join(path, d, D.DFILE_TESTSUITE_NAME + ".csv")
readSpec(job, d, DLG_TESTSUITE, specpath)
def readSpec(job, testentity, testgran, specpath):
print("spec "+specpath)
if not os.path.isfile(specpath):
print("continue c")
return
text = tools.file_tool.read_file_text(job, specpath, job.m)
print("-----------\n"+text+"\n------------------")
if re.match(r".*?depricated;[jJyY]", text):
return
if re.match(r".*\nhead:application;", text):
print("## app gematcht")
res = re.search(r".*head:application;(.+)\n", text)
apps = res.group(1).replace(";", ",").split(",")
print("# "+str(apps))
for a in apps:
if len(a) < 1:
break
if a not in entities[testgran]:
entities[testgran][a] = []
print(a+" in "+testentity+" "+testgran+" -- "+str(entities))
entities[testgran][a].append(testentity)
def printProc(job, process):
print("--------------------------------------------------")
for k in process:
print("| {0:15s} : {1}".format(k, process[k]))
print("--------------------------------------------------")
def restartActualProcess(job):
"""
check if an actual process is open
:return:
"""
path = tools.path_tool.getActualJsonPath(job)
if os.path.exists(path):
actProc = tools.file_tool.read_file_dict(job, path, job.m)
print("restartActJob "+str(actProc))
printProc(job, actProc)
step = int(actProc["step"])
if actProc["program"] == "test_executer":
if step > 5:
dialogProcess(job)
else:
actProc["step"] = str(step+1)
tools.job_tool.start_child_process(job, actProc)
restartActualProcess(job)
selection = [DLG_NEWJOB, DLG_REDO]
nr = int(JOB_NR[actProc["gran"]][actProc["program"]]["jobnr"])
if (actProc["gran"] == DLG_TESTSUITE and nr < 4) or (actProc["gran"] == DLG_TESTCASE and nr < 9):
selection.append(DLG_CONTINUE)
if nr == 7:
selection.append(DLG_DUMMY_STEP)
choice = getChoice(job, selection, DLG_ENVIRONMENT)
print(choice)
if choice == DLG_REDO:
tools.job_tool.start_child_process(job, actProc)
restartActualProcess(job)
elif choice == DLG_DUMMY_STEP:
actProc["program"] = JOB_LIST[nr+1]
tools.job_tool.start_child_process(job, actProc)
restartActualProcess(job)
elif choice == DLG_CONTINUE:
if nr == 7:
nr = 9
else:
nr += 1
print (" act nr "+str(nr))
actProc["step"] = str(step + 1)
actProc["program"] = JOB_LIST[nr]
tools.job_tool.start_child_process(job, actProc)
elif choice == DLG_NEWJOB:
dialogProcess(job)
else:
dialogProcess(job)
def dialogProcess(job):
"""
dialog for selection and starting a process
:param job:
:return:
"""
process = {}
index = 0
print("create new process")
selection = []
selection = LIST_DLG_GRAN
if DLG_TESTCASE + " - " + DLG_COMPLETE not in selection:
selection.append(DLG_TESTCASE + " - " + DLG_COMPLETE)
if DLG_TESTSUITE + " - " + DLG_COMPLETE not in selection:
selection.append(DLG_TESTSUITE + " - " + DLG_COMPLETE)
choice = getChoice(job, LIST_DLG_GRAN, DLG_START_QUESTION)
if DLG_COMPLETE in choice:
process["gran"] = choice[0:-3-len(DLG_COMPLETE)]
process["program"] = "test_executer"
process["step"] = 1
else:
process["gran"] = choice
process["program"] = JOB_NR[process["gran"]]["start"]
process["step"] = 1
if len(appList) == 1:
process["app"] = appList[0]
else:
process["app"] = getChoice(job, appList, DLG_ENVIRONMENT)
#
if len(envList) == 1:
process["env"] = envList[0]
else:
process["env"] = getChoice(job, envList, DLG_ENVIRONMENT)
#
if len(entities[process["gran"]][process["app"]]) == 1:
process["entity"] = entities[process["gran"]][process["app"]][0]
else:
process["entity"] = getChoice(job, entities[process["gran"]][process["app"]], process["gran"])
print(str(process))
setattr(job.par, B.PAR_ENV, process["env"])
setattr(job.par, B.PAR_APP, process["app"])
if process["gran"] == DLG_TESTCASE:
setattr(job.par, B.PAR_TESTCASE, process["entity"])
setattr(job.par, B.PAR_TCTIME, tools.date_tool.getActdate(tools.date_tool.F_DIR))
path = tools.path_tool.composePattern(job, "{"+P.P_TCBASE+"}", None)
process[B.PAR_TCDIR] = path
elif process["gran"] == DLG_TESTSUITE:
setattr(job.par, B.PAR_TESTSUITE, process["entity"])
setattr(job.par, B.PAR_TSTIME, tools.date_tool.getActdate(tools.date_tool.F_DIR))
path = tools.path_tool.composePattern(job, "{"+P.P_TSBASE+"}", None)
process[B.PAR_TSDIR] = path
tools.job_tool.start_child_process(job, process)
restartActualProcess(job)
def getChoice(job, choiselist, description):
index = 0
print("+------------- "+description+" ----------")
print('| | {:2d} : {:60s}'.format(0, "exit"))
for k in choiselist:
index += 1
print('| | {:2d} : {:60s}'.format(index, k))
print("+-----------------------------------------------")
choice = input("Auswahl 1-" + str(index) + ": ")
if not choice.isnumeric():
print("FEHLER Fehleingabe "+choice)
getChoice(job, choiselist, description)
elif int(choice) < 1:
exit(0)
elif int(choice) > index:
print("FEHLER Fehleingabe "+choice)
getChoice(job, choiselist, description)
else:
return choiselist[int(choice) - 1]
if __name__ == '__main__':
job = basic.program.Job(PROGRAM_NAME, "", {})
readContext(job)
restartActualProcess(job)

28
make_workspace.py

@ -12,17 +12,17 @@
import os
import basic.program
import utils.path_tool
import utils.file_tool
import tools.path_tool
import tools.file_tool
import basic.constants as B
# look if the path-constanst are specific defined.
home = os.getcwd()
if os.path.exists(os.path.join(home, "components", "config", "path_const.py")):
import components.config.path_const as P
else:
import utils.path_const as P
import tools.path_const as P
# import always the keyword.
import utils.path_const as Q
import tools.path_const as Q
print("# ----------------------------------------------------------------------------------- ")
dirs = {}
@ -32,7 +32,7 @@ dirs[Q.ATTR_PATH_COMPONENTS] = os.path.join(home, "components")
pval = [P.ATTR_PATH_ARCHIV, P.ATTR_PATH_ENV, P.ATTR_PATH_DEBUG, P.ATTR_PATH_TEMP]
qkey = [Q.ATTR_PATH_ARCHIV, Q.ATTR_PATH_ENV, Q.ATTR_PATH_DEBUG, Q.ATTR_PATH_TEMP]
dirkey = [Q.ATTR_PATH_PROGRAM, Q.ATTR_PATH_COMPONENTS, Q.ATTR_PATH_HOME, Q.VAL_BASE_DATA] + qkey
home = utils.path_tool.getHome()
home = tools.path_tool.getHome()
print("mkdirs in home " + home)
dirname = os.path.join(home, P.VAL_BASE_DATA)
dirs[Q.VAL_BASE_DATA] = dirname
@ -90,23 +90,23 @@ print("please install requirements ")
job = basic.program.Job("unit")
print("\n# ----------------------------------------------------------------------------------- ")
import utils.git_tool
import tools.git_tool
for repo in ["program", "components", "testdata"]:
utils.git_tool.gitPull(job, repo)
tools.git_tool.gitPull(job, repo)
print("\n# ----------------------------------------------------------------------------------- ")
if "db" in job.conf.confs:
if "db" in job.conf:
import basic.connection
entity = basic.connection.Connection(job)
entity.createSchema()
import basic.testcase
entity = basic.testcase.Testcase(job)
import model.testcase
entity = model.testcase.Testcase(job)
entity.createSchema()
import basic.testsuite
entity = basic.testsuite.Testsuite(job)
import model.testsuite
entity = model.testsuite.Testsuite(job)
entity.createSchema()
import basic.testplan
entity = basic.testplan.Testplan(job)
import model.testplan
entity = model.testplan.Testplan(job)
entity.createSchema()
import basic.testexecution
entity = basic.testexecution.Testexecution(job)

143
model/Entity.py

@ -0,0 +1,143 @@
import getpass
import os
import re
import basic.toolHandling
import tools.data_const as D
import basic.constants as B
import tools.date_tool
import tools.file_tool
class Entity:
def __int__(self, job):
self.job = job
self.table = ""
self.testserver = None
def get_schema(self):
"""
gets schema/ddl-informations in order to create the database
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def select_entity(self, job, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def write_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def update_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def getDbAttr(self, job):
out = {}
for attr in [B.ATTR_DB_HOST, B.ATTR_DB_USER, B.ATTR_DB_DATABASE, B.ATTR_DB_PASSWD]:
out[attr] = job.conf[B.TOPIC_NODE_DB][attr]
return out
def getDdl(self, job, ddl):
out = {}
for t in ddl:
out[t] = {}
for f in ddl[t]:
out[t][f] = {}
for a in ddl[t][f]:
print("entity-23 "+f+", "+a+" "+str(ddl))
out[t][f][a] = ddl[t][f][a]
out[t][f][D.DDL_FNAME] = f
out[t][B.DATA_NODE_HEADER] = list(ddl[t].keys())
return out
def createSchema(self, testserver):
if B.TOPIC_NODE_DB in self.job.conf:
dbi = basic.toolHandling.getDbTool(self.job, testserver, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
sql = self.get_schema()
print(sql)
for s in sql.split(";\n"):
if len(s) < 3: continue
try:
dbi.execStatement(s+";", self.job.conf[B.TOPIC_NODE_DB])
print("SQL executed: "+s)
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
def getHistoryFields(self):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getSchemaAttribut("inscommit", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("insauthor", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("instime", D.TYPE_TIME)+","
sql += dbi.getSchemaAttribut("updcommit", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("updauthor", D.TYPE_STR)+","
sql += dbi.getSchemaAttribut("updtime", D.TYPE_TIME)+","
sql += dbi.getSchemaAttribut("actual", D.TYPE_INT)
return sql
def selectHistoryFields(self):
if B.TOPIC_NODE_DB in self.job.conf:
dbi = basic.toolHandling.getDbTool(self.job, self.testserver, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
dbi.selectRows
def getHistoryIndex(self, table):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getSchemaIndex(table, "actual") + "\n"
return sql
def read_spec(job, testentity, testgran, specpath):
if not os.path.isfile(specpath):
return
text = tools.file_tool.read_file_text(job, specpath, job.m)
if re.match(r".*?depricated;[jJyY]", text):
return None
spec = {}
regex = re.compile(r".*\nhead:(.*?);(.+)")
for res in regex.finditer(text):
#res = re.search(r".*head:(.*?);(.+)\n", text)
key = res.group(1)
if key == B.SUBJECT_DESCRIPTION:
spec[B.SUBJECT_DESCRIPTION] = res.group(2).replace(";", "")
elif key in [B.SUBJECT_APPS, B.PAR_APP]:
apps = res.group(2).replace(";", ",").split(",")
spec[B.SUBJECT_APPS] = apps
else:
val = res.group(2).replace(";", "")
spec[key] = val
return spec

18
model/application.py

@ -92,7 +92,7 @@ def select_applications(job, projectList):
def searchApplications(job, projectList, appl):
appList = {}
for proj in projectList:
if hasattr(job.par, B.PAR_PROJ) and proj != getattr(job.par, B.PAR_PROJ):
if hasattr(job, "par") and hasattr(job.par, B.PAR_PROJ) and proj != getattr(job.par, B.PAR_PROJ):
continue
for app in appl[B.SUBJECT_APPS]:
if B.ATTR_APPS_PROJECT in appl[B.SUBJECT_APPS][app] and proj != appl[B.SUBJECT_APPS][app][B.ATTR_APPS_PROJECT]:
@ -141,7 +141,7 @@ def syncEnitities(job):
(appObjects, appDict) = selectEntities(job, dbi)
print(str(appDict))
applPath = tools.config_tool.select_config_path(job, P.KEY_BASIC, B.SUBJECT_APPS)
tools.file_tool.writeFileDict(job.m, job, applPath, appDict)
tools.file_tool.write_file_dict(job.m, job, applPath, appDict)
#
elif fileTime > dbTime:
print("git vorne")
@ -197,7 +197,7 @@ def insertEntities(job,applData, dbTime, dbi):
dbi.deleteRows(t, job)
for app in applData[B.SUBJECT_APPS]:
ao = Application(job)
ao.readEntity(job, app)
ao.read_entity(job, app)
ao.insertEntity(dbi)
class Application(model.entity.Entity):
@ -220,12 +220,12 @@ class Application(model.entity.Entity):
def getEntity(self, job, name):
if B.TOPIC_NODE_DB in job.conf:
self.selectEntity(job, name)
#self.readEntity(job, name)
self.select_entity(job, name)
#self.read_entity(job, name)
else:
self.readEntity(job, name)
self.read_entity(job, name)
def readEntity(self, job, app):
def read_entity(self, job, app):
apppath = tools.config_tool.select_config_path(job, P.KEY_BASIC, B.SUBJECT_APPS, "")
repopath = apppath[len(job.conf[B.SUBJECT_PATH][B.ATTR_PATH_COMPS]) + 1:]
gitresult = tools.git_tool.gitLog(job, B.ATTR_PATH_COMPS, repopath, 1)
@ -295,7 +295,7 @@ class Application(model.entity.Entity):
rows.append(row)
return rows
def selectEntity(self, job, app):
def select_entity(self, job, app):
dbi = basic.toolHandling.getDbTool(job, job.testserver, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
data = dbi.selectRows(TABLE_NAMES[0], job, "WHERE name = \'" + app +"\' AND actual = " + model.entity.ENTITY_ACTUAL)
# main object
@ -335,7 +335,7 @@ class Application(model.entity.Entity):
def writeEntity(self):
pass
def getSchema(self):
def get_schema(self):
"""
ersetzt durch testserver.createDB
:return:

4
model/catalog.py

@ -102,10 +102,10 @@ class Catalog:
msg = job.m
else:
msg = None
data = tools.file_tool.read_file_dict(job, pathname, msg)
data = tools.file_tool.read_file_dict(job, pathname, msg, D.CSV_SPECTYPE_CTLG)
if hasattr(job, "m"):
job.m.debug(12, "domain " + domain + " readed from " + pathname)
self.catalog[domain] = data[B.DATA_NODE_TABLES][domain][B.DATA_NODE_KEYS]
self.catalog[domain] = data[domain][B.DATA_NODE_KEYS]
return data

83
model/component.py

@ -0,0 +1,83 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.constants as B
import model.entity
# from model.Entity import Entity
import tools.path_const as P
import tools.config_tool as config_tool
import tools.file_tool as file_tool
import tools.git_tool
TABLE_NAMES = ["component", "co_step", "co_table", "co_artifact", "co_comps"]
DEFAULT_SYNC = model.entity.SYNC_FULL_GIT2DB
CP_SUBJECT_COMPS = "components"
CP_SUBJECT_STEPS = "steps"
CP_SUBJECT_TABLES = "tables"
CP_SUBJECT_ARTS = B.SUBJECT_ARTS
LIST_CP_SUBJECTS = [CP_SUBJECT_COMPS, CP_SUBJECT_STEPS, CP_SUBJECT_TABLES, CP_SUBJECT_ARTS]
REL_ATTR_TYPE = "relationtyp"
REL_ATTR_FILE = "conffile"
REL_ATTR_FTYPE = "filetyp"
REL_ATTR_IP_PATTERN = "ippattern"
REL_ATTR_HOST_PATTERN = "hostpattern"
REL_ATTR_PORT_PATTERN = "portpattern"
REL_ATTR_URL_PATTERN = "urlpattern"
LIST_REL_ATTR = [REL_ATTR_TYPE, REL_ATTR_FILE, REL_ATTR_FTYPE,
REL_ATTR_IP_PATTERN, REL_ATTR_HOST_PATTERN, REL_ATTR_PORT_PATTERN, REL_ATTR_URL_PATTERN]
def select_components(job, project, application):
"""
get all project which are configured for the workspace
with all environments where the application of the project are installed
:param job:
:return:
"""
outList = []
appl = tools.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
path = job.conf[B.SUBJECT_PATH][B.ATTR_PATH_COMPS]
for p in os.listdir(path):
if p in ["catalog", "config", "test", "tools"]:
continue
if p[0:1] in [".", "_"]:
continue
if not os.path.isdir(os.path.join(path, p)):
continue
outList.append(p)
return outList
class Component(model.entity.Entity):
def __init__(self, job, name=""):
pass
def read_entity(self, job, name):
configPath = tools.config_tool.select_config_path(job, P.KEY_COMP, name, "")
repoPath = os.path.join(job.conf[B.SUBJECT_PATH][B.ATTR_PATH_COMPS], name)
gitresult = tools.git_tool.gitLog(job, B.ATTR_PATH_COMPS, repoPath, 1)
configTree = file_tool.read_file_dict(job, configPath, job.m)
print("ok")
return
def write_entity(self, job, name):
return
def remove_entity(self, job, name):
return
def select_entity(self, job, name):
return
def update_entity(self, job, name):
return
def delete_entity(self, job, name):
return
def get_schema(self):
return

75
model/entity.py

@ -51,6 +51,71 @@ class Entity:
self.table = ""
self.testserver = None
def get_schema(self):
"""
gets schema/ddl-informations in order to create the database
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def read_entity(self, job, name):
"""
reads the entity from the file-system
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def select_entity(self, job, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def write_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def update_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def remove_entity(self, job, name):
"""
removes the entity from the file-system
it similar to delete_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def delete_entity(self, job, name):
"""
deletes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def getDbAttr(self, job):
out = {}
for attr in [B.ATTR_DB_HOST, B.ATTR_DB_USER, B.ATTR_DB_DATABASE, B.ATTR_DB_PASSWD]:
@ -75,7 +140,7 @@ class Entity:
dbi = basic.toolHandling.getDbTool(self.job, testserver, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
sql = self.getSchema()
sql = self.get_schema()
print(sql)
for s in sql.split(";\n"):
if len(s) < 3: continue
@ -85,8 +150,8 @@ class Entity:
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
def getSchema(self):
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def getHistoryFields(self):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
@ -102,13 +167,13 @@ class Entity:
def selectHistoryFields(self):
if B.TOPIC_NODE_DB in self.job.conf:
dbi = basic.toolHandling.getDbTool(self.job, self.testserver, self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
dbi = basic.toolHandling.getDbTool(self.job, self.testserver, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
dbi.selectRows
def getHistoryIndex(self, table):
dbtype = self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getSchemaIndex(table, "actual") + "\n"
return sql

2
model/environment.py

@ -35,6 +35,8 @@ def select_environments(job, projectList):
for proj in doc[B.SUBJECT_ENV][B.CONF_NODE_GENERAL][B.SUBJECT_PROJECTS]:
if proj in projectList:
environments[envdir] = doc[B.SUBJECT_ENV][B.CONF_NODE_GENERAL]
elif len(projectList) == 1 and projectList[0] == "ALL":
environments[envdir] = doc[B.SUBJECT_ENV][B.CONF_NODE_GENERAL]
except:
continue
return environments

32
model/step.py

@ -0,0 +1,32 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.config_tool
import tools.file_tool
import tools.git_tool
STEP_ATTR_NR = "nr"
""" unique number in test-specification / in comp the array-position """
STEP_ATTR_GROUP = "step"
""" step-number in test-specification / in comp the block itself """
STEP_ATTR_COMP = "comp"
""" comp-name which executes the step """
STEP_ATTR_FCT = "fct"
""" fct-name which executes the step in the component """
STEP_ATTR_TOOL_TYPE = "type"
""" interface of the tool, values cli, dbi """
STEP_ATTR_TOOL_NAME = "type"
""" name of the tool, values cli, dbi """
STEP_ATTR_ARGS = "args"
# step
# testsuite | testcase | component
#---------------------------------- # ---------------------------------- # ----------------------------------
# start programs # start function #

30
model/testcase.py

@ -6,6 +6,7 @@
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import tools.job_const as J
import utils.data_const as D
import basic.constants as B
import model.entity
@ -17,6 +18,16 @@ import model.entity
class Testcase(model.entity.Entity):
"""
Generally this object can be stored as a file with data or in a database.
references:
application ->
story -> story
variant -> comp.step
subtables
steps -> comp.step
tables -> comp.table
"""
name = ""
description = ""
application = ""
@ -32,7 +43,7 @@ class Testcase(model.entity.Entity):
"""
self.job = job
def getSchema(self):
def get_schema(self):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getCreateTable("testcase")
@ -55,18 +66,26 @@ class Testcase(model.entity.Entity):
dbi.getSubTableId(dbi.getSubTableName("tc", attr), attr))+"\n"
return sql
def select_testcase(job, project, testsuite):
def select_testcase(job, project, testcase):
"""
to select a concrete testcase
:param job:
:param project:
:param testcase:
:return:
"""
jobProj = None
if hasattr(job.par, B.PAR_PROJ):
jobProj = getattr(job.par, B.PAR_PROJ)
setattr(job.par, B.PAR_PROJ, project)
path = tools.path_tool.compose_path(job, P.P_TDROOT, None)
specpath = os.path.join(path, testsuite, D.DFILE_TESTSUITE_NAME + ".csv")
spec = model.entity.read_spec(job, testsuite, tools.job_tool.GRAN_TS, specpath)
specpath = os.path.join(path, testcase, D.DFILE_TESTCASE_NAME + ".csv")
spec = model.entity.read_spec(job, testcase, J.GRAN_TS, specpath)
if jobProj is None:
delattr(job.par, B.PAR_PROJ)
else:
setattr(job.par, B.PAR_PROJ, jobProj)
print("select_testcase "+str(spec))
return spec
def select_testcases(job, projList, appList):
@ -83,9 +102,8 @@ def select_testcases(job, projList, appList):
continue
if d[0:1] == "_":
continue
print(d)
specpath = os.path.join(path, d, D.DFILE_TESTCASE_NAME + ".csv")
spec = model.entity.read_spec(job, d, tools.job_tool.GRAN_TS, specpath)
spec = model.entity.read_spec(job, d, J.GRAN_TS, specpath)
if spec is None:
continue
out[d] = spec

2
model/testplan.py

@ -23,7 +23,7 @@ class Testplan(model.entity.Entity):
"""
self.job = job
def getSchema(self):
def get_schema(self):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getCreateTable("testplan")

7
model/testsuite.py

@ -6,6 +6,7 @@
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import tools.job_const as J
import utils.data_const as D
import basic.constants as B
import model.entity
@ -32,7 +33,7 @@ class Testsuite(model.entity.Entity):
"""
self.job = job
def getSchema(self):
def get_schema(self):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getCreateTable("testsuite")
@ -63,7 +64,7 @@ def select_testsuite(job, project, testsuite):
setattr(job.par, B.PAR_PROJ, project)
path = tools.path_tool.compose_path(job, P.P_TDROOT, None)
specpath = os.path.join(path, testsuite, D.DFILE_TESTSUITE_NAME + ".csv")
spec = model.entity.read_spec(job, testsuite, tools.job_tool.GRAN_TS, specpath)
spec = model.entity.read_spec(job, testsuite, J.GRAN_TS, specpath)
if jobProj is None:
delattr(job.par, B.PAR_PROJ)
else:
@ -87,7 +88,7 @@ def select_testsuites(job, projList, appList):
continue
print(d)
specpath = os.path.join(path, d, D.DFILE_TESTSUITE_NAME + ".csv")
spec = model.entity.read_spec(job, d, tools.job_tool.GRAN_TS, specpath)
spec = model.entity.read_spec(job, d, J.GRAN_TS, specpath)
if spec is None:
continue
out[d] = spec

9
start_dialog.py

@ -1,4 +1,4 @@
#
#
#
# ----------------------------------------------------------
"""
@ -84,7 +84,7 @@ def initDialog(job, args={}):
:return:
"""
# which process
verify = job.m.getLogLevel("job_tool")
verify = job.getDebugLevel("job_tool")
if "proc" not in args:
args["proc"] = getChoice(job, J.LIST_PROC, "Welchen Prozess starten")
args[J.MODEL_GRAN] = ""
@ -156,6 +156,11 @@ def initDialog(job, args={}):
p = B.PAR_TESTCASE
else:
raise Exception("unknown testgranularity "+args[B.PAR_GRAN])
elif p == B.PAR_COMP:
description = J.CHOICE_ENV
choiceList = job_tool.select_components(job, programDef, args[B.PAR_PROJ], args[B.PAR_APP])
choiceAll = ",".join(choiceList)
choiceList.append(choiceAll)
elif p == B.PAR_ENV:
description = J.CHOICE_ENV
choiceList = job_tool.select_environment(job, programDef, args[B.PAR_PROJ])

78
test_executer.py

@ -9,13 +9,15 @@ from datetime import datetime
import traceback
import basic.program
import basic.constants as B
import utils.tdata_tool
import init_testcase
import init_testsuite
import execute_testcase
import collect_testcase
import compare_testcase
import finish_testsuite
# import tools.tdata_tool as tdata_tool
import tools.date_tool as date_tool
import tools.job_tool as job_tool
# import init_testcase
# import init_testsuite
# import execute_testcase
# import collect_testcase
# import compare_testcase
# import finish_testsuite
PROGRAM_NAME = "test_executer"
myjob = None
@ -26,65 +28,41 @@ def getTime():
time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
return time
def startPy(pjob):
myjob = pjob
myjob.m.setMsg("# # # # # start executer # # # # # ")
tdata = utils.tdata_tool.getTestdata(myjob)
job = basic.program.Job("unit")
if not hasattr(myjob.par, B.PAR_STEP):
def startPyJob(job):
job.m.setMsg("# # # # # start executer # # # # # ")
tdata = {} #tdata_tool.getTestdata(job)
if not hasattr(job.par, B.PAR_STEP):
raise Exception("Parameter " + B.PAR_STEP + " is missing")
testcases = getattr(myjob.par, B.PAR_TESTCASE)
testcases = getattr(job.par, B.PAR_TESTCASE)
for step in tdata[B.DATA_NODE_STEPS]:
if int(step.exexStep) != int(getattr(myjob.par, "step")):
if int(step.exexStep) != int(getattr(job.par, "step")):
continue
for arg in step["args"]:
if arg == "start":
print("start "+str(step["args"][arg]))
if "testsuite" in step["args"][arg]:
jobargs = {B.PAR_APP: myjob.par.application, B.PAR_ENV: myjob.par.environment,
B.PAR_TESTSUITE: myjob.par.usecase, B.PAR_TSTIME: utils.date_tool.getActdate(utils.date_tool.F_DIR)}
#job.popInstance()
job = basic.program.Job("unit")
job.par.setParameterArgs(job, jobargs)
job.setProgram(step["args"][arg])
print("Job initialisiert "+job.program+" in Verzeichnis "+getattr(job.par, B.PAR_TSDIR))
myjob.m.logInfo("Job initialisiert "+job.program+" in Verzeichnis "+getattr(job.par, B.PAR_TSDIR))
dirname = getattr(job.par, B.PAR_TSDIR)
job.stopJob(1)
#job.popInstance()
#basic.program.Job.pushInstance(myjob)
print("ende")
job = myjob
if B.PAR_TESTCASE in step["args"][arg]:
jobargs = {B.PAR_APP: job.par.application, B.PAR_ENV: job.par.environment,
B.PAR_TESTSUITE: job.par.usecase, B.PAR_TSTIME: date_tool.getActdate(date_tool.F_DIR)}
job_tool.start_child_process(job, jobargs)
elif B.PAR_TESTCASE in step["args"][arg]:
if step["comp"] in testcases:
jobargs = {B.PAR_APP: myjob.par.application, B.PAR_ENV: myjob.par.environment,
jobargs = {B.PAR_APP: job.par.application, B.PAR_ENV: job.par.environment,
B.PAR_TCDIR: testcases[step["comp"]] }
else:
jobargs = {B.PAR_APP: myjob.par.application, B.PAR_ENV: myjob.par.environment,
B.PAR_TESTCASE: step["comp"], B.PAR_TCTIME: utils.date_tool.getActdate(utils.date_tool.F_DIR)}
#job.popInstance()
job = basic.program.Job("unit")
job.par.setParameterArgs(job, jobargs)
job.setProgram(step["args"][arg])
print("Job initialisiert "+job.program+" in Verzeichnis "+getattr(job.par, B.PAR_TCDIR))
myjob.m.logInfo("Job initialisiert "+job.program+" in Verzeichnis "+getattr(job.par, B.PAR_TCDIR))
dirname = getattr(job.par, B.PAR_TCDIR)
testcases[step["comp"]] = dirname
job.stopJob(1)
#job.popInstance()
#basic.program.Job.pushInstance(myjob)
print("ende")
job = myjob
jobargs = {B.PAR_APP: job.par.application, B.PAR_ENV: job.par.environment,
B.PAR_TESTCASE: step["comp"], B.PAR_TCTIME: date_tool.getActdate(date_tool.F_DIR)}
job_tool.start_child_process(job, jobargs)
if arg == "report": # testsuite
#basic.program.Job.pushInstance(myjob)
pass
#basic.program.Job.pushInstance(myjob)
setattr(myjob.par, "testcases", testcases)
setattr(job.par, "testcases", testcases)
# myjob.stopJob(1)
def startStepProgram(step, job, jobargs):
"""def startStepProgram(step, job, jobargs):
myjob = basic.program.Job("unit") # meaning temp
myjob.par.setParameterArgs(job, jobargs)
myjob.setProgram(step.start)
@ -113,7 +91,7 @@ def startStepProgram(step, job, jobargs):
finally:
myjob.stopJob(1)
#myjob.popInstance(myjob)
"""
if __name__ == '__main__':
print(PROGRAM_NAME)

Loading…
Cancel
Save