Browse Source

Testserver incl. DB-mysql

refactor
Ulrich 2 years ago
parent
commit
419715d629
  1. 214
      basic/DATASTRUCTURE.yml
  2. 87
      basic/Testserver.py
  3. 264
      basic/application.py
  4. 11
      basic/constants.py
  5. 52
      basic/entity.py
  6. 5
      basic/message.py
  7. 30
      basic/program.py
  8. 6
      basic/step.py
  9. 13
      test/test_09git.py
  10. 91
      test/test_10testserver.py
  11. 18
      utils/config_tool.py
  12. 1
      utils/data_const.py
  13. 6
      utils/date_tool.py
  14. 23
      utils/db_abstract.py
  15. 55
      utils/dbmysql_tool.py
  16. 4
      utils/dbrel_tool.py
  17. 19
      utils/git_tool.py

214
basic/DATASTRUCTURE.yml

@ -0,0 +1,214 @@
application:
_header:
- apid
- name
- description
- reference
- attributes
- inscommit
- insauthor
- instime
- updcommit
- updauthor
- updtime
- actual
apid:
field: apid
type: pk
name:
field: name
index: I
type: str
description:
field: description
type: string
reference:
field: reference
type: str
attributes:
field: attributes
type: string
insauthor:
field: insauthor
type: str
inscommit:
field: inscommit
type: str
instime:
field: instime
type: time
updauthor:
field: updauthor
type: str
updcommit:
field: updcommit
type: str
updtime:
field: updtime
type: time
actual:
field: actual
index: I
type: int
ap_component:
_header:
- apcomid
- apid
- component
apcomid:
field: apcomid
type: pk
apid:
field: apid
index: I
type: int
component:
field: component
index: I
type: str
ap_project:
_header:
- approid
- apid
- project
- description
- reference
approid:
field: apid
type: pk
apid:
field: apid
index: I
type: int
project:
field: project
index: I
type: str
description:
field: description
type: string
reference:
field: reference
type: str
component:
_header:
- coid
- name
- description
- reference
- attributes
- inscommit
- insauthor
- instime
- updcommit
- updauthor
- updtime
- actual
coid:
field: apid
type: pk
name:
field: name
index: I
type: str
description:
field: description
type: string
reference:
field: reference
type: str
attributes:
field: attributes
type: string
insauthor:
field: insauthor
type: str
inscommit:
field: inscommit
type: str
instime:
field: instime
type: time
updauthor:
field: updauthor
type: str
updcommit:
field: updcommit
type: str
updtime:
field: updtime
type: time
actual:
field: actual
index: I
type: int
connection:
_header:
- coid
- environment
- component
- type
- ip
- port
- hostname
- dompath
- attributes
- inscommit
- insauthor
- instime
- updcommit
- updauthor
- updtime
- actual
cnid:
field: cnid
type: pk
environment:
field: environment
index: I
type: str
component:
field: component
index: I
type: string
type:
field: type
type: str
ip:
field: ip
type: str
port:
field: port
type: str
hostname:
field: hostname
type: str
dompath:
field: dompath
type: str
attributes:
field: attributes
type: string
insauthor:
field: insauthor
type: str
inscommit:
field: inscommit
type: str
instime:
field: instime
type: time
updauthor:
field: updauthor
type: str
updcommit:
field: updcommit
type: str
updtime:
field: updtime
type: time
actual:
field: actual
index: I
type: int

87
basic/Testserver.py

@ -0,0 +1,87 @@
import basic.component
import basic.constants as B
import utils.config_tool
import utils.data_const as D
import utils.file_tool
COMP_NAME = "testserver"
COMP_TABLES = ["application", "ap_component", "ap_project"]
class Testserver(basic.component.Component):
def __init__(self, job):
print('init '+COMP_NAME)
self.m = job.m
self.conf = {}
if B.TOPIC_NODE_DB in job.conf.confs:
self.conf[B.SUBJECT_CONN] = {}
self.conf[B.SUBJECT_CONN][B.TOPIC_NODE_DB] = {}
for attr in B.LIST_DB_ATTR:
if attr in job.conf.confs[B.TOPIC_NODE_DB]:
self.conf[B.SUBJECT_CONN][B.TOPIC_NODE_DB][attr] = job.conf.confs[B.TOPIC_NODE_DB][attr]
if not B.DATA_NODE_DDL in self.conf:
self.conf[B.DATA_NODE_DDL] = {}
for table in COMP_TABLES:
if table in B.LIST_DB_ATTR:
continue
ddl = utils.config_tool.getConfig(job, D.DDL_FILENAME, COMP_NAME, table)
path = "/home/ulrich/workspace/Datest/temp/DATASTRUCTURE.yml"
utils.file_tool.writeFileDict(job.m, job, path, ddl)
if B.DATA_NODE_TABLES in ddl and table in ddl[B.DATA_NODE_TABLES]:
self.conf[B.DATA_NODE_DDL][table] = ddl[B.DATA_NODE_TABLES][table]
elif table in ddl:
self.conf[B.DATA_NODE_DDL][table] = ddl[table]
else:
self.conf[B.DATA_NODE_DDL][table] = ddl
def createDBTables(self, job):
if B.TOPIC_NODE_DB in job.conf.confs:
dbi = basic.toolHandling.getDbTool(job, self, job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
for t in COMP_TABLES:
sql = self.getDBSchema(job, dbi, t)
for s in sql.split(";\n"):
if len(s) < 3:
continue
try:
dbi.execStatement(s+";", job.conf.confs[B.TOPIC_NODE_DB])
print("SQL executed: "+s)
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
pass
def getDBSchema(self, job, dbi, table):
sqlTable = ""
sqlSub = ""
sqlTable += dbi.getCreateTable(table)
tableId = ""
for f in self.conf[B.DATA_NODE_DDL][table]:
if f[0:1] == "_":
continue
fo = self.conf[B.DATA_NODE_DDL][table][f]
if D.DDL_INDEX in fo and len(fo[D.DDL_INDEX]) > 0:
a = fo[D.DDL_INDEX].split(":")
if a[0] == "I":
sqlSub += dbi.getSchemaIndex(table, fo[D.DDL_FNAME]) + "\n"
elif a[0] == "S":
attrList = []
attr = {"attr":fo[D.DDL_FNAME], "atype": fo[D.DDL_TYPE]}
attrList.append(attr)
for i in range(2, len(a)):
if i % 2 == 1:
continue
if a[i] == "attr":
attr = {"attr":"attributes", "atype": D.TYPE_TEXT}
elif i+1 < len(a):
attr = {"attr": a[i], "atype": a[i+1]}
attrList.append(attr)
sqlSub += dbi.getSchemaSubtable(a[1], attrList) + "\n"
sqlSub += dbi.getSchemaIndex(dbi.getSubTableName(a[1], fo[D.DDL_FNAME]), tableId)+"\n"
continue
sqlTable += dbi.getSchemaAttribut(fo[D.DDL_FNAME], fo[D.DDL_TYPE]) + ","
if fo[D.DDL_TYPE] == D.TYPE_PK:
tableId = fo[D.DDL_FNAME]
sql = sqlTable[0:-1]+");\n"+sqlSub
print(sql)
return sql

264
basic/application.py

@ -0,0 +1,264 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import utils.db_abstract
import basic.toolHandling
import utils.data_const as D
import basic.constants as B
import basic.entity
import utils.path_const as P
import utils.config_tool
import utils.file_tool
import utils.git_tool
TABLE_NAMES = ["application", "ap_project", "ap_component"]
def getProjects(job):
"""
get all project which are configured for the workspace
with all environments where the application of the project are installed
:param job:
:return:
"""
appl = utils.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
return searchProjects(job, appl)
def searchProjects(job, appl):
"""
search all relevant projects from server-configuration
filtered by parameter --application , --project
:param job:
:return:
"""
projects = {}
if B.SUBJECT_PROJECTS in job.conf.confs:
for k in job.conf.confs[B.SUBJECT_PROJECTS]:
if k in B.LIST_SUBJECTS:
continue
if hasattr(job.par, B.PAR_PROJ) and k != getattr(job.par, B.PAR_PROJ):
continue
if hasattr(job.par, B.PAR_APP) \
and k not in appl[B.SUBJECT_APPS][getattr(job.par, B.PAR_APP)][B.SUBJECT_PROJECTS]:
continue
projects[k] = appl[B.SUBJECT_PROJECTS][k]
projects[k][B.SUBJECT_ENV] = []
else:
job.conf.confs[B.SUBJECT_PROJECTS] = appl[B.SUBJECT_PROJECTS]
return projects
def getEnvironments(job, projectList):
"""
searches and gets environments in which the applications of the project are declared that these are installed
filtered by parameter --environment
:param job:
:return:
"""
projects = {}
path = job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_ENV]
if os.path.exists(path):
raise Exception("Umgebungsverzeichnis existiert nicht "+path)
for envdir in os.listdir(path):
print ("-- "+envdir)
if not os.path.isdir(os.path.join(path, envdir)):
continue
if envdir[0:1] == "_":
continue
if hasattr(job.par, B.PAR_ENV) and envdir != getattr(job.par, B.PAR_ENV):
continue
for format in utils.config_tool.CONFIG_FORMAT:
pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_ENV),
envdir, P.VAL_CONFIG, P.KEY_TOOL + "_conn." + format)
if os.path.exists(pathname):
break
if os.path.exists(pathname):
doc = utils.file_tool.readFileDict(job, pathname, job.m)
print(str(doc))
for proj in doc["env"]["general"][B.SUBJECT_PROJECTS]:
if proj in projectList:
projects[proj][B.SUBJECT_ENV].append(envdir)
return projects
def getApplications(job, projectList):
"""
get all project which are configured for the workspace
with all environments where the application of the project are installed
:param job:
:return:
"""
appl = utils.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
return searchApplications(job, projectList, appl)
def searchApplications(job, projectList, appl):
appList = {}
for proj in projectList:
if hasattr(job.par, B.PAR_PROJ) and proj != getattr(job.par, B.PAR_PROJ):
continue
for app in appl[B.SUBJECT_PROJECTS][proj][B.SUBJECT_APPS]:
if hasattr(job.par, B.PAR_APP) and app != getattr(job.par, B.PAR_APP):
continue
appList[app] = appl[B.SUBJECT_APPS][app]
return appList
def syncApplications(job):
"""
synchronize the configuration with the database
:param job:
:return:
"""
# get git-commit
apppath = utils.config_tool.getConfigPath(job, P.KEY_BASIC, B.SUBJECT_APPS, "")
repopath = apppath[len(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_COMPS]) + 1:]
gitresult = utils.git_tool.gitLog(job, B.ATTR_PATH_COMPS, repopath, 1)
if B.TOPIC_NODE_DB in job.conf.confs:
dbi = basic.toolHandling.getDbTool(job, job.testserver, job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
data = dbi.selectRows(TABLE_NAMES[0], job)
print(str(gitresult))
print(str(data[B.DATA_NODE_DATA]))
if gitresult[0]["date"] == data[B.DATA_NODE_DATA][0]["updtime"]:
print("gleich")
if len(gitresult) > 0:
return
if len(data[B.DATA_NODE_DATA]) > 0:
for t in TABLE_NAMES:
dbi.deleteRows(t, job)
# insertRows
# get list of application
applData = utils.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
for app in applData[B.SUBJECT_APPS]:
ao = Application(job)
ao.readEntity(job, app)
rows = ao.getApplicationRows(job)
apid = dbi.insertRows(TABLE_NAMES[0], rows, job)
rows = ao.getAppProjectRows(job, apid)
dbi.insertRows(TABLE_NAMES[1], rows, job)
rows = ao.getAppComponentRows(job, apid)
dbi.insertRows(TABLE_NAMES[2], rows, job)
class Application(basic.entity.Entity):
table = "application"
name = ""
description = ""
reference = ""
component = []
project = {}
def __init__(self, job, name=""):
"""
to be initialized by readSpec
:param job:
"""
self.job = job
if len(name) > 1:
self.getEntity(job, name)
def getEntity(self, job, name):
if B.TOPIC_NODE_DB in job.conf.confs:
self.selectEntity(job, name)
#self.readEntity(job, name)
else:
self.readEntity(job, name)
def readEntity(self, job, app):
apppath = utils.config_tool.getConfigPath(job, P.KEY_BASIC, B.SUBJECT_APPS, "")
repopath = apppath[len(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_COMPS]) + 1:]
gitresult = utils.git_tool.gitLog(job, B.ATTR_PATH_COMPS, repopath, 1)
applData = utils.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
# main object
for f in job.testserver.conf[B.DATA_NODE_DDL][TABLE_NAMES[0]][B.DATA_NODE_HEADER]:
if f == basic.entity.ENTITY_NAME:
setattr(self, f, app)
elif f == basic.entity.ENTITY_ATTRIBUTES:
setattr(self, f, {})
elif f in applData[B.SUBJECT_APPS][app]:
setattr(self, f, applData[B.SUBJECT_APPS][app][f])
elif f in basic.entity.ENTITY_FIELDS:
setattr(self, f, basic.entity.getEntityValue(job, f, gitresult[0]))
else:
setattr(self, f, "xx")
project = {}
if applData[B.SUBJECT_APPS][app][B.SUBJECT_PROJECTS] is not None:
for proj in applData[B.SUBJECT_APPS][app][B.SUBJECT_PROJECTS]:
project[proj] = {}
for f in job.testserver.conf[B.DATA_NODE_DDL][TABLE_NAMES[1]][B.DATA_NODE_HEADER]:
if f == basic.entity.ENTITY_NAME:
project[proj][f] = proj
elif f == "project":
project[proj][f] = proj
elif f == basic.entity.ENTITY_ATTRIBUTES:
project[proj][f] = {}
elif f in applData[B.SUBJECT_PROJECTS][proj]:
project[proj][f] = applData[B.SUBJECT_PROJECTS][proj][f]
elif f in basic.entity.ENTITY_FIELDS:
project[proj][f] = basic.entity.getEntityValue(job, f, gitresult[0])
else:
project[proj][f] = "xx"
setattr(self, "project", project)
component = []
if applData[B.SUBJECT_APPS][app][B.SUBJECT_COMPS] is not None:
for comp in applData[B.SUBJECT_APPS][app][B.SUBJECT_COMPS]:
component.append(comp)
setattr(self, "component", component)
def getApplicationRows(self, job):
rows = []
row = {}
for f in job.testserver.conf[B.DATA_NODE_DDL][TABLE_NAMES[0]][B.DATA_NODE_HEADER]:
row[f] = getattr(self, f)
rows.append(row)
return rows
def getAppProjectRows(self, job, apid):
rows = []
for proj in self.project:
row = {}
for f in job.testserver.conf[B.DATA_NODE_DDL][TABLE_NAMES[1]][B.DATA_NODE_HEADER]:
if f == "apid":
row[f] = apid
elif f in self.project[proj]:
row[f] = self.project[proj][f]
rows.append(row)
return rows
def getAppComponentRows(self, job, apid):
rows = []
for comp in self.component:
row = {}
row["apid"] = apid
row["component"] = comp
rows.append(row)
return rows
def selectEntity(self, job, app):
dbi = basic.toolHandling.getDbTool(job, job.testserver, job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
data = dbi.selectRows(TABLE_NAMES[0], job, "WHERE name = \'"+app+"\' AND actual = "+basic.entity.ENTITY_ACTUAL)
# main object
for f in job.testserver.conf[B.DATA_NODE_DDL][TABLE_NAMES[0]][B.DATA_NODE_HEADER]:
if f == basic.entity.ENTITY_NAME:
setattr(self, f, app)
else:
setattr(self, f, str(data[B.DATA_NODE_DATA][0][f]))
apid = getattr(self, "apid")
data = dbi.selectRows(TABLE_NAMES[1], job, "WHERE apid = "+str(apid))
project = {}
for row in data[B.DATA_NODE_DATA]:
project[row["project"]] = row
setattr(self, "project", project)
data = dbi.selectRows(TABLE_NAMES[2], job, "WHERE apid = " + str(apid))
component = []
for row in data[B.DATA_NODE_DATA]:
component.append(row["component"])
setattr(self, "component", component)
def getSchema(self):
"""
ersetzt durch testserver.createDB
:return:
"""
return ""

11
basic/constants.py

@ -30,6 +30,8 @@ SVAL_NULL = "null"
# -------------------------------------------------------------
# parameter with arguments
PAR_PROJ = 'project'
""" definition of the project which will be tested """
PAR_APP = 'application'
""" definition of the application which will be tested """
PAR_ENV = 'environment'
@ -210,6 +212,8 @@ ATTR_INST_SUBCOMP = SUBJECT_COMPS
#SUBJECT_FCT = "function" # | | | | x | main-programs
SUBJECT_ARTS = "artifact" # | | | | x | Component
"""
in this subject-node are each kind of result of any component with the structure:
* topic (db, cli, api, ...)
@ -222,6 +226,8 @@ ATTR_ARTS_LOG = "log"
ATTR_ARTS_LOB = "lob"
ATTR_ARTS_FILE = "file"
#SUBJECT_DB = "databases" # | | | | # | db*_tools, match_tool
SUBJECT_PROJECTS = "projects"
SUBJECT_ENV = PAR_ENV
SUBJECT_CONN = "conn" # | | x | | | conn_tool, db*_tools, cli*_toold
ATTR_TYPE = "type" # | x | x | | x | conn_tool, toolHandling, db*_tools
@ -233,8 +239,11 @@ ATTR_EXEC_REF = "_exec"
ATTR_DATA_REF = "_nr"
ATTR_DATA_COMP = "_comp"
SUBJECT_TOOL = "tool"
SUBJECT_DESCRIPTION = "description"
SUBJECT_REFERENCE = "reference"
SUBJECT_TOOL = "tool"
LIST_SUBJECTS = [SUBJECT_APPS, SUBJECT_ARTS, SUBJECT_CONN, SUBJECT_COMPS, SUBJECT_INST, SUBJECT_TOOL, SUBJECT_PROJECTS]
# -------------------------------------------------------------
# exception texts
EXP_NO_BASIS_FILE = "basis file cant be found"

52
basic/entity.py

@ -1,11 +1,46 @@
import getpass
import utils.db_abstract
import basic.toolHandling
import utils.data_const as D
import basic.constants as B
import utils.date_tool
ENTITY_NAME = "name"
ENTITY_ATTRIBUTES = "attributes"
ENTITY_INS_COMMIT = "inscommit"
ENTITY_INS_AUTHOR = "insauthor"
ENTITY_INS_TIME = "instime"
ENTITY_UPD_COMMIT = "updcommit"
ENTITY_UPD_AUTHOR = "updauthor"
ENTITY_UPD_TIME = "updtime"
ENTITY_ACTUAL = "actual"
VAL_ACTUAL = 1
ENTITY_FIELDS = [ENTITY_INS_COMMIT, ENTITY_INS_AUTHOR, ENTITY_INS_TIME,
ENTITY_UPD_COMMIT, ENTITY_UPD_AUTHOR, ENTITY_UPD_TIME, ENTITY_ACTUAL]
def getEntityValue(job, field, gitcommit):
if field == ENTITY_INS_COMMIT:
return ""
if field == ENTITY_INS_AUTHOR:
return getpass.getuser()
if field == ENTITY_INS_TIME:
return utils.date_tool.getActdate(utils.date_tool.F_DIR)
if field == ENTITY_UPD_COMMIT:
return gitcommit["commit"]
if field == ENTITY_UPD_AUTHOR:
return gitcommit["author"]
if field == ENTITY_UPD_TIME:
return gitcommit["date"]
if field == ENTITY_ACTUAL:
return VAL_ACTUAL
class Entity:
def __int__(self, job):
self.job = job
self.table = ""
self.testserver = None
def getDbAttr(self, job):
out = {}
@ -26,16 +61,20 @@ class Entity:
out[t][B.DATA_NODE_HEADER] = list(ddl[t].keys())
return out
def createSchema(self):
def createSchema(self, testserver):
if B.TOPIC_NODE_DB in self.job.conf.confs:
dbi = basic.toolHandling.getDbTool(self.job, None, self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
dbi = basic.toolHandling.getDbTool(self.job, testserver, self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
sql = self.getSchema()
print(sql)
for s in sql.split(";\n"):
if len(s) < 3: continue
dbi.execStatement(self.job.conf.confs[B.TOPIC_NODE_DB], s+";")
try:
dbi.execStatement(s+";", self.job.conf.confs[B.TOPIC_NODE_DB])
print("SQL executed: "+s)
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
def getSchema(self):
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
@ -52,6 +91,13 @@ class Entity:
sql += dbi.getSchemaAttribut("actual", D.TYPE_INT)
return sql
def selectHistoryFields(self):
if B.TOPIC_NODE_DB in self.job.conf.confs:
dbi = basic.toolHandling.getDbTool(self.job, self.testserver, self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
dbi.selectRows
def getHistoryIndex(self, table):
dbtype = self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)

5
basic/message.py

@ -241,7 +241,10 @@ class Message:
def log(self, prio, text):
""" eigentliche Schreibroutine: hierin wird debug-Level beruecksichtgigt"""
if (int(prio) <= int(self.level)) and (self.componente is None): # and self.logfile.closed == False:
self.logfile.write(text + "\n")
try:
self.logfile.write(text + "\n")
except:
pass
elif (int(prio) <= int(self.level)):
self.messages.append(text)
else:

30
basic/program.py

@ -14,7 +14,6 @@ import os
from datetime import datetime
import basic.constants as B
import basic.message
import basic.message
import basic.componentHandling
import utils.date_tool
import utils.path_tool
@ -260,9 +259,15 @@ class Job:
except:
pass # the special path is not necessary
appl = utils.config_tool.getConfig(self, P.KEY_BASIC, B.SUBJECT_APPS)
print(appl)
if appl is not None:
self.conf.confs[B.SUBJECT_APPS] = appl[B.SUBJECT_APPS]
if B.SUBJECT_PROJECTS in self.conf.confs:
for k in self.conf.confs[B.SUBJECT_PROJECTS]:
if k not in appl[B.SUBJECT_PROJECTS]:
raise Exception("Workspace has project "+k+" which is not configured")
self.conf.confs[B.SUBJECT_PROJECTS][k] = appl[B.SUBJECT_PROJECTS][k]
else:
self.conf.confs[B.SUBJECT_PROJECTS] = appl[B.SUBJECT_PROJECTS]
par = Parameter(self, program, args)
self.par = par
logTime = utils.date_tool.getActdate(utils.date_tool.F_LOG)
@ -322,22 +327,25 @@ class Job:
utils.job_tool.stopJobProcesses(self)
self.ende = datetime.now()
self.dumpParameter()
print("stopJob " + str(self.m.messages) + ", " + str(self.m.debugfile))
self.m.logInfo("# # " + self.m.topmessage + " # # # ")
footer = "# # # Stop Job " + utils.date_tool.formatParsedDate(str(self.start), utils.date_tool.F_LOG)
footer += " # " + utils.date_tool.formatParsedDate(str(self.ende), utils.date_tool.F_LOG) + " # # # "
self.m.logInfo(footer)
self.m.debug(basic.message.LIMIT_INFO, "# # " + self.m.topmessage + " # # # ")
self.m.debug(basic.message.LIMIT_INFO, footer + " # # # RC: " + str(self.m.getFinalRc()))
footer1 = "# # " + self.m.topmessage + " # # # "
footer2 = "# # # Stop Job " + utils.date_tool.formatParsedDate(str(self.start), utils.date_tool.F_DE_TSTAMP)
footer2 += " # " + utils.date_tool.formatParsedDate(str(self.ende), utils.date_tool.F_DE_TSTAMP) + " # # # "
footer2 += " # # # RC: " + str(self.m.getFinalRc())
self.m.logInfo(footer1)
self.m.logInfo(footer2)
self.m.debug(basic.message.LIMIT_INFO, footer1)
self.m.debug(basic.message.LIMIT_INFO, footer2)
self.m.closeMessage()
rc = self.m.getFinalRc()
print ("rc " + str(rc))
print(footer)
print(footer1)
print(footer2)
if reboot == 0:
exit(rc)
def dumpParameter(self):
if len(jobdef[self.program]["pfiletarget"]) < 2:
return
parpath = utils.path_tool.composePath(self, jobdef[self.program]["pfiletarget"], None)
output = {}
cconf = basic.componentHandling.getComponentDict()

6
basic/step.py

@ -53,6 +53,7 @@ def parseStep(job, fields):
step.refLine = fields[D.STEP_REFNR_I]
step.variante = fields[D.STEP_VARIANT_I]
setattr(step, B.ATTR_DATA_REF, step.refLine)
i = 0
if D.STEP_ARGS_I == D.STEP_LIST_I:
args = ""
for i in range(D.STEP_ARGS_I, len(fields)):
@ -63,16 +64,19 @@ def parseStep(job, fields):
args += "," + fields[i]
args = args[1:]
else:
i = D.STEP_ARGS_I
args = fields[D.STEP_ARGS_I]
a = args.split(",")
for arg in a:
print("arg " + arg)
b = arg.split(":")
if len(b) < 2:
raise Exception(D.EXCP_MALFORMAT + "" + str(fields))
raise Exception(D.EXCP_MALFORMAT + " in arg["+str(i)+ "] " + str(fields))
step.args[b[0]] = b[1]
if b[0] in LIST_ARGS:
setattr(step, b[0], b[1])
i += 1
# data[B.DATA_NODE_STEPS].append(step)
return step

13
test/test_09git.py

@ -28,7 +28,7 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
utils.git_tool.runGit(job, B.ATTR_PATH_PROGRAM, "git status")
result = utils.git_tool.runGit(job, B.ATTR_PATH_PROGRAM, "git status")
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_02status(self):
@ -38,7 +38,7 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
utils.git_tool.gitStatus(job, B.ATTR_PATH_PROGRAM)
result = utils.git_tool.gitStatus(job, B.ATTR_PATH_PROGRAM)
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_03log(self):
@ -48,7 +48,14 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
utils.git_tool.gitLog(job, B.ATTR_PATH_COMPS)
result = utils.git_tool.gitLog(job, B.ATTR_PATH_COMPS)
self.assertGreaterEqual(10, len(result))
result = utils.git_tool.gitLog(job, B.ATTR_PATH_COMPS, cnt=1)
self.assertEqual(1, len(result))
apppath = utils.config_tool.getConfigPath(job, P.KEY_BASIC, B.SUBJECT_APPS, "")
repopath = apppath[len(job.conf.confs[B.SUBJECT_PATH][B.ATTR_PATH_COMPS])+1:]
result = utils.git_tool.gitLog(job, B.ATTR_PATH_COMPS, repopath, 1)
self.assertEqual(1, len(result))
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_zzz(self):

91
test/test_10testserver.py

@ -0,0 +1,91 @@
"""
unit-test
"""
import unittest
import inspect
import utils.gen_tool
import basic.program
import basic.Testserver
import test.testtools
import basic.application
import basic.constants as B
import utils.path_const as P
# the list of TEST_FUNCTIONS defines which function will be really tested.
# if you minimize the list you can check the specific test-function
TEST_FUNCTIONS = ["test_01createTestserver", "test_02getDBSchema", "test_11createDBTables", "test_11syncApplication"]
TEST_FUNCTIONS = ["test_02getDBSchema"]
# with this variable you can switch prints on and off
verbose = False
class MyTestCase(unittest.TestCase):
mymsg = "--------------------------------------------------------------"
def test_01createTestserver(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
testserver = basic.Testserver.Testserver(job)
self.assertIsNotNone(testserver)
cnttest += 1
if B.TOPIC_NODE_DB in job.conf.confs:
self.assertIn(B.TOPIC_NODE_DB, testserver.conf[B.SUBJECT_CONN])
self.assertIn(B.ATTR_DB_DATABASE, testserver.conf[B.SUBJECT_CONN][B.TOPIC_NODE_DB])
self.assertIn(B.DATA_NODE_DDL, testserver.conf)
self.assertIn("application", testserver.conf[B.DATA_NODE_DDL])
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_02getDBSchema(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
testserver = basic.Testserver.Testserver(job)
if B.TOPIC_NODE_DB in job.conf.confs:
dbi = basic.toolHandling.getDbTool(job, self, job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
sql = testserver.getDBSchema(job, dbi, "application")
print(sql+"##")
lines = sql.split("\n")
self.assertEqual(4, len(lines))
self.assertIn("CREATE TABLE", lines[0])
self.assertIn("CREATE INDEX", lines[1])
sql = testserver.getDBSchema(job, dbi, "ap_project")
print(sql+"##")
lines = sql.split("\n")
self.assertEqual(4, len(lines))
self.assertIn("CREATE TABLE", lines[0])
self.assertIn("CREATE INDEX", lines[1])
sql = testserver.getDBSchema(job, dbi, "ap_component")
print(sql+"##")
lines = sql.split("\n")
self.assertEqual(4, len(lines))
self.assertIn("CREATE TABLE", lines[0])
self.assertIn("CREATE INDEX", lines[1])
def test_11createDBTables(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
testserver = basic.Testserver.Testserver(job)
testserver.createDBTables(job)
def test_zzz(self):
if verbose: print(MyTestCase.mymsg)
if __name__ == '__main__':
verbose = True
unittest.main()

18
utils/config_tool.py

@ -24,6 +24,14 @@ import utils.path_const as P
COMP_FILES = [D.DDL_FILENAME]
CONFIG_FORMAT = [D.DFILE_TYPE_YML, D.DFILE_TYPE_JSON, D.DFILE_TYPE_CSV]
def getExistgetConfigPath(job, pathnames):
for p in pathnames:
for format in CONFIG_FORMAT:
pathname = p+"."+format
if os.path.exists(pathname):
return pathname
return None
def getConfigPath(job, modul, name, subname=""):
"""
gets the most specified configuration of different sources
@ -90,6 +98,16 @@ def getConfigPath(job, modul, name, subname=""):
raise Exception(P.EXP_CONFIG_MISSING, modul+", "+name)
elif modul in COMP_FILES:
# for example DATASTRUCURE or the table
pathnames = []
pathnames.append(os.path.join(job.conf.getPath(P.ATTR_PATH_COMPONENTS),
basic.componentHandling.getComponentFolder(name), modul))
pathnames.append(os.path.join(job.conf.getPath(P.ATTR_PATH_COMPONENTS),
basic.componentHandling.getComponentFolder(subname), modul))
pathnames.append(os.path.join(job.conf.getPath(P.ATTR_PATH_PROGRAM), P.VAL_BASIC, modul))
pathnames.append(os.path.join(job.conf.getPath(P.ATTR_PATH_PROGRAM), P.VAL_BASIC, subname))
configpath = getExistgetConfigPath(job, pathnames)
if configpath is not None:
return configpath
for format in CONFIG_FORMAT:
pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_COMPONENTS),
basic.componentHandling.getComponentFolder(name), modul+"."+format)

1
utils/data_const.py

@ -23,6 +23,7 @@ DDL_FNAME = "field"
DDL_ACCEPTANCE = "acceptance"
DDL_KEY = "key"
DDL_TYPE = "type"
DDL_INDEX = "index"
DFILE_TYPE_YML = "yml"
DFILE_TYPE_JSON = "json"

6
utils/date_tool.py

@ -11,9 +11,11 @@ import utils.data_const as D
F_DIR = "%Y-%m-%d_%H-%M-%S"
F_DB_DATE = "%Y-%m-%d"
F_DB_TIME = "%Y-%m-%d %H:%M:%S"
F_DE = "%d.%m.%Y"
F_N8 = "%Y%m%d"
F_LOG = "%Y%m%d_%H%M%S"
F_DE_TSTAMP = "%d.%m.%Y %H:%M:%S"
MONTH_EN = ["jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct", "nov", "dec"]
MONTH_DE = ["jan", "feb", "mar", "apr", "mai", "jun", "jul", "aug", "sep", "okt", "nov", "dez"]
def getActdate(format):
@ -34,7 +36,7 @@ def getFormatDatetupel(dtupel, format):
def formatParsedDate(instring, format):
dtupel = parseDate(instring)
print ("---------------"+str(dtupel))
#print ("---------------"+str(dtupel))
return getFormatDatetupel(dtupel, format)
def parseFormula(instring):
@ -117,7 +119,7 @@ def parseDate(instring):
hour = 0
min = 0
sec = 0
print(instring)
#print(instring)
if instring[0:2] == "{(" and instring[-2:] == ")}":
return parseFormula(instring)
if re.match(r"\d{8}_\d{6}", instring):

23
utils/db_abstract.py

@ -230,6 +230,10 @@ def formatDbVal(msg, val, dtyp):
if not isinstance(val, str):
msg.logError("field must be " + dtyp + ", " + str(val))
return utils.date_tool.getFormatDatetupel(utils.date_tool.parseDate(val), utils.date_tool.F_DB_DATE)
if dtyp == D.TYPE_TIME:
if not isinstance(val, str):
msg.logError("field must be " + dtyp + ", " + str(val))
return utils.date_tool.getFormatDatetupel(utils.date_tool.parseDate(val), utils.date_tool.F_DB_TIME)
if dtyp == D.TYPE_INT:
if not (isinstance(val, int) or re.match(r"^\d+$", val)):
msg.logError("field must be " + dtyp + ", " + str(val))
@ -349,7 +353,7 @@ class DbFcts():
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def execStatement(self, statement):
def execStatement(self, statement, conn=None):
""" add-on-method to execute the statement
this method should only called by the class itself """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
@ -370,10 +374,11 @@ class DbFcts():
def getOrder(self):
return ""
def getDbValue(self, fo, value):
value = str(formatDbField(self.comp, value, fo))
if len(value.strip()) == 0 and fo[D.DDL_FNULLABLE] == B.SVAL_YES:
return self.getDbNull()
def getDbValue(self, fo, pvalue):
value = str(formatDbField(self.comp, pvalue, fo))
if len(value.strip()) == 0:
if D.DDL_FNULLABLE not in fo or fo[D.DDL_FNULLABLE] == B.SVAL_YES:
return self.getDbNull()
if fo[D.DATA_NODE_TYPE] == D.TYPE_STRING or fo[D.DATA_NODE_TYPE] == D.TYPE_STR:
return value.strip()
elif fo[D.DATA_NODE_TYPE] == D.TYPE_INT:
@ -422,3 +427,11 @@ class DbFcts():
if attr in table:
return "idx_"+table
return "idx_"+table+"_"+attr
def getInsertFields(self, ddl):
header = []
for f in ddl[B.DATA_NODE_HEADER]:
if D.DDL_TYPE in ddl[f] and ddl[f][D.DDL_TYPE] == D.TYPE_PK:
continue
header.append(f)
return header

55
utils/dbmysql_tool.py

@ -10,6 +10,7 @@ import utils.dbrel_tool
import mysql.connector
import basic.constants as B
import utils.data_const as D
import utils.date_tool
class DbFcts(utils.dbrel_tool.DbFcts):
"""
@ -43,7 +44,10 @@ class DbFcts(utils.dbrel_tool.DbFcts):
r = {}
i = 0
for f in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
r[f] = x[i]
if self.comp.conf[B.DATA_NODE_DDL][table][f][D.DDL_TYPE] in [D.TYPE_TIME, D.TYPE_DATE]:
r[f] = utils.date_tool.getFormatdate(x[i], utils.date_tool.F_DIR)
else:
r[f] = str(x[i])
i += 1
tdata[B.DATA_NODE_DATA].append(r)
self.comp.m.logInfo(str(tdata))
@ -72,40 +76,44 @@ class DbFcts(utils.dbrel_tool.DbFcts):
"""
verify = -1+job.getDebugLevel("db_tool")
attr = self.getDbAttributes(B.SVAL_NULL)
insheader = self.getInsertFields(self.comp.conf[B.DATA_NODE_DDL][table])
if len(insheader) < len(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]):
lastid = 1
else:
lastid = 0
sql = "INSERT INTO "+attr[B.ATTR_DB_DATABASE]+"."+table
sql += " ( "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]) + " ) "
sql += " VALUES ( "
for x in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
sql += "%s, "
sql = sql[0:-2] + " )"
sql += " ( "+",".join(insheader) + " ) "
sql += " VALUES "
self.comp.m.logInfo(sql)
values = []
for r in rows:
rowvalues = []
for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]:
if (self.comp.conf[B.DATA_NODE_DDL][table][h] == D.TYPE_PK):
continue
for h in insheader:
if (h in r):
rowvalues.append(self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][h], r[h]))
rowvalues.append("\'"+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][h], r[h])+"\'")
else:
rowvalues.append(self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][h], ""))
values.append( tuple(rowvalues))
rowvalues.append("\'"+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][h], "")+"\'")
sql += "("+",".join(rowvalues)+"), "
values.append(tuple(rowvalues))
sql = sql[0:-2]
self.comp.m.logInfo(str(values))
try:
connector = self.getConnector()
mycursor = connector.cursor()
mycursor.executemany(sql, values)
mycursor.execute(sql)
if lastid > 0:
lastid = mycursor.lastrowid
connector.commit()
except Exception as e:
self.comp.m.setError("")
return
return 0
self.comp.m.setMsg(str(len(values))+" rows inserted into "+table)
return lastid
def execStatement(self, statement):
def execStatement(self, statement, conn=None):
""" add-on-method to execute the statement
this method should only called by the class itself """
connector = self.getConnector()
connector = self.getConnector(conn)
cursor = connector.cursor()
try:
cursor.execute(statement)
@ -117,15 +125,16 @@ class DbFcts(utils.dbrel_tool.DbFcts):
print("Statement executed "+statement)
self.comp.m.setMsg("Statement executed")
def getConnector(self):
def getConnector(self, conn=None):
""" add-on-method to get the connector
this method should only called by the class itself """
job = self.job # basic.program.Job.getInstance()
attr = self.getDbAttributes(B.SVAL_NULL)
if conn is None:
conn = self.getDbAttributes(B.SVAL_NULL)
cnx = mysql.connector.connect(
host=attr[B.ATTR_DB_HOST],
user=attr[B.ATTR_DB_USER],
password=attr[B.ATTR_DB_PASSWD],
database=attr[B.ATTR_DB_DATABASE]
host=conn[B.ATTR_DB_HOST],
user=conn[B.ATTR_DB_USER],
password=conn[B.ATTR_DB_PASSWD],
database=conn[B.ATTR_DB_DATABASE]
)
return cnx

4
utils/dbrel_tool.py

@ -95,6 +95,8 @@ class DbFcts(utils.db_abstract.DbFcts):
def getSchemaAttribut(self, attr, atype):
if atype == "id":
return attr + " INTEGER PRIMARY KEY AUTO_INCREMENT"
elif atype == D.TYPE_PK:
return attr + " INTEGER PRIMARY KEY AUTO_INCREMENT"
elif atype == D.TYPE_STR:
return attr + " varchar(50)"
elif atype == D.TYPE_STRING:
@ -141,7 +143,7 @@ class DbFcts(utils.db_abstract.DbFcts):
return mysql
@staticmethod
def execStatement(self, comp, conn, statement):
def execStatement(self, statement, conn=None):
""" add-on-method to execute the statement
this method should only called by the class itself """
raise Exception(B.EXCEPT_NOT_IMPLEMENT)

19
utils/git_tool.py

@ -21,6 +21,13 @@ COMMIT_DATE = "date"
COMMIT_COMMENT = "comment"
def runGit(job, repo, cmd):
"""
executes the command on the repository
:param job:
:param repo:
:param cmd:
:return:
"""
cdpath = ""
if os.path.isdir(repo):
cdpath = repo
@ -37,6 +44,7 @@ def runGit(job, repo, cmd):
def gitStatus(job, repo):
text = runGit(job, repo, "git status")
return text
def gitLog(job, repo, arg="", cnt=DEFAULT_CNT_COMMITS):
"""
@ -46,15 +54,17 @@ def gitLog(job, repo, arg="", cnt=DEFAULT_CNT_COMMITS):
:param cnt:
:return: [ {commit: "", author: ]
"""
text = runGit(job, repo, "git log")
if len(arg) > 1:
arg = " -- "+arg
else:
arg = ""
text = runGit(job, repo, "git log --pretty=format:\"%H | %cn | %cd | %s\""+arg)
print(text)
text = runGit(job, repo, "git log -n "+str(cnt)+" --pretty=format:\"%H | %cn | %cd | %s\""+arg)
#print(text)
logs = []
i = 0
for l in text.split("\n"):
if i == cnt:
break
res = {}
a = l.split("|")
res[COMMIT_ID] = a[0].strip()
@ -63,8 +73,9 @@ def gitLog(job, repo, arg="", cnt=DEFAULT_CNT_COMMITS):
res[COMMIT_DATE] = utils.date_tool.getFormatDatetupel(cdate, utils.date_tool.F_DIR)
res[COMMIT_COMMENT] = a[3].strip()
logs.append(res)
i += 1
print(str(logs))
return logs
def gitCommits(job, repo, arg=""):
if len(arg) > 1:

Loading…
Cancel
Save