Browse Source

refactorings and bugfixes

master
Ulrich Carmesin 2 years ago
parent
commit
5d60dcc348
  1. 14
      basic/componentHandling.py
  2. 12
      basic/constants.py
  3. 13
      basic/program.py
  4. 41
      basic/testexec.py
  5. 2
      test/test_toolhandling.py
  6. 7
      test/testtools.py
  7. 44
      utils/config/path.yml
  8. 29
      utils/config_tool.py
  9. 4
      utils/conn_tool.py
  10. 3
      utils/data_const.py
  11. 16
      utils/date_tool.py
  12. 31
      utils/file_abstract.py
  13. 1
      utils/file_tool.py
  14. 3
      utils/path_tool.py

14
basic/componentHandling.py

@ -23,6 +23,7 @@ import basic.component
import importlib import importlib
import copy import copy
import basic.constants as B import basic.constants as B
import utils.data_const as D
comps = {} comps = {}
PARAM_NOSUBNODE = [B.SUBJECT_ARTS, "components", "instance"] PARAM_NOSUBNODE = [B.SUBJECT_ARTS, "components", "instance"]
@ -86,6 +87,7 @@ class ComponentManager:
return return
for k in job.conf.confs[B.SUBJECT_APPS].get(anw): for k in job.conf.confs[B.SUBJECT_APPS].get(anw):
job.m.logDebug("applicationscomponente -- " + k + ":") job.m.logDebug("applicationscomponente -- " + k + ":")
print("applicationscomponente -- " + k + ":")
self.createComponent(k, 0, "") self.createComponent(k, 0, "")
@ -192,17 +194,17 @@ class ComponentManager:
c.conf[B.SUBJECT_CONN] = conns[i] c.conf[B.SUBJECT_CONN] = conns[i]
c.init() c.init()
if parContent is not None: if parContent is not None:
print("createComponent 5 a " + name + " : " + str(parContent)) print("createComponent 5 a " + compName + " : " + str(parContent))
if name in parContent["comps"]: if B.SUBJECT_COMPS in parContent and compName in parContent[B.SUBJECT_COMPS]:
for k in parContent["comps"][name].keys(): for k in parContent[B.SUBJECT_COMPS][compName].keys():
c.conf[k] = parContent["comps"][name][k] c.conf[k] = parContent[B.SUBJECT_COMPS][compName][k]
if B.SUBJECT_ARTS in c.conf and B.TOPIC_NODE_DB in c.conf[B.SUBJECT_ARTS]: if B.SUBJECT_ARTS in c.conf and B.TOPIC_NODE_DB in c.conf[B.SUBJECT_ARTS]:
if not B.DATA_NODE_DDL in c.conf: if not B.DATA_NODE_DDL in c.conf:
c.conf[B.DATA_NODE_DDL] = {} c.conf[B.DATA_NODE_DDL] = {}
for table in c.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]: for table in c.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]:
if table in ["type"]: if table in B.LIST_DB_ATTR:
continue continue
conf = utils.config_tool.getConfig("DATASTRUCTURE", c.name, table) conf = utils.config_tool.getConfig(D.DDL_FILENAME, compName, table)
if B.DATA_NODE_TABLES in conf and table in conf[B.DATA_NODE_TABLES]: if B.DATA_NODE_TABLES in conf and table in conf[B.DATA_NODE_TABLES]:
c.conf[B.DATA_NODE_DDL][table] = conf[B.DATA_NODE_TABLES][table] c.conf[B.DATA_NODE_DDL][table] = conf[B.DATA_NODE_TABLES][table]
elif table in conf: elif table in conf:

12
basic/constants.py

@ -102,6 +102,9 @@ DATA_NODE_PAR = "par"
TOPIC_NODE_DB = "db" TOPIC_NODE_DB = "db"
# testexec, db_abstr # testexec, db_abstr
ATTR_DB_TYPE = "type" ATTR_DB_TYPE = "type"
""" must attribute for the type of the database """
ATTR_DB_RESET = "reset"
""" optional attribute in order to use a different technical name for the db-table """
ATTR_DB_PARTITION = "partitioned" ATTR_DB_PARTITION = "partitioned"
""" optional attribute if table is partitioned """ optional attribute if table is partitioned
- this keyword delimited by "+" will be replaced by partition-names which are parametrized """ - this keyword delimited by "+" will be replaced by partition-names which are parametrized """
@ -111,7 +114,10 @@ ATTR_DB_SCHEMA = "schema"
""" optional attribute for technical name of the schema """ """ optional attribute for technical name of the schema """
ATTR_DB_TABNAME = "tabname" ATTR_DB_TABNAME = "tabname"
""" optional attribute in order to use a different technical name for the db-table """ """ optional attribute in order to use a different technical name for the db-table """
LIST_DB_ATTR = [ATTR_DB_TYPE, ATTR_DB_PARTITION, ATTR_DB_DATABASE, ATTR_DB_SCHEMA, ATTR_DB_TABNAME] ATTR_DB_PRESTEP = "prestep"
""" optional attribute to define a source-table for this table """
LIST_DB_ATTR = [ATTR_DB_TYPE, ATTR_DB_RESET, ATTR_DB_PARTITION,
ATTR_DB_DATABASE, ATTR_DB_SCHEMA, ATTR_DB_TABNAME, ATTR_DB_PRESTEP]
ATTR_DB_CONN_JAR = "conn_jar_name" ATTR_DB_CONN_JAR = "conn_jar_name"
""" optional attribute for connection-jar-file instead of connection by ip, port """ """ optional attribute for connection-jar-file instead of connection by ip, port """
ATTR_CONN_HOST = "hostname" ATTR_CONN_HOST = "hostname"
@ -171,7 +177,8 @@ SUBJECT_INST = "instance" # | | | | x | CompHanldin
ATTR_INST_CNT = "count" # | | | | x | CompHanlding ATTR_INST_CNT = "count" # | | | | x | CompHanlding
ATTR_INST_SGL = "single" ATTR_INST_SGL = "single"
LIST_INST_ATTR = [ATTR_INST_CNT, ATTR_INST_SGL] LIST_INST_ATTR = [ATTR_INST_CNT, ATTR_INST_SGL]
ATTR_INST_SUBCOMP = "components" SUBJECT_COMPS = "components"
ATTR_INST_SUBCOMP = SUBJECT_COMPS
#SUBJECT_FCT = "function" # | | | | x | main-programs #SUBJECT_FCT = "function" # | | | | x | main-programs
SUBJECT_ARTS = "artifact" # | | | | x | Component SUBJECT_ARTS = "artifact" # | | | | x | Component
@ -195,6 +202,7 @@ RULE_ACCEPTANCE = "acceptance" # | | | | x | tools_match
ATTR_STEP_ARGS = "args" ATTR_STEP_ARGS = "args"
ATTR_DATA_REF = "_nr" ATTR_DATA_REF = "_nr"
ATTR_DATA_COMP = "_comp"
# ------------------------------------------------------------- # -------------------------------------------------------------
# exception texts # exception texts

13
basic/program.py

@ -245,13 +245,13 @@ class Job:
if len(cconf) < 1: if len(cconf) < 1:
utils.file_tool.writeFileDict(self.m, parpath, output) utils.file_tool.writeFileDict(self.m, parpath, output)
return return
output["comps"] = {} output[B.SUBJECT_COMPS] = {}
for c in cconf: for c in cconf:
output["comps"][c] = {} output[B.SUBJECT_COMPS][c] = {}
for x in ["function", "conn"]: for x in ["function", "conn"]:
output["comps"][c][x] = cconf[c][x] output[B.SUBJECT_COMPS][c][x] = cconf[c][x]
if x == "conn" and "passwd" in cconf[c][x]: if x == B.SUBJECT_CONN and "passwd" in cconf[c][x]:
cconf["comps"][c][x]["passwd"] = "xxxxx" cconf[B.SUBJECT_COMPS][c][x]["passwd"] = "xxxxx"
utils.file_tool.writeFileDict(self.m, parpath, output) utils.file_tool.writeFileDict(self.m, parpath, output)
@ -441,6 +441,9 @@ class Configuration:
self.program = program self.program = program
print (f"job initialisiert {self.program}") print (f"job initialisiert {self.program}")
if program == "unit": if program == "unit":
if (os.path.exists(utils.path_tool.rejoinPath("..", "..", "config", B.BASIS_FILE))):
self.setConfiguration(utils.path_tool.rejoinPath("..", "..", "config", B.BASIS_FILE))
return
if (os.path.exists(utils.path_tool.rejoinPath("..", "config", B.BASIS_FILE))): if (os.path.exists(utils.path_tool.rejoinPath("..", "config", B.BASIS_FILE))):
self.setConfiguration(utils.path_tool.rejoinPath("..", "config", B.BASIS_FILE)) self.setConfiguration(utils.path_tool.rejoinPath("..", "config", B.BASIS_FILE))
return return

41
basic/testexec.py

@ -74,7 +74,7 @@ class Testexecuter():
+ datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper()) + datetime.now().strftime("%Y%m%d_%H%M%S")+" for " + str(self.name).upper())
if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]: if B.ATTR_ARTS_LOG in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("log rotate in "+ self.name) self.m.logInfo("log rotate in "+ self.name)
if "db" in self.conf[B.SUBJECT_ARTS]: if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("delete content "+ self.name) self.m.logInfo("delete content "+ self.name)
dbi = basic.toolHandling.getDbTool(self) dbi = basic.toolHandling.getDbTool(self)
dbi.deleteTables() dbi.deleteTables()
@ -85,7 +85,7 @@ class Testexecuter():
self.m.setMsg("resetInstance for " + self.name + " is OK") self.m.setMsg("resetInstance for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
def load_TData(self, granularity, testdata): def load_TData(self, granularity, tdata):
""" """
the testdata will be loaded into the componend especially into databses the testdata will be loaded into the componend especially into databses
or with import-functions of the component or with import-functions of the component
@ -96,13 +96,14 @@ class Testexecuter():
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name) verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
plainname = basic.componentHandling.getPlainCompname(self.name) if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS] and B.DATA_NODE_TABLES in tdata:
if plainname in testdata: for t in tdata[B.DATA_NODE_TABLES]:
print("plainname in testdata "+plainname) print (t)
if "db" in self.conf[B.SUBJECT_ARTS]: if utils.db_abstract.isCompTable(self, tdata, t):
self.m.logInfo("delete content "+ self.name) self.m.logInfo("insert content "+ self.name)
dbi = basic.toolHandling.getDbTool(self) dbi = basic.toolHandling.getDbTool(self)
dbi.insertTables(testdata) dbi.insertTables(tdata)
break
self.m.setMsg("data loaded for " + self.name + " is OK") self.m.setMsg("data loaded for " + self.name + " is OK")
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() finished at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
@ -125,11 +126,17 @@ class Testexecuter():
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel(self.name) verify = -1+job.getDebugLevel(self.name)
self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper()) self.m.debug(verify, "--- " + str(inspect.currentframe().f_code.co_name) + "() started at " + datetime.now().strftime("%Y%m%d_%H%M%S") + " for " + str(self.name).upper())
if "db" in self.conf[B.SUBJECT_ARTS]: if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("select db-content "+ self.name) self.m.logInfo("select db-content "+ self.name)
dbi = basic.toolHandling.getDbTool(self) dbi = basic.toolHandling.getDbTool(self)
tdata = dbi.selectTables() data = dbi.selectTables(subdir)
utils.tdata_tool.writeCsvData("", tdata, self) print("ppp")
#data = {}
for t in data[subdir]:
data[B.DATA_NODE_TABLES] = {}
data[B.DATA_NODE_TABLES][t] = data[subdir][t]
utils.tdata_tool.writeCsvData(utils.path_tool.rejoinPath(
utils.path_tool.composePattern("{tcresult}", self), subdir, t+".csv"), data, self)
if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]: if B.ATTR_ARTS_LOB in self.conf[B.SUBJECT_ARTS]:
self.m.logInfo("check lob if is deleted with flaskdb "+ self.name) self.m.logInfo("check lob if is deleted with flaskdb "+ self.name)
self.m.setMsg("readInstance for " + self.name + " is OK") self.m.setMsg("readInstance for " + self.name + " is OK")
@ -305,15 +312,15 @@ class Testexecuter():
cm = basic.componentHandling.ComponentManager.getInstance() cm = basic.componentHandling.ComponentManager.getInstance()
data = {} data = {}
matching = utils.match_tool.Matching() matching = utils.match_tool.Matching()
if "db" in self.conf[B.SUBJECT_ARTS]: if B.TOPIC_NODE_DB in self.conf[B.SUBJECT_ARTS]:
for t in self.conf[B.SUBJECT_ARTS]["db"]: for t in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]:
if t in ["type"]: if t in B.LIST_DB_ATTR:
continue continue
# fill each data into matching-object # fill each data into matching-object
for side in M.MATCH_SIDES: for side in M.MATCH_SIDES:
if side == M.MATCH_SIDE_PRESTEP: if side == M.MATCH_SIDE_PRESTEP:
if "prestep" in self.conf[B.SUBJECT_ARTS]["db"][t]: if B.ATTR_DB_PRESTEP in self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][t]:
a = self.conf[B.SUBJECT_ARTS]["db"][t]["prestep"].split(":") a = self.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][t][B.ATTR_DB_PRESTEP].split(":")
if a[0] != self.name: if a[0] != self.name:
comp = cm.getComponent(a[0]) comp = cm.getComponent(a[0])
else: else:

2
test/test_toolhandling.py

@ -15,7 +15,7 @@ class MyTestCase(unittest.TestCase):
def test_toolhandling(self): def test_toolhandling(self):
job = basic.program.Job("unit") job = basic.program.Job("unit")
args = {"application": "TEST", "application": "ENV01", "modus": "unit", "loglevel": "debug", "tool": "config_tool", args = {B.PAR_APP: "TEST", B.PAR_ENV: "ENV01", "modus": "unit", "loglevel": "debug", "tool": "config_tool",
"modus": "unit"} "modus": "unit"}
job.par.setParameterArgs(args) job.par.setParameterArgs(args)
#t = basic.toolHandling.ToolManager() #t = basic.toolHandling.ToolManager()

7
test/testtools.py

@ -7,7 +7,8 @@ import test.constants as T
DEFAULT_GRAN = "tc" DEFAULT_GRAN = "tc"
DEFAULT_APP = "TESTAPP" DEFAULT_APP = "TESTAPP"
DEFAULT_ENV = "ENV01" DEFAULT_ENV = "ENV01"
DEFAULT_DIR = T.DATA_PATH + "/tdata" DEFAULT_DATA_DIR = T.DATA_PATH + "/tdata"
DEFAULT_ARCHIV_DIR = T.DATA_PATH + "/lauf"
DEFAULT_TIME = "2022-03-19_12-09-09" DEFAULT_TIME = "2022-03-19_12-09-09"
DEFAULT_MODE = "unit" DEFAULT_MODE = "unit"
gran = "" gran = ""
@ -68,9 +69,9 @@ def getJob(pgran="", papp="", penv="", ptstamp="", pmode=""):
else: else:
mode = pmode mode = pmode
if gran == "tc": if gran == "tc":
path = DEFAULT_DIR+"/TC0001/"+tstamp path = DEFAULT_ARCHIV_DIR + "/TC0001/" + tstamp
elif gran == "ts": elif gran == "ts":
path = DEFAULT_DIR + "/testlauf/TST001_" + tstamp path = DEFAULT_ARCHIV_DIR + "/testlauf/TST001_" + tstamp
job = basic.program.Job("unit") job = basic.program.Job("unit")
job.conf.confs[B.SUBJECT_PATH]["components"] = T.COMP_PATH job.conf.confs[B.SUBJECT_PATH]["components"] = T.COMP_PATH
args = {"application": app, "environment": env, "modus": mode, gran+"time": tstamp, args = {"application": app, "environment": env, "modus": mode, gran+"time": tstamp,

44
utils/config/path.yml

@ -0,0 +1,44 @@
#
pattern:
# Keywords
log: log
parfile: PARAMETER_{job.par.application}_{job.par.environment}.yml
precond: vorher
postcond: nachher
diff: diff_fach
prediff: diff_init
rundiff: diff_ablauf
result: Ergebnisse/{comp.name}
origin: original
parts: teilergebnisse
sumfile: xxx
backup: backup
reffile: Herkunft.txt
tc: testfall
ts: testlauf
debugname: debug
logname: log
debugs: "{job.conf.home}/test/log"
# environment
envbase: "{job.conf.environment}/{job.par.environment}"
envlog: "{envbase}/{log}"
envparfile: "{envbase}/{parfile}"
# testcase
tcbase: "{job.conf.archiv}/{job.par.testcase}/{job.par.tctime}"
tclog: "{tcbase}/{log}"
tcresult: "{tcbase}/{result}"
tcparfile: "{tcbase}/{parfile}"
tcdiff: "{tcresult}/{diff}"
tcprediff: "{tcresult}/{prediff}"
tcrundiff: "{tcresult}/{rundiff}"
tcprecond: "{tcresult}/{precond}"
tcpostcond: "{tcresult}/{postcond}"
# testset
tsbase: "{job.conf.archiv}/{ts}/{job.par.usecase}_{job.par.tstime}"
tslog: "{tsbase}/{log}"
tsparfile: "{tsbase}/{parfile}"
tssum: "{tsbase}/Ergebnis"
# expectation-result rs
xpbase: "{job.conf.expect}/{job.par.branch}"
xpresult: "{xpbase}/{result}"
xpbackup: "{xpbase}/{result}"

29
utils/config_tool.py

@ -72,8 +72,8 @@ def getConfigPath(modul, name, subname=""):
if os.path.exists(pathname): if os.path.exists(pathname):
return pathname return pathname
for format in CONFIG_FORMAT: for format in CONFIG_FORMAT:
pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_PROGRAM), pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_COMPONENTS),
P.VAL_COMPS, basic.componentHandling.getComponentFolder(name), "CONFIG." + format) basic.componentHandling.getComponentFolder(name), "CONFIG." + format)
job.debug(verify, "5 " + pathname) job.debug(verify, "5 " + pathname)
if os.path.exists(pathname): if os.path.exists(pathname):
return pathname return pathname
@ -82,13 +82,13 @@ def getConfigPath(modul, name, subname=""):
elif modul in COMP_FILES: elif modul in COMP_FILES:
# for example DATASTRUCURE or the table # for example DATASTRUCURE or the table
for format in CONFIG_FORMAT: for format in CONFIG_FORMAT:
pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_PROGRAM), P.VAL_COMPS, pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_COMPONENTS),
basic.componentHandling.getComponentFolder(name), modul+"."+format) basic.componentHandling.getComponentFolder(name), modul+"."+format)
if os.path.exists(pathname): if os.path.exists(pathname):
return pathname return pathname
for format in CONFIG_FORMAT: for format in CONFIG_FORMAT:
if len(subname) > 1: if len(subname) > 1:
pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_PROGRAM), P.VAL_COMPS, pathname = os.path.join(job.conf.getPath(P.ATTR_PATH_COMPONENTS),
basic.componentHandling.getComponentFolder(name), subname+"."+format) basic.componentHandling.getComponentFolder(name), subname+"."+format)
if os.path.exists(pathname): if os.path.exists(pathname):
return pathname return pathname
@ -184,6 +184,17 @@ def getConfig(modul, name, subname=""):
if len(pathname) < 1: if len(pathname) < 1:
return confs return confs
doc = utils.file_tool.readFileDict(pathname, msg) doc = utils.file_tool.readFileDict(pathname, msg)
if modul == D.DDL_FILENAME:
# in csv the root is the subname
# from the Dict-structure of DDL_FILENAME pick the substructure of the subname
keys = list(doc.keys())
if subname not in keys and len(keys) == 1:
doc0 = doc[keys[0]]
doc = doc0
keys = list(doc.keys())
if subname in keys:
doc0 = doc[subname]
doc = doc0
for i, v in doc.items(): for i, v in doc.items():
confs[i] = v confs[i] = v
return confs return confs
@ -197,6 +208,8 @@ def mergeConn(msg, conf, conn):
:param conn: :param conn:
:return: :return:
""" """
if B.SUBJECT_INST not in conf:
conf[B.SUBJECT_INST] = {}
for a in conn[B.SUBJECT_INST]: for a in conn[B.SUBJECT_INST]:
conf[B.SUBJECT_INST][a] = conn[B.SUBJECT_INST][a] conf[B.SUBJECT_INST][a] = conn[B.SUBJECT_INST][a]
for topic in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API, B.TOPIC_NODE_FILE]: for topic in [B.TOPIC_NODE_DB, B.TOPIC_NODE_CLI, B.TOPIC_NODE_API, B.TOPIC_NODE_FILE]:
@ -210,20 +223,28 @@ def mergeConn(msg, conf, conn):
list = B.LIST_API_ATTR list = B.LIST_API_ATTR
if topic == B.TOPIC_NODE_FILE: if topic == B.TOPIC_NODE_FILE:
list = B.LIST_FILE_ATTR list = B.LIST_FILE_ATTR
print(" --- merge-conn " + topic + " " + str(list))
for a in conf[B.SUBJECT_ARTS][topic]: for a in conf[B.SUBJECT_ARTS][topic]:
if topic not in conn:
continue
if a in list: if a in list:
if a in conn[topic]: if a in conn[topic]:
conf[B.SUBJECT_ARTS][topic][a] = conn[topic][a] conf[B.SUBJECT_ARTS][topic][a] = conn[topic][a]
else: else:
for b in conf[B.SUBJECT_ARTS][topic][a]: for b in conf[B.SUBJECT_ARTS][topic][a]:
print(" --- merge-conn b " + topic + " " + a+" "+b)
if b not in list: if b not in list:
msg.logError("not-topic-attribute in topic-connection: "+topic+", "+b) msg.logError("not-topic-attribute in topic-connection: "+topic+", "+b)
continue continue
if a not in conn[topic]:
continue
if b in conn[topic][a]: if b in conn[topic][a]:
conf[B.SUBJECT_ARTS][topic][a][b] = conn[topic][a][b] conf[B.SUBJECT_ARTS][topic][a][b] = conn[topic][a][b]
for a in list: for a in list:
if topic not in conn: if topic not in conn:
break break
if topic not in conn:
continue
if a in conn[topic]: if a in conn[topic]:
conf[B.SUBJECT_ARTS][topic][a] = conn[topic][a] conf[B.SUBJECT_ARTS][topic][a] = conn[topic][a]
return conf return conf

4
utils/conn_tool.py

@ -14,7 +14,7 @@ def getConnection(comp, nr):
verify = job.getDebugLevel("conn_tool") verify = job.getDebugLevel("conn_tool")
conn = {} conn = {}
if job.conf.confs.get("tools").get("connsrc") == D.DFILE_TYPE_YML: if job.conf.confs.get("tools").get("connsrc") == D.DFILE_TYPE_YML:
conn = utils.config_tool.getConfig("tool", "conn") conn = utils.config_tool.getConfig("tool", B.SUBJECT_CONN)
xtypes = None xtypes = None
if ("types" in conn["env"][comp]): if ("types" in conn["env"][comp]):
xtypes = conn["env"][comp]["types"] xtypes = conn["env"][comp]["types"]
@ -52,7 +52,7 @@ def getConnections(comp):
conn = {} conn = {}
conns = [] conns = []
if job.conf.confs.get("tools").get("connsrc") in [D.DFILE_TYPE_YML, D.DFILE_TYPE_JSON, D.DFILE_TYPE_CSV]: if job.conf.confs.get("tools").get("connsrc") in [D.DFILE_TYPE_YML, D.DFILE_TYPE_JSON, D.DFILE_TYPE_CSV]:
conn = utils.config_tool.getConfig("tool", "conn") conn = utils.config_tool.getConfig("tool", B.SUBJECT_CONN)
if not comp in conn["env"]: if not comp in conn["env"]:
job.m.setFatal("Conn-Tool: Comp not configured " + comp) job.m.setFatal("Conn-Tool: Comp not configured " + comp)
elif job.conf.confs.get("tools").get("connsrc") == "flaskdb": elif job.conf.confs.get("tools").get("connsrc") == "flaskdb":

3
utils/data_const.py

@ -12,6 +12,7 @@ TYPE_FLOAT = "float"
TYPE_DOUBLE = "double" TYPE_DOUBLE = "double"
TYPE_DATE = "date" TYPE_DATE = "date"
TYPE_TIME = "time" TYPE_TIME = "time"
# fields in DDL # fields in DDL
DDL_FNULLABLE = "nullable" DDL_FNULLABLE = "nullable"
DDL_FNAME = "field" DDL_FNAME = "field"
@ -54,5 +55,3 @@ DEFAULT_DB_PARTITION = "n"
DEFAULT_DB_CONN_JAR = "n" DEFAULT_DB_CONN_JAR = "n"
""" attribute for connection-jar-file instead of connection by ip, port """ """ attribute for connection-jar-file instead of connection by ip, port """
ATTR_TABLE_DATE = "date"
ATTR_TABLE_CNT = "count"

16
utils/date_tool.py

@ -99,13 +99,14 @@ def parseDate(instring):
sec = 0 sec = 0
if instring[0:2] == "{(" and instring[-2:] == ")}": if instring[0:2] == "{(" and instring[-2:] == ")}":
return parseFormula(instring) return parseFormula(instring)
for d in ["_", " "]: if len(instring) > 8:
if d in instring and instring.find(d) > 8: for d in ["_", " "]:
dstring = instring[0:instring.find(d)] if d in instring and instring.find(d) > 8:
tstring = instring[instring.find(d)+1:] dstring = instring[0:instring.find(d)]
dres = parseDate(dstring) tstring = instring[instring.find(d)+1:]
tres = parseDate(tstring) dres = parseDate(dstring)
return (dres[0], dres[1], dres[2], tres[3], tres[4], tres[5]) tres = parseDate(tstring)
return (dres[0], dres[1], dres[2], tres[3], tres[4], tres[5])
if re.match(r"\d{4}[-./]\d{2}[-./]\d{2}", instring): if re.match(r"\d{4}[-./]\d{2}[-./]\d{2}", instring):
res = re.match(r"(\d{4})[-./](\d{2})[-./](\d{2})", instring) res = re.match(r"(\d{4})[-./](\d{2})[-./](\d{2})", instring)
year = int(res.group(1)) year = int(res.group(1))
@ -129,3 +130,4 @@ def parseDate(instring):
min = int(res.group(2)) min = int(res.group(2))
sec = int(res.group(3)) sec = int(res.group(3))
return (year, mon, day, hour, min, sec) return (year, mon, day, hour, min, sec)
return (year, mon, day, hour, min, sec)

31
utils/file_abstract.py

@ -0,0 +1,31 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program
import basic.catalog
import utils.config_tool
import basic.constants as B
import utils.data_const as D
import utils.date_tool
class FileFcts():
"""
this is an abstract class
"""
def __init__(self):
pass
def parseText(self, text):
"""
this function parses the text and translates it to dict
:param text:
:return:
"""
def file2dict(self):
pass

1
utils/file_tool.py

@ -222,3 +222,4 @@ def writeFileDict(msg, path, dict, enc="utf-8"):
doc = json.dumps(file, indent=4) doc = json.dumps(file, indent=4)
file.write(doc) file.write(doc)
file.close() file.close()

3
utils/path_tool.py

@ -43,7 +43,8 @@ def getKeyValue(key, comp=None):
return utils.config_tool.getAttr(comp, key[5:]) return utils.config_tool.getAttr(comp, key[5:])
return "" return ""
elif 'env.' in key: elif 'env.' in key:
#if key[4:] if key[4:] in comp.conf["conn"]:
return comp.conf["conn"][key[4:]]
pass pass
elif (pt.pattern): elif (pt.pattern):
return pt.pattern[key] return pt.pattern[key]

Loading…
Cancel
Save