Browse Source

table-object in testserver

refactor
Ulrich 1 year ago
parent
commit
5936e88930
  1. 164
      basic/DATASTRUCTURE.yml
  2. 58
      basic/Testserver.py
  3. 19
      basic/constants.py
  4. 2
      model/Entity.py
  5. 3
      model/application.py
  6. 2
      model/component.py
  7. 10
      model/entity.py
  8. 183
      model/table.py
  9. 2
      model/testcase.py
  10. 2
      model/testplan.py
  11. 2
      model/testsuite.py
  12. 0
      test/test_27table.py
  13. 37
      test/test_90testserver.py
  14. 24
      tools/data_const.py
  15. 6
      tools/db_abstract.py
  16. 1
      tools/job_const.py
  17. 23
      tools/job_tool.py

164
basic/DATASTRUCTURE.yml

@ -1,5 +1,10 @@
application:
_header:
- _field
- type
- format
- index
_fields:
- apid
- name
- description
@ -13,101 +18,121 @@ application:
- updtime
- actual
apid:
field: apid
_field: apid
type: pk
name:
field: name
_field: name
type: str
index: I
description:
field: description
_field: description
type: string
reference:
field: reference
_field: reference
type: str
attributes:
field: attributes
_field: attributes
type: string
insauthor:
field: insauthor
_field: insauthor
type: str
inscommit:
field: inscommit
_field: inscommit
type: str
instime:
field: instime
_field: instime
type: time
updauthor:
field: updauthor
_field: updauthor
type: str
updcommit:
field: updcommit
_field: updcommit
type: str
updtime:
field: updtime
_field: updtime
type: time
actual:
field: actual
_field: actual
type: int
index: I
ap_component:
_header:
- _field
- type
- format
- index
_fields:
- apcomid
- apid
- component
apcomid:
field: apcomid
_field: apcomid
type: pk
apid:
field: apid
_field: apid
type: int
index: I
component:
field: component
_field: component
type: str
index: I
ap_application:
_header:
- _field
- type
- format
- index
_fields:
- apappid
- apid
- application
apappid:
field: apappid
_field: apappid
type: pk
apid:
field: apid
_field: apid
type: int
index: I
application:
field: component
_field: component
type: str
index: I
ap_project:
_header:
- _field
- type
- format
- index
_fields:
- approid
- apid
- project
- description
- reference
approid:
field: apid
_field: apid
type: pk
apid:
field: apid
_field: apid
type: int
index: I
project:
field: project
_field: project
type: str
index: I
description:
field: description
_field: description
type: string
reference:
field: reference
_field: reference
type: str
environment:
_header:
- _field
- type
- format
- index
_fields:
- enid
- name
- description
@ -121,61 +146,71 @@ environment:
- updtime
- actual
enid:
field: enid
_field: enid
type: pk
name:
field: name
_field: name
type: str
index: I
description:
field: description
_field: description
type: string
reference:
field: reference
_field: reference
type: str
attributes:
field: attributes
_field: attributes
type: string
insauthor:
field: insauthor
_field: insauthor
type: str
inscommit:
field: inscommit
_field: inscommit
type: str
instime:
field: instime
_field: instime
type: time
updauthor:
field: updauthor
_field: updauthor
type: str
updcommit:
field: updcommit
_field: updcommit
type: str
updtime:
field: updtime
_field: updtime
type: time
actual:
field: actual
_field: actual
type: int
index: I
en_project:
_header:
- _field
- type
- format
- index
_fields:
- enproid
- enid
- project
enproid:
field: enproid
_field: enproid
type: pk
enid:
field: enid
_field: enid
type: int
index: I
project:
field: project
_field: project
type: str
index: I
en_component:
_header:
- _field
- type
- format
- index
_fields:
- encomid
- enid
- component
@ -189,45 +224,50 @@ en_component:
- password
- attributes
encomid:
field: encomid
_field: encomid
type: pk
enid:
field: enid
_field: enid
index: I
type: int
component:
field: component
_field: component
index: I
type: str
instance:
field: instance
_field: instance
type: int
type:
field: type
_field: type
type: str
ip:
field: ip
_field: ip
type: str
port:
field: port
_field: port
type: str
hostname:
field: hostname
_field: hostname
type: str
dompath:
field: dompath
_field: dompath
type: str
user:
field: user
_field: user
type: str
password:
field: password
_field: password
type: str
attributes:
field: attributes
_field: attributes
type: string
component:
_header:
- _field
- type
- format
- index
_fields:
- coid
- name
- description
@ -241,41 +281,41 @@ component:
- updtime
- actual
coid:
field: apid
_field: apid
type: pk
name:
field: name
_field: name
type: str
index: I
description:
field: description
_field: description
type: string
reference:
field: reference
_field: reference
type: str
attributes:
field: attributes
_field: attributes
type: string
insauthor:
field: insauthor
_field: insauthor
type: str
inscommit:
field: inscommit
_field: inscommit
type: str
instime:
field: instime
_field: instime
type: time
updauthor:
field: updauthor
_field: updauthor
type: str
updcommit:
field: updcommit
_field: updcommit
type: str
updtime:
field: updtime
_field: updtime
type: time
actual:
field: actual
_field: actual
type: int
index: I

58
basic/Testserver.py

@ -1,14 +1,17 @@
import basic.component
import basic.constants as B
import basic.toolHandling
import tools.config_tool
import tools.data_const as D
import tools.file_tool
import model.table
COMP_NAME = "testserver"
COMP_TABLES = ["application", "ap_component", "ap_project", "ap_application",
"environment", "en_component", "en_project"]
class Testserver(basic.component.Component):
tables = {}
def __init__(self, job):
print('init '+COMP_NAME)
self.m = job.m
@ -25,13 +28,20 @@ class Testserver(basic.component.Component):
if table in B.LIST_DB_ATTR:
continue
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, COMP_NAME, table)
tableDdl = model.table.Table(job, component=COMP_NAME, name=table)
self.tables[table] = tableDdl
tddl = {}
tddl[table] = {}
for k in ddl:
tddl[table][k] = ddl[k]
#for k in ddl:
for k in tableDdl.fielddef:
tddl[table][k] = tableDdl.fielddef[k]
tddl[table][B.DATA_NODE_HEADER] = D.LIST_DDL_ATTR
tddl[table][B.DATA_NODE_FIELDS] = tableDdl.fieldnames
tddl[D.DATA_ATTR_TBL] = table
path = "/home/ulrich/workspace/Datest/temp/"+table+".csv"
path = "/home/ulrich/workspace/Datest/temp/"+table+".yml"
tools.file_tool.write_file_dict(job.m, job, path, tddl)
path = "/home/ulrich/workspace/Datest/temp/"+table+".csv"
tools.file_tool.write_file_dict(job.m, job, path, tddl, ttype=D.CSV_SPECTYPE_DDL)
if B.DATA_NODE_TABLES in ddl and table in ddl[B.DATA_NODE_TABLES]:
self.conf[B.DATA_NODE_DDL][table] = ddl[B.DATA_NODE_TABLES][table]
elif table in ddl:
@ -44,8 +54,10 @@ class Testserver(basic.component.Component):
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
for t in COMP_TABLES:
sql = self.getDBSchema(job, dbi, t)
for t in self.tables:
sql = self.tables[t].get_schema(tableName=t, tableObject=self.tables[t])
#for t in COMP_TABLES:
#sql = self.getDBSchema(job, dbi, t)
for s in sql.split(";\n"):
if len(s) < 3:
continue
@ -55,39 +67,3 @@ class Testserver(basic.component.Component):
except Exception as e:
raise Exception("Fehler bei createSchema "+s)
pass
def getDBSchema(self, job, dbi, table):
sqlTable = ""
sqlSub = ""
sqlTable += dbi.getCreateTable(table)
tableId = ""
for f in self.conf[B.DATA_NODE_DDL][table]:
if f[0:1] == "_":
continue
fo = self.conf[B.DATA_NODE_DDL][table][f]
if D.DDL_INDEX in fo and len(fo[D.DDL_INDEX]) > 0:
a = fo[D.DDL_INDEX].split(":")
if a[0] == "I":
sqlSub += dbi.getSchemaIndex(table, fo[D.DDL_FNAME]) + "\n"
elif a[0] == "S":
attrList = []
attr = {"attr":fo[D.DDL_FNAME], "atype": fo[D.DDL_TYPE]}
attrList.append(attr)
for i in range(2, len(a)):
if i % 2 == 1:
continue
if a[i] == "attr":
attr = {"attr":"attributes", "atype": D.TYPE_TEXT}
elif i+1 < len(a):
attr = {"attr": a[i], "atype": a[i+1]}
attrList.append(attr)
sqlSub += dbi.getSchemaSubtable(a[1], attrList) + "\n"
sqlSub += dbi.getSchemaIndex(dbi.getSubTableName(a[1], fo[D.DDL_FNAME]), tableId)+"\n"
continue
sqlTable += dbi.getSchemaAttribut(fo[D.DDL_FNAME], fo[D.DDL_TYPE]) + ","
if fo[D.DDL_TYPE] == D.TYPE_PK:
tableId = fo[D.DDL_FNAME]
sql = sqlTable[0:-1]+");\n"+sqlSub
print(sql)
return sql

19
basic/constants.py

@ -96,22 +96,23 @@ LIST_MAIN_PAR = [PAR_APP, PAR_ENV, PAR_VAR, PAR_REL, PAR_TCDIR, PAR_TSDIR]
CONF_NODE_GENERAL = "_general"
""" This constant defines a subnode of a table for the column-names """
DATA_NODE_HEADER = "_header"
""" This constant defines a subnode of a table for the column-names """
""" it defines a subnode of a table for the column-names """
DATA_NODE_FIELDS = "_fields"
""" This constant defines a subnode of a table for the column-names """
""" it defines a subnode of a table for the field-list, used for ddl """
DATA_NODE_DATA = "_data"
""" This constant defines a subnode of a table for the data which are set as key-value-pair with the column-names as key """
DATA_NODE_STEPS = "_steps"
""" This constant defines the main node in the testdata for the steps to execute """
""" it defines a subnode of a table for the data as key-value-pair, keys from header """
DATA_NODE_STEPS = "_step"
""" it defines the main node in the testdata for the steps to execute """
DATA_NODE_OPTION = "_option"
""" This constant defines main node in the testdata for testcase specific parameters """
DATA_NODE_TABLES = "_tables"
""" it defines main node in the testdata for testcase specific parameters """
DATA_NODE_ARGS = "_arguments"
""" it defines arguments as internal key-value-pairs by delimted with :, used in steps """
""" This constant defines the main node in the testdata for the steps to execute """
DATA_NODE_TABLES = "_tables"
DATA_NODE_KEYS = "_keys"
""" This constant defines the main node in the testdata for the steps to execute """
DATA_NODE_DDL = "ddl"
""" This constant defines the node for data scheme (DataDefinitionLanguage)
The fields are defined in data_const (D) """
DATA_NODE_DDL = "ddl"
DATA_NODE_COMP = "comp"
""" This constant defines """
DATA_NODE_PAR = "par"

2
model/Entity.py

@ -73,7 +73,7 @@ class Entity:
for a in ddl[t][f]:
print("entity-23 "+f+", "+a+" "+str(ddl))
out[t][f][a] = ddl[t][f][a]
out[t][f][D.DDL_FNAME] = f
out[t][f][D.DDL_FIELD] = f
out[t][B.DATA_NODE_HEADER] = list(ddl[t].keys())
return out

3
model/application.py

@ -214,7 +214,6 @@ class Application(model.entity.Entity):
:param job:
"""
self.job = job
if len(name) > 1:
self.getEntity(job, name)
@ -335,7 +334,7 @@ class Application(model.entity.Entity):
def writeEntity(self):
pass
def get_schema(self):
def get_schema(self, tableName="", tableObject=None):
"""
ersetzt durch testserver.createDB
:return:

2
model/component.py

@ -79,5 +79,5 @@ class Component(model.entity.Entity):
def delete_entity(self, job, name):
return
def get_schema(self):
def get_schema(self, tableName="", tableObject=None):
return

10
model/entity.py

@ -51,7 +51,13 @@ class Entity:
self.table = ""
self.testserver = None
def get_schema(self):
def getEntity(self, job, name):
if B.TOPIC_NODE_DB in job.conf:
self.select_entity(job, name)
#self.read_entity(job, name)
else:
self.read_entity(job, name)
def get_schema(self, tableName="", tableObject=None):
"""
gets schema/ddl-informations in order to create the database
"""
@ -131,7 +137,7 @@ class Entity:
for a in ddl[t][f]:
print("entity-23 "+f+", "+a+" "+str(ddl))
out[t][f][a] = ddl[t][f][a]
out[t][f][D.DDL_FNAME] = f
out[t][f][D.DDL_FIELD] = f
out[t][B.DATA_NODE_HEADER] = list(ddl[t].keys())
return out

183
model/table.py

@ -0,0 +1,183 @@
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import os
import basic.toolHandling
import basic.constants as B
import model.entity
import tools.path_const as P
import tools.data_const as D
import tools.config_tool
import tools.file_tool
import tools.git_tool
DEFAULT_FIELD = ""
DEFAULT_TYPE = "string"
DEFAULT_FORMAT = "vchar(256)"
DEFAULT_INDEX = "N"
DEFAULT_CONSTRAINT = "nullable"
DEFAULT_AGGREGAT = ""
DEFAULT_GENERIC = ""
DEFAULT_KEY = ""
DEFAULT_ACCEPTANCE = ""
DEFAULT_ALIAS = ""
DEFAULT_DESCRIPTION = ""
DEFAULTS = {
D.DDL_FIELD : DEFAULT_FIELD,
D.DDL_TYPE : DEFAULT_TYPE,
D.DDL_FORMAT : DEFAULT_FORMAT,
D.DDL_INDEX : DEFAULT_INDEX,
D.DDL_CONSTRAINT : DEFAULT_CONSTRAINT,
D.DDL_AGGREGAT : DEFAULT_AGGREGAT,
D.DDL_GENERIC : DEFAULT_GENERIC,
D.DDL_KEY : DEFAULT_KEY,
D.DDL_ACCEPTANCE : DEFAULT_ACCEPTANCE,
D.DDL_ALIAS : DEFAULT_ALIAS,
D.DDL_DESCRIPTION : DEFAULT_DESCRIPTION
}
def select_tables(job, project="", application="", component=""):
outList = []
appl = tools.config_tool.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS)
path = job.conf[B.SUBJECT_PATH][B.ATTR_PATH_COMPS]["catalog"]["tables"]
for p in os.listdir(path):
if p[-4:] not in [".csv", ".yml", ".xml", "json"]:
continue
table = p[:-4]
if not os.path.isdir(os.path.join(path, table)):
continue
outList.append(table)
return outList
class Table(model.entity.Entity):
project = ""
application = ""
component = ""
name = ""
fieldnames = []
fielddef = {}
def __init__(self, job, project="", application="", component="", name=""):
"""
to be initialized by readSpec
:param job:
"""
self.job = job
if len(project) > 1:
self.project = project
if len(application) > 1:
self.application = application
if len(component) > 1:
self.component = component
if len(name) > 1:
self.getEntity(job, name)
def get_schema(self, tableName="", tableObject=None):
"""
gets schema/ddl-informations in order to create the database
"""
sql = ""
sqlTable = ""
sqlSub = ""
dbi = basic.toolHandling.getDbTool(self.job, None, self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
sqlTable += dbi.getCreateTable(tableName)
tableId = ""
for f in self.fieldnames:
if f[0:1] == "_":
continue
fo = self.fielddef[f]
if D.DDL_INDEX in fo and len(fo[D.DDL_INDEX]) > 0:
a = fo[D.DDL_INDEX].split(":")
if a[0] == "I":
sqlSub += dbi.getSchemaIndex(tableName, fo[D.DDL_FIELD]) + "\n"
elif a[0] == "S":
attrList = []
attr = {"attr":fo[D.DDL_FIELD], "atype": fo[D.DDL_TYPE]}
attrList.append(attr)
for i in range(2, len(a)):
if i % 2 == 1:
continue
if a[i] == "attr":
attr = {"attr":"attributes", "atype": D.TYPE_TEXT}
elif i+1 < len(a):
attr = {"attr": a[i], "atype": a[i+1]}
attrList.append(attr)
sqlSub += dbi.getSchemaSubtable(a[1], attrList) + "\n"
sqlSub += dbi.getSchemaIndex(dbi.getSubtableName(a[1], fo[D.DDL_FIELD]), tableId) + "\n"
continue
sqlTable += dbi.getSchemaAttribut(fo[D.DDL_FIELD], fo[D.DDL_TYPE]) + ","
if fo[D.DDL_TYPE] == D.TYPE_PK:
tableId = fo[D.DDL_FIELD]
sql = sqlTable[0:-1]+");\n"+sqlSub
"""
# print(sql)
"""
return sql
def read_entity(self, job, name):
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, self.component, name)
self.fieldnames = []
fielddef = {}
for f in ddl:
if f in [B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS]:
continue
field = {}
self.fieldnames.append(f)
for k in D.LIST_DDL_ATTR:
if k in ddl[f]:
field[k] = ddl[f][k]
else:
field[k] = DEFAULTS[k]
fielddef[f] = field
self.fielddef = fielddef
def select_entity(self, job, name):
"""
reads the entity from the database
it should get the same result like read_entity
:param job:
:param name:
:return:
"""
self.read_entity(job, name)
# raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def write_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def update_entity(self, job, name):
"""
writes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def remove_entity(self, job, name):
"""
removes the entity from the file-system
it similar to delete_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)
def delete_entity(self, job, name):
"""
deletes the entity into the database
it similar to update_entity
:param job:
:param name:
:return:
"""
raise Exception(B.EXCEPT_NOT_IMPLEMENT)

2
model/testcase.py

@ -43,7 +43,7 @@ class Testcase(model.entity.Entity):
"""
self.job = job
def get_schema(self):
def get_schema(self, tableName="", tableObject=None):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getCreateTable("testcase")

2
model/testplan.py

@ -23,7 +23,7 @@ class Testplan(model.entity.Entity):
"""
self.job = job
def get_schema(self):
def get_schema(self, tableName="", tableObject=None):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getCreateTable("testplan")

2
model/testsuite.py

@ -33,7 +33,7 @@ class Testsuite(model.entity.Entity):
"""
self.job = job
def get_schema(self):
def get_schema(self, tableName="", tableObject=None):
dbtype = self.job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE]
dbi = basic.toolHandling.getDbTool(self.job, None, dbtype)
sql = dbi.getCreateTable("testsuite")

0
test/test_27table.py

37
test/test_90testserver.py

@ -3,18 +3,16 @@ unit-test
"""
import unittest
import inspect
import utils.gen_tool
import basic.program
import basic.toolHandling
import basic.Testserver
import test.testtools
import basic.application
import basic.constants as B
import utils.path_const as P
# the list of TEST_FUNCTIONS defines which function will be really tested.
# if you minimize the list you can check the specific test-function
TEST_FUNCTIONS = ["test_01createTestserver", "test_02getDBSchema", "test_11createDBTables", "test_11syncApplication"]
#TEST_FUNCTIONS = ["test_02getDBSchema"]
#TEST_FUNCTIONS = ["test_01createTestserver", "test_02getDBSchema", "test_11createDBTables", "test_11syncApplication"]
TEST_FUNCTIONS = ["test_02getDBSchema"]
# with this variable you can switch prints on and off
verbose = False
@ -51,20 +49,31 @@ class MyTestCase(unittest.TestCase):
if B.TOPIC_NODE_DB in job.conf:
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE])
else:
return "No DB in job-config"
sql = testserver.getDBSchema(job, dbi, "application")
job.conf[B.TOPIC_NODE_DB] = {}
job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE] = "rel"
job.conf[B.TOPIC_NODE_DB][B.ATTR_DB_DATABASE] = "testserver"
self.job = job
dbi = basic.toolHandling.getDbTool(job, self, "rel")
# return "No DB in job-config"
t = "application"
sql = testserver.tables[t].get_schema(tableName=t, tableObject=testserver.tables[t])
#sql = testserver.getDBSchema(job, dbi, "application")
print(sql+"##")
lines = sql.split("\n")
self.assertEqual(4, len(lines))
self.assertIn("CREATE TABLE", lines[0])
self.assertIn("CREATE INDEX", lines[1])
sql = testserver.getDBSchema(job, dbi, "ap_project")
t = "ap_project"
sql = testserver.tables[t].get_schema(tableName=t, tableObject=testserver.tables[t])
# sql = testserver.getDBSchema(job, dbi, "ap_project")
print(sql+"##")
lines = sql.split("\n")
self.assertEqual(4, len(lines))
self.assertIn("CREATE TABLE", lines[0])
self.assertIn("CREATE INDEX", lines[1])
sql = testserver.getDBSchema(job, dbi, "ap_component")
t = "ap_component"
sql = testserver.tables[t].get_schema(tableName=t, tableObject=testserver.tables[t])
#sql = testserver.getDBSchema(job, dbi, "ap_component")
print(sql+"##")
lines = sql.split("\n")
self.assertEqual(4, len(lines))
@ -79,8 +88,16 @@ class MyTestCase(unittest.TestCase):
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
if B.TOPIC_NODE_DB not in job.conf:
job.conf[B.TOPIC_NODE_DB] = {}
job.conf[B.TOPIC_NODE_DB][B.ATTR_TYPE] = "rel"
job.conf[B.TOPIC_NODE_DB][B.ATTR_DB_DATABASE] = "testserver"
self.job = job
testserver = basic.Testserver.Testserver(job)
testserver.createDBTables(job)
for t in testserver.tables:
sql = testserver.tables[t].get_schema(tableName=t, tableObject=testserver.tables[t])
print(sql)
#testserver.createDBTables(job)
def test_zzz(self):
if verbose: print(MyTestCase.mymsg)

24
tools/data_const.py

@ -18,12 +18,28 @@ TYPE_TIME = "time"
TYPE_PK = "pk"
# fields in DDL
DDL_FNULLABLE = "nullable"
DDL_FNAME = "field"
DDL_ACCEPTANCE = "acceptance"
DDL_KEY = "key"
# _field;comment;format;acceptance;generate;nullable
DDL_FIELD = "_field"
""" pk, str, int, """
DDL_TYPE = "type"
""" char(10), vchar(10), see format_tool """
DDL_FORMAT = "format"
""" used in create database """
DDL_INDEX = "index"
DDL_CONSTRAINT = "nullable"
""" aggregat-functinóns in sql """
DDL_AGGREGAT = "aggregat"
""" see generic_tool """
DDL_GENERIC = "generic"
""" compare-key """
DDL_KEY = "key"
""" see acceptance_tool """
DDL_ACCEPTANCE = "acceptance"
DDL_ALIAS = "alias"
DDL_DESCRIPTION = "description"
LIST_DDL_ATTR = [DDL_FIELD, DDL_TYPE, DDL_FORMAT, DDL_INDEX, DDL_GENERIC,
DDL_AGGREGAT, DDL_KEY, DDL_ACCEPTANCE, DDL_ALIAS, DDL_DESCRIPTION]
DFILE_TYPE_YML = "yml"
DFILE_TYPE_JSON = "json"

6
tools/db_abstract.py

@ -213,8 +213,8 @@ def formatDbRows(table, comp, rows):
def formatDbField(comp, val, field):
if val == B.SVAL_NULL:
if field[D.DDL_FNULLABLE] != B.SVAL_YES:
comp.m.logError("must-field is null "+ field[D.DDL_FNAME])
if field[D.DDL_CONSTRAINT] != B.SVAL_YES:
comp.m.logError("must-field is null " + field[D.DDL_FIELD])
return None
print("formatDbField "+str(comp))
print("formatDbField "+str(field)+" , "+str(val))
@ -383,7 +383,7 @@ class DbFcts():
def getDbValue(self, fo, pvalue):
value = str(formatDbField(self.comp, pvalue, fo))
if len(value.strip()) == 0:
if D.DDL_FNULLABLE not in fo or fo[D.DDL_FNULLABLE] == B.SVAL_YES:
if D.DDL_CONSTRAINT not in fo or fo[D.DDL_CONSTRAINT] == B.SVAL_YES:
return self.getDbNull()
if fo[D.DATA_NODE_TYPE] == D.TYPE_STRING or fo[D.DATA_NODE_TYPE] == D.TYPE_STR:
return value.strip()

1
tools/job_const.py

@ -45,6 +45,7 @@ MODEL_RELEASE = B.PAR_REL
LIST_MODEL = [MODEL_APP, MODEL_ENV, MODEL_COMP, MODEL_RELEASE]
CHOICE_GRAN = "Testgranularitaet auswaehlen"
CHOICE_APP = "Anwendung auswaehlen"
CHOICE_COMP = "Komponente auswaehlen"
CHOICE_ENV = "Umgebung auswaehlen"
CHOICE_SPEC = "Testspezifikation auswaehlen"
CHOICE_TP = "Testplan auswaehlen"

23
tools/job_tool.py

@ -51,7 +51,7 @@ import model.environment
import model.testplan
import model.testsuite
import model.testcase
import model.component
def hasModul(komp):
#job = Job.getInstance()
@ -102,7 +102,7 @@ def write_child_args(job, childArgs):
print("path "+path)
print("args "+str(childArgs))
tools.file_tool.writeFileDict(job.m, job, path, childArgs)
tools.file_tool.write_file_dict(job.m, job, path, childArgs)
def start_child_process(job, args):
"""
@ -116,12 +116,12 @@ def start_child_process(job, args):
job.m.logTrace(verifiy, args)
print("args "+str(args))
path = tools.path_tool.getActualJsonPath(job)
tools.file_tool.writeFileDict(job.m, job, path, args)
tools.file_tool.write_file_dict(job.m, job, path, args)
childArgs = {}
for p in args:
if p in B.LIST_MAIN_PAR:
childArgs[p] = args[p]
if p in [B.PAR_USER, B.PAR_STEP, B.PAR_PROJ]:
if p in [B.PAR_USER, B.PAR_STEP, B.PAR_PROJ, B.PAR_APP, B.PAR_COMP]:
childArgs[p] = args[p]
if p in [B.PAR_TESTCASE, B.PAR_TCTIME]:
childArgs[B.PAR_TCTIME] = date_tool.getActdate(date_tool.F_LOG)
@ -194,13 +194,24 @@ def select_application(job, programDef, project):
read the application of the project and select one of them
:return:
"""
verify = job.m.getLogLevel("job_tool")
job.m.logDebug(verify, "select_application "+project)
verify = job.getDebugLevel("job_tool")
job.debug(verify, "select_application "+project)
# the application are configured in comp/applications.yml which are optionally stored in the database
projList = [project]
apps = model.application.select_applications(job, projList)
return list(apps.keys())
def select_components(job, programDef, project, application):
"""
read the application of the project and select one of them
:return:
"""
verify = job.getDebugLevel("job_tool")
job.debug(verify, "select_components "+project)
# the application are configured in comp/applications.yml which are optionally stored in the database
comps = model.component.select_components(job, project, application)
return comps
def select_environment(job, programDef, project):
"""
read environments and select one of them

Loading…
Cancel
Save