Browse Source

refactoring: basic-model - app inclusive plus table-ddl

refactor
Ulrich 1 year ago
parent
commit
40bfeb694f
  1. 2
      basic/Testserver.py
  2. 8
      basic/constants.py
  3. 64
      model/application.py
  4. 5
      model/datatable.csv
  5. 6
      model/entity.py
  6. 97
      model/table.py
  7. 4
      model/user.py
  8. 26
      test/test_14table.py
  9. 4
      test/test_16project.py
  10. 6
      test/test_17release.py
  11. 8
      test/test_17story.py
  12. 6
      test/test_19usecase.py
  13. 6
      test/test_19variant.py
  14. 4
      test/test_20application.py
  15. 2
      test/test_21environment.py
  16. 8
      test/test_22component.py
  17. 2
      test/test_26testsuite.py
  18. 2
      test/test_27testcase.py
  19. 4
      test/test_28step.py
  20. 2
      test/test_29datatable.py
  21. 30
      tools/config_tool.py
  22. 2
      tools/db_abstract.py
  23. 4
      tools/dbmysql_tool.py
  24. 67
      tools/file_tool.py
  25. 30
      tools/file_type.py
  26. 3
      tools/filecsv_fcts.py

2
basic/Testserver.py

@ -147,7 +147,7 @@ class Testserver():
:return:
"""
table = model.table.Table(job)
table = table.read_entity(job, tablename, project)
table = table.read_entity(job, tablename)
sql = table.get_schema(project=project, tableName=tablename, tableObject=table) # [B.DATA_NODE_TABLES][m[:-4]]
job.m.logInfo(sql)
for s in sql.split(";\n"):

8
basic/constants.py

@ -323,6 +323,8 @@ DATA_NODE_HEADER = "_header"
""" it defines a subnode of a table for the field-list, used for ddl """
DATA_NODE_FIELDS = "_fields"
""" it defines a subnode of a table for the data as key-value-pair, keys from header """
DATA_NODE_DDLFIELDS = "_ddlfields"
""" it defines a subnode of a table for the data as key-value-pair, keys from header """
DATA_NODE_DATA = "_data"
""" it defines the main node in the testdata for the steps to execute """
DATA_NODE_STEPS = SUBJECT_STEPS # "_step" ## ?? SUBJECT
@ -336,6 +338,9 @@ DATA_NODE_TABLES = "_tables" # ?? SUBJECT
DATA_NODE_KEYS = "_keys"
""" This constant defines the node for data scheme (DataDefinitionLanguage)
The fields are defined in data_const (D) """
DATA_NODE_DDLKEYS = "_ddlkeys"
""" This constant defines the node for data scheme (DataDefinitionLanguage)
The fields are defined in data_const (D) """
DATA_NODE_DDL = "ddl" # ?? TOPIC
DATA_NODE_COMP = "comp" # ?? SUBJECT
""" This constant defines """
@ -344,8 +349,9 @@ DATA_NODE_CATALOG = "_catalog"
DATA_NODE_ROW = "_row"
DATA_NODE_SUBTABLES = "subtables" # ?? SUBJECT
DATA_NODE_TOPICS = "_topics"
DATA_NODE_PATH = "_path"
LIST_DATA_NODE = [DATA_NODE_HEADER, DATA_NODE_DATA, DATA_NODE_DDL, DATA_NODE_TOPICS,
DATA_NODE_TABLES, DATA_NODE_STEPS, DATA_NODE_OPTION, DATA_NODE_ROW]
DATA_NODE_TABLES, DATA_NODE_STEPS, DATA_NODE_OPTION, DATA_NODE_ROW, DATA_NODE_PATH]
# -- Parameter ------------------------------------------------
# ____ _

64
model/application.py

@ -198,19 +198,12 @@ class Application(model.entity.Entity):
project = {}
"""
FIELD_ID = "apid"
FIELD_NAME = D.FIELD_NAME
FIELD_DESCRIPTION = B.SUBJECT_DESCRIPTION
FIELD_REFERENCE = B.SUBJECT_REFERENCE
FIELD_PROJECT = B.SUBJECT_PROJECT
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE, FIELD_PROJECT]
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES]
LIST_SUBTABLES = [B.SUBJECT_APPS, B.SUBJECT_COMPS, B.SUBJECT_RELS, B.SUBJECT_USECASES]
PREFIX_SUBTABLE = "ap"
def __init__(self, job):
self.job = job
def read_unique_names(self, job, project, application, gran, args):
"""
reads the entity-names from file-storage
@ -221,7 +214,8 @@ class Application(model.entity.Entity):
:param opt. args additional args
:return: list of entity-names
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS, tools.config_tool.get_plain_filename(job, ""))
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS,
tools.config_tool.get_plain_filename(job, ""), ttype=B.SUBJECT_APP)
outList = list(config[B.SUBJECT_APPS].keys())
return outList
@ -232,40 +226,34 @@ class Application(model.entity.Entity):
:param name:
:return:
"""
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS, tools.config_tool.get_plain_filename(job, name))
return self.setAttributes(job, config, name, LIST_FIELDS, LIST_NODES, LIST_SUBTABLES)
def getFieldList(self):
"""
returns a list of scalar attributes
:return: LIST_FIELDS
"""
return LIST_FIELDS
config = self.getConfig(job, P.KEY_BASIC, B.SUBJECT_APPS,
tools.config_tool.get_plain_filename(job, name), ttype=B.SUBJECT_APP)
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
def getNodeList(self):
@staticmethod
def rebuild_data(job, data: dict) -> dict:
"""
returns a list of sub-nodes - which can be persisted in a clob-field
:return: LIST_NODES
"""
return LIST_NODES
def getSubtableList(self):
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return LIST_SUBTABLES
def getName(self):
"""
returns the name - maybe build from other attributes
gets the subtable-tag from filecsv and sets the subtables in order to workable entity-elements
:param job:
:param data:
:return:
"""
return self.name
data = tools.file_type.popSubjectsNode(job, data)
data = tools.file_type.popNameNode(job, data)
return data
def getIDName(self):
def check_data(self, job, data: dict) -> dict:
"""
it returns the name as unique-id - maybe build from few attributes
it checks the data for the specific form
:param job:
:param tdata:
:param ttype:
:return:
"""
return self.name
import tools.file_type
checkNodes = {}
checkNodes[tools.file_type.MUST_NODES] = []
checkNodes[tools.file_type.MUSTNT_NODES] = [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS] + B.LIST_SUBJECTS
checkNodes[tools.file_type.OPT_NODES] = []
return tools.file_type.check_nodes(job, data, checkNodes)

5
model/datatable.csv

@ -2,10 +2,13 @@ _type;ctlg;;;;;;;;;
_key;_field;;;;;;;;;
table:datatable;_field;type;format;index;generic;aggregat;key;acceptance;alias;description
;dtid;pk;integer;N;;;T:1;;;
;name;str;vchar(256);I;;;F:1;;;
;name;str;vchar(256);I;;;F:3;;;
;database;str;vchar(256);I;;;F:1;;;
;schema;str;vchar(256);I;;;F:2;;;
;description;string;vchar(256);N;;;;;;
;reference;str;vchar(256);N;;;;;;story
;attributes;jlob;clob;N;;;;;;
;fieldnames;jlob;clob;N;;;;;;
;insauthor;str;vchar(256);N;;;;;;
;inscommit;str;vchar(256);N;;;;;;
;instime;time;datetime;N;;;;;;

1 _type ctlg
2 _key _field
3 table:datatable _field type format index generic aggregat key acceptance alias description
4 dtid pk integer N T:1
5 name str vchar(256) I F:1 F:3
6 database str vchar(256) I F:1
7 schema str vchar(256) I F:2
8 description string vchar(256) N
9 reference str vchar(256) N story
10 attributes jlob clob N
11 fieldnames jlob clob N
12 insauthor str vchar(256) N
13 inscommit str vchar(256) N
14 instime time datetime N

6
model/entity.py

@ -86,7 +86,7 @@ class Entity:
entityNames = self.read_unique_names(job, project, application, gran, args)
else:
entityNames = self.read_unique_names(job, project, application, gran, args)
return entityNames
return [item for item in entityNames if item not in B.LIST_DATA_NODE]
def get_entities(self, job, storage="", project="", application="", gran="", args={}):
"""
@ -364,6 +364,8 @@ class Entity:
if verify: print("setFields " + str(k) + " = " + str(config[rootname][key]))
if k in fields:
setattr(self, tools.data_tool.getSingularKeyword(k), tools.data_tool.getValueStr(config[rootname][key]))
elif k == "fieldnames":
setattr(self, tools.data_tool.getPluralKeyword(k), config[rootname][key])
else:
setattr(self, tools.data_tool.getSingularKeyword(k), config[rootname][key])
setattr(self, D.FIELD_NAME, rootname)
@ -377,7 +379,7 @@ class Entity:
if key == "":
continue
for o in config[rootname][key]:
if o in [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_ROW]:
if o in [B.DATA_NODE_DATA, B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_ROW, B.DATA_NODE_PATH]:
continue
args = {}
args[k] = config[rootname][key][o]

97
model/table.py

@ -15,17 +15,6 @@ import tools.git_tool
TABLE_NAME = "table"
""" system-name for this entity """
FIELD_ID = "tbid"
FIELD_NAME = D.FIELD_NAME
FIELD_DESCRIPTION = B.SUBJECT_DESCRIPTION
FIELD_REFERENCE = B.SUBJECT_REFERENCE
FIELD_ATTRIBUTES = B.NODE_ATTRIBUTES
FIELD_PROJECT = B.SUBJECT_PROJECT
FIELD_APPLICATION = B.SUBJECT_APP
LIST_FIELDS = [FIELD_ID, FIELD_NAME, FIELD_DESCRIPTION, FIELD_REFERENCE]
""" list of object-attributes """
LIST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
LIST_SUBTABLES = {}
DEFAULT_FIELD = ""
DEFAULT_TYPE = "string"
@ -69,6 +58,12 @@ class Table(model.entity.Entity):
nor a concrete table in the system-model
it is an abstract super-class in order to relation to the database-management-system
"""
FIELD_ID = "tbid"
LIST_FIELDS = [FIELD_ID, D.FIELD_NAME, B.SUBJECT_DESCRIPTION, B.SUBJECT_REFERENCE, B.SUBJECT_PROJECT]
""" list of object-attributes """
LIST_NODES = [B.NODE_ATTRIBUTES, "fielddef", "fieldnames"]
LIST_SUBTABLES = []
tbid = 0
name = ""
project = ""
@ -121,70 +116,38 @@ class Table(model.entity.Entity):
"""
return sql
# def read_unique_names(self, job, project, application, gran, args):
def read_unique_names(self, job, project="", application="", gran= "", args={}) -> list:
return []
# table is not an real entity
# def read_entity(self, job, name, project=""):
def read_entity(self, job, name: str, args: dict={}):
return self.read_ddl(job, name, args=args)
# table is not an real entity
def getFieldList(self):
"""
returns a list of scalar attributes
:return: LIST_FIELDS
"""
return LIST_FIELDS
def getNodeList(self):
"""
returns a list of sub-nodes - which can be persisted in a clob-field
:return: LIST_NODES
"""
return LIST_NODES
def getSubtableList(self):
"""
returns a list of sub-tables
:return: LIST_SUBTABLES
"""
return LIST_SUBTABLES
def getName(self):
"""
returns the name - maybe build from other attributes
:return:
"""
return self.name
def getIDName(self):
def read_ddl(self, job, name, args: dict={}):
"""
it returns the name as unique-id - maybe build from few attributes
reads the ddl of the table depending on context
a) component: the ddl is read from specific or general component-folder
b) testcase: the ddl is read from general component-folder
c) testserver: the ddl is read from model-folder
:param job:
:param name:
:param context:
:return:
"""
return self.name
def read_ddl(self, job, name):
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, self.component, name)
self.fieldnames = []
fielddef = {}
if B.DATA_NODE_TABLES in ddl and name in ddl[B.DATA_NODE_TABLES]:
ddl = ddl[B.DATA_NODE_TABLES][name]
for k in ddl[B.DATA_NODE_DATA]:
if not isinstance(k, dict) \
or D.DDL_FIELD not in k:
continue
ddl[k[D.DDL_FIELD]] = k
for f in ddl:
if f in [B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_DATA]:
continue
field = {}
self.fieldnames.append(f)
for k in D.LIST_DDL_ATTR:
if k in ddl[f]:
field[k] = ddl[f][k]
if "context" in args:
if args["context"] == "component":
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, self.component.name, name, ttype=D.CSV_SPECTYPE_DDL)
elif args["context"] == "testdata":
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, args["context"], name, ttype=D.CSV_SPECTYPE_DDL)
elif args["context"] == B.ATTR_INST_TESTSERVER:
ddl = tools.config_tool.getConfig(job, D.DDL_FILENAME, B.ATTR_INST_TESTSERVER, name, ttype=D.CSV_SPECTYPE_DDL)
config = {}
if "_name" in ddl:
config[ddl["_name"]] = ddl
else:
field[k] = DEFAULTS[k]
fielddef[f] = field
self.fielddef = fielddef
config = ddl
return self.setAttributes(job, config, name, self.LIST_FIELDS, self.LIST_NODES, self.LIST_SUBTABLES)
def select_entity(self, job, name):
"""

4
model/user.py

@ -259,8 +259,8 @@ class User(model.entity.Entity):
self.setDbAttributes(job, [TABLE_NAME])
dbi = basic.toolHandling.getDbTool(job, self, job.conf[B.TOPIC_NODE_DB]["type"])
condition = "where"
if B.DATA_NODE_KEYS in self.conf[B.DATA_NODE_DDL][table]:
keys = self.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_KEYS]
if B.DATA_NODE_DDLKEYS in self.conf[B.DATA_NODE_DDL][table]:
keys = self.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DDLKEYS]
else:
keys = self.conf[B.DATA_NODE_DDL][table]
for f in IDENTIFYER_FIELDS:

26
test/test_14table.py

@ -11,13 +11,14 @@ import basic.constants as B
import test.constants as T
import model.table
import model.entity
import tools.data_const as D
HOME_PATH = test.constants.HOME_PATH
PYTHON_CMD = "python"
TEST_FUNCTIONS = ["test_10getEntityNames", "test_11getEntities", "test_12getEntity",
# "test_14insertEntity", # "test_13writeEntity",
"test_20getSchema"]
TEST_FUNCTIONS = []
# TEST_FUNCTIONS = []
PROGRAM_NAME = "clean_workspace"
class MyTestCase(unittest.TestCase):
@ -41,7 +42,7 @@ class MyTestCase(unittest.TestCase):
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg
@ -68,20 +69,23 @@ class MyTestCase(unittest.TestCase):
job = test.testtools.getJob()
table = model.table.Table(job)
name = "adress"
acttable = table.read_entity(job, name)
self.assertEqual(getattr(acttable, model.table.FIELD_NAME), name)
args = {}
args["context"] = "testdata"
acttable = table.read_entity(job, name, args=args)
self.assertEqual(getattr(acttable, D.FIELD_NAME), name)
self.assertRaises(Exception, table.read_entity, job, "xyzxyz")
#
table = model.table.Table(job)
args["context"] = B.ATTR_INST_TESTSERVER
name = B.SUBJECT_APP
acttable = table.read_entity(job, name, project=B.ATTR_INST_TESTSERVER)
self.assertEqual(getattr(acttable, model.table.FIELD_NAME), name)
acttable = table.read_entity(job, name, args=args)
self.assertEqual(getattr(acttable, D.FIELD_NAME), name)
self.assertRaises(Exception, table.read_entity, job, "xyzxyz")
#
table = model.table.Table(job)
name = "ap_application"
acttable = table.read_entity(job, name, project=B.ATTR_INST_TESTSERVER)
self.assertEqual(getattr(acttable, model.table.FIELD_NAME), name)
acttable = table.read_entity(job, name, args=args)
self.assertEqual(getattr(acttable, D.FIELD_NAME), name)
self.assertRaises(Exception, table.read_entity, job, "xyzxyz")
def test_20getSchema(self):
@ -94,16 +98,18 @@ class MyTestCase(unittest.TestCase):
job = test.testtools.getJob()
table = model.table.Table(job)
entityNames = table.read_unique_names(job, B.ATTR_INST_TESTSERVER, "", "", {})
args = {}
args["context"] = B.ATTR_INST_TESTSERVER
name = "tc_table"
print(name)
acttable = table.read_entity(job, name, project=B.ATTR_INST_TESTSERVER)
acttable = table.read_entity(job, name, args=args)
schema = acttable.get_schema(name)
print(schema)
self.assertEqual("CREATE TABLE", schema[:12])
self.assertIn(name, schema)
for name in entityNames:
print(name)
acttable = table.read_entity(job, name, project=B.ATTR_INST_TESTSERVER)
acttable = table.read_entity(job, name, args=args)
schema = acttable.get_schema(name)
print(schema)
self.assertEqual("CREATE TABLE", schema[:12])

4
test/test_16project.py

@ -35,7 +35,7 @@ class MyTestCase(unittest.TestCase):
entityNames = project.read_unique_names(job, "", "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg
@ -99,7 +99,7 @@ class MyTestCase(unittest.TestCase):
entityNames = collectInnerList(project.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertIn(username, entityNames)
actUser = project.select_entity(job, username)
self.assertEquals(getattr(actUser, model.project.FIELD_USERNAME), username)
self.assertEqual(getattr(actUser, model.project.FIELD_USERNAME), username)
actUser.delete_entity(job, username, "project")
entityNames = collectInnerList(project.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertNotIn(username, entityNames)

6
test/test_17release.py

@ -36,9 +36,9 @@ class MyTestCase(unittest.TestCase):
release = model.release.Release(job)
entityNames = []
entityNames = release.read_unique_names(job, "", "", "", {})
self.assertEquals(type(entityNames), list)
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg
@ -102,7 +102,7 @@ class MyTestCase(unittest.TestCase):
entityNames = collectInnerList(release.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertIn(username, entityNames)
actUser = release.select_entity(job, username)
self.assertEquals(getattr(actUser, model.release.FIELD_NAME), username)
self.assertEqual(getattr(actUser, model.release.FIELD_NAME), username)
actUser.delete_entity(job, username, "release")
entityNames = collectInnerList(release.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertNotIn(username, entityNames)

8
test/test_17story.py

@ -35,9 +35,9 @@ class MyTestCase(unittest.TestCase):
job = test.testtools.getJob()
story = model.story.Story(job)
entityNames = story.read_unique_names(job, "", "", "", {})
self.assertEquals(type(entityNames), list)
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg
@ -92,7 +92,7 @@ class MyTestCase(unittest.TestCase):
entityNames = story.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertIn(username, entityNames)
actUser = story.read_entity(job, username)
self.assertEquals(getattr(actUser, model.story.FIELD_NAME), username)
self.assertEqual(getattr(actUser, model.story.FIELD_NAME), username)
actUser.remove_entity(job, username)
entityNames = story.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertNotIn(username, entityNames)
@ -117,7 +117,7 @@ class MyTestCase(unittest.TestCase):
entityNames = collectInnerList(story.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertIn(username, entityNames)
actUser = story.select_entity(job, username)
self.assertEquals(getattr(actUser, model.story.FIELD_NAME), username)
self.assertEqual(getattr(actUser, model.story.FIELD_NAME), username)
actUser.delete_entity(job, username, "story")
entityNames = collectInnerList(story.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertNotIn(username, entityNames)

6
test/test_19usecase.py

@ -38,7 +38,7 @@ class MyTestCase(unittest.TestCase):
entityNames = usecase.read_unique_names(job, "", "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg
@ -93,7 +93,7 @@ class MyTestCase(unittest.TestCase):
entityNames = project.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertIn(username, entityNames)
actUser = project.read_entity(job, username)
self.assertEquals(getattr(actUser, model.project.FIELD_USERNAME), username)
self.assertEqual(getattr(actUser, model.project.FIELD_USERNAME), username)
actUser.remove_entity(job, username)
entityNames = project.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertNotIn(username, entityNames)
@ -118,7 +118,7 @@ class MyTestCase(unittest.TestCase):
entityNames = collectInnerList(project.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertIn(username, entityNames)
actUser = project.select_entity(job, username)
self.assertEquals(getattr(actUser, model.project.FIELD_USERNAME), username)
self.assertEqual(getattr(actUser, model.project.FIELD_USERNAME), username)
actUser.delete_entity(job, username, "project")
entityNames = collectInnerList(project.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertNotIn(username, entityNames)

6
test/test_19variant.py

@ -37,7 +37,7 @@ class MyTestCase(unittest.TestCase):
entityNames = variant.read_unique_names(job, "", "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = variant.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg
@ -92,7 +92,7 @@ class MyTestCase(unittest.TestCase):
entityNames = variant.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertIn(username, entityNames)
actUser = variant.read_entity(job, username)
self.assertEquals(getattr(actUser, model.variant.FIELD_NAME), username)
self.assertEqual(getattr(actUser, model.variant.FIELD_NAME), username)
actUser.remove_entity(job, username)
entityNames = variant.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertNotIn(username, entityNames)
@ -117,7 +117,7 @@ class MyTestCase(unittest.TestCase):
entityNames = collectInnerList(variant.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertIn(username, entityNames)
actUser = variant.select_entity(job, username)
self.assertEquals(getattr(actUser, model.variant.FIELD_NAME), username)
self.assertEqual(getattr(actUser, model.variant.FIELD_NAME), username)
actUser.delete_entity(job, username, "variant")
entityNames = collectInnerList(variant.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertNotIn(username, entityNames)

4
test/test_20application.py

@ -35,10 +35,10 @@ class MyTestCase(unittest.TestCase):
application = model.application.Application(job)
entityNames = []
entityNames = application.read_unique_names(job, "", "", "", {})
self.assertEquals(type(entityNames), list)
self.assertEqual(type(entityNames), list)
print("apps "+str(entityNames))
#entityNames = component.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg

2
test/test_21environment.py

@ -36,7 +36,7 @@ class MyTestCase(unittest.TestCase):
entityNames = environment.read_unique_names(job, "", "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = environment.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg

8
test/test_22component.py

@ -31,9 +31,9 @@ class MyTestCase(unittest.TestCase):
component = model.component.Component(job)
entityNames = []
entityNames = component.read_unique_names(job, "", "", "", {})
self.assertEquals(type(entityNames), list)
self.assertEqual(type(entityNames), list)
#entityNames = component.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg
@ -88,7 +88,7 @@ class MyTestCase(unittest.TestCase):
entityNames = component.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertIn(username, entityNames)
actUser = component.read_entity(job, username)
self.assertEquals(getattr(actUser, model.component.FIELD_USERNAME), username)
self.assertEqual(getattr(actUser, model.component.FIELD_USERNAME), username)
actUser.remove_entity(job, username)
entityNames = component.get_unique_names(job, storage=model.entity.STORAGE_FILE)
self.assertNotIn(username, entityNames)
@ -113,7 +113,7 @@ class MyTestCase(unittest.TestCase):
entityNames = collectInnerList(component.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertIn(username, entityNames)
actUser = component.select_entity(job, username)
self.assertEquals(getattr(actUser, model.component.FIELD_USERNAME), username)
self.assertEqual(getattr(actUser, model.component.FIELD_USERNAME), username)
actUser.delete_entity(job, username, "component")
entityNames = collectInnerList(component.get_unique_names(job, storage=model.entity.STORAGE_DB))
self.assertNotIn(username, entityNames)

2
test/test_26testsuite.py

@ -51,7 +51,7 @@ class MyTestCase(unittest.TestCase):
entityNames = testsuite.read_unique_names(job, "", "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg

2
test/test_27testcase.py

@ -35,7 +35,7 @@ class MyTestCase(unittest.TestCase):
entityNames = testcase.read_unique_names(job, "", "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg

4
test/test_28step.py

@ -49,10 +49,10 @@ class MyTestCase(unittest.TestCase):
step = model.step.Step(job)
entityNames = []
entityNames = step.read_unique_names(job, "", "", "", {})
self.assertEquals(type(entityNames), list)
self.assertEqual(type(entityNames), list)
print("apps "+str(entityNames))
#entityNames = component.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_12getEntity(self):
global mymsg

2
test/test_29datatable.py

@ -35,7 +35,7 @@ class MyTestCase(unittest.TestCase):
entityNames = table.read_unique_names(job, "", "", "", {})
self.assertEqual(type(entityNames), list)
#entityNames = project.select_unique_names(job, "", "", "", {})
#self.assertEquals(type(entityNames), list)
#self.assertEqual(type(entityNames), list)
def test_11getEntities(self):
global mymsg

30
tools/config_tool.py

@ -7,6 +7,7 @@
# ---------------------------------------------------------------------------------------------------------
import sys
import basic.constants as B
import tools.value_tool
try:
import basic.program
@ -21,7 +22,7 @@ import basic.constants as B
import tools.data_const as D
import tools.path_const as P
COMP_FILES = [D.DDL_FILENAME]
TABLE_FILES = [D.DDL_FILENAME]
CONFIG_FORMAT = [D.DFILE_TYPE_YML, D.DFILE_TYPE_JSON, D.DFILE_TYPE_CSV]
def getExistingPath(job, pathnames):
@ -74,8 +75,8 @@ def select_config_path(job, modul, name, subname=""):
return pathname
if verify: job.debug(verify, "6 " + pathname)
raise Exception(P.EXP_CONFIG_MISSING, modul+", "+name)
elif modul in COMP_FILES:
return getCompPath(job, name, subname, modul)
elif modul in TABLE_FILES:
return getTablePath(job, name, subname, modul)
elif modul == P.KEY_BASIC:
return getBasicPath(job, name)
elif modul == P.KEY_TESTCASE:
@ -147,15 +148,34 @@ def getToolPath(job, name, subname):
return path
raise Exception(P.EXP_CONFIG_MISSING, envdir + ", " + name)
def getCompPath(job, name, subname, filename):
# for example DATASTRUCURE or the table
def getTablePath(job, name, subname, filename):
"""
reads the ddl of the table depending on context (=name)
a) component: the ddl is read from specific or general component-folder
b) testcase: the ddl is read from general component-folder
c) testserver: the ddl is read from model-folder
:param job:
:param name: name of the context testserver/testdata or name of the component
:param subname: name of the table
:param filename: dont use
:return:
"""
pathnames = []
if name == B.ATTR_INST_TESTSERVER:
path = tools.value_tool.compose_string(job, job.conf[B.TOPIC_PATH][P.ATTR_PATH_MODEL], None)
pathnames.append(os.path.join(path, subname))
else:
path = tools.value_tool.compose_string(job, job.conf[B.TOPIC_PATH][P.ATTR_PATH_COMPONENTS], None)
pathnames.append(os.path.join(path, P.KEY_CATALOG, P.VAL_TABLES, subname))
pass
"""
pathnames.append(os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_COMPONENTS],
basic.componentHandling.getComponentFolder(name), filename))
pathnames.append(os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_COMPONENTS],
basic.componentHandling.getComponentFolder(subname), filename))
pathnames.append(os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_PROGRAM], P.VAL_BASIC, filename))
pathnames.append(os.path.join(job.conf[B.TOPIC_PATH][P.ATTR_PATH_PROGRAM], P.VAL_BASIC, subname))
"""
configpath = getExistingPath(job, pathnames)
if configpath is not None:
return configpath

2
tools/db_abstract.py

@ -486,7 +486,7 @@ class DbFcts():
keys = ddl[B.DATA_NODE_KEYS]
else:
header = ddl[B.DATA_NODE_HEADER]
keys = ddl
keys = ddl[B.DATA_NODE_DDLKEYS]
for f in header:
#for f in ddl[B.DATA_NODE_HEADER]:
if D.DDL_TYPE in keys[f] and keys[f][D.DDL_TYPE] == D.TYPE_PK:

4
tools/dbmysql_tool.py

@ -48,7 +48,7 @@ class DbFcts(tools.dbrel_tool.DbFcts):
for x in myresult:
r = {}
i = 0
keys = self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_KEYS]
keys = self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DDLKEYS]
for f in header:
if (keys[f][D.DDL_TYPE] in [D.TYPE_TIME, D.TYPE_DATE]):
r[f] = tools.date_tool.getFormatdate(x[i], tools.date_tool.F_DIR)
@ -95,7 +95,7 @@ class DbFcts(tools.dbrel_tool.DbFcts):
if B.DATA_NODE_KEYS in self.comp.conf[B.DATA_NODE_DDL][table]:
keys = self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_KEYS]
else:
keys = self.comp.conf[B.DATA_NODE_DDL][table]
keys = self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DDLKEYS]
for r in rows:
rowvalues = []
for h in insheader:

67
tools/file_tool.py

@ -246,73 +246,6 @@ def read_file_dict(job, path: str, msg, ttype: str = D.DFILE_TYPE_CSV) -> dict:
doc["_path"] = path
# TODO !! refactor to file_type
return tools.file_type.check_tdata(job, doc, ttype)
#check_file_dict(job, doc, msg, ttype)
#return doc
def check_file_dict(job, config: dict, msg, ttype: str):
"""
check-routine for different kind of dictionary-types
:param job:
:param config:
:param msg:
:param ttype:
:return:
"""
MUST_NODES = []
MUSTNT_NODES = []
if ttype in [D.CSV_SPECTYPE_CTLG]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_KEYS]
MUSTNT_NODES = [B.DATA_NODE_DATA]
elif ttype in [D.CSV_SPECTYPE_DDL]:
MUST_NODES = [B.DATA_NODE_HEADER]
MUSTNT_NODES = [B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
elif ttype in [D.CSV_SPECTYPE_DATA]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_DATA]
MUSTNT_NODES = [B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
elif ttype in [D.CSV_SPECTYPE_CONF]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_DATA]
MUSTNT_NODES = [B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
elif ttype in [D.CSV_SPECTYPE_TREE]:
MUST_NODES = [B.DATA_NODE_HEADER, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
MUSTNT_NODES = [B.DATA_NODE_DATA]
elif ttype in [D.CSV_SPECTYPE_COMP]:
MUST_NODES = [B.SUBJECT_ARTIFACTS, B.SUBJECT_STEPS, "functions", B.SUBJECT_DATATABLES]
MUSTNT_NODES = [B.DATA_NODE_DATA]
elif ttype+"s" in B.LIST_SUBJECTS:
print("subject-typ "+ttype+" "+config["_path"])
elif ttype in ["basic", "tool"]:
# tool : tool-specific nodes
#print("anderer bekannter Ttyp " + ttype + " " + config["_path"])
return
else:
print("anderer Ttyp "+ttype+" "+config["_path"])
checkNodes(job, config, MUST_NODES, MUSTNT_NODES)
def checkNodes(job, config, mustNodes, mustntNodes):
a = str(config["_path"]).split(os.path.sep)
b = a[-1].split(".")
path = config["_path"]
if b[0] in config:
config = config[b[0]]
try:
if len(config) == 2:
for x in B.LIST_SUBJECTS:
if x[:-1] in config:
config = config[x[:-1]]
break
except:
pass
for n in mustNodes:
if n not in config:
raise Exception("must-node doesnt exist "+n+" "+path)
for n in mustntNodes:
if n not in config:
continue
if len(config[n]) == 0:
job.m.logWarn("empty mustnt-node "+n+" "+path)
else:
raise Exception("must-node doesnt exist "+n+" "+path)

30
tools/file_type.py

@ -169,7 +169,8 @@ class DatatypeCatalog():
checkNodes[MUST_NODES] = [B.DATA_NODE_HEADER, B.DATA_NODE_KEYS]
checkNodes[MUSTNT_NODES] = [B.DATA_NODE_DATA]
checkNodes[OPT_NODES] = [B.DATA_NODE_FIELDS]
return check_nodes(job, data, checkNodes)
check_nodes(job, data, checkNodes)
return data
def popSubjectsNode(job, data: dict) -> dict:
if len(data) == 1:
@ -238,24 +239,32 @@ def checkCatalog(job, tdata: dict) -> dict:
class DatatypeDDL():
"""
structure:
* B.DATA_NODE_HEADER : list of ddl-attributes
* B.DATA_NODE_FIELDS : list of field-names
* B.DATA_NODE_KEYS : fields with attributes (header X fields)
* B.DATA_NODE_HEADER : list of field-names = column-names for table content
* B.DATA_NODE_DATA : list of rows = tabel-content
* B.DATA_NODE_DDLFIELDS : list of ddl-attributes - optional because it is B.DATA_NODE_KEYS.keys()
* B.DATA_NODE_DDLKEYS : fields with attributes (header X fields)
"""
@staticmethod
def rebuild_data(job, data: dict, tableAttr: dict) -> dict:
data = popTablesNode(job, data)
data = popNameNode(job, data)
data = buildKeys(job, data)
data["fieldnames"] = data[B.DATA_NODE_FIELDS]
data["fielddef"] = data[B.DATA_NODE_KEYS]
data[B.DATA_NODE_DDLFIELDS] = data[B.DATA_NODE_HEADER]
data[B.DATA_NODE_DDLKEYS] = data[B.DATA_NODE_KEYS]
data[B.DATA_NODE_HEADER] = data[B.DATA_NODE_FIELDS]
data.pop(B.DATA_NODE_KEYS)
data.pop(B.DATA_NODE_FIELDS)
data = addTableAttr(job, data, tableAttr)
return data
@staticmethod
def check_data(job, data: dict) -> dict:
checkNodes = {}
checkNodes[MUST_NODES] = [B.DATA_NODE_HEADER, B.DATA_NODE_KEYS]
checkNodes[MUSTNT_NODES] = [B.DATA_NODE_DATA]
checkNodes[OPT_NODES] = [B.DATA_NODE_FIELDS]
checkNodes[MUST_NODES] = [B.DATA_NODE_HEADER, B.DATA_NODE_DDLFIELDS, B.DATA_NODE_DDLKEYS]
checkNodes[MUSTNT_NODES] = []
checkNodes[OPT_NODES] = [B.DATA_NODE_DATA]
return check_nodes(job, data, checkNodes)
@ -280,13 +289,6 @@ def buildKeys(job, data: dict) -> dict:
def insertDDL(job, tdata, fields: list) -> dict:
return tdata
def checkDDL(job, tdata: dict) -> dict:
checkNodes = {}
checkNodes[MUST_NODES] = [B.DATA_NODE_HEADER]
checkNodes[MUSTNT_NODES] = [B.DATA_NODE_DATA, B.DATA_NODE_FIELDS, B.DATA_NODE_KEYS]
checkNodes[OPT_NODES] = []
return check_nodes(job, tdata, checkNodes)
def rebuildSpec(job, tdata: dict) -> dict:
return tdata

3
tools/filecsv_fcts.py

@ -137,7 +137,8 @@ class FileFcts(tools.file_abstract.FileFcts):
tableAttr = setTableAttribute(job, tableAttr, a[0], fields)
if ttype == "" and D.DATA_ATTR_TYPE in tableAttr:
ttype = tableAttr[D.DATA_ATTR_TYPE]
elif D.DATA_ATTR_TYPE in tableAttr and ttype != tableAttr[D.DATA_ATTR_TYPE]:
elif (D.DATA_ATTR_TYPE in tableAttr and ttype != tableAttr[D.DATA_ATTR_TYPE]
and ttype not in [D.CSV_SPECTYPE_DDL]):
msg.logWarn("System-Type " + ttype + " be overwrite by file-Type " + tableAttr[D.DATA_ATTR_TYPE])
ttype = tableAttr[D.DATA_ATTR_TYPE]
continue

Loading…
Cancel
Save