Data-Test-Executer Framework speziell zum Test von Datenverarbeitungen mit Datengenerierung, Systemvorbereitungen, Einspielungen, ganzheitlicher diversifizierender Vergleich
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

571 lines
25 KiB

import unittest
import inspect
import tools.filecsv_fcts
import basic.constants as B
import basic.toolHandling as toolHandling
import tools.data_const as D
import tools.path_const as P
import tools.config_tool
import test.testtools
import test.constants
import basic.program
import tools.path_tool
import tools.file_tool
import os
import test.testtools
HOME_PATH = test.constants.HOME_PATH
DATA_PATH = test.constants.DATA_PATH
OS_SYSTEM = test.constants.OS_SYSTEM
"""
a) catalog: key(s) - values # meta-spec, meta-auto
b) head: key - value # spec-info
c) option: key - value # spec -> job.par
d) step: key=function - values # spec (tp, ts) -> comp.function
e) step: key=usecase - values # spec (tc) -> comp.steps
f) ddl-table: key=field - vaulues=attributes # meta-spec, comp
g) data-table: array: field - values # spec.data, comp.artifacts
"""
# the list of TEST_FUNCTIONS defines which function will be really tested.
# if you minimize the list you can check the specific test-function
TEST_FUNCTIONS = [ "test_02isBlock", "test_03setSubtable", "test_06parseCsv",
"test_11ddl", "test_12catalog", "test_13getCsvSpec_tree", "test_14getCsvSpec_key",
"test_15getCsvSpec_conf", "test_16getCsvSpec_data"
]
TEST_FUNCTIONS = ["test_03setSubtable"]
PROGRAM_NAME = "clean_workspace"
# with this variable you can switch prints on and off
verbose = False
class MyTestCase(unittest.TestCase):
mymsg = "--------------------------------------------------------------"
def test_03setSubtable(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
# job = basic.program.SimpleJob(PROGRAM_NAME)
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
f = toolHandling.getFileTool(job, None, "csv")
subtable = {}
subtable = f.setSubTable(job, subtable, "_stories", ["_stories", "S1", "S2", "", "", "", ""])
subtable = f.setSubTable(job, subtable, "_stories-description", ["_stories-description", "todo 1", "todo 2", "", "", "", ""])
subtable = f.setSubTable(job, subtable, "reference", ["reference", "ext 1", "ext 2", "", "", "", ""])
pass
def test_11ddl(self):
global mymsg
context = D.CSV_SPECTYPE_DDL
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
# job = basic.program.SimpleJob(PROGRAM_NAME)
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
f = toolHandling.getFileTool(job, None, "csv")
fy = toolHandling.getFileTool(job, None, "yaml")
csvText = "_type;"+context+";;;;;\n"
csvText += "table:police;_field;comment;format;acceptance;generate;nullable\n"
csvText += ";polid;;int;ignore;auto-id;n\n"
csvText += ";polnr;;string;;auto-id;n\n"
csvText += ";polvers;;int;;build-id;n\n"
csvText += ";persid;;int;;ref-person;n\n"
csvText += ";objid;;int;;ref-object;n\n"
csvText += ";amount;;dec;;range;n\n"
data = f.parseCsv(job.m, job, csvText.split("\n"), ttype="")
self.assertIn("_type", data)
self.assertIn("police", data)
self.assertIn("polid", data["police"])
self.assertIn("format", data["police"]["polid"])
self.assertIn("int", data["police"]["objid"]["format"])
csvText = "table:police;_field;comment;format;acceptance;generate;nullable\n"
csvText += ";polid;;int;ignore;auto-id;n\n"
csvText += ";polnr;;string;;auto-id;n\n"
csvText += ";polvers;;int;;build-id;n\n"
csvText += ";persid;;int;;ref-person;n\n"
csvText += ";objid;;int;;ref-object;n\n"
csvText += ";amount;;dec;;range;n\n"
data = f.parseCsv(job.m, job, csvText.split("\n"), ttype=context)
self.assertIn("_type", data)
self.assertIn("police", data)
self.assertIn("polid", data["police"])
self.assertIn("format", data["police"]["polid"])
self.assertIn("int", data["police"]["objid"]["format"])
text = fy.dump_file(data)
#print(str(data))
#print(text)
job.m.logInfo(csvText)
job.m.logInfo("----------------------------------------")
result = f.buildCsv(job.m, job, data, ttype="")
self.assertRegex(result, r"_type;"+context)
result = f.buildCsv(job.m, job, data, ttype=D.CSV_SPECTYPE_DDL)
self.assertNotIn("_type", result)
job.m.logInfo(result)
job.m.logInfo("----------------------------------------")
job.m.logInfo(text)
def test_12catalog(self):
global mymsg
context = D.CSV_SPECTYPE_CTLG
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = basic.program.SimpleJob(PROGRAM_NAME)
f = toolHandling.getFileTool(job, None, "csv")
fy = toolHandling.getFileTool(job, None, "yaml")
csvText = "_type;ctlg;;;;;;;;;;;;;\n"
csvText += "_key;name;;;;;;;;;;;;;\n"
csvText += "table:programs;name;objtype;objname;time;env;app;variant;pardef;pfilesource;pfiletarget;dirname;basedir;loglevel;logpath\n"
csvText += ";test_executer;tp,ts,tc;m;m;m;m;o;\"{\"gran\": \"args\", \"application\": \"args\", \"environment\": \"args\", \"testelem\": \"args\", \"variant\": \"args\"}\";;;{objtype}dir;{objtype}base;info;{job.par.wsdir}/{log}/log_{job.start}.txt\n"
csvText += ";init_testsuite;ts;m;o;m;m;o;\"{\"gran\": \"testsuite\", \"application\": \"args\", \"environment\": \"args\", \"testsuite\": \"args\", \"variant\": \"args\"}\";envparfile;tsparfile;tsdir;tsbase;info;{job.par.tsdir}/{log}/{job.program}_{job.start}.txt\n"
data = f.parseCsv(job.m, job, csvText.split("\n"), ttype="")
"""self.assertIn("_type", data)
self.assertIn("programs", data)
self.assertIn("polid", data["police"])
self.assertIn("format", data["police"]["polid"])
self.assertIn("int", data["police"]["objid"]["format"])
"""
text = fy.dump_file(data)
#print(str(data))
#print(text)
logPath = os.path.join("/home/ulrich/workspace/testprojekt/temp/log_test.txt")
logger = open(logPath, "w")
logger.write(csvText)
job.m.logInfo(csvText)
job.m.logInfo("----------------------------------------")
logger.write("----------------------------------------\n")
#self.assertRegex(result, r"_type;"+context)
result = f.buildCsv(None, job, data, ttype=context)
#self.assertNotIn("_type", result)
logger.write(result)
logger.write("----------------------------------------\n")
logger.write(text)
#job.m.logInfo(result)
#job.m.logInfo("----------------------------------------")
#job.m.logInfo(text)
logger.close()
self.assertEqual(csvText, result)
def test_01tdata(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
setattr(job.par, "tdtyp", "dir")
setattr(job.par, "tdsrc", "TC0001")
setattr(job.par, "tdname", "testspec")
filename = str(job.conf["paths"]["testdata"]) + "/" + getattr(job.par, "tdsrc") + "/" + getattr(job.par, "tdname") + ".csv"
#tdata = f.readCsv(job.m, filename, None)
#self.assertEqual(len(tdata["testa1"]), 3)
setattr(job.par, "tdtyp", "dir")
setattr(job.par, "tdsrc", "TST001")
#tdata = f.getTestdata()
#self.assertEqual(("steps" in tdata), True)
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_02isBlock(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
f = toolHandling.getFileTool(job, None, "csv")
res = f.isBlock(job.m, job, "_variants", D.CSV_BLOCK_SUBTABLES, "status")
self.assertEqual(True, res)
res = f.isBlock(job.m, job, "_description", D.CSV_BLOCK_SUBTABLES, B.DATA_NODE_SUBTABLES)
self.assertEqual(True, res)
res = f.isBlock(job.m, job, "_type", D.CSV_BLOCK_ATTR, "status")
self.assertEqual(True, res) # with any attribute it is true - open for new attributes
res = f.isBlock(job.m, job, "", D.CSV_BLOCK_ATTR, "status")
self.assertEqual(False, res) # without any attribute it is False
res = f.isBlock(job.m, job, "_xyz", D.CSV_BLOCK_ATTR, "status")
self.assertEqual(True, res) # with any attribute it is true - open for new attributes
res = f.isBlock(job.m, job, "head:name", D.CSV_BLOCK_OPTION, "status")
self.assertEqual(False, res)
res = f.isBlock(job.m, job, "option:name", D.CSV_BLOCK_OPTION, "status")
self.assertEqual(True, res)
res = f.isBlock(job.m, job, ":name", D.CSV_BLOCK_OPTION, "option")
self.assertEqual(True, res)
res = f.isBlock(job.m, job, "table:name", D.CSV_BLOCK_OPTION, "option")
self.assertEqual(False, res)
def test_16getCsvSpec_data(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
# filename = os.path.join(job.conf["paths"]["testdata"], getattr(job.par, "tdsrc"), getattr(job.par, "tdname") + ".csv")
"""
a) data : like a table with data-array of key-value-pairs
a_0 is keyword [option, step, CSV_HEADER_START ]
a_0 : { a_1 : { f_1 : v_1, .... } # option, step
a_0 : { .. a_n : { _header : [ .. ], _data : [ rows... ] # table, node
"""
tests = ["malformated", "comments", D.CSV_BLOCK_OPTION, D.CSV_BLOCK_STEP, B.DATA_NODE_TABLES, D.SUB_TABLES]
tests = [D.SUB_TABLES]
f = toolHandling.getFileTool(job, None, "csv")
if "comments" in tests:
specLines = [
";;;;;;",
"#;;;;;;"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA)
self.assertEqual(1, len(tdata))
cnttest += 1
if "malformated" in tests:
malformat = "option;arg;;;;;"
specLines = [
"option:par;arg;;;;;",
malformat,
"#option:nopar;arg;;;;;",
"#;;;;;;"
]
self.assertRaises(Exception, f.parseCsv, (job.m, job, specLines, D.CSV_SPECTYPE_DATA))
cnttest += 1
malformat = "step;component;1;arg:val;;;;;"
specLines = [
"step:1;component;1;arg:val;;;",
malformat
]
# TODO sortierung nicht ausgwertet
# self.assertRaises(D.EXCP_MALFORMAT+malformat, f.parseCsvSpec, (job.m, specLines, D.CSV_SPECTYPE_DATA))
malformat = "step:2;component;1;arg;;;;;"
specLines = [
"step:1;component;1;arg:val;;;",
malformat
]
self.assertRaises(Exception, f.parseCsv, (job.m, job, specLines, D.CSV_SPECTYPE_DATA))
cnttest += 1
specLines = [
"option:par;arg;;;;;",
"#option:nopar;arg;;;;;",
"#;;;;;;"
]
if D.SUB_TABLES in tests:
specLines = [
"_stories;something;;;;;",
"_stories-description;something-one;;;;;",
"#;;;;;;"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA)
self.assertEqual(2, len(tdata))
print(tdata)
self.assertIn(D.CSV_BLOCK_OPTION, tdata)
cnttest += 2
if D.CSV_BLOCK_OPTION in tests:
specLines = [
"option:description;something;;;;;",
"#;;;;;;"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA)
self.assertEqual(2, len(tdata))
print(tdata)
self.assertIn(D.CSV_BLOCK_OPTION, tdata)
cnttest += 2
if D.CSV_BLOCK_STEP in tests:
specLines = [
# "step:1;testa;1;1;table:_lofts,action:import;;;;;",
"step:header;_nr;variant;data;program;comp;args;;;;;;",
";1;testa;person:1;execute_testcase;testrest;action:import;;;;;",
"#;;;;;;"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA)
print(tdata)
self.assertEqual(2, len(tdata))
self.assertIn(B.DATA_NODE_STEPS, tdata)
self.assertIsInstance(tdata[B.DATA_NODE_STEPS], dict)
self.assertIsInstance(tdata[B.DATA_NODE_STEPS][B.DATA_NODE_DATA], list)
cnttest += 3
for step in tdata[B.DATA_NODE_STEPS][B.DATA_NODE_DATA]:
print(step)
self.assertIn(B.DATA_NODE_COMP, step)
self.assertIn(B.DATA_NODE_ARGS, step)
cnttest += 3
specLines = [
"step:header;_nr;variant;data;program;comp;args;;;;;;",
";1;testa;person:1;execute_testcase;testrest;action:import;var:xyz;;;;"
]
tdata = {}
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA)
print(tdata)
self.assertEqual(2, len(tdata))
self.assertIn(B.DATA_NODE_STEPS, tdata)
self.assertIsInstance(tdata[B.DATA_NODE_STEPS], dict)
self.assertEqual(2, len(tdata[B.DATA_NODE_STEPS][B.DATA_NODE_DATA][0][B.DATA_NODE_ARGS]))
cnttest += 3
text = f.buildCsv(job.m, job, tdata)
spez = "_type;data\n"+self.stripDelimiter(specLines)
self.assertEqual(spez, text)
print(text)
if B.DATA_NODE_TABLES in tdata:
specLines = [
"table:testa:lofts;_nr;street;city;zip;state;beds;baths;sqft;type;price;latitude;longitude",
"testa:lofts;1;stra;town;12345;usa;4;1;50;house;111;45;8",
"#;;;;;;"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA)
print(tdata)
self.assertEqual(2, len(tdata))
self.assertIn(B.DATA_NODE_TABLES, tdata)
self.assertIsInstance(tdata[B.DATA_NODE_TABLES], dict)
cnttest += 3
for k in tdata[B.DATA_NODE_TABLES]["testa"]:
table = tdata[B.DATA_NODE_TABLES]["testa"][k]
self.assertIn(B.DATA_NODE_HEADER, table)
self.assertIn(B.DATA_NODE_DATA, table)
cnttest += 2
if B.DATA_NODE_TABLES in tests:
specLines = [
"option:description;create 2 new contracts;;;;",
"# ;;;;;",
"# ;component;exec;_nr;action;args;;",
"step:1;testrest;2;1;function:xml-rest;action:new;;",
"step:2;testrest;3;1,2;function:json-rest;action:new;;",
"# ;;;;;",
"# testdate only here specified;expect:row 2 is inserted as precond;;;;",
"_date;01.07.2022;;;;",
"table:person;_nr;famname;name;birth;sex",
"testrest:person;1;Brecht;Bert;10.02.98;m",
"testrest:person,testcrmdb:person;2;Leon;Donna;28.09.42;f"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA)
print(tdata)
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def stripDelimiter(self, lines):
out = ""
for l in lines:
if len(l) > 0 and l[0:1] == "#":
continue
while len(l) > 1 and l[-1:] == ";":
l = l[:-1]
out += "\n" + l
if len(out) > 0:
out = out[1:]
return out
def test_13getCsvSpec_tree(self):
# TODO : Baumstruktur fuer properties
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getJob()
""""
b) tree : as a tree - the rows must be unique identified by the first column
a_0 is keyword in CSV_HEADER_START
a_0 : { .. a_n : { _header : [ fields.. ], _data : { field : value }
"""
specLines = [
"_type;tree;;;;;",
"table:usecae;usecase;executer;nr;subcase;must;args;",
";Meldung_einspielen;Mock;1;Meldung_aufbereiten;must:;;",
";Meldung_einspielen;Mock;2;Meldung_senden;must:;;",
";Meldung_einspielen;Mock;3;Batche_starten_stopen;must:;;",
";Meldung_aufbereiten;Mock;1;Daten_lesen;must:;;",
";Meldung_aufbereiten;Mock;2;Daten_mappen;must:;;",
";Meldung_aufbereiten;Mock;3;Anfrage_schreiben;must:;;",
";Meldung_senden;Mock;1;cli;must:;;",
";Batche_starten_stopen;Mock;1;api;must:;;"
]
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_14getCsvSpec_key(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
""""
c) keys : as a tree - the rows must be unique identified by the first column
a_0 is keyword in CSV_HEADER_START
a_1 ... a_n is key characterized by header-field like _fk* or _pk*
a_0 : { .. a_n : { _keys : [ _fpk*.. ] , _header : [ fields.. ], _data : { pk_0 : { ... pk_n : { field : value }
"""
tests = ["malformated", "comments", B.DATA_NODE_TABLES]
if "comments" in tests:
specLines = [
";;;;;;",
"#;;;;;;"
]
f = toolHandling.getFileTool(job, None, "csv")
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_KEYS)
self.assertEqual(1, len(tdata))
cnttest += 1
if "malformated" in tests:
malformat = "table;key;;;;;"
specLines = [
malformat,
"#;;;;;;"
]
self.assertRaises(Exception, f.parseCsv, (job.m, job, specLines, D.CSV_SPECTYPE_KEYS))
cnttest += 1
if B.DATA_NODE_TABLES in tests:
specLines = [
"table:capital;key;val;;;;",
";athens;;;;;",
";berlin;;;;;",
";cairo;;;;;"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_KEYS)
print(str(tdata))
self.assertEqual(2, len(tdata))
self.assertEqual(1, len(tdata["_tables"]))
self.assertEqual(3, len(tdata["_tables"]["capital"]))
self.assertEqual(3, len(tdata["_tables"]["capital"]["_keys"]))
cnttest += 4
specLines = [
"table:capital;key;val;;;;",
";athens;;;;;",
";berlin;;;;;",
"table:country;key;val;;;;",
";greece;;;;;",
";germany;;;;;"
]
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_KEYS)
#tdata = f.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_TREE)
print(str(tdata))
self.assertEqual(2, len(tdata))
self.assertIn("capital", tdata["_tables"])
self.assertEqual(2, len(tdata["_tables"]))
self.assertEqual(3, len(tdata["_tables"]["country"]))
self.assertEqual(2, len(tdata["_tables"]["country"]["_keys"]))
cnttest += 4
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_15getCsvSpec_conf(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
f = toolHandling.getFileTool(job, None, "csv")
""""
d) conf:
_type : conf
_header : [ field_0, ... ]
{ field_0 : { attr_0 : val_0, .. },
field_1 : { ... }, ... }
"""
specLinesA = [
"table:lofts;_field;field;type;acceptance;key",
"lofts;street;a;str;;T:1",
";city;b;str;;F:1",
"#;;;;;;"
]
specLinesB = [
"_type;conf",
"table:lofts;_field;field;type;acceptance;key",
";street;a;str;;T:1",
";city;b;str;;F:1",
""
]
tdata = f.parseCsv(job.m, job, specLinesA, D.CSV_SPECTYPE_CONF)
self.assertEqual(2, len(tdata))
self.assertEqual(D.CSV_SPECTYPE_CONF, tdata[D.DATA_ATTR_TYPE])
self.assertIn("lofts", tdata)
self.assertEqual("_field;field;type;acceptance;key", ";".join(tdata["lofts"][B.DATA_NODE_HEADER]))
tdata = f.parseCsv(job.m, job, specLinesB, "")
print(tdata)
self.assertEqual(2, len(tdata))
self.assertEqual(D.CSV_SPECTYPE_CONF, tdata[D.DATA_ATTR_TYPE])
self.assertNotIn(B.DATA_NODE_TABLES, tdata)
self.assertIn("lofts", tdata)
self.assertEqual("_field;field;type;acceptance;key", ";".join(tdata["lofts"][B.DATA_NODE_HEADER]))
cnttest += 3
table = tdata["lofts"]
self.assertIn(B.DATA_NODE_HEADER, table)
self.assertNotIn(B.DATA_NODE_DATA, table)
cnttest += 2
returnLines = f.buildCsv(job.m, job, tdata, D.CSV_SPECTYPE_CONF)
print("returnLines:")
print(returnLines)
self.assertEqual("\n".join(specLinesB), returnLines)
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_06parseCsv(self):
global mymsg
actfunction = str(inspect.currentframe().f_code.co_name)
cnttest = 0
if actfunction not in TEST_FUNCTIONS:
return
job = test.testtools.getWorkspaceJob(PROGRAM_NAME)
f = toolHandling.getFileTool(job, None, "csv")
cm = basic.componentHandling.ComponentManager.getInstance(job)
componentName = "testcrmdb"
confs = tools.config_tool.getConfig(job, "comp", componentName)
conns = tools.conn_tool.getConnections(job, componentName)
comp = cm.createInstance(componentName, None, confs, conns, 1)
fileLines = [
"table:person;_nr;famname;name;birth;sex",
"testcrmdb:person;1;Brecht;Bert;10.02.98;m",
"testcrmdb:person;2;Leon;Donna;28.09.42;f",
"#;;;;;;"
]
filename = tools.path_tool.rejoinPath(tools.path_tool.composePath(job, P.P_TCBASE, comp), "t_person.csv")
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="")
print(str(tdata))
self.assertIn(B.DATA_NODE_TABLES, tdata)
self.assertIn("person", tdata[B.DATA_NODE_TABLES])
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA]))
cnttest += 3
fileLines = [
"_date;27.06.2022",
"_count;2",
"table:person;_nr;famname;name;birth;sex",
"testcrmdb:person;1;Brecht;Bert;10.02.98;m",
"testcrmdb:person;2;Leon;Donna;28.09.42;f",
"#;;;;;;"
]
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="")
self.assertIn(B.DATA_NODE_TABLES, tdata)
self.assertIn("person", tdata[B.DATA_NODE_TABLES])
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA]))
cnttest += 3
filename = tools.path_tool.rejoinPath(tools.path_tool.composePath(job, P.P_TCRESULT, comp), "person.csv")
fileLines = [
"_date;27.06.2022",
"_count;2",
"persid;famname;name;birth;sex",
"1;Brecht;Bert;10.02.98;m",
"2;Leon;Donna;28.09.42;f",
"#;;;;;;"
]
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="")
self.assertIn(B.DATA_NODE_TABLES, tdata)
self.assertIn("person", tdata[B.DATA_NODE_TABLES])
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA]))
cnttest += 3
text = ""
for k in tdata[B.DATA_NODE_TABLES]:
print("---------\n"+str(tdata))
text += f.buildCsvData(tdata[B.DATA_NODE_TABLES], k, comp, job)
text += "\n"
print(text)
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest)
def test_zzz(self):
print(MyTestCase.mymsg)
if __name__ == '__main__':
unittest.main()