Ulrich
2 years ago
19 changed files with 918 additions and 53 deletions
@ -0,0 +1,519 @@ |
|||
import unittest |
|||
import inspect |
|||
import tools.filecsv_fcts |
|||
import basic.constants as B |
|||
import basic.toolHandling as toolHandling |
|||
import tools.data_const as D |
|||
import tools.path_const as P |
|||
import tools.config_tool |
|||
import test.testtools |
|||
import test.constants |
|||
import basic.program |
|||
import tools.path_tool |
|||
import tools.file_tool |
|||
import os |
|||
|
|||
HOME_PATH = test.constants.HOME_PATH |
|||
DATA_PATH = test.constants.DATA_PATH |
|||
OS_SYSTEM = test.constants.OS_SYSTEM |
|||
""" |
|||
a) catalog: key(s) - values # meta-spec, meta-auto |
|||
b) head: key - value # spec-info |
|||
c) option: key - value # spec -> job.par |
|||
d) step: key=function - values # spec (tp, ts) -> comp.function |
|||
e) step: key=usecase - values # spec (tc) -> comp.steps |
|||
f) ddl-table: key=field - vaulues=attributes # meta-spec, comp |
|||
g) data-table: array: field - values # spec.data, comp.artifacts |
|||
""" |
|||
|
|||
# the list of TEST_FUNCTIONS defines which function will be really tested. |
|||
# if you minimize the list you can check the specific test-function |
|||
TEST_FUNCTIONS = ["test_11ddl", "test_12catalog", |
|||
"test_02getCsvSpec_data", "test_03getCsvSpec_tree", "test_04getCsvSpec_key", |
|||
"test_05getCsvSpec_conf", "test_06parseCsv"] |
|||
TEST_FUNCTIONS = ["test_12catalog"] |
|||
TEST_FUNCTIONS = ["test_11ddl"] |
|||
PROGRAM_NAME = "clean_workspace" |
|||
|
|||
# with this variable you can switch prints on and off |
|||
verbose = False |
|||
|
|||
class MyTestCase(unittest.TestCase): |
|||
mymsg = "--------------------------------------------------------------" |
|||
|
|||
|
|||
def test_11ddl(self): |
|||
global mymsg |
|||
context = D.CSV_SPECTYPE_DDL |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
# job = basic.program.SimpleJob(PROGRAM_NAME) |
|||
job = test.testtools.getWorkspaceJob(PROGRAM_NAME) |
|||
f = toolHandling.getFileTool(job, None, "csv") |
|||
fy = toolHandling.getFileTool(job, None, "yaml") |
|||
csvText = "_type;"+context+";;;;;\n" |
|||
csvText += "table:police;_field;comment;format;acceptance;generate;nullable\n" |
|||
csvText += ";polid;;int;ignore;auto-id;n\n" |
|||
csvText += ";polnr;;string;;auto-id;n\n" |
|||
csvText += ";polvers;;int;;build-id;n\n" |
|||
csvText += ";persid;;int;;ref-person;n\n" |
|||
csvText += ";objid;;int;;ref-object;n\n" |
|||
csvText += ";amount;;dec;;range;n\n" |
|||
data = f.parseCsv(job.m, job, csvText.split("\n"), ttype="") |
|||
self.assertIn("_type", data) |
|||
self.assertIn("police", data) |
|||
self.assertIn("polid", data["police"]) |
|||
self.assertIn("format", data["police"]["polid"]) |
|||
self.assertIn("int", data["police"]["objid"]["format"]) |
|||
csvText = "table:police;_field;comment;format;acceptance;generate;nullable\n" |
|||
csvText += ";polid;;int;ignore;auto-id;n\n" |
|||
csvText += ";polnr;;string;;auto-id;n\n" |
|||
csvText += ";polvers;;int;;build-id;n\n" |
|||
csvText += ";persid;;int;;ref-person;n\n" |
|||
csvText += ";objid;;int;;ref-object;n\n" |
|||
csvText += ";amount;;dec;;range;n\n" |
|||
data = f.parseCsv(job.m, job, csvText.split("\n"), ttype=context) |
|||
self.assertIn("_type", data) |
|||
self.assertIn("police", data) |
|||
self.assertIn("polid", data["police"]) |
|||
self.assertIn("format", data["police"]["polid"]) |
|||
self.assertIn("int", data["police"]["objid"]["format"]) |
|||
text = fy.dump_file(data) |
|||
#print(str(data)) |
|||
#print(text) |
|||
job.m.logInfo(csvText) |
|||
job.m.logInfo("----------------------------------------") |
|||
result = f.buildCsv(job.m, job, data, ttype="") |
|||
self.assertRegex(result, r"_type;"+context) |
|||
result = f.buildCsv(job.m, job, data, ttype=D.CSV_SPECTYPE_DDL) |
|||
self.assertNotIn("_type", result) |
|||
job.m.logInfo(result) |
|||
job.m.logInfo("----------------------------------------") |
|||
job.m.logInfo(text) |
|||
|
|||
def test_12catalog(self): |
|||
global mymsg |
|||
context = D.CSV_SPECTYPE_CTLG |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = basic.program.SimpleJob(PROGRAM_NAME) |
|||
f = toolHandling.getFileTool(job, None, "csv") |
|||
fy = toolHandling.getFileTool(job, None, "yaml") |
|||
csvText = "_type;ctlg;;;;;;;;;;;;;\n" |
|||
csvText += "_key;name;;;;;;;;;;;;;\n" |
|||
csvText += "table:programs;name;objtype;objname;time;env;app;variant;pardef;pfilesource;pfiletarget;dirname;basedir;loglevel;logpath\n" |
|||
csvText += ";test_executer;tp,ts,tc;m;m;m;m;o;\"{\"gran\": \"args\", \"application\": \"args\", \"environment\": \"args\", \"testelem\": \"args\", \"variant\": \"args\"}\";;;{objtype}dir;{objtype}base;info;{job.par.wsdir}/{log}/log_{job.start}.txt\n" |
|||
csvText += ";init_testsuite;ts;m;o;m;m;o;\"{\"gran\": \"testsuite\", \"application\": \"args\", \"environment\": \"args\", \"testsuite\": \"args\", \"variant\": \"args\"}\";envparfile;tsparfile;tsdir;tsbase;info;{job.par.tsdir}/{log}/{job.program}_{job.start}.txt\n" |
|||
data = f.parseCsv(job.m, job, csvText.split("\n"), ttype="") |
|||
"""self.assertIn("_type", data) |
|||
self.assertIn("programs", data) |
|||
self.assertIn("polid", data["police"]) |
|||
self.assertIn("format", data["police"]["polid"]) |
|||
self.assertIn("int", data["police"]["objid"]["format"]) |
|||
""" |
|||
text = fy.dump_file(data) |
|||
#print(str(data)) |
|||
#print(text) |
|||
logPath = os.path.join("/home/ulrich/workspace/testprojekt/temp/log_test.txt") |
|||
logger = open(logPath, "w") |
|||
logger.write(csvText) |
|||
job.m.logInfo(csvText) |
|||
job.m.logInfo("----------------------------------------") |
|||
logger.write("----------------------------------------\n") |
|||
#self.assertRegex(result, r"_type;"+context) |
|||
result = f.buildCsv(None, job, data, ttype=context) |
|||
#self.assertNotIn("_type", result) |
|||
logger.write(result) |
|||
logger.write("----------------------------------------\n") |
|||
logger.write(text) |
|||
#job.m.logInfo(result) |
|||
#job.m.logInfo("----------------------------------------") |
|||
#job.m.logInfo(text) |
|||
logger.close() |
|||
self.assertEqual(csvText, result) |
|||
|
|||
def test_01tdata(self): |
|||
global mymsg |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = basic.program.SimpleJob(PROGRAM_NAME) |
|||
setattr(job.par, "tdtyp", "dir") |
|||
setattr(job.par, "tdsrc", "TC0001") |
|||
setattr(job.par, "tdname", "testspec") |
|||
filename = str(job.conf["paths"]["testdata"]) + "/" + getattr(job.par, "tdsrc") + "/" + getattr(job.par, "tdname") + ".csv" |
|||
#tdata = f.readCsv(job.m, filename, None) |
|||
#self.assertEqual(len(tdata["testa1"]), 3) |
|||
setattr(job.par, "tdtyp", "dir") |
|||
setattr(job.par, "tdsrc", "TST001") |
|||
#tdata = f.getTestdata() |
|||
#self.assertEqual(("steps" in tdata), True) |
|||
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) |
|||
|
|||
def test_02isBlock(self): |
|||
global mymsg |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = basic.program.SimpleJob(PROGRAM_NAME) |
|||
f = toolHandling.getFileTool(job, None, "csv") |
|||
res = f.isBlock(job.m, job, "_type", D.CSV_BLOCK_ATTR, "status") |
|||
self.assertEqual(True, res) |
|||
res = f.isBlock(job.m, job, "", D.CSV_BLOCK_ATTR, "status") |
|||
self.assertEqual(True, res) |
|||
res = f.isBlock(job.m, job, "head:name", D.CSV_BLOCK_OPTION, "status") |
|||
self.assertEqual(False, res) |
|||
res = f.isBlock(job.m, job, "option:name", D.CSV_BLOCK_OPTION, "status") |
|||
self.assertEqual(True, res) |
|||
res = f.isBlock(job.m, job, ":name", D.CSV_BLOCK_OPTION, "option") |
|||
self.assertEqual(True, res) |
|||
res = f.isBlock(job.m, job, "table:name", D.CSV_BLOCK_OPTION, "option") |
|||
self.assertEqual(False, res) |
|||
|
|||
|
|||
def test_02getCsvSpec_data(self): |
|||
global mymsg |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = test.testtools.getJob() |
|||
# filename = os.path.join(job.conf["paths"]["testdata"], getattr(job.par, "tdsrc"), getattr(job.par, "tdname") + ".csv") |
|||
""" |
|||
a) data : like a table with data-array of key-value-pairs |
|||
a_0 is keyword [option, step, CSV_HEADER_START ] |
|||
a_0 : { a_1 : { f_1 : v_1, .... } # option, step |
|||
a_0 : { .. a_n : { _header : [ .. ], _data : [ rows... ] # table, node |
|||
""" |
|||
tests = ["malformated", "comments", D.CSV_BLOCK_OPTION, D.CSV_BLOCK_STEP, B.DATA_NODE_TABLES] |
|||
if "comments" in tests: |
|||
specLines = [ |
|||
";;;;;;", |
|||
"#;;;;;;" |
|||
] |
|||
f = toolHandling.getFileTool(job, None, "csv") |
|||
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_DATA) |
|||
self.assertEqual(0, len(tdata)) |
|||
cnttest += 1 |
|||
if "malformated" in tests: |
|||
malformat = "option;arg;;;;;" |
|||
specLines = [ |
|||
"option:par;arg;;;;;", |
|||
malformat, |
|||
"#option:nopar;arg;;;;;", |
|||
"#;;;;;;" |
|||
] |
|||
self.assertRaises(Exception, f.parseCsv, (job.m, job, specLines, D.CSV_SPECTYPE_DATA)) |
|||
cnttest += 1 |
|||
malformat = "step;component;1;arg:val;;;;;" |
|||
specLines = [ |
|||
"step:1;component;1;arg:val;;;", |
|||
malformat |
|||
] |
|||
# TODO sortierung nicht ausgwertet |
|||
# self.assertRaises(D.EXCP_MALFORMAT+malformat, f.parseCsvSpec, (job.m, specLines, D.CSV_SPECTYPE_DATA)) |
|||
malformat = "step:2;component;1;arg;;;;;" |
|||
specLines = [ |
|||
"step:1;component;1;arg:val;;;", |
|||
malformat |
|||
] |
|||
self.assertRaises(Exception, f.parseCsv, (job.m, job, specLines, D.CSV_SPECTYPE_DATA)) |
|||
cnttest += 1 |
|||
specLines = [ |
|||
"option:par;arg;;;;;", |
|||
"#option:nopar;arg;;;;;", |
|||
"#;;;;;;" |
|||
] |
|||
if D.CSV_BLOCK_OPTION in tests: |
|||
specLines = [ |
|||
"option:description;something;;;;;", |
|||
"#;;;;;;" |
|||
] |
|||
tdata = f.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA, {}, job) |
|||
self.assertEqual(1, len(tdata)) |
|||
print(tdata) |
|||
self.assertIn(D.CSV_BLOCK_OPTION, tdata) |
|||
cnttest += 2 |
|||
if D.CSV_BLOCK_STEP in tests: |
|||
specLines = [ |
|||
"step:1;testa;1;1;table:_lofts,action:import;;;;;", |
|||
"#;;;;;;" |
|||
] |
|||
tdata = f.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA, {}, job) |
|||
print(tdata) |
|||
self.assertEqual(1, len(tdata)) |
|||
self.assertIn(B.DATA_NODE_STEPS, tdata) |
|||
self.assertIsInstance(tdata[B.DATA_NODE_STEPS], list) |
|||
cnttest += 3 |
|||
for step in tdata[B.DATA_NODE_STEPS]: |
|||
print(step) |
|||
self.assertEqual(hasattr(step, B.DATA_NODE_COMP), True) |
|||
# self.assertEqual(hasattr(step, B.ATTR_DATA_REF), True) |
|||
self.assertEqual(hasattr(step, B.ATTR_STEP_ARGS), True) |
|||
cnttest += 3 |
|||
specLines = [ |
|||
"step:1;testa;1;1;table:_lofts;action:export;;;;;", |
|||
"#;;;;;;" |
|||
] |
|||
tdata = {} |
|||
tdata = f.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA, {}, job) |
|||
print(tdata) |
|||
self.assertEqual(1, len(tdata)) |
|||
self.assertIn(B.DATA_NODE_STEPS, tdata) |
|||
self.assertIsInstance(tdata[B.DATA_NODE_STEPS], list) |
|||
self.assertEqual(2, len(tdata[B.DATA_NODE_STEPS][0].args)) |
|||
cnttest += 3 |
|||
if B.DATA_NODE_TABLES in tests: |
|||
specLines = [ |
|||
"table:testa:lofts;_nr;street;city;zip;state;beds;baths;sqft;type;price;latitude;longitude", |
|||
"testa:lofts;1;stra;town;12345;usa;4;1;50;house;111;45;8", |
|||
"#;;;;;;" |
|||
] |
|||
tdata = f.parseCsvSpec(job.m, specLines, B.DATA_NODE_TABLES, {}, job) |
|||
print(tdata) |
|||
self.assertEqual(1, len(tdata)) |
|||
self.assertIn(B.DATA_NODE_TABLES, tdata) |
|||
self.assertIsInstance(tdata[B.DATA_NODE_TABLES], dict) |
|||
cnttest += 3 |
|||
for k in tdata[B.DATA_NODE_TABLES]["testa"]: |
|||
table = tdata[B.DATA_NODE_TABLES]["testa"][k] |
|||
self.assertIn(B.DATA_NODE_HEADER, table) |
|||
self.assertIn(B.DATA_NODE_DATA, table) |
|||
cnttest += 2 |
|||
|
|||
if B.DATA_NODE_TABLES in tests: |
|||
specLines = [ |
|||
"option:description;create 2 new contracts;;;;", |
|||
"# ;;;;;", |
|||
"# ;component;exec;_nr;action;args;;", |
|||
"step:1;testrest;2;1;function:xml-rest;action:new;;", |
|||
"step:2;testrest;3;1,2;function:json-rest;action:new;;", |
|||
"# ;;;;;", |
|||
"# testdate only here specified;expect:row 2 is inserted as precond;;;;", |
|||
"_date;01.07.2022;;;;", |
|||
"table:person;_nr;famname;name;birth;sex", |
|||
"testrest:person;1;Brecht;Bert;10.02.98;m", |
|||
"testrest:person,testcrmdb:person;2;Leon;Donna;28.09.42;f" |
|||
] |
|||
tdata = f.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA, {}, job) |
|||
print(tdata) |
|||
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) |
|||
|
|||
|
|||
def test_03getCsvSpec_tree(self): |
|||
global mymsg |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = test.testtools.getJob() |
|||
"""" |
|||
b) tree : as a tree - the rows must be unique identified by the first column |
|||
a_0 is keyword in CSV_HEADER_START |
|||
a_0 : { .. a_n : { _header : [ fields.. ], _data : { field : value } |
|||
""" |
|||
specLines = [ |
|||
"_type;tree;;;;;", |
|||
"table:usecae;usecase;executer;nr;subcase;must;args;", |
|||
";Meldung_einspielen;Mock;1;Meldung_aufbereiten;must:;;", |
|||
";Meldung_einspielen;Mock;2;Meldung_senden;must:;;", |
|||
";Meldung_einspielen;Mock;3;Batche_starten_stopen;must:;;", |
|||
";Meldung_aufbereiten;Mock;1;Daten_lesen;must:;;", |
|||
";Meldung_aufbereiten;Mock;2;Daten_mappen;must:;;", |
|||
";Meldung_aufbereiten;Mock;3;Anfrage_schreiben;must:;;", |
|||
";Meldung_senden;Mock;1;cli;must:;;", |
|||
";Batche_starten_stopen;Mock;1;api;must:;;" |
|||
] |
|||
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) |
|||
|
|||
|
|||
def test_04getCsvSpec_key(self): |
|||
global mymsg |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = test.testtools.getJob() |
|||
"""" |
|||
c) keys : as a tree - the rows must be unique identified by the first column |
|||
a_0 is keyword in CSV_HEADER_START |
|||
a_1 ... a_n is key characterized by header-field like _fk* or _pk* |
|||
a_0 : { .. a_n : { _keys : [ _fpk*.. ] , _header : [ fields.. ], _data : { pk_0 : { ... pk_n : { field : value } |
|||
""" |
|||
tests = ["malformated", "comments", B.DATA_NODE_TABLES] |
|||
if "comments" in tests: |
|||
specLines = [ |
|||
";;;;;;", |
|||
"#;;;;;;" |
|||
] |
|||
f = toolHandling.getFileTool(job, None, "csv") |
|||
tdata = f.parseCsv(job.m, specLines, D.CSV_SPECTYPE_CONF, {}, job) |
|||
self.assertEqual(0, len(tdata)) |
|||
cnttest += 1 |
|||
if "malformated" in tests: |
|||
malformat = "table;key;;;;;" |
|||
specLines = [ |
|||
malformat, |
|||
"#;;;;;;" |
|||
] |
|||
self.assertRaises(Exception, f.parseCsv, (job.m, job, specLines, D.CSV_SPECTYPE_KEYS)) |
|||
cnttest += 1 |
|||
if B.DATA_NODE_TABLES in tests: |
|||
specLines = [ |
|||
"table:capital;key;val;;;;", |
|||
";athens;;;;;", |
|||
";berlin;;;;;", |
|||
";cairo;;;;;" |
|||
] |
|||
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_KEYS) |
|||
print(str(tdata)) |
|||
self.assertEqual(1, len(tdata)) |
|||
self.assertEqual(1, len(tdata["_tables"])) |
|||
self.assertEqual(4, len(tdata["_tables"]["capital"])) |
|||
self.assertEqual(3, len(tdata["_tables"]["capital"]["_keys"])) |
|||
cnttest += 4 |
|||
specLines = [ |
|||
"table:capital;key;val;;;;", |
|||
";athens;;;;;", |
|||
";berlin;;;;;", |
|||
"table:country;key;val;;;;", |
|||
";greece;;;;;", |
|||
";germany;;;;;" |
|||
] |
|||
tdata = f.parseCsv(job.m, job, specLines, D.CSV_SPECTYPE_KEYS) |
|||
#tdata = f.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_TREE) |
|||
print(str(tdata)) |
|||
self.assertEqual(1, len(tdata)) |
|||
self.assertIn("capital", tdata["_tables"]) |
|||
self.assertEqual(2, len(tdata["_tables"])) |
|||
self.assertEqual(4, len(tdata["_tables"]["country"])) |
|||
self.assertEqual(2, len(tdata["_tables"]["country"]["_keys"])) |
|||
cnttest += 4 |
|||
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) |
|||
|
|||
|
|||
def test_05getCsvSpec_conf(self): |
|||
global mymsg |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = basic.program.SimpleJob(PROGRAM_NAME) |
|||
# job = test.testtools.getJob() |
|||
f = toolHandling.getFileTool(job, None, "csv") |
|||
"""" |
|||
d) conf: |
|||
_type : conf |
|||
_header : [ field_0, ... ] |
|||
{ field_0 : { attr_0 : val_0, .. }, |
|||
field_1 : { ... }, ... } |
|||
""" |
|||
specLinesA = [ |
|||
"table:lofts;_field;field;type;acceptance;key", |
|||
"lofts;street;a;str;;T:1", |
|||
";city;b;str;;F:1", |
|||
"#;;;;;;" |
|||
] |
|||
specLinesB = [ |
|||
"_type;conf;;;;;;", |
|||
"table:lofts;_field;field;type;acceptance;key", |
|||
"lofts;street;a;str;;T:1", |
|||
";city;b;str;;F:1", |
|||
"#;;;;;;" |
|||
] |
|||
tdata = f.parseCsv(job.m, job, specLinesA, D.CSV_SPECTYPE_CONF) |
|||
self.assertEqual(2, len(tdata)) |
|||
self.assertEqual(D.CSV_SPECTYPE_CONF, tdata[D.DATA_ATTR_TYPE]) |
|||
self.assertIn("lofts", tdata) |
|||
self.assertEqual("_field;field;type;acceptance;key", ";".join(tdata["lofts"][B.DATA_NODE_HEADER])) |
|||
tdata = f.parseCsv(job.m, job, specLinesB, "") |
|||
print(tdata) |
|||
self.assertEqual(2, len(tdata)) |
|||
self.assertEqual(D.CSV_SPECTYPE_CONF, tdata[D.DATA_ATTR_TYPE]) |
|||
self.assertNotIn(B.DATA_NODE_TABLES, tdata) |
|||
self.assertIn("lofts", tdata) |
|||
self.assertEqual("_field;field;type;acceptance;key", ";".join(tdata["lofts"][B.DATA_NODE_HEADER])) |
|||
cnttest += 3 |
|||
table = tdata["lofts"] |
|||
self.assertIn(B.DATA_NODE_HEADER, table) |
|||
self.assertNotIn(B.DATA_NODE_DATA, table) |
|||
cnttest += 2 |
|||
returnLines = f.buildCsv(job.m, job, tdata, D.CSV_SPECTYPE_CONF) |
|||
print("returnLines:") |
|||
print(returnLines) |
|||
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) |
|||
|
|||
|
|||
def test_06parseCsv(self): |
|||
global mymsg |
|||
actfunction = str(inspect.currentframe().f_code.co_name) |
|||
cnttest = 0 |
|||
if actfunction not in TEST_FUNCTIONS: |
|||
return |
|||
job = test.testtools.getJob() |
|||
f = toolHandling.getFileTool(job, None, "csv") |
|||
cm = basic.componentHandling.ComponentManager.getInstance(job) |
|||
componentName = "testcrmdb" |
|||
confs = tools.config_tool.getConfig(job, "comp", componentName) |
|||
conns = tools.conn_tool.getConnections(job, componentName) |
|||
comp = cm.createInstance(componentName, None, confs, conns, 1) |
|||
fileLines = [ |
|||
"table:person;_nr;famname;name;birth;sex", |
|||
"testcrmdb:person;1;Brecht;Bert;10.02.98;m", |
|||
"testcrmdb:person;2;Leon;Donna;28.09.42;f", |
|||
"#;;;;;;" |
|||
] |
|||
filename = tools.path_tool.rejoinPath(tools.path_tool.composePath(job, P.P_TCBASE, comp), "t_person.csv") |
|||
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="") |
|||
print(str(tdata)) |
|||
self.assertIn(B.DATA_NODE_TABLES, tdata) |
|||
self.assertIn("person", tdata[B.DATA_NODE_TABLES]) |
|||
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA])) |
|||
cnttest += 3 |
|||
fileLines = [ |
|||
"_date;27.06.2022", |
|||
"_count;2", |
|||
"table:person;_nr;famname;name;birth;sex", |
|||
"testcrmdb:person;1;Brecht;Bert;10.02.98;m", |
|||
"testcrmdb:person;2;Leon;Donna;28.09.42;f", |
|||
"#;;;;;;" |
|||
] |
|||
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="") |
|||
self.assertIn(B.DATA_NODE_TABLES, tdata) |
|||
self.assertIn("person", tdata[B.DATA_NODE_TABLES]) |
|||
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA])) |
|||
cnttest += 3 |
|||
filename = tools.path_tool.rejoinPath(tools.path_tool.composePath(job, P.P_TCRESULT, comp), "person.csv") |
|||
fileLines = [ |
|||
"_date;27.06.2022", |
|||
"_count;2", |
|||
"persid;famname;name;birth;sex", |
|||
"1;Brecht;Bert;10.02.98;m", |
|||
"2;Leon;Donna;28.09.42;f", |
|||
"#;;;;;;" |
|||
] |
|||
tdata = f.parseCsv(comp.m, job, filename, fileLines, comp, aliasNode="") |
|||
self.assertIn(B.DATA_NODE_TABLES, tdata) |
|||
self.assertIn("person", tdata[B.DATA_NODE_TABLES]) |
|||
self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA])) |
|||
cnttest += 3 |
|||
text = "" |
|||
for k in tdata[B.DATA_NODE_TABLES]: |
|||
print("---------\n"+str(tdata)) |
|||
text += f.buildCsvData(tdata[B.DATA_NODE_TABLES], k, comp, job) |
|||
text += "\n" |
|||
print(text) |
|||
MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) |
|||
|
|||
|
|||
def test_zzz(self): |
|||
print(MyTestCase.mymsg) |
|||
|
|||
if __name__ == '__main__': |
|||
unittest.main() |
@ -0,0 +1,35 @@ |
|||
import json |
|||
import re |
|||
|
|||
import yaml |
|||
|
|||
import basic.program |
|||
import tools.file_abstract |
|||
import basic.constants as B |
|||
import tools.data_const as D |
|||
import tools.file_tool |
|||
from basic import toolHandling |
|||
|
|||
|
|||
class FileFcts(tools.file_abstract.FileFcts): |
|||
|
|||
def __init__(self): |
|||
pass |
|||
|
|||
def load_file(self, path): |
|||
""" |
|||
this function parses the text and translates it to dict |
|||
:param text: |
|||
:return: |
|||
""" |
|||
with open(path, 'r', encoding="utf-8") as file: |
|||
doc = yaml.full_load(file) |
|||
file.close() |
|||
return doc |
|||
|
|||
def dump_file(self, data, path=""): |
|||
if path == "": |
|||
return yaml.dump(data) |
|||
with open(path, 'w', encoding="utf-8") as file: |
|||
yaml.dump(data, file) |
|||
file.close() |
Loading…
Reference in new issue