import unittest import inspect import utils.tdata_tool as t import basic.constants as B import utils.data_const as D import utils.path_const as P import utils.config_tool import test.testtools import test.constants import basic.program import utils.path_tool import utils.file_tool import os HOME_PATH = test.constants.HOME_PATH DATA_PATH = test.constants.DATA_PATH OS_SYSTEM = test.constants.OS_SYSTEM # the list of TEST_FUNCTIONS defines which function will be really tested. # if you minimize the list you can check the specific test-function TEST_FUNCTIONS = ["test_01tdata", "test_02getCsvSpec_data", "test_03getCsvSpec_tree", "test_04getCsvSpec_key", "test_05getCsvSpec_conf", "test_06parseCsv"] # TEST_FUNCTIONS = ["test_getCsvSpec_data"] # with this variable you can switch prints on and off verbose = False class MyTestCase(unittest.TestCase): mymsg = "--------------------------------------------------------------" def test_01tdata(self): global mymsg actfunction = str(inspect.currentframe().f_code.co_name) cnttest = 0 if actfunction not in TEST_FUNCTIONS: return job = test.testtools.getJob() setattr(job.par, "tdtyp", "dir") setattr(job.par, "tdsrc", "TC0001") setattr(job.par, "tdname", "testspec") filename = str(job.conf.confs["paths"]["testdata"]) + "/" + getattr(job.par, "tdsrc") + "/" + getattr(job.par, "tdname") + ".csv" #tdata = t.readCsv(job.m, filename, None) #self.assertEqual(len(tdata["testa1"]), 3) setattr(job.par, "tdtyp", "dir") setattr(job.par, "tdsrc", "TST001") #tdata = t.getTestdata() #self.assertEqual(("steps" in tdata), True) MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) def test_02getCsvSpec_data(self): global mymsg actfunction = str(inspect.currentframe().f_code.co_name) cnttest = 0 if actfunction not in TEST_FUNCTIONS: return job = test.testtools.getJob() # filename = os.path.join(job.conf.confs["paths"]["testdata"], getattr(job.par, "tdsrc"), getattr(job.par, "tdname") + ".csv") """ a) data : like a table with data-array of key-value-pairs a_0 is keyword [option, step, CSV_HEADER_START ] a_0 : { a_1 : { f_1 : v_1, .... } # option, step a_0 : { .. a_n : { _header : [ .. ], _data : [ rows... ] # table, node """ tests = ["malformated", "comments", D.CSV_BLOCK_OPTION, D.CSV_BLOCK_STEP, B.DATA_NODE_TABLES] if "comments" in tests: specLines = [ ";;;;;;", "#;;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA) self.assertEqual(0, len(tdata)) cnttest += 1 if "malformated" in tests: malformat = "option;arg;;;;;" specLines = [ "option:par;arg;;;;;", malformat, "#option:nopar;arg;;;;;", "#;;;;;;" ] self.assertRaises(Exception, t.parseCsvSpec, (job.m, specLines, D.CSV_SPECTYPE_DATA)) cnttest += 1 malformat = "step;component;1;arg:val;;;;;" specLines = [ "step:1;component;1;arg:val;;;", malformat ] # TODO sortierung nicht ausgwertet # self.assertRaises(D.EXCP_MALFORMAT+malformat, t.parseCsvSpec, (job.m, specLines, D.CSV_SPECTYPE_DATA)) malformat = "step:2;component;1;arg;;;;;" specLines = [ "step:1;component;1;arg:val;;;", malformat ] self.assertRaises(Exception, t.parseCsvSpec, (job.m, specLines, D.CSV_SPECTYPE_DATA)) cnttest += 1 specLines = [ "option:par;arg;;;;;", "#option:nopar;arg;;;;;", "#;;;;;;" ] if D.CSV_BLOCK_OPTION in tests: specLines = [ "option:description;something;;;;;", "#;;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA) self.assertEqual(1, len(tdata)) print(tdata) self.assertIn(D.CSV_BLOCK_OPTION, tdata) cnttest += 2 if D.CSV_BLOCK_STEP in tests: specLines = [ "step:1;testa;1;1;table:_lofts,action:import;;;;;", "#;;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA) print(tdata) self.assertEqual(1, len(tdata)) self.assertIn(B.DATA_NODE_STEPS, tdata) self.assertIsInstance(tdata[B.DATA_NODE_STEPS], list) cnttest += 3 for step in tdata[B.DATA_NODE_STEPS]: print(step) self.assertEqual(hasattr(step, B.DATA_NODE_COMP), True) # self.assertEqual(hasattr(step, B.ATTR_DATA_REF), True) self.assertEqual(hasattr(step, B.ATTR_STEP_ARGS), True) cnttest += 3 specLines = [ "step:1;testa;1;1;table:_lofts;action:export;;;;;", "#;;;;;;" ] tdata = {} tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA) print(tdata) self.assertEqual(1, len(tdata)) self.assertIn(B.DATA_NODE_STEPS, tdata) self.assertIsInstance(tdata[B.DATA_NODE_STEPS], list) self.assertEqual(2, len(tdata[B.DATA_NODE_STEPS][0].args)) cnttest += 3 if B.DATA_NODE_TABLES in tests: specLines = [ "table:testa:lofts;_nr;street;city;zip;state;beds;baths;sqft;type;price;latitude;longitude", "testa:lofts;1;stra;town;12345;usa;4;1;50;house;111;45;8", "#;;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, B.DATA_NODE_TABLES) print(tdata) self.assertEqual(1, len(tdata)) self.assertIn(B.DATA_NODE_TABLES, tdata) self.assertIsInstance(tdata[B.DATA_NODE_TABLES], dict) cnttest += 3 for k in tdata[B.DATA_NODE_TABLES]["testa"]: table = tdata[B.DATA_NODE_TABLES]["testa"][k] self.assertIn(B.DATA_NODE_HEADER, table) self.assertIn(B.DATA_NODE_DATA, table) cnttest += 2 if B.DATA_NODE_TABLES in tests: specLines = [ "option:description;create 2 new contracts;;;;", "# ;;;;;", "# ;component;exec;_nr;action;args;;", "step:1;testrest;2;1;function:xml-rest;action:new;;", "step:2;testrest;3;1,2;function:json-rest;action:new;;", "# ;;;;;", "# testdate only here specified;expect:row 2 is inserted as precond;;;;", "_date;01.07.2022;;;;", "table:person;_nr;famname;name;birth;sex", "testrest:person;1;Brecht;Bert;10.02.98;m", "testrest:person,testcrmdb:person;2;Leon;Donna;28.09.42;f" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_DATA) print(tdata) MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) def test_03getCsvSpec_tree(self): global mymsg actfunction = str(inspect.currentframe().f_code.co_name) cnttest = 0 if actfunction not in TEST_FUNCTIONS: return job = test.testtools.getJob() """" b) tree : as a tree - the rows must be unique identified by the first column a_0 is keyword in CSV_HEADER_START a_0 : { .. a_n : { _header : [ fields.. ], _data : { field : value } """ MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) def test_04getCsvSpec_key(self): global mymsg actfunction = str(inspect.currentframe().f_code.co_name) cnttest = 0 if actfunction not in TEST_FUNCTIONS: return job = test.testtools.getJob() """" c) keys : as a tree - the rows must be unique identified by the first column a_0 is keyword in CSV_HEADER_START a_1 ... a_n is key characterized by header-field like _fk* or _pk* a_0 : { .. a_n : { _keys : [ _fpk*.. ] , _header : [ fields.. ], _data : { pk_0 : { ... pk_n : { field : value } """ tests = ["malformated", "comments", B.DATA_NODE_TABLES] if "comments" in tests: specLines = [ ";;;;;;", "#;;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_KEYS) self.assertEqual(0, len(tdata)) cnttest += 1 if "malformated" in tests: malformat = "table;key;;;;;" specLines = [ malformat, "#;;;;;;" ] self.assertRaises(Exception, t.parseCsvSpec, (job.m, specLines, D.CSV_SPECTYPE_KEYS)) cnttest += 1 if B.DATA_NODE_TABLES in tests: specLines = [ "table:capital;key;val;;;;", ";athens;;;;;", ";berlin;;;;;", ";cairo;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_KEYS) print(str(tdata)) self.assertEqual(1, len(tdata)) self.assertEqual(1, len(tdata["_tables"])) self.assertEqual(3, len(tdata["_tables"]["capital"])) self.assertEqual(3, len(tdata["_tables"]["capital"]["_keys"])) cnttest += 4 specLines = [ "table:capital;key;val;;;;", ";athens;;;;;", ";berlin;;;;;", "table:country;key;val;;;;", ";greece;;;;;", ";germany;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_KEYS) #tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_TREE) print(str(tdata)) self.assertEqual(1, len(tdata)) self.assertIn("capital", tdata["_tables"]) self.assertEqual(2, len(tdata["_tables"])) self.assertEqual(3, len(tdata["_tables"]["country"])) self.assertEqual(2, len(tdata["_tables"]["country"]["_keys"])) cnttest += 4 MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) def test_05getCsvSpec_conf(self): global mymsg actfunction = str(inspect.currentframe().f_code.co_name) cnttest = 0 if actfunction not in TEST_FUNCTIONS: return job = test.testtools.getJob() """" d) conf: _header : [ field_0, ... ] { field_0 : { attr_0 : val_0, .. }, field_1 : { ... }, ... } """ specLines = [ "table:lofts;_field;field;type;acceptance;key", "lofts;street;a;str;;T:1", ";city;b;str;;F:1", "#;;;;;;" ] tdata = t.parseCsvSpec(job.m, specLines, D.CSV_SPECTYPE_CONF) print(tdata) self.assertEqual(1, len(tdata)) self.assertNotIn(B.DATA_NODE_TABLES, tdata) self.assertIn("lofts", tdata) cnttest += 3 table = tdata["lofts"] self.assertIn(B.DATA_NODE_HEADER, table) self.assertNotIn(B.DATA_NODE_DATA, table) cnttest += 2 MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) def test_06parseCsv(self): global mymsg actfunction = str(inspect.currentframe().f_code.co_name) cnttest = 0 if actfunction not in TEST_FUNCTIONS: return job = test.testtools.getJob() cm = basic.componentHandling.ComponentManager.getInstance("J") componentName = "testcrmdb" confs = utils.config_tool.getConfig("comp", componentName) conns = utils.conn_tool.getConnections(componentName) comp = cm.createInstance(componentName, None, confs, conns, 1) fileLines = [ "table:person;_nr;famname;name;birth;sex", "testcrmdb:person;1;Brecht;Bert;10.02.98;m", "testcrmdb:person;2;Leon;Donna;28.09.42;f", "#;;;;;;" ] filename = utils.path_tool.rejoinPath(utils.path_tool.composePath(P.P_TCBASE, comp), "t_person.csv") tdata = t.parseCsv(comp.m, filename, fileLines, comp, aliasNode="") print(str(tdata)) self.assertIn(B.DATA_NODE_TABLES, tdata) self.assertIn("person", tdata[B.DATA_NODE_TABLES]) self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA])) cnttest += 3 fileLines = [ "_date;27.06.2022", "_count;2", "table:person;_nr;famname;name;birth;sex", "testcrmdb:person;1;Brecht;Bert;10.02.98;m", "testcrmdb:person;2;Leon;Donna;28.09.42;f", "#;;;;;;" ] tdata = t.parseCsv(comp.m, filename, fileLines, comp, aliasNode="") self.assertIn(B.DATA_NODE_TABLES, tdata) self.assertIn("person", tdata[B.DATA_NODE_TABLES]) self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA])) cnttest += 3 filename = utils.path_tool.rejoinPath(utils.path_tool.composePath(P.P_TCRESULT, comp), "person.csv") fileLines = [ "_date;27.06.2022", "_count;2", "persid;famname;name;birth;sex", "1;Brecht;Bert;10.02.98;m", "2;Leon;Donna;28.09.42;f", "#;;;;;;" ] tdata = t.parseCsv(comp.m, filename, fileLines, comp, aliasNode="") self.assertIn(B.DATA_NODE_TABLES, tdata) self.assertIn("person", tdata[B.DATA_NODE_TABLES]) self.assertEqual(2, len(tdata[B.DATA_NODE_TABLES]["person"][B.DATA_NODE_DATA])) cnttest += 3 text = "" for k in tdata[B.DATA_NODE_TABLES]: text += t.buildCsvData(filename, tdata[B.DATA_NODE_TABLES][k], comp) text += "\n" print(text) MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) def test_zzz(self): print(MyTestCase.mymsg) if __name__ == '__main__': unittest.main()