Browse Source

some comments with constants

master
Ulrich Carmesin 3 years ago
parent
commit
5caa3f52c7
  1. 11
      basic/componentHandling.py
  2. 71
      basic/constants.py
  3. 4
      basic/message.py
  4. 19
      basic/program.py
  5. 14
      basic/toolHandling.py
  6. 8
      components/component.py
  7. 0
      test_run.py
  8. 10
      utils/config_tool.py
  9. 19
      utils/db_abstract.py
  10. 21
      utils/dbcsv_tool.py
  11. 14
      utils/dbmysql_tool.py
  12. 35
      utils/dbspark_tool.py
  13. 10
      utils/match_tool.py
  14. 12
      utils/path_tool.py
  15. 18
      utils/tdata_tool.py

11
basic/componentHandling.py

@ -1,3 +1,9 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
# managing the components # managing the components
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
""" """
@ -16,6 +22,7 @@ import basic.message
import components.component import components.component
import importlib import importlib
import copy import copy
import basic.constants as B
comps = {} comps = {}
PARAM_NOSUBNODE = ["artifact", "components", "instance"] PARAM_NOSUBNODE = ["artifact", "components", "instance"]
@ -127,7 +134,7 @@ class ComponentManager:
c = class_() c = class_()
c.name = name c.name = name
c.conf = confs["conf"] c.conf = confs["conf"]
c.conf["conn"] = conns[0] c.conf[B.SUBJECT_CONN] = conns[0]
c.m = basic.message.Message(basic.message.LIMIT_DEBUG, "logTime", name) c.m = basic.message.Message(basic.message.LIMIT_DEBUG, "logTime", name)
c.init() c.init()
print("createComponent 4 a " + componentName) print("createComponent 4 a " + componentName)
@ -148,7 +155,7 @@ class ComponentManager:
c = class_() c = class_()
c.name = name c.name = name
c.conf = confs["conf"] c.conf = confs["conf"]
c.conf["conn"] = conns[0] c.conf[B.SUBJECT_CONN] = conns[0]
c.m = basic.message.Message(basic.message.LIMIT_DEBUG, "logTime", name) c.m = basic.message.Message(basic.message.LIMIT_DEBUG, "logTime", name)
c.init() c.init()
print("createComponent 4 b " + componentName) print("createComponent 4 b " + componentName)

71
basic/constants.py

@ -1,17 +1,84 @@
# #!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
"""
The constants desribes the keywords of the main datastructures, these are
* the configurations of
* basic-configuration for global variables in job - stored in external conf-folder
* comp-configuration for component-attributes - stored in internal component-folder in
+ ddl-configuration of an entity of the component as attributes - stored in internal component-folder
+ environment-config for connection-attributes of the component - stored in external environment-folder
* tool-configuration
* test-specification with testdata - stored in external testdata-folder
* the internal datastructure
"""
# the internal datastructure is a tree with this design: # the internal datastructure is a tree with this design:
# root { : constant # root { : constant
# + option { : constant # + option { : constant
# +
# + steps # + steps
# + comp { : variable component-name # + comp { : variable component-name
# + substructure { : variable maybe scheme, table of a database-component # + substructure { : variable maybe scheme, table of a database-component
# + + _header [ : constant # + + _header [ : constant
# - fields : variable field-name # - fields : variable field-name
DATA_NODE_HEADER = "_header"
""" This constant defines a subnode of a table for the column-names """
DATA_NODE_DATA = "_data" DATA_NODE_DATA = "_data"
""" This constant defines a subnode of a table for the data which are set as key-value-pair with the column-names as key """
DATA_NODE_STEPS = "_steps"
""" This constant defines the main node in the testdata for the steps to execute """
DATA_NODE_OPTION = "_option"
""" This constant defines main node in the testdata for testcase specific parameters """
DATA_NODE_DDL = "ddl"
""" This constant defines """
# the configuration of a component or tool # the configuration of a component or tool
# entity { : variable name of the group, basic, component-name or tool-name # entity { : variable name of the group, basic, component-name or tool-name
# + subject { : variable subject-name - it correspondends to a tool # + subject { : variable subject-name - it correspondends to a tool
# + + sub-subject { : variable subject-name - it correspondends to a tool # + + sub-subject { : variable subject-name - it correspondends to a tool
# + attributes : constant of the tool in which the attribute ist implemented # + attributes : constant of the tool in which the attribute ist implemented
# the main subjects # prog basic envir tool comp testcase main implentation module
SUBJECT_PATH = "paths" # | x | | x | | path_tool, config_tool
""" This constant defines the subject in order to define paths of filesystem of any testuse """
ATTR_PATH_MODE = "mode"
""" This constant defines the home-folder in filesystem of test """
ATTR_PATH_HOME = "home"
""" This constant defines the home-folder in testing-filesystem """
ATTR_PATH_DEBUG = "debugs"
""" This constant defines the debug-folder in testing-filesystem """
ATTR_PATH_ARCHIV = "archiv"
""" This constant defines the archiv-folder in testing-filesystem """
ATTR_PATH_SPECIAL = "results"
""" This constant defines the debug-folder in testing-filesystem """
ATTR_PATH_PROGRAM = "program"
""" This constant defines the program-folder in testing-filesystem """
ATTR_PATH_ENV = "environment"
""" This constant defines the environment-folder in testing-filesystem, used for configs related to environments """
ATTR_PATH_RELEASE = "release"
""" This constant defines the release-folder in testing-filesystem, used for configs related to release """
ATTR_PATH_TDATA = "testdata"
""" This constant defines the testdata-folder in testing-filesystem with the testcase-specifications """
ATTR_PATH_PATTN = "pattern"
""" This constant defines the debug-folder in testing-filesystem """
SUBJECT_APPS = "applications" # | x | | | | CompHandling
SUBJECT_INST = "instance" # | | | | x | CompHanlding
ATTR_COUNT = "count" # | | | | x | CompHanlding
SUBJECT_FCT = "function" # | | | | x | main-programs
SUBJECT_ARTS = "artifact" # | | | | x | Component
ATTR_ARTS_DB = "db"
ATTR_ARTS_LOG = "log"
ATTR_ARTS_LOB = "lob"
SUBJECT_DB = "databases" # | | | | # | db*_tools, match_tool
SUBJECT_CONN = "conn" # | | x | | | conn_tool, db*_tools, cli*_toold
ATTR_CONN_DBTYPE = "dbtype" # | x | x | | x | conn_tool, toolHandling, db*_tools
ATTR_CONN_CLITYPE = "clitype" # | x | x | | x | conn_tool, toolHandling, cli*_tools
RULE_ACCEPTANCE = "acceptance" # | | | | x | tools_match

4
basic/message.py

@ -58,8 +58,8 @@ class Message:
self.componente = componente # dezantrales Logsystem self.componente = componente # dezantrales Logsystem
#self.level = level # Vorgabe zu level zum Filtern, ob auszugebe #self.level = level # Vorgabe zu level zum Filtern, ob auszugebe
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
print(vars(job)) # print(vars(job))
print(globals()) # print(globals())
# exit(5) # exit(5)
verify = LIMIT_DEBUG verify = LIMIT_DEBUG
self.initErrorTyp() self.initErrorTyp()

19
basic/program.py

@ -1,11 +1,16 @@
#!/usr/bin/python3 #!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
# Template Batchrahmen # Template Batchrahmen
# #
#import sys, getopt #import sys, getopt
import argparse import argparse
import copy import copy
import yaml import yaml, os
from datetime import datetime from datetime import datetime
import basic.message import basic.message
@ -198,7 +203,7 @@ class Job:
def getTraceLevel(self, elem): def getTraceLevel(self, elem):
return self.getMessageLevel("trace", elem) return self.getMessageLevel("trace", elem)
def debug(self, prio, text): def debug(self, prio, text):
print("job.debug "+str(prio)+" "+text) #print("job.debug "+str(prio)+" "+text)
if hasattr(self, "m"): if hasattr(self, "m"):
self.m.debug(prio, text) self.m.debug(prio, text)
else: else:
@ -318,6 +323,10 @@ class Configuration:
self.program = program self.program = program
print (f"job initialisiert {self.program}") print (f"job initialisiert {self.program}")
if program == "unit": if program == "unit":
if (os.path.exists('../conf/basis.yml')):
self.setConfiguration('../conf/basis.yml')
return
elif (os.path.exists('conf/basis.yml')):
self.setConfiguration('../conf/basis.yml') self.setConfiguration('../conf/basis.yml')
return return
self.setConfiguration('conf/basis.yml') self.setConfiguration('conf/basis.yml')
@ -326,6 +335,10 @@ class Configuration:
self.confs = {} self.confs = {}
with open(path, "r") as file: with open(path, "r") as file:
doc = yaml.full_load(file) doc = yaml.full_load(file)
if "basic" in doc:
for i, v in doc["basic"].items():
self.confs[i] = v
else:
for i, v in doc.items(): for i, v in doc.items():
self.confs[i] = v self.confs[i] = v
def setConfig(self, path, val): def setConfig(self, path, val):

14
basic/toolHandling.py

@ -1,6 +1,12 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import importlib, os import importlib, os
import basic.program import basic.program
import basic.constants as B
# ------------------------------------------------- # -------------------------------------------------
def hasAttr(o, name): def hasAttr(o, name):
@ -24,9 +30,9 @@ Toolmanager
def getDbTool(comp): def getDbTool(comp):
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
verify = int(job.getDebugLevel("db_tool")) verify = int(job.getDebugLevel("db_tool"))
if not hasAttr(comp.conf["conn"], "dbtype"): if not hasAttr(comp.conf[B.SUBJECT_CONN], "dbtype"):
if hasAttr(comp.conf["conn"], "types") and hasAttr(comp.conf["conn"]["types"], "dbtype"): if hasAttr(comp.conf[B.SUBJECT_CONN], "types") and hasAttr(comp.conf[B.SUBJECT_CONN]["types"], "dbtype"):
dbtype = comp.conf["conn"]["types"]["dbtype"] dbtype = comp.conf[B.SUBJECT_CONN]["types"]["dbtype"]
else: else:
raise LookupError("dbtype is not set in comp " + comp.name) raise LookupError("dbtype is not set in comp " + comp.name)
else: else:

8
components/component.py

@ -1,3 +1,9 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
# abstract class for instance components # abstract class for instance components
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
from datetime import datetime from datetime import datetime
@ -23,7 +29,7 @@ class CompData:
#class Component(components.sysmonitor.SystemMonitor, components.testexec.Testexecuter, components.report.Report, #class Component(components.sysmonitor.SystemMonitor, components.testexec.Testexecuter, components.report.Report,
# components.maintain.Maintainer, components.catalog, threading.Thread): # components.maintain.Maintainer, components.catalog, threading.Thread):
class Component(): class Component(components.testexec.Testexecuter):
""" """
A component represents an application of the system-under-test or a data-artifact which is created from the system-under-test. A component represents an application of the system-under-test or a data-artifact which is created from the system-under-test.
As the representation it has to knowlegde of the url, which other components depends on this component. As the representation it has to knowlegde of the url, which other components depends on this component.

0
test_run.py

10
utils/config_tool.py

@ -1,6 +1,12 @@
# #!/usr/bin/python
# -------------------------------------------------------------- # -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import sys import sys
import basic.constants as B
try: try:
import basic.program import basic.program
except ImportError: except ImportError:

19
utils/db_abstract.py

@ -1,5 +1,12 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program import basic.program
import utils.config_tool import utils.config_tool
import basic.constants as B
import os import os
class DbFcts(): class DbFcts():
@ -19,7 +26,7 @@ class DbFcts():
statement written in sql """ statement written in sql """
self.loadDdl() self.loadDdl()
tdata = {} tdata = {}
for t in self.comp.conf["ddl"]: for t in self.comp.conf[B.DATA_NODE_DDL]:
tdata[t] = self.selectRows(t) tdata[t] = self.selectRows(t)
return tdata return tdata
@ -32,7 +39,7 @@ class DbFcts():
""" method to delete rows from a database """ method to delete rows from a database
statement written in sql """ statement written in sql """
self.loadDdl() self.loadDdl()
for t in self.comp.conf["ddl"]: for t in self.comp.conf[B.DATA_NODE_DDL]:
print("zu loeschende Tabelle "+t) print("zu loeschende Tabelle "+t)
self.deleteRows(t) self.deleteRows(t)
@ -59,7 +66,7 @@ class DbFcts():
for t in self.comp.conf["ddl"]: for t in self.comp.conf["ddl"]:
print("einzufuegende Tabelle "+t) print("einzufuegende Tabelle "+t)
if (t in tdata[plainname]): if (t in tdata[plainname]):
self.insertRows(t, tdata[plainname][t]["_data"]) self.insertRows(t, tdata[plainname][t][B.DATA_NODE_DATA])
def insertRows(self, rows): def insertRows(self, rows):
""" method to insert rows into a database """ method to insert rows into a database
@ -74,12 +81,12 @@ class DbFcts():
def loadDdl(self): def loadDdl(self):
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
if ("ddl" in self.comp.conf): if (B.DATA_NODE_DDL in self.comp.conf):
return return
conf = utils.config_tool.getConfig("DATASTRUCTURE", self.comp.name) conf = utils.config_tool.getConfig("DATASTRUCTURE", self.comp.name)
self.comp.conf["ddl"] = {} self.comp.conf[B.DATA_NODE_DDL] = {}
for k in conf[self.comp.name]: for k in conf[self.comp.name]:
self.comp.conf["ddl"][k] = conf[self.comp.name][k] self.comp.conf[B.DATA_NODE_DDL][k] = conf[self.comp.name][k]
return conf[self.comp.name] return conf[self.comp.name]
def getWhere(self): def getWhere(self):

21
utils/dbcsv_tool.py

@ -1,7 +1,12 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program import basic.program
import utils.config_tool import utils.config_tool
import utils.db_abstract import utils.db_abstract
import mysql.connector
class DbFcts(utils.db_abstract.DbFcts): class DbFcts(utils.db_abstract.DbFcts):
@ -65,19 +70,7 @@ class DbFcts(utils.db_abstract.DbFcts):
""" add-on-method to get the connector """ add-on-method to get the connector
this method should only called by the class itself """ this method should only called by the class itself """
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
mydb = mysql.connector.connect( return ""
host = "localhost",
user = "datest",
password = "Advent!2021",
database = "datest"
)
return mysql
@staticmethod
def execStatement(self, comp, conn, statement):
""" add-on-method to execute the statement
this method should only called by the class itself """
raise Exception("method is not implemented")

14
utils/dbmysql_tool.py

@ -1,8 +1,14 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program import basic.program
import utils.config_tool import utils.config_tool
import utils.db_abstract import utils.db_abstract
import mysql.connector import mysql.connector
import basic.constants as B
class DbFcts(utils.db_abstract.DbFcts): class DbFcts(utils.db_abstract.DbFcts):
""" """
@ -23,9 +29,9 @@ class DbFcts(utils.db_abstract.DbFcts):
#mycursor = self.getConnector() #mycursor = self.getConnector()
#mycursor.execute(cmd) #mycursor.execute(cmd)
#myresult = mycursor.fetchall() #myresult = mycursor.fetchall()
tdata["_header"] = [] tdata[B.DATA_NODE_HEADER] = []
for f in self.comp.conf["ddl"][table]["fields"]: for f in self.comp.conf["ddl"][table][B.DATA_NODE_HEADER]:
tdata["_header"].append(f) tdata[B.DATA_NODE_HEADER].append(f)
myresult = [] myresult = []
for x in myresult: for x in myresult:
print(x) print(x)

35
utils/dbspark_tool.py

@ -1,11 +1,14 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import basic.program import basic.program
import utils.config_tool import utils.config_tool
import utils.db_abstract import utils.db_abstract
import pyspark import pyspark
import basic.constants import basic.constants as B
DATA_NODE_HEADER = basic.constants.DATA_NODE_HEADER
DATA_NODE_DATA = basic.constants.DATA_NODE_DATA
class DbFcts(utils.db_abstract.DbFcts): class DbFcts(utils.db_abstract.DbFcts):
""" """
@ -20,15 +23,15 @@ class DbFcts(utils.db_abstract.DbFcts):
tdata = {} tdata = {}
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
verify = -1+job.getDebugLevel("db_tool") verify = -1+job.getDebugLevel("db_tool")
cmd = "SELECT "+self.comp.conf["ddl"][table]["fields"].join(",")\ cmd = "SELECT "+",".join(self.comp.conf["ddl"][table]["fields"])
+" FROM "+table+""+self.getWhere()+""+self.getOrder()";" cmd += " FROM "+table+""+self.getWhere()+""+self.getOrder()
spark = self.getConnector() spark = self.getConnector()
df = spark.sql(cmd) df = spark.sql(cmd)
data = [] data = []
for r in df: for r in df:
data.append(r) data.append(r)
tdata[DATA_NODE_HEADER] = self.comp.conf["ddl"][table]["fields"] tdata[B.DATA_NODE_HEADER] = self.comp.conf["ddl"][table]["fields"]
tdata[DATA_NODE_DATA] = data tdata[B.DATA_NODE_DATA] = data
return tdata return tdata
def deleteRows(self, table): def deleteRows(self, table):
@ -39,11 +42,6 @@ class DbFcts(utils.db_abstract.DbFcts):
cmd = "DELETE FROM "+table+";" cmd = "DELETE FROM "+table+";"
self.comp.m.logInfo(cmd) self.comp.m.logInfo(cmd)
def updateRows(self, statement):
""" method to delete rows from a database
statement written in sql """
raise Exception("method is not implemented")
def insertRows(self, table, rows): def insertRows(self, table, rows):
""" method to insert rows into a database """ method to insert rows into a database
the rows will be interpreted by the ddl of the component the rows will be interpreted by the ddl of the component
@ -59,17 +57,12 @@ class DbFcts(utils.db_abstract.DbFcts):
""" add-on-method to get the connector """ add-on-method to get the connector
this method should only called by the class itself """ this method should only called by the class itself """
job = basic.program.Job.getInstance() job = basic.program.Job.getInstance()
spark = pyspark.SparkSession.builder()\ spark = pyspark.SparkSession\
.master("local[1]")\ .builder\
.appName("SparkByExamples.com")\ .appName("datest")\
.getOrCreate() .getOrCreate()
return spark return spark
@staticmethod
def execStatement(self, comp, conn, statement):
""" add-on-method to execute the statement
this method should only called by the class itself """
raise Exception("method is not implemented")

10
utils/match_tool.py

@ -1,7 +1,13 @@
# #!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
import json import json
import utils.css_tool import utils.css_tool
import basic.program import basic.program
import basic.constants as B
# ------------------------------------------------------------ # ------------------------------------------------------------
""" """
@ -366,7 +372,7 @@ def compareRows(matching, path):
htmltext = "<p>Tabelle : "+table+"</p>" htmltext = "<p>Tabelle : "+table+"</p>"
htmltext += "<table>" htmltext += "<table>"
htmltext += "<tr><th></th>" htmltext += "<tr><th></th>"
for f in ddl["_header"]: for f in ddl[B.DATA_NODE_HEADER]:
job.debug(verify, "ddl " + f + " ") job.debug(verify, "ddl " + f + " ")
header.append({ "field": f, "type": ddl[f]["type"], "acceptance": ddl[f]["acceptance"]}) header.append({ "field": f, "type": ddl[f]["type"], "acceptance": ddl[f]["acceptance"]})
htmltext += "<th>"+f+"</th>" htmltext += "<th>"+f+"</th>"

12
utils/path_tool.py

@ -1,11 +1,15 @@
# All functions related to the full path. #!/usr/bin/python
# It implements the team conventions of the directory structure # -*- coding: utf-8 -*-
# ----------------------------------------------------------------------- # ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
""" In diesem Modul werden alle Funktionen zusammengefasst zur Generierung und Ermittlung von pathsn """ """ In diesem Modul werden alle Funktionen zusammengefasst zur Generierung und Ermittlung von pathsn """
import sys import sys
import basic.program import basic.program
import utils.config_tool import utils.config_tool
import re import re
import basic.constants as B
def getKeyValue(key): def getKeyValue(key):
@ -17,7 +21,7 @@ def getKeyValue(key):
neu = job.getParameter(key[8:]) neu = job.getParameter(key[8:])
return neu return neu
elif 'job.conf' in key: elif 'job.conf' in key:
neu = job.conf.confs["paths"][key[9:]] neu = job.conf.confs[B.SUBJECT_PATH][key[9:]]
job.debug(verify, neu) job.debug(verify, neu)
return neu return neu
# return job.conf.confs["paths"][key[9:]] # return job.conf.confs["paths"][key[9:]]

18
utils/tdata_tool.py

@ -1,5 +1,9 @@
# #!/usr/bin/python
# --------------------------------------------------------------------- # -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------------------------------------
# Author : Ulrich Carmesin
# Source : gitea.ucarmesin.de
# ---------------------------------------------------------------------------------------------------------
""" """
the issue of this tool is to transform extern data to the internal structure and the internal structure into extern data - i.e. mostly test-results. the issue of this tool is to transform extern data to the internal structure and the internal structure into extern data - i.e. mostly test-results.
* * * * * * * * * * * * * * * *
@ -111,7 +115,7 @@ def getCsvSpec(msg, filename, type):
if (a[0] not in data): if (a[0] not in data):
data[a[0]] = {} data[a[0]] = {}
if len(a) == 1 and type == CSV_SPECTYPE_DATA: if len(a) == 1 and type == CSV_SPECTYPE_DATA:
data[a[0]]["_data"] = [] data[a[0]][B.DATA_NODE_DATA] = []
# its a component # its a component
if len(a) > 1 and a[1] not in data[a[0]]: if len(a) > 1 and a[1] not in data[a[0]]:
data[a[0]][a[1]] = {} data[a[0]][a[1]] = {}
@ -120,7 +124,7 @@ def getCsvSpec(msg, filename, type):
if len(a) > 2 and a[2] not in data[a[0]][a[1]]: if len(a) > 2 and a[2] not in data[a[0]][a[1]]:
data[a[0]][a[1]][a[2]] = {} data[a[0]][a[1]][a[2]] = {}
if len(a) == 3 and type == CSV_SPECTYPE_DATA: if len(a) == 3 and type == CSV_SPECTYPE_DATA:
data[a[0]][a[1]][a[1]]["_data"] = [] data[a[0]][a[1]][a[1]][B.DATA_NODE_DATA] = []
# fill data # fill data
row = {} row = {}
i = 1 i = 1
@ -128,13 +132,13 @@ def getCsvSpec(msg, filename, type):
row[f] = fields[i] row[f] = fields[i]
i += 1 i += 1
if len(a) == 1 and type == CSV_SPECTYPE_DATA: if len(a) == 1 and type == CSV_SPECTYPE_DATA:
data[a[0]]["_data"].append(row) data[a[0]][B.DATA_NODE_DATA].append(row)
elif len(a) == 1 and type == CSV_SPECTYPE_DATA: elif len(a) == 1 and type == CSV_SPECTYPE_DATA:
data[a[0]] = {f: row} data[a[0]] = {f: row}
elif len(a) == 2 and type == CSV_SPECTYPE_DATA: elif len(a) == 2 and type == CSV_SPECTYPE_DATA:
data[a[0]][a[1]]["_data"].append(row) data[a[0]][a[1]][B.DATA_NODE_DATA].append(row)
elif len(a) == 1 and type == CSV_SPECTYPE_DATA: elif len(a) == 1 and type == CSV_SPECTYPE_DATA:
data[a[0]][a[1]]["_data"] = {f: row} data[a[0]][a[1]][B.DATA_NODE_DATA] = {f: row}
elif len(a) == 3 and type == CSV_SPECTYPE_DATA: elif len(a) == 3 and type == CSV_SPECTYPE_DATA:
data[a[0]][a[1]][a[2]] = row data[a[0]][a[1]][a[2]] = row
elif len(a) == 1 and type == CSV_SPECTYPE_DATA: elif len(a) == 1 and type == CSV_SPECTYPE_DATA:

Loading…
Cancel
Save