Data-Test-Executer Framework speziell zum Test von Datenverarbeitungen mit Datengenerierung, Systemvorbereitungen, Einspielungen, ganzheitlicher diversifizierender Vergleich
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

117 lines
3.6 KiB

#!/usr/bin/python
"""
constants for used for api-functions
"""
import basic.constants as B
DDL_FILENAME = "DATASTRUCTURE"
DATA_NODE_TYPE = "type"
TYPE_STRING = "string"
TYPE_STR = "str"
TYPE_TEXT = "text"
TYPE_INT = "int"
TYPE_FLOAT = "float"
TYPE_DOUBLE = "double"
TYPE_DATE = "date"
TYPE_TIME = "time"
TYPE_PK = "pk"
# fields in DDL
DDL_FNULLABLE = "nullable"
DDL_FNAME = "field"
DDL_ACCEPTANCE = "acceptance"
DDL_KEY = "key"
DDL_TYPE = "type"
DDL_INDEX = "index"
DFILE_TYPE_YML = "yml"
DFILE_TYPE_JSON = "json"
DFILE_TYPE_CSV = "csv"
DFILE_TYPE_XML = "xml"
DFILE_TESTCASE_NAME = "testspec"
DFILE_TESTSUITE_NAME = "testsuite"
DFILE_TABLE_PREFIX = "table_"
LIST_DFNAME_ATTR = [DFILE_TESTCASE_NAME, DFILE_TESTSUITE_NAME, DFILE_TABLE_PREFIX]
LIST_DFNAME_CONST = ["DFILE_TESTCASE_NAME", "DFILE_TESTSUITE_NAME", "DFILE_TABLE_PREFIX"]
DATA_SRC_DIR = "dir"
DATA_SRC_CSV = "csv"
DATA_ATTR_COUNT = "_count"
""" statistical information of data-count """
DATA_ATTR_DATE = "_date"
""" reference-date for computing the actual date in relation to specification or expectation """
DATA_ATTR_COMP = "_comp"
""" reference to using componente with their object """
DATA_ATTR_CHAR = "_char"
""" character of the data in order to delete it ión initialization """
DATA_ATTR_KEY = "_key"
""" key for a data-specification of a catalog-list - default: the first field is the key """
DATA_ATTR_ALIAS = "_alias"
DATA_ATTR_IDS = "_ids"
DATA_ATTR_REF = "_ref"
LIST_DATA_ATTR = [DATA_ATTR_COUNT, DATA_ATTR_DATE, DATA_ATTR_CHAR, DATA_ATTR_COMP,
DATA_ATTR_REF, DATA_ATTR_IDS, DATA_ATTR_ALIAS, DATA_ATTR_KEY]
LIST_ATTR_CONST = ["DATA_ATTR_COUNT", "DATA_ATTR_DATE", "DATA_ATTR_CHAR", "DATA_ATTR_COMP", "DATA_ATTR_ALIAS", "DATA_ATTR_KEY"]
HEAD_ATTR_DESCR = "decription"
HEAD_ATTR_TARGET = "target"
HEAD_ATTR_USECASE = "usecase"
HEAD_ATTR_UCID = "usecase-id"
HEAD_ATTR_STORY = "story"
HEAD_ATTR_STORYID = "storyid-id"
HEAD_ATTR_APPS = B.SUBJECT_APPS
HEAD_ATTR_DEPR = "deprecated"
LIST_HEAD_ATTR = [HEAD_ATTR_DESCR, HEAD_ATTR_TARGET, HEAD_ATTR_USECASE, HEAD_ATTR_UCID,
HEAD_ATTR_STORY, HEAD_ATTR_STORYID, HEAD_ATTR_APPS, HEAD_ATTR_DEPR]
LIST_HEAD_CONST = ["HEAD_ATTR_DESCR", "HEAD_ATTR_TARGET", "HEAD_ATTR_USECASE", "HEAD_ATTR_UCID",
"HEAD_ATTR_STORY", "HEAD_ATTR_STORYID", "HEAD_ATTR_APPS", "HEAD_ATTR_DEPR"]
CSV_HEADER_START = ["node", "table", "tabelle"]
CSV_DELIMITER = ";"
INTERNAL_DELIMITER = "||"
"""
internal structure of testdata
"""
CSV_SPECTYPE_DATA = "data"
CSV_SPECTYPE_TREE = "tree"
CSV_SPECTYPE_KEYS = "keys"
CSV_SPECTYPE_CONF = "conf"
CSV_NODETYPE_KEYS = "_keys"
CSV_BLOCK_HEAD = "_head"
CSV_BLOCK_OPTION = B.DATA_NODE_OPTION
CSV_BLOCK_STEP = B.DATA_NODE_STEPS
CSV_BLOCK_TABLES = B.DATA_NODE_TABLES
CSV_BLOCK_IMPORT = "_import"
LIST_CSV_BLOCKS = [CSV_BLOCK_HEAD, CSV_BLOCK_OPTION, CSV_BLOCK_STEP, CSV_BLOCK_TABLES, CSV_BLOCK_IMPORT]
LIST_BLOCK_CONST = ["CSV_BLOCK_HEAD", "CSV_BLOCK_OPTION", "CSV_BLOCK_STEP", "CSV_BLOCK_TABLES", "CSV_BLOCK_IMPORT"]
STEP_COMP_I = 1
STEP_EXECNR_I = 2
STEP_REFNR_I = 3
STEP_VARIANT_I = 4
STEP_ARGS_I = 5
STEP_LIST_I = 5
STEP_ATTR_COMP = "component"
STEP_ATTR_EXECNR = "exec-step"
STEP_ATTR_REFNR = "reference-nr"
STEP_ATTR_ARGS = "arguments"
LIST_STEP_ATTR = [STEP_ATTR_COMP, STEP_ATTR_EXECNR, STEP_ATTR_REFNR, STEP_ATTR_ARGS]
LIST_STEP_CONST = ["STEP_ATTR_COMP", "STEP_ATTR_EXECNR", "STEP_ATTR_REFNR", "STEP_ATTR_ARGS"]
EXCP_MALFORMAT = "malformated line: "
ATTR_SRC_TYPE = "tdtyp"
ATTR_SRC_DATA = "tdsrc"
ATTR_SRC_NAME = "tdname"
DEFAULT_DB_PARTITION = "n"
""" attribute if table is partitioned - partitions are parametrized """
DEFAULT_DB_CONN_JAR = "n"
""" attribute for connection-jar-file instead of connection by ip, port """