Data-Test-Executer Framework speziell zum Test von Datenverarbeitungen mit Datengenerierung, Systemvorbereitungen, Einspielungen, ganzheitlicher diversifizierender Vergleich
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

166 lines
5.6 KiB

#!/usr/bin/python
"""
constants for used for api-functions
"""
import basic.constants as B
DDL_FILENAME = "DATASTRUCTURE"
DATA_NODE_TYPE = "type"
TYPE_STRING = "string"
TYPE_STR = "str"
TYPE_TEXT = "text"
TYPE_INT = "int"
TYPE_FLOAT = "float"
TYPE_DOUBLE = "double"
TYPE_DATE = "date"
TYPE_TIME = "time"
TYPE_PK = "pk"
TYPE_AUTOINT = "autoint"
FIELD_NAME = "name"
# fields in DDL
# _field;comment;format;acceptance;generate;nullable
DDL_FIELD = "_field"
""" pk, str, int, """
DDL_TYPE = "type"
""" char(10), vchar(10), see format_tool """
DDL_FORMAT = "format"
""" used in create database """
DDL_INDEX = "index"
DDL_CONSTRAINT = "nullable"
""" aggregat-functinóns in sql """
DDL_AGGREGAT = "aggregat"
""" see generic_tool """
DDL_GENERIC = "generic"
""" compare-key """
DDL_KEY = "key"
""" see acceptance_tool """
DDL_ACCEPTANCE = "acceptance"
DDL_ALIAS = "alias"
DDL_DESCRIPTION = B.SUBJECT_REFERENCE
LIST_DDL_ATTR = [DDL_FIELD, DDL_TYPE, DDL_FORMAT, DDL_INDEX, DDL_GENERIC,
DDL_AGGREGAT, DDL_KEY, DDL_ACCEPTANCE, DDL_ALIAS, DDL_DESCRIPTION]
DFILE_TYPE_YML = "yml"
DFILE_TYPE_JSON = "json"
DFILE_TYPE_CSV = "csv"
DFILE_TYPE_XML = "xml"
DFILE_TESTCASE_NAME = "testspec"
DFILE_TESTSUITE_NAME = "testsuite"
DFILE_TABLE_PREFIX = "table_"
LIST_DFNAME_ATTR = [DFILE_TESTCASE_NAME, DFILE_TESTSUITE_NAME, DFILE_TABLE_PREFIX]
LIST_DFNAME_CONST = ["DFILE_TESTCASE_NAME", "DFILE_TESTSUITE_NAME", "DFILE_TABLE_PREFIX"]
DATA_SRC_DIR = "dir"
DATA_SRC_CSV = "csv"
DATA_ATTR_COUNT = "_count"
""" statistical information of data-count """
DATA_ATTR_DATE = "_date"
""" reference-date for computing the actual date in relation to specification or expectation """
DATA_ATTR_COMP = "_comp"
""" reference to using componente with their object """
DATA_ATTR_CHAR = "_char"
""" character of the data in order to delete it ión initialization """
DATA_ATTR_KEY = "_key"
""" key for a data-specification of a catalog-list - default: the first field is the key """
DATA_ATTR_ALIAS = "_alias"
DATA_ATTR_IDS = "_ids"
DATA_ATTR_REF = "_ref"
DATA_ATTR_TBL = "_table"
DATA_ATTR_TYPE = "_type"
DATA_ATTR_DLIM = "_dlim"
DATA_ATTR_NAME = "_"+FIELD_NAME
DATA_ATTR_DESCRIPTION = "_"+B.SUBJECT_DESCRIPTION
DATA_ATTR_REFERENCE = "_"+B.SUBJECT_REFERENCE
DATA_ATTR_PROJECT = "_"+B.SUBJECT_PROJECT
DATA_ATTR_USECASES = "_"+B.SUBJECT_USECASES
DATA_ATTR_USECASE_DESCR = DATA_ATTR_USECASES+"-description"
DATA_ATTR_STORIES = "_"+B.SUBJECT_STORIES
DATA_ATTR_STORY_DESCR = DATA_ATTR_STORIES+"-description"
""" name of the table - it can be overwrite from the environment-attribut tablename """
LIST_DATA_ATTR = [DATA_ATTR_TYPE, DATA_ATTR_COUNT, DATA_ATTR_DATE, DATA_ATTR_CHAR, DATA_ATTR_COMP,
DATA_ATTR_REF, DATA_ATTR_IDS, DATA_ATTR_ALIAS, DATA_ATTR_KEY, DATA_ATTR_TBL, DATA_ATTR_DLIM,
DATA_ATTR_NAME, DATA_ATTR_DESCRIPTION, DATA_ATTR_REFERENCE, DATA_ATTR_PROJECT,
DATA_ATTR_STORIES, DATA_ATTR_STORY_DESCR, DATA_ATTR_USECASES, DATA_ATTR_USECASE_DESCR]
LIST_ATTR_CONST = ["DATA_ATTR_COUNT", "DATA_ATTR_DATE", "DATA_ATTR_CHAR", "DATA_ATTR_COMP", "DATA_ATTR_ALIAS", "DATA_ATTR_KEY"]
LIST_ATTR_MULTI = [DATA_ATTR_USECASES, DATA_ATTR_STORIES, DATA_ATTR_STORY_DESCR]
# attributes in testcase-specification
HEAD_ATTR_DESCR = "decription"
HEAD_ATTR_TARGET = "target"
HEAD_ATTR_USECASE = "usecase"
HEAD_ATTR_UCID = "usecase-id"
HEAD_ATTR_STORY = "story"
HEAD_ATTR_STORYID = "story-id"
HEAD_ATTR_APP = B.SUBJECT_APP
HEAD_ATTR_APPS = B.SUBJECT_APPS
HEAD_ATTR_DEPR = "deprecated"
LIST_HEAD_ATTR = [HEAD_ATTR_DESCR, HEAD_ATTR_TARGET, HEAD_ATTR_USECASE, HEAD_ATTR_UCID,
HEAD_ATTR_STORY, HEAD_ATTR_STORYID, HEAD_ATTR_APP, HEAD_ATTR_APPS, HEAD_ATTR_DEPR]
LIST_HEAD_CONST = ["HEAD_ATTR_DESCR", "HEAD_ATTR_TARGET", "HEAD_ATTR_USECASE", "HEAD_ATTR_UCID",
"HEAD_ATTR_STORY", "HEAD_ATTR_STORYID", "HEAD_ATTR_APPS", "HEAD_ATTR_DEPR"]
# attributes in testcase-specification
OPT_ATTR_DESCR = "decription"
OPT_ATTR_TARGET = "target"
OPT_ATTR_USECASE = "usecase"
OPT_ATTR_UCID = "usecase-id"
OPT_ATTR_STORY = "story"
OPT_ATTR_STORYID = "story-id"
LIST_OPT_ATTR = [OPT_ATTR_DESCR, OPT_ATTR_TARGET, OPT_ATTR_USECASE, OPT_ATTR_UCID,
OPT_ATTR_STORY, OPT_ATTR_STORYID]
CSV_HEADER_START = ["node", "table", "tabelle"]
CSV_DELIMITER = ";"
INTERNAL_DELIMITER = "||"
"""
internal structure of testdata
"""
CSV_SPECTYPE_DATA = "data"
CSV_SPECTYPE_TREE = "tree"
CSV_SPECTYPE_KEYS = "keys"
CSV_SPECTYPE_CONF = "conf"
CSV_SPECTYPE_DDL = "ddl"
CSV_SPECTYPE_CTLG = "ctlg"
CSV_NODETYPE_KEYS = "_keys"
CSV_BLOCK_ATTR = "_attr"
CSV_BLOCK_HEAD = "_head"
CSV_BLOCK_OPTION = B.DATA_NODE_OPTION
CSV_BLOCK_STEP = B.DATA_NODE_STEPS
CSV_BLOCK_TABLES = "_table"
CSV_BLOCK_IMPORT = "_import"
LIST_CSV_BLOCKS = [CSV_BLOCK_ATTR, CSV_BLOCK_HEAD, CSV_BLOCK_OPTION, CSV_BLOCK_STEP, CSV_BLOCK_TABLES, CSV_BLOCK_IMPORT]
LIST_BLOCK_CONST = ["CSV_BLOCK_HEAD", "CSV_BLOCK_OPTION", "CSV_BLOCK_STEP", "CSV_BLOCK_TABLES", "CSV_BLOCK_IMPORT"]
STEP_COMP_I = 1
STEP_EXECNR_I = 2
STEP_REFNR_I = 3
STEP_VARIANT_I = 4
STEP_ARGS_I = 5
STEP_LIST_I = 5
STEP_ATTR_COMP = "component"
STEP_ATTR_EXECNR = "exec-step"
STEP_ATTR_REFNR = "reference-nr"
STEP_ATTR_ARGS = "arguments"
LIST_STEP_ATTR = [STEP_ATTR_COMP, STEP_ATTR_EXECNR, STEP_ATTR_REFNR, STEP_ATTR_ARGS]
LIST_STEP_CONST = ["STEP_ATTR_COMP", "STEP_ATTR_EXECNR", "STEP_ATTR_REFNR", "STEP_ATTR_ARGS"]
EXCP_MALFORMAT = "malformated line: "
ATTR_SRC_TYPE = "tdtyp"
ATTR_SRC_DATA = "tdsrc"
ATTR_SRC_NAME = "tdname"
DEFAULT_DB_PARTITION = "n"
""" attribute if table is partitioned - partitions are parametrized """
DEFAULT_DB_CONN_JAR = "n"
""" attribute for connection-jar-file instead of connection by ip, port """