diff --git a/basic/compexec.py b/basic/compexec.py index c2b9777..89b3662 100644 --- a/basic/compexec.py +++ b/basic/compexec.py @@ -143,8 +143,6 @@ class Testexecuter(): self.m.logInfo("select db-content "+ self.name) dbi = basic.toolHandling.getDbTool(job, self) data = dbi.selectTables(subdir, job) - print("ppp") - #data = {} for t in data[subdir]: data[B.DATA_NODE_TABLES] = {} data[B.DATA_NODE_TABLES][t] = data[subdir][t] @@ -255,7 +253,7 @@ class Testexecuter(): :return: """ if not step.fct in self.conf[B.DATA_NODE_STEPS]: - raise Exception(self.m.getMessageText(T.EXP_KEY_DOESNT_EXIST, [step.fct, self.name])) + raise Exception(self.m.getMessageText(job, T.EXP_KEY_DOESNT_EXIST, [step.fct, self.name])) if step.fct in self.conf[B.DATA_NODE_STEPS]: for stepconf in self.conf[B.DATA_NODE_STEPS][step.fct]: if stepconf[B.SUBJECT_TOOL] == B.TOPIC_NODE_FILE: diff --git a/basic/constants.py b/basic/constants.py index 5adfeaf..b3c6b8f 100644 --- a/basic/constants.py +++ b/basic/constants.py @@ -126,7 +126,14 @@ ATTR_DB_SCHEMA = "schema" """ optional attribute for technical name of the schema """ ATTR_DB_TABNAME = "tabname" """ optional attribute in order to use a different technical name for the db-table """ -LIST_DB_ATTR = [ATTR_DB_PARTITION, ATTR_DB_DATABASE, ATTR_DB_SCHEMA, ATTR_DB_TABNAME, ATTR_ARTS_PRESTEP] + LIST_ARTS_ATTR +ATTR_DB_USER = "user" +""" optional attribute in order to use a different technical name for the db-table """ +ATTR_DB_PASSWD = "password" +""" optional attribute in order to use a different technical name for the db-table """ +ATTR_DB_HOST = "hostname" +""" optional attribute in order to use a different technical name for the db-table """ +LIST_DB_ATTR = [ATTR_DB_PARTITION, ATTR_DB_DATABASE, ATTR_DB_SCHEMA, ATTR_DB_TABNAME, ATTR_ARTS_PRESTEP, + ATTR_DB_USER, ATTR_DB_PASSWD, ATTR_DB_HOST] + LIST_ARTS_ATTR TOPIC_NODE_CLI = "cli" LIST_CLI_ATTR = [] + LIST_ARTS_ATTR TOPIC_NODE_API = "api" diff --git a/basic/entity.py b/basic/entity.py index e0e7b31..b494af5 100644 --- a/basic/entity.py +++ b/basic/entity.py @@ -7,6 +7,25 @@ class Entity: def __int__(self, job): self.job = job + def getDbAttr(self, job): + out = {} + for attr in [B.ATTR_DB_HOST, B.ATTR_DB_USER, B.ATTR_DB_DATABASE, B.ATTR_DB_PASSWD]: + out[attr] = job.conf.confs[B.TOPIC_NODE_DB][attr] + return out + + def getDdl(self, job, ddl): + out = {} + for t in ddl: + out[t] = {} + for f in ddl[t]: + out[t][f] = {} + for a in ddl[t][f]: + print("entity-23 "+f+", "+a+" "+str(ddl)) + out[t][f][a] = ddl[t][f][a] + out[t][f][D.DDL_FNAME] = f + out[t][B.DATA_NODE_HEADER] = list(ddl[t].keys()) + return out + def createSchema(self): if B.TOPIC_NODE_DB in self.job.conf.confs: dbi = basic.toolHandling.getDbTool(self.job, None, self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE]) diff --git a/basic/program.py b/basic/program.py index 07103d8..9f376cd 100644 --- a/basic/program.py +++ b/basic/program.py @@ -40,14 +40,14 @@ jobdef = { "dirname": "workdir", "loglevel": basic.message.LIMIT_INFO, "logpath": "{job.conf.data}/workspace/reorg_{job.start:H}.txt" }, - "temp": { + "service": { "pardef": "", "pfilesource": "", "pfiletarget": "", "basedir": "workbase", "dirname": "workdir", "loglevel": basic.message.LIMIT_INFO, - "logpath": "{job.par.envdir}/{log}/log_{job.start:H}.txt" }, + "logpath": "{job.conf.data}/workspace/service_{job.start:H}.txt" }, "unit": { "pardef": "", "pfilesource": "", diff --git a/basic/user.py b/basic/user.py index e24021a..7432d31 100644 --- a/basic/user.py +++ b/basic/user.py @@ -9,6 +9,20 @@ import utils.data_const as D import basic.constants as B import basic.entity +ddl = { + "user": { + "id": { + D.DDL_TYPE: D.TYPE_PK, + }, + "username": { + D.DDL_TYPE: D.TYPE_STR, + }, + "password": { + D.DDL_TYPE: D.TYPE_STRING, + } + } +} + class User(basic.entity.Entity): username = "" password = "" @@ -19,6 +33,10 @@ class User(basic.entity.Entity): :param job: """ self.job = job + self.conf = {} + self.conf[B.SUBJECT_CONN] = self.getDbAttr(job) + self.conf[B.DATA_NODE_DDL] = self.getDdl(job, ddl) + self.m = job.m def getSchema(self): dbtype = self.job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE] diff --git a/test/test_21tdata.py b/test/test_21tdata.py index 3319f78..c76bebd 100644 --- a/test/test_21tdata.py +++ b/test/test_21tdata.py @@ -20,7 +20,7 @@ OS_SYSTEM = test.constants.OS_SYSTEM # if you minimize the list you can check the specific test-function TEST_FUNCTIONS = ["test_01tdata", "test_02getCsvSpec_data", "test_03getCsvSpec_tree", "test_04getCsvSpec_key", "test_05getCsvSpec_conf", "test_06parseCsv"] -# TEST_FUNCTIONS = ["test_getCsvSpec_data"] +TEST_FUNCTIONS = ["test_05getCsvSpec_conf"] # with this variable you can switch prints on and off verbose = False diff --git a/test/test_31db.py b/test/test_31db.py index e954bce..a4a8951 100644 --- a/test/test_31db.py +++ b/test/test_31db.py @@ -16,14 +16,15 @@ import utils.db_abstract import test.testtools import utils.config_tool import utils.data_const as D - +import basic.user HOME_PATH = test.constants.HOME_PATH conf = {} # here you can select single testfunction for developping the tests # "test_toolhandling", "test_parseSql" -> test of components -TEST_FUNCTIONS = ["test_formatDbRows"] -# TEST_FUNCTIONS = ["test_getTechnicalIDFields"] +TEST_FUNCTIONS = ["test_formatDbRows", "test_10insertion", "test_11selection", "test_12deletion"] +#TEST_FUNCTIONS = ["test_10insertion", "test_11selection"] +TEST_FUNCTIONS = ["test_10insertion","test_12deletion"] class MyTestCase(unittest.TestCase): mymsg = "--------------------------------------------------------------" @@ -123,6 +124,62 @@ class MyTestCase(unittest.TestCase): MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) + + def test_10insertion(self): + global mymsg + actfunction = str(inspect.currentframe().f_code.co_name) + cnttest = 0 + if actfunction not in TEST_FUNCTIONS: + return + job = basic.program.Job("webflask", {}) + if B.TOPIC_NODE_DB in job.conf.confs: + dbtype = job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE] + userentity = basic.user.User(job) + dbi = basic.toolHandling.getDbTool(job, userentity, dbtype) + data = {} + data["user"] = {} + data["user"][B.DATA_NODE_HEADER] = userentity.conf[B.DATA_NODE_DDL]["user"][B.DATA_NODE_HEADER] + data["user"][B.DATA_NODE_DATA] = [] + row = {"username": "alfons", "password": "alfons-secret"} + data["user"][B.DATA_NODE_DATA].append(row) + dbi.insertRows("user", data["user"][B.DATA_NODE_DATA], job) + + def test_11selection(self): + global mymsg + actfunction = str(inspect.currentframe().f_code.co_name) + cnttest = 0 + if actfunction not in TEST_FUNCTIONS: + return + job = basic.program.Job("webflask", {}) + if B.TOPIC_NODE_DB in job.conf.confs: + dbtype = job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE] + userentity = basic.user.User(job) + dbi = basic.toolHandling.getDbTool(job, userentity, dbtype) + rows = dbi.selectRows("user", job) + print (str(rows)) + MyTestCase.mymsg += "\n----- "+actfunction+" : "+str(cnttest) + + def test_12deletion(self): + global mymsg + actfunction = str(inspect.currentframe().f_code.co_name) + cnttest = 0 + if actfunction not in TEST_FUNCTIONS: + return + job = basic.program.Job("webflask", {}) + if B.TOPIC_NODE_DB in job.conf.confs: + dbtype = job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE] + userentity = basic.user.User(job) + dbi = basic.toolHandling.getDbTool(job, userentity, dbtype) + rows = dbi.selectRows("user", job, "WHERE username = \'alfons\'") + cntpre = len(rows) + print (str(rows)) + dbi.deleteRows("user", job, "WHERE username = \'alfons\'") + rows = dbi.selectRows("user", job, "WHERE username = \'alfons\'") + print (str(rows)) + cntpost = len(rows) + self.assertGreaterEqual(cntpre, cntpost) + + def test_zzz(self): print(MyTestCase.mymsg) diff --git a/test/testtools.py b/test/testtools.py index 080e3ca..97a47a7 100644 --- a/test/testtools.py +++ b/test/testtools.py @@ -74,13 +74,12 @@ def getJob(pgran="", papp="", penv="", ptstamp="", pmode=""): path = DEFAULT_ARCHIV_DIR + "/TC0001/" + tstamp elif gran == "ts": path = DEFAULT_ARCHIV_DIR + "/testlauf/TST001_" + tstamp - job = basic.program.Job("unit") #job.conf.confs[B.SUBJECT_PATH]["components"] = T.COMP_PATH args = {"application": app, "environment": env, "modus": mode, gran+"time": tstamp, gran+"dir": path, "step": 2} # "usecase": "TST001", "tstime": "2022-03-17_17-28"} - job.par.setParameterArgs(job, args) + job = basic.program.Job("unit", args) return job diff --git a/utils/data_const.py b/utils/data_const.py index 6aeb720..2543755 100644 --- a/utils/data_const.py +++ b/utils/data_const.py @@ -15,6 +15,7 @@ TYPE_FLOAT = "float" TYPE_DOUBLE = "double" TYPE_DATE = "date" TYPE_TIME = "time" +TYPE_PK = "pk" # fields in DDL DDL_FNULLABLE = "nullable" diff --git a/utils/db_abstract.py b/utils/db_abstract.py index df3e79b..08fab06 100644 --- a/utils/db_abstract.py +++ b/utils/db_abstract.py @@ -61,15 +61,18 @@ def getDbAttributes(comp, table): B.ATTR_DB_DATABASE: "", B.ATTR_DB_SCHEMA: "", B.ATTR_DB_TABNAME: "", + B.ATTR_DB_USER: "", + B.ATTR_DB_PASSWD: "", + B.ATTR_DB_HOST: "", B.ATTR_DB_PARTITION: D.DEFAULT_DB_PARTITION, B.ATTR_DB_CONN_JAR: D.DEFAULT_DB_CONN_JAR } for attr in out.keys(): - if (table in comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]) \ + if (B.SUBJECT_ARTS in comp.conf and table in comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]) \ and (attr in comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][table]): out[attr] = comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][table][attr] print("a " + attr + " " + out[attr]) - elif (attr in comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]): + elif (B.SUBJECT_ARTS in comp.conf and attr in comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB]): out[attr] = comp.conf[B.SUBJECT_ARTS][B.TOPIC_NODE_DB][attr] print("b " + attr + " " + out[attr]) elif (B.TOPIC_NODE_DB in comp.conf[B.SUBJECT_CONN]) \ @@ -241,7 +244,7 @@ def formatDbVal(msg, val, dtyp): pass -def isCompTable(comp, data, table): +def isCompTable(comp, job, data, table): """ checks if the table in data relates to the component """ print(str(data)) return isCompRow(comp, data[B.DATA_NODE_TABLES][table]) @@ -334,7 +337,7 @@ class DbFcts(): self.loadDdl(job) for t in tdata[B.DATA_NODE_TABLES]: print("einzufuegende Tabelle "+self.comp.name+" "+t) - if isCompTable(self.comp, tdata, t): + if isCompTable(self.comp, job, tdata, t): self.insertRows(t, tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA], job) self.comp.m.logMsg("in Tabelle {} {} Zeilen eingefuegt".format( t, len(tdata[B.DATA_NODE_TABLES][t][B.DATA_NODE_DATA]))) @@ -372,7 +375,7 @@ class DbFcts(): if len(value.strip()) == 0 and fo[D.DDL_FNULLABLE] == B.SVAL_YES: return self.getDbNull() if fo[D.DATA_NODE_TYPE] == D.TYPE_STRING or fo[D.DATA_NODE_TYPE] == D.TYPE_STR: - return "'"+value.strip()+"'" + return value.strip() elif fo[D.DATA_NODE_TYPE] == D.TYPE_INT: return value.strip() elif fo[D.DATA_NODE_TYPE] == D.TYPE_DOUBLE: diff --git a/utils/dbmysql_tool.py b/utils/dbmysql_tool.py index 36157b3..f598793 100644 --- a/utils/dbmysql_tool.py +++ b/utils/dbmysql_tool.py @@ -12,86 +12,120 @@ import basic.constants as B import utils.data_const as D class DbFcts(utils.dbrel_tool.DbFcts): - """ - This interface defines each necessary connection to any kind of database. - The specific technique how to connect to the concrete DBMS has to be implemented in the specific tool. - """ + """ + This interface defines each necessary connection to any kind of database. + The specific technique how to connect to the concrete DBMS has to be implemented in the specific tool. + """ - def __init__(self): - pass + def __init__(self): + pass - def selectRows(self, table, job): - """ method to select rows from a database - statement written in sql """ - tdata = {} - verify = -1+job.getDebugLevel("db_tool") - cmd = "SELECT * FROM "+table+";" - #mycursor = self.getConnector() - #mycursor.execute(cmd) - #myresult = mycursor.fetchall() - tdata[B.DATA_NODE_HEADER] = [] - for f in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: - tdata[B.DATA_NODE_HEADER].append(f) - myresult = [] - for x in myresult: - print(x) - self.comp.m.logInfo(cmd) - return tdata + def selectRows(self, table, job, where=""): + """ method to select rows from a database + statement written in sql """ + tdata = {} + verify = -1+job.getDebugLevel("db_tool") + attr = self.getDbAttributes(B.SVAL_NULL) + sql = "SELECT * FROM "+attr[B.ATTR_DB_DATABASE]+"."+table + if len(where) > 3: + sql += " "+where + sql += ";" + self.comp.m.logInfo(sql) + connector = self.getConnector() + mycursor = connector.cursor() + mycursor.execute(sql) + myresult = mycursor.fetchall() + tdata[B.DATA_NODE_HEADER] = [] + for f in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: + tdata[B.DATA_NODE_HEADER].append(f) + tdata[B.DATA_NODE_DATA] = [] + for x in myresult: + r = {} + i = 0 + for f in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: + r[f] = x[i] + i += 1 + tdata[B.DATA_NODE_DATA].append(r) + self.comp.m.logInfo(str(tdata)) + return tdata - def deleteRows(self, table, job): - """ method to delete rows from a database - statement written in sql """ - verify = -1+job.getDebugLevel("db_tool") - cmd = "DELETE FROM "+table+";" - self.comp.m.logInfo(cmd) + def deleteRows(self, table, job, where=""): + """ method to delete rows from a database + statement written in sql """ + verify = -1+job.getDebugLevel("db_tool") + attr = self.getDbAttributes(B.SVAL_NULL) + sql = "DELETE FROM "+attr[B.ATTR_DB_DATABASE]+"."+table + if len(where) > 3: + sql += " "+where + sql += ";" + self.comp.m.logInfo(sql) + self.execStatement(sql) - def updateRows(self, statement, job): - """ method to delete rows from a database - statement written in sql """ - raise Exception(B.EXCEPT_NOT_IMPLEMENT) + def updateRows(self, statement, job): + """ method to delete rows from a database + statement written in sql """ + raise Exception(B.EXCEPT_NOT_IMPLEMENT) + + def insertRows(self, table, rows, job): + """ method to insert rows into a database + the rows will be interpreted by the ddl of the component + """ + verify = -1+job.getDebugLevel("db_tool") + attr = self.getDbAttributes(B.SVAL_NULL) + sql = "INSERT INTO "+attr[B.ATTR_DB_DATABASE]+"."+table + sql += " ( "+",".join(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]) + " ) " + sql += " VALUES ( " + for x in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: + sql += "%s, " + sql = sql[0:-2] + " )" + self.comp.m.logInfo(sql) + values = [] + for r in rows: + rowvalues = [] + for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: + if (self.comp.conf[B.DATA_NODE_DDL][table][h] == D.TYPE_PK): + continue + if (h in r): + rowvalues.append(self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][h], r[h])) + else: + rowvalues.append(self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][h], "")) + values.append( tuple(rowvalues)) + self.comp.m.logInfo(str(values)) + try: + connector = self.getConnector() + mycursor = connector.cursor() + mycursor.executemany(sql, values) + connector.commit() + except Exception as e: + self.comp.m.setError("") + return + self.comp.m.setMsg(str(len(values))+" rows inserted into "+table) - def insertRows(self, table, rows, job): - """ method to insert rows into a database - the rows will be interpreted by the ddl of the component - """ - verify = -1+job.getDebugLevel("db_tool") - cmd = "INSERT INTO "+table+";" - header = "" - for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: - print(h) - header += ", "+h - cmd += " (" + header[1:]+" ) " - rowvalues = "" - for r in rows: - print("r-----------------") - print(r) - rowvalues = "" - cmd += "\n ( " - for h in self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_HEADER]: - print("h "+h) - if (h in r): - rowvalues += ", "+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DATA][h], r[h]) - else: - rowvalues += ", "+self.getDbValue(self.comp.conf[B.DATA_NODE_DDL][table][B.DATA_NODE_DATA][h], "") - print("rv " + rowvalues) - cmd += rowvalues[1:]+" )," - cmd = cmd[0:-1]+";" - self.comp.m.logInfo(cmd) + def execStatement(self, statement): + """ add-on-method to execute the statement + this method should only called by the class itself """ + connector = self.getConnector() + cursor = connector.cursor() + try: + cursor.execute(statement) + connector.commit() + except Exception as e: + if "CREATE INDEX " in statement: + return + raise Exception("DB-Exception "+statement+"\n"+e.__str__()) + print("Statement executed "+statement) + self.comp.m.setMsg("Statement executed") - def execStatement(self, dbconn, statement): - """ add-on-method to execute the statement - this method should only called by the class itself """ - print("execStatement "+statement) - connector = mysql.connector.connect( - host=dbconn["host"], - user=dbconn["user"], - password=dbconn["passwd"] - ) - cursor = connector.cursor() - try: - cursor.execute(statement) - except: - if "CREATE INDEX " in statement: - return - raise Exception("DB-Exception "+statement) \ No newline at end of file + def getConnector(self): + """ add-on-method to get the connector + this method should only called by the class itself """ + job = self.job # basic.program.Job.getInstance() + attr = self.getDbAttributes(B.SVAL_NULL) + cnx = mysql.connector.connect( + host=attr[B.ATTR_DB_HOST], + user=attr[B.ATTR_DB_USER], + password=attr[B.ATTR_DB_PASSWD], + database=attr[B.ATTR_DB_DATABASE] + ) + return cnx diff --git a/utils/tdata_tool.py b/utils/tdata_tool.py index 5aa213b..f12538d 100644 --- a/utils/tdata_tool.py +++ b/utils/tdata_tool.py @@ -372,10 +372,14 @@ def splitFields(line, delimiter, job): def writeCsvData(filename, tdata, comp, job): text = "" - if B.DATA_NODE_TABLES in tdata: - for k in tdata[B.DATA_NODE_TABLES]: - text += buildCsvData(tdata, k, comp, job) - text += "\n" + data = tdata + for p in [B.DATA_NODE_TABLES, P.KEY_PRECOND, P.KEY_POSTCOND]: + if p in data: + print("data "+p) + data = data[p] + for k in data: + text += buildCsvData(data, k, comp, job) + text += "\n" utils.file_tool.writeFileText(comp.m, job, filename, text) @@ -391,10 +395,6 @@ def buildCsvData(tdata, tableName, comp, job=None): for k in [D.DATA_ATTR_DATE, D.DATA_ATTR_COUNT]: if k in tdata: text += k+";"+str(tdata[k])+"\n" - x0 = "-------"+str(f"{B.DATA_NODE_TABLES=}") - x1 = "-------"+str(tableName) - x2 = str(utils.i18n_tool.I18n.getInstance(job).getText(f"{B.DATA_NODE_TABLES=}", job)) - print(x0+" "+x1+" "+x2) if tableName in tdata: actdata = tdata[tableName] else: diff --git a/webflask/auth.py b/webflask/auth.py index 62191d5..597b7e4 100644 --- a/webflask/auth.py +++ b/webflask/auth.py @@ -1,12 +1,18 @@ # https://flask.palletsprojects.com/en/2.0.x/tutorial/views/ # -------------------------------------------------------------- import functools +import traceback +import basic.user +import basic.program +import basic.constants as B +import basic.toolHandling from flask import ( Blueprint, flash, g, redirect, render_template, request, session, url_for ) from werkzeug.security import check_password_hash, generate_password_hash +import basic.program from webflask.db import get_db bp = Blueprint('auth', __name__, url_prefix='/auth') @@ -16,21 +22,24 @@ def login(): if request.method == 'POST': username = request.form['username'] password = request.form['password'] - db = get_db() + job = basic.program.Job("webflask", {}) + session['datest_job'] = job + dbtype = job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE] + userentity = basic.user.User(job) + dbi = basic.toolHandling.getDbTool(job, userentity, dbtype) error = None - user = db.execute( - 'SELECT * FROM user WHERE username = ?', (username,) - ).fetchone() + row = dbi.selectRows("user", job, "WHERE username = \'" + username + "\'") - if user is None: + if row is None or len(row[B.DATA_NODE_DATA]) == 0: error = 'Incorrect username.' - elif not check_password_hash(user['password'], password): + elif not check_password_hash(row[B.DATA_NODE_DATA][0]['password'], password): error = 'Incorrect password.' if error is None: session.clear() - session['user_id'] = user['id'] - return redirect(url_for('index')) + session['user_id'] = row[B.DATA_NODE_DATA][0]['id'] + return redirect(url_for('testcase.overview')) + flash(error) @@ -41,7 +50,10 @@ def register(): if request.method == 'POST': username = request.form['username'] password = request.form['password'] - db = get_db() + job = basic.program.Job("webflask", {}) + dbtype = job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE] + dbi = basic.toolHandling.getDbTool(job, None, "mysql") + # db = get_db() error = None if not username: @@ -51,13 +63,13 @@ def register(): if error is None: try: - db.execute( - "INSERT INTO user (username, password) VALUES (?, ?)", - (username, generate_password_hash(password)), - ) - db.commit() - except db.IntegrityError: - error = f"User {username} is already registered." + sql = "INSERT INTO datest.user (username, password) " + sql += "VALUES ( '"+username+"', '"+generate_password_hash(password)+"');" + dbi.execStatement(job.conf.confs[B.TOPIC_NODE_DB], sql ) + #db.commit() + except Exception as e: + error = str(e) + # error = f"User {username} is already registered." else: return redirect(url_for("auth.login")) @@ -72,9 +84,20 @@ def load_logged_in_user(): if user_id is None: g.user = None else: - g.user = get_db().execute( - 'SELECT * FROM user WHERE id = ?', (user_id,) - ).fetchone() + job = basic.program.Job("webflask", {}) + dbtype = job.conf.confs[B.TOPIC_NODE_DB][B.ATTR_TYPE] + userentity = basic.user.User(job) + dbi = basic.toolHandling.getDbTool(job, userentity, dbtype) + error = None + sql = "WHERE id = " + str(user_id) + row = dbi.selectRows("user", job, sql) + if len(row[B.DATA_NODE_DATA]) > 0: + g.user = row[B.DATA_NODE_DATA][0] + else: + g.user = None + # get_db().execute( + # 'SELECT * FROM user WHERE id = ?', (user_id,) + #).fetchone() @bp.route('/logout') def logout(): diff --git a/webflask/db.py b/webflask/db.py index a94e23c..1e51d9f 100644 --- a/webflask/db.py +++ b/webflask/db.py @@ -13,6 +13,10 @@ def init_db(): db.executescript(f.read().decode('utf8')) def get_db(): + """ + read config and check the db-connection + :return: + """ if 'db' not in g: g.db = sqlite3.connect( current_app.config['DATABASE'],