Migratted alot of Admin stuff in AddSite,

DeleteSite
This commit is contained in:
Jadowyne Ulve 2025-08-02 18:30:47 -05:00
parent 69409f10fa
commit af6ee94ff1
66 changed files with 1469 additions and 118 deletions

View File

@ -359,6 +359,10 @@ class SitePayload:
self.default_primary_location self.default_primary_location
) )
def get_dictionary(self):
return self.__dict__
#DONE #DONE
@dataclass @dataclass
class RolePayload: class RolePayload:

View File

@ -8,7 +8,8 @@ import postsqldb, process, hashlib
# APPLICATION IMPORTS # APPLICATION IMPORTS
from application.administration import administration_database from application.administration import administration_database, administration_processes
from application import database_payloads
admin_api = Blueprint('admin_api', __name__, template_folder="templates", static_folder="static") admin_api = Blueprint('admin_api', __name__, template_folder="templates", static_folder="static")
@ -18,57 +19,45 @@ admin_api = Blueprint('admin_api', __name__, template_folder="templates", static
@admin_api.route('/') @admin_api.route('/')
def admin_index(): def admin_index():
sites = [site[1] for site in main.get_sites(session['user']['sites'])] sites = [site[1] for site in main.get_sites(session['user']['sites'])]
return render_template("index.html", return render_template("admin_index.html", current_site=session['selected_site'], sites=sites)
current_site=session['selected_site'],
sites=sites)
# API ROUTES # Added to Database
@admin_api.route('/admin/site/<id>') @admin_api.route('/site/<id>')
@login_required @login_required
def adminSites(id): def adminSites(id):
if id == "new": if id == "new":
new_site = postsqldb.SitesTable.Payload( new_site_payload = database_payloads.SitePayload("", "", session['user_id'])
"", return render_template("site.html", site=new_site_payload.get_dictionary())
"",
session['user_id']
)
return render_template("admin/site.html", site=new_site.get_dictionary())
else: else:
database_config = config() site = administration_database.selectSitesTuple((id,))
with psycopg2.connect(**database_config) as conn: return render_template('site.html', site=site)
site = postsqldb.SitesTable.select_tuple(conn, (id,))
return render_template('admin/site.html', site=site)
@admin_api.route('/admin/role/<id>') # Added to database
@admin_api.route('/role/<id>')
@login_required @login_required
def adminRoles(id): def adminRoles(id):
database_config = config() sites = administration_database.selectSitesTuples()
with psycopg2.connect(**database_config) as conn:
sites = postsqldb.SitesTable.selectTuples(conn)
if id == "new": if id == "new":
new_role = postsqldb.RolesTable.Payload( new_role_payload = database_payloads.RolePayload("", "", 0)
"", return render_template("role.html", role=new_role_payload.get_dictionary(), sites=sites)
"",
0
)
return render_template("admin/role.html", role=new_role.get_dictionary(), sites=sites)
else: else:
role = postsqldb.RolesTable.select_tuple(conn, (id,)) role = administration_database.selectRolesTuple((id,))
return render_template('admin/role.html', role=role, sites=sites) return render_template('role.html', role=role, sites=sites)
@admin_api.route('/admin/user/<id>') # Added to database
@admin_api.route('/user/<id>')
@login_required @login_required
def adminUser(id): def adminUser(id):
database_config = config()
with psycopg2.connect(**database_config) as conn:
if id == "new": if id == "new":
new_user = postsqldb.LoginsTable.Payload("", "", "", "") new_user_payload = database_payloads.LoginsPayload("", "", "", "")
return render_template("admin/user.html", user=new_user.get_dictionary()) return render_template("user.html", user=new_user_payload.get_dictionary())
else: else:
user = administration_database.selectLoginsUser(int(id)) user = administration_database.selectLoginsTuple(int(id))
return render_template('admin/user.html', user=user) return render_template('user.html', user=user)
@admin_api.route('/admin/getSites', methods=['GET']) # API ROUTES
# add to database
@admin_api.route('/api/getSites', methods=['GET'])
@login_required @login_required
def getSites(): def getSites():
if request.method == "GET": if request.method == "GET":
@ -77,13 +66,12 @@ def getSites():
page = int(request.args.get('page', 1)) page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10)) limit = int(request.args.get('limit', 10))
offset = (page - 1) * limit offset = (page - 1) * limit
database_config = config() records, count = administration_database.paginateSitesTuples((limit, offset))
with psycopg2.connect(**database_config) as conn:
records, count = postsqldb.SitesTable.paginateTuples(conn, (limit, offset))
return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Sites Loaded Successfully!'}) return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Sites Loaded Successfully!'})
return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Sites!'}) return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Sites!'})
@admin_api.route('/admin/getRoles', methods=['GET']) # Added to database
@admin_api.route('/api/getRoles', methods=['GET'])
@login_required @login_required
def getRoles(): def getRoles():
if request.method == "GET": if request.method == "GET":
@ -92,13 +80,12 @@ def getRoles():
page = int(request.args.get('page', 1)) page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10)) limit = int(request.args.get('limit', 10))
offset = (page - 1) * limit offset = (page - 1) * limit
database_config = config() records, count = administration_database.paginateRolesTuples((limit, offset))
with psycopg2.connect(**database_config) as conn:
records, count = postsqldb.RolesTable.paginate_tuples(conn, (limit, offset))
return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Roles Loaded Successfully!'}) return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Roles Loaded Successfully!'})
return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Roles!'}) return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Roles!'})
@admin_api.route('/admin/getLogins', methods=['GET']) # Added to Database
@admin_api.route('/api/getLogins', methods=['GET'])
@login_required @login_required
def getLogins(): def getLogins():
if request.method == "GET": if request.method == "GET":
@ -107,65 +94,46 @@ def getLogins():
page = int(request.args.get('page', 1)) page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10)) limit = int(request.args.get('limit', 10))
offset = (page - 1) * limit offset = (page - 1) * limit
database_config = config() records, count = administration_database.paginateLoginsTuples((limit, offset))
with psycopg2.connect(**database_config) as conn:
records, count = postsqldb.LoginsTable.paginate_tuples(conn, (limit, offset))
return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':False, 'message': 'logins Loaded Successfully!'}) return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':False, 'message': 'logins Loaded Successfully!'})
return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading logins!'}) return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading logins!'})
@admin_api.route('/admin/site/postDeleteSite', methods=["POST"]) # Added to database and Processses.
@admin_api.route('/api/site/postDeleteSite', methods=["POST"])
def postDeleteSite(): def postDeleteSite():
if request.method == "POST": if request.method == "POST":
site_id = request.get_json()['site_id'] site_id = request.get_json()['site_id']
database_config = config()
user_id = session['user_id'] user_id = session['user_id']
site = administration_database.selectSitesTuple((site_id,))
user = administration_database.selectLoginsTuple((user_id,))
if user['id'] != site['site_owner_id']:
return jsonify({'error': True, 'message': f"You must be the owner of this site to delete."})
try: try:
with psycopg2.connect(**database_config) as conn: administration_processes.deleteSite(site, user)
user = postsqldb.LoginsTable.select_tuple(conn, (user_id,)) except Exception as err:
admin_user = (user['username'], user['password'], user['email'], user['row_type']) print(err)
site = postsqldb.SitesTable.select_tuple(conn, (site_id,))
site = postsqldb.SitesTable.Manager(
site['site_name'],
admin_user,
site['default_zone'],
site['default_primary_location'],
site['site_description']
)
process.deleteSite(site_manager=site)
except Exception as error:
conn.rollback()
return jsonify({'error': True, 'message': error})
return jsonify({'error': False, 'message': f""}) return jsonify({'error': False, 'message': f""})
return jsonify({'error': True, 'message': f""}) return jsonify({'error': True, 'message': f""})
@admin_api.route('/admin/site/postAddSite', methods=["POST"]) # Added to Database and Processes
@admin_api.route('/api/site/postAddSite', methods=["POST"])
def postAddSite(): def postAddSite():
if request.method == "POST": if request.method == "POST":
payload = request.get_json()['payload'] payload = request.get_json()['payload']
database_config = config()
site_name = session['selected_site'] site_name = session['selected_site']
user_id = session['user_id'] user_id = session['user_id']
print(payload) user = administration_database.selectLoginsTuple((user_id,))
try: payload['admin_user'] = (user['username'], user['password'], user['email'], user['row_type'])
with psycopg2.connect(**database_config) as conn:
user = postsqldb.LoginsTable.select_tuple(conn, (user_id,)) administration_processes.addSite(payload)
admin_user = (user['username'], user['password'], user['email'], user['row_type'])
site = postsqldb.SitesTable.Manager(
payload['site_name'],
admin_user,
payload['default_zone'],
payload['default_primary_location'],
payload['site_description']
)
process.addSite(site_manager=site)
except Exception as error:
conn.rollback()
return jsonify({'error': True, 'message': error})
return jsonify({'error': False, 'message': f"Zone added to {site_name}."}) return jsonify({'error': False, 'message': f"Zone added to {site_name}."})
return jsonify({'error': True, 'message': f"These was an error with adding this Zone to {site_name}."}) return jsonify({'error': True, 'message': f"These was an error with adding this Zone to {site_name}."})
@admin_api.route('/admin/site/postEditSite', methods=["POST"]) @admin_api.route('/api/site/postEditSite', methods=["POST"])
def postEditSite(): def postEditSite():
if request.method == "POST": if request.method == "POST":
payload = request.get_json()['payload'] payload = request.get_json()['payload']
@ -179,7 +147,7 @@ def postEditSite():
return jsonify({'error': False, 'message': f"Site updated."}) return jsonify({'error': False, 'message': f"Site updated."})
return jsonify({'error': True, 'message': f"These was an error with updating Site."}) return jsonify({'error': True, 'message': f"These was an error with updating Site."})
@admin_api.route('/admin/role/postAddRole', methods=["POST"]) @admin_api.route('/api/role/postAddRole', methods=["POST"])
def postAddRole(): def postAddRole():
if request.method == "POST": if request.method == "POST":
payload = request.get_json()['payload'] payload = request.get_json()['payload']
@ -200,7 +168,7 @@ def postAddRole():
return jsonify({'error': False, 'message': f"Role added."}) return jsonify({'error': False, 'message': f"Role added."})
return jsonify({'error': True, 'message': f"These was an error with adding this Role."}) return jsonify({'error': True, 'message': f"These was an error with adding this Role."})
@admin_api.route('/admin/role/postEditRole', methods=["POST"]) @admin_api.route('/api/role/postEditRole', methods=["POST"])
def postEditRole(): def postEditRole():
if request.method == "POST": if request.method == "POST":
payload = request.get_json()['payload'] payload = request.get_json()['payload']
@ -216,7 +184,7 @@ def postEditRole():
return jsonify({'error': False, 'message': f"Role updated."}) return jsonify({'error': False, 'message': f"Role updated."})
return jsonify({'error': True, 'message': f"These was an error with updating this Role."}) return jsonify({'error': True, 'message': f"These was an error with updating this Role."})
@admin_api.route('/admin/user/postAddLogin', methods=["POST"]) @admin_api.route('/api/user/postAddLogin', methods=["POST"])
def postAddLogin(): def postAddLogin():
if request.method == "POST": if request.method == "POST":
payload = request.get_json()['payload'] payload = request.get_json()['payload']
@ -237,7 +205,7 @@ def postAddLogin():
return jsonify({'user': user, 'error': False, 'message': f"User added."}) return jsonify({'user': user, 'error': False, 'message': f"User added."})
return jsonify({'user': user, 'error': True, 'message': f"These was an error with adding this User."}) return jsonify({'user': user, 'error': True, 'message': f"These was an error with adding this User."})
@admin_api.route('/admin/user/postEditLogin', methods=["POST"]) @admin_api.route('/api/user/postEditLogin', methods=["POST"])
def postEditLogin(): def postEditLogin():
if request.method == "POST": if request.method == "POST":
payload = request.get_json()['payload'] payload = request.get_json()['payload']
@ -251,7 +219,7 @@ def postEditLogin():
return jsonify({'error': False, 'message': f"User was Added Successfully."}) return jsonify({'error': False, 'message': f"User was Added Successfully."})
return jsonify({'error': True, 'message': f"These was an error with adding this user."}) return jsonify({'error': True, 'message': f"These was an error with adding this user."})
@admin_api.route('/admin/user/postEditLoginPassword', methods=["POST"]) @admin_api.route('/api/user/postEditLoginPassword', methods=["POST"])
def postEditLoginPassword(): def postEditLoginPassword():
if request.method == "POST": if request.method == "POST":
payload = request.get_json()['payload'] payload = request.get_json()['payload']

View File

@ -1,18 +1,672 @@
import postsqldb from application import postsqldb
import psycopg2 import psycopg2
import config import config
def selectLoginsUser(login_id): def getUser(conn, payload, convert=False):
database_config = config.config() """_summary_
Args:
conn (_type_): _description_
payload (tuple): (username, password)
convert (bool, optional): _description_. Defaults to False.
Raises:
DatabaseError: _description_
Returns:
_type_: _description_
"""
user = ()
try: try:
with psycopg2.connect(**database_config) as conn:
with open("sql/SELECT/admin/selectLoginsUser.sql", "r") as file:
sql = file.read()
with conn.cursor() as cur: with conn.cursor() as cur:
cur.execute(sql, (login_id,)) sql = f"SELECT * FROM logins WHERE username=%s;"
user = cur.fetchone() cur.execute(sql, (payload[0],))
if user: rows = cur.fetchone()
user = postsqldb.tupleDictionaryFactory(cur.description, user) if rows and rows[2] == payload[1] and convert:
user = tupleDictionaryFactory(cur.description, rows)
elif rows and rows[2] == payload[1] and not convert:
user = rows
except Exception as error:
raise DatabaseError(error, payload, sql)
return user
def selectLoginsTuple(payload, convert=True, conn=None):
user = ()
self_conn = False
with open("application/administration/sql/selectLoginsUser.sql", "r") as file:
sql = file.read()
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
user = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
user = rows
if self_conn:
conn.close()
return user return user
except Exception as error: except Exception as error:
raise postsqldb.DatabaseError(error, login_id, sql) raise postsqldb.DatabaseError(error, payload, sql)
def selectSitesTuple(payload, convert=True, conn=None):
record = []
self_conn = False
sql = f"SELECT * FROM sites WHERE id=%s;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
record = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
record = rows
if self_conn:
conn.close()
return record
except Exception as error:
raise postsqldb.DatabaseError(error, (), sql)
def selectSiteTupleByName(payload, convert=True, conn=None):
""" payload (tuple): (site_name,) """
site = ()
self_conn = False
select_site_sql = f"SELECT * FROM sites WHERE site_name = %s;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(select_site_sql, payload)
rows = cur.fetchone()
if rows and convert:
site = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
site = rows
if self_conn:
conn.commit()
conn.close()
return site
except Exception as error:
raise postsqldb.DatabaseError(error, payload, select_site_sql)
def selectSitesTuples(convert=True, conn=None):
sites = []
self_conn = False
sql = f"SELECT * FROM sites;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql)
rows = cur.fetchall()
if rows and convert:
sites = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
elif rows and not convert:
sites = rows
if self_conn:
conn.close()
return sites
except Exception as error:
raise postsqldb.DatabaseError(error, (), sql)
def selectRolesTuple(payload, convert=True, conn=None):
role = []
self_conn = False
sql = f"SELECT roles.*, row_to_json(sites.*) as site FROM roles LEFT JOIN sites ON sites.id = roles.site_id WHERE roles.id=%s;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
role = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
role = rows
if self_conn:
conn.close()
return role
except Exception as error:
raise postsqldb.DatabaseError(error, (), sql)
def selectRolesTupleBySite(payload, convert=True, conn=None):
""" payload (tuple): (site_id,) """
roles = ()
self_conn = False
select_roles_sql = f"SELECT * FROM roles WHERE site_id = %s;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(select_roles_sql, payload)
rows = cur.fetchall()
if rows and convert:
roles = [postsqldb.tupleDictionaryFactory(cur.description, role) for role in rows]
elif rows and not convert:
roles = rows
if self_conn:
conn.close()
return roles
except Exception as error:
raise postsqldb.DatabaseError(error, payload, select_roles_sql)
def paginateSitesTuples(payload, convert=True, conn=None):
""" payload (tuple): (limit, offset) """
recordsets = []
count = 0
self_conn = False
sql = f"SELECT * FROM sites LIMIT %s OFFSET %s;"
sql_count = f"SELECT COUNT(*) FROM sites;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchall()
if rows and convert:
recordsets = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
elif rows and not convert:
recordsets = rows
cur.execute(sql_count)
count = cur.fetchone()[0]
if self_conn:
conn.close()
return recordsets, count
except Exception as error:
raise postsqldb.DatabaseError(error, (), sql)
def paginateRolesTuples(payload, convert=True, conn=None):
""" payload (tuple): (limit, offset) """
recordset = []
self_conn = False
sql = f"SELECT roles.*, row_to_json(sites.*) as site FROM roles LEFT JOIN sites ON sites.id = roles.site_id LIMIT %s OFFSET %s;"
sql_count = f"SELECT COUNT(*) FROM roles;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchall()
if rows and convert:
recordset = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
elif rows and not convert:
recordset = rows
cur.execute(sql_count)
count = cur.fetchone()[0]
if self_conn:
conn.close()
return recordset, count
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def paginateLoginsTuples(payload, convert=True, conn=None):
""" payload (tuple): (limit, offset) """
recordset = []
self_conn = False
sql = f"SELECT * FROM logins LIMIT %s OFFSET %s;"
sql_count = f"SELECT COUNT(*) FROM logins;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchall()
if rows and convert:
recordset = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
elif rows and not convert:
recordset = rows
cur.execute(sql_count)
count = cur.fetchone()[0]
if self_conn:
conn.close()
return recordset, count
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def insertSitesTuple(payload, convert=True, conn=None):
""" payload (tuple): (site_name[str], site_description[str], creation_date[timestamp], site_owner_id[int],
flags[dict], default_zone[str], default_auto_issue_location[str], default_primary_location[str]) """
site_tuple = ()
self_conn = False
with open(f"application/administration/sql/insertSitesTuple.sql", "r+") as file:
sql = file.read()
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
site_tuple = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
site_tuple = rows
if self_conn:
conn.commit()
conn.close()
return site_tuple
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def insertRolesTuple(payload, convert=True, conn=None):
""" payload (tuple): (role_name[str], role_description[str], site_id[int], flags[jsonb]) """
role_tuple = ()
self_conn = False
with open(f"application/administration/sql/insertRolesTuple.sql", "r+") as file:
sql = file.read()
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
role_tuple = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
role_tuple = rows
if self_conn:
conn.commit()
conn.close()
return role_tuple
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def insertZonesTuple(site, payload, convert=True, conn=None):
""" payload (tuple): (name[str],) """
zone = ()
self_conn = False
with open(f"application/administration/sql/insertZonesTuple.sql", "r+") as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
zone = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
zone = rows
if self_conn:
conn.commit()
conn.close()
return zone
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def insertLocationsTuple(site, payload, convert=True, conn=None):
""" payload (tuple): (uuid[str], name[str], zone_id[int], items[jsonb]) """
location = ()
self_conn = False
with open(f"application/administration/sql/insertLocationsTuple.sql", "r+") as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
location = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
location = rows
if self_conn:
conn.commit()
conn.close()
return location
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def insertVendorsTuple(site, payload, convert=True, conn=None):
""" payload (tuple): (vendor_name[str], vendor_address[str], creation_date[timestamp], created_by[int], phone_number[str]) """
vendor = ()
self_conn = False
with open(f"application/administration/sql/insertVendorsTuple.sql", "r+") as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
vendor = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
vendor = rows
if self_conn:
conn.commit()
conn.close()
return vendor
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def insertBrandsTuple(site, payload, convert=True, conn=None):
""" payload (tuple): (brand_name[str], ) """
brand = ()
self_conn = False
with open(f"application/administration/sql/insertBrandsTuple.sql", "r+") as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
brand = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
brand = rows
if self_conn:
conn.commit()
conn.close()
return brand
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def updateAddLoginSitesRoles(payload, convert=True, conn=None):
""" payload (tuple): (site_id, role_id, login_id) """
sql = f"UPDATE logins SET sites = sites || %s, site_roles = site_roles || %s WHERE id=%s RETURNING *;"
login = ()
self_conn = False
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
login = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
login = rows
if self_conn:
conn.commit()
conn.close()
return login
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def updateSitesTuple(payload, convert=True, conn=None):
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}} """
updated = ()
self_conn = False
set_clause, values = postsqldb.updateStringFactory(payload['update'])
values.append(payload['id'])
sql = f"UPDATE sites SET {set_clause} WHERE id=%s RETURNING *;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, values)
rows = cur.fetchone()
if rows and convert:
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
updated = rows
if self_conn:
conn.commit()
conn.close()
return updated
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def updateUsersSites(payload, convert=True, conn=None):
""" payload: {'site_id',} """
self_conn = False
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
select_sql = f"SELECT logins.id FROM logins WHERE sites @> ARRAY[%s];"
with conn.cursor() as cur:
cur.execute(select_sql, (payload['site_id'], ))
user = tuple([row[0] for row in cur.fetchall()])
update_sql = f"UPDATE logins SET sites = array_remove(sites, %s) WHERE id = %s;"
with conn.cursor() as cur:
for user_id in user:
cur.execute(update_sql, (payload['site_id'], user_id))
if self_conn:
conn.commit()
conn.close()
except Exception as error:
raise error
def updateUsersRoles(payload, convert=True, conn=None):
""" payload: {'role_id',} """
self_conn = False
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
select_sql = f"SELECT logins.id FROM logins WHERE site_roles @> ARRAY[%s];"
with conn.cursor() as cur:
cur.execute(select_sql, (payload['role_id'], ))
users = tuple([row[0] for row in cur.fetchall()])
update_sql = f"UPDATE logins SET site_roles = array_remove(site_roles, %s) WHERE id = %s;"
with conn.cursor() as cur:
for user_id in users:
cur.execute(update_sql, (payload['role_id'], user_id))
if self_conn:
conn.commit()
conn.close()
except Exception as error:
raise error
def createTable(site, table, conn=None):
self_conn = False
with open(f"application/administration/sql/CREATE/{table}.sql", 'r') as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql)
if self_conn:
conn.commit()
conn.close()
except Exception as error:
raise postsqldb.DatabaseError(error, sql, table)
def dropTable(site, table, conn=None):
self_conn = False
with open(f"application/administration/sql/DROP/{table}.sql", 'r') as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql)
if self_conn:
conn.commit()
conn.close()
except Exception as error:
raise postsqldb.DatabaseError(error, sql, table)
def deleteSitesTuple(payload, convert=True, conn=None):
"""payload (tuple): (tuple_id, )"""
deleted = ()
self_conn = False
sql = f"WITH deleted_rows AS (DELETE FROM sites WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchall()
if rows and convert:
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
elif rows and not convert:
deleted = rows
if self_conn:
conn.commit()
conn.close()
return deleted
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def deleteRolesTuple(payload, convert=True, conn=None):
"""payload (tuple): (tuple_id, )"""
deleted = ()
self_conn = False
sql = f"WITH deleted_rows AS (DELETE FROM roles WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchall()
if rows and convert:
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
elif rows and not convert:
deleted = rows
if self_conn:
conn.commit()
conn.close()
return deleted
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)

View File

@ -0,0 +1,165 @@
import psycopg2
import datetime
import config
from application import postsqldb, database_payloads
from application.administration import administration_database
def dropSiteTables(conn, site_manager):
try:
for table in site_manager.drop_order:
administration_database.dropTable(site_manager.site_name, table, conn=conn)
with open("logs/process.log", "a+") as file:
file.write(f"{datetime.datetime.now()} --- INFO --- {table} DROPPED!\n")
except Exception as error:
raise error
def deleteSite(site, user, conn=None):
"""Uses a Site Manager to delete a site from the system.
Args:
site_manager (MyDataclasses.SiteManager):
Raises:
Exception:
"""
self_conn = False
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = False
self_conn = True
try:
admin_user = (user['username'], user['password'], user['email'], user['row_type'])
site_manager = database_payloads.SiteManager(
site['site_name'],
admin_user,
site['default_zone'],
site['default_primary_location'],
site['site_description']
)
roles = administration_database.selectRolesTupleBySite((site['id'],), conn=conn)
administration_database.deleteRolesTuple([role['id'] for role in roles], conn=conn)
dropSiteTables(conn, site_manager)
for role in roles:
administration_database.updateUsersRoles({'role_id': role['id']}, conn=conn)
administration_database.updateUsersSites({'site_id': site['id']}, conn=conn)
site = administration_database.deleteSitesTuple((site['id'], ), conn=conn)
if self_conn:
conn.commit()
conn.close()
except Exception as error:
with open("logs/process.log", "a+") as file:
file.write(f"{datetime.datetime.now()} --- ERROR --- {error}\n")
conn.rollback()
conn.close()
def addAdminUser(conn, site_manager, convert=True):
admin_user = ()
try:
sql = f"INSERT INTO logins (username, password, email, row_type) VALUES (%s, %s, %s, %s) ON CONFLICT (username) DO UPDATE SET username = excluded.username RETURNING *;"
with conn.cursor() as cur:
cur.execute(sql, site_manager.admin_user)
rows = cur.fetchone()
if rows and convert:
admin_user = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
admin_user = rows
with open("logs/process.log", "a+") as file:
file.write(f"{datetime.datetime.now()} --- INFO --- Admin User Created!\n")
except Exception as error:
raise error
return admin_user
def setupSiteTables(conn, site_manager):
try:
for table in site_manager.create_order:
administration_database.createTable(site_manager.site_name, table, conn=conn)
with open("logs/process.log", "a+") as file:
file.write(f"{datetime.datetime.now()} --- INFO --- {table} Created!\n")
except Exception as error:
raise error
def addSite(payload, conn=None):
"""uses a Site Manager to add a site to the system
Args:
site_manager (MyDataclasses.SiteManager):
"""
self_conn = False
site_manager = database_payloads.SiteManager(
payload['site_name'],
payload['admin_user'],
payload['default_zone'],
payload['default_primary_location'],
payload['site_description']
)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = False
self_conn = True
setupSiteTables(conn, site_manager)
admin_user = addAdminUser(conn, site_manager)
site = database_payloads.SitePayload(
site_name=site_manager.site_name,
site_description=site_manager.description,
site_owner_id=admin_user['id']
)
site = administration_database.insertSitesTuple(site.payload(), conn=conn)
role = database_payloads.RolePayload("Admin", f"Admin for {site['site_name']}", site['id'])
role = administration_database.insertRolesTuple(role.payload(), conn=conn)
admin_user = administration_database.updateAddLoginSitesRoles((site["id"], role["id"], admin_user["id"]), conn=conn)
default_zone = database_payloads.ZonesPayload(site_manager.default_zone)
default_zone = administration_database.insertZonesTuple(site["site_name"], default_zone.payload(), conn=conn)
uuid = f"{site_manager.default_zone}@{site_manager.default_location}"
default_location = database_payloads.LocationsPayload(uuid, site_manager.default_location, default_zone['id'])
default_location = administration_database.insertLocationsTuple(site['site_name'], default_location.payload(), conn=conn)
payload = {
'id': site['id'],
'update': {
'default_zone': default_zone['id'],
'default_auto_issue_location': default_location['id'],
'default_primary_location': default_location['id']
}
}
administration_database.updateSitesTuple(payload, conn=conn)
blank_vendor = database_payloads.VendorsPayload("None", admin_user['id'])
blank_brand = database_payloads.BrandsPayload("None")
blank_vendor = administration_database.insertVendorsTuple(site['site_name'], blank_vendor.payload(), conn=conn)
blank_brand = administration_database.insertBrandsTuple(site['site_name'], blank_brand.payload(), conn=conn)
if self_conn:
conn.commit()
conn.close()
except Exception as error:
with open("logs/process.log", "a+") as file:
file.write(f"{datetime.datetime.now()} --- ERROR --- {error}\n")
conn.rollback()
raise error

View File

@ -0,0 +1,4 @@
INSERT INTO %%site_name%%_brands
(name)
VALUES (%s)
RETURNING *;

View File

@ -0,0 +1,4 @@
INSERT INTO %%site_name%%_locations
(uuid, name, zone_id)
VALUES (%s, %s, %s)
RETURNING *;

View File

@ -0,0 +1,4 @@
INSERT INTO roles
(role_name, role_description, site_id, flags)
VALUES (%s, %s, %s, %s)
RETURNING *;

View File

@ -0,0 +1,5 @@
INSERT INTO sites
(site_name, site_description, creation_date, site_owner_id, flags, default_zone,
default_auto_issue_location, default_primary_location)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
RETURNING *;

View File

@ -0,0 +1,4 @@
INSERT INTO %%site_name%%_vendors
(vendor_name, vendor_address, creation_date, created_by, phone_number)
VALUES (%s, %s, %s, %s, %s)
RETURNING *;

View File

@ -0,0 +1,4 @@
INSERT INTO %%site_name%%_zones
(name, description)
VALUES (%s, %s)
RETURNING *;

View File

@ -0,0 +1,16 @@
WITH passed_id AS (SELECT %s AS passed_id),
cte_login AS (
SELECT logins.* FROM logins
WHERE logins.id = (SELECT passed_id FROM passed_id)
),
cte_roles AS (
SELECT roles.*,
row_to_json(sites.*) AS site
FROM roles
LEFT JOIN sites ON sites.id = roles.site_id
WHERE roles.id = ANY(SELECT unnest(site_roles) FROM cte_login)
)
SELECT login.*,
(SELECT COALESCE(array_agg(row_to_json(r)), '{}') FROM cte_roles r) AS site_roles
FROM cte_login login;

View File

@ -26,7 +26,6 @@ document.addEventListener('DOMContentLoaded', async function() {
await replenishRolesTable(roles) await replenishRolesTable(roles)
let logins = await fetchLogins() let logins = await fetchLogins()
console.log(logins)
await updateLoginsPagination() await updateLoginsPagination()
await replenishLoginsTable(logins) await replenishLoginsTable(logins)
}) })
@ -47,7 +46,7 @@ var sites_current_page = 1
var sites_end_page = 10 var sites_end_page = 10
var sites_limit = 25 var sites_limit = 25
async function fetchSites(){ async function fetchSites(){
const url = new URL('/admin/getSites', window.location.origin) const url = new URL('/admin/api/getSites', window.location.origin)
url.searchParams.append('page', sites_current_page) url.searchParams.append('page', sites_current_page)
url.searchParams.append('limit', sites_limit) url.searchParams.append('limit', sites_limit)
const response = await fetch(url) const response = await fetch(url)
@ -177,7 +176,7 @@ async function postDeleteSite(site_id, item_name){
let valid = document.getElementById('delete_input') let valid = document.getElementById('delete_input')
if(valid.value==item_name){ if(valid.value==item_name){
valid.classList.remove('uk-form-danger') valid.classList.remove('uk-form-danger')
const response = await fetch(`/admin/site/postDeleteSite`, { const response = await fetch(`/admin/api/site/postDeleteSite`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -215,7 +214,7 @@ var roles_current_page = 1
var roles_end_page = 10 var roles_end_page = 10
var roles_limit = 25 var roles_limit = 25
async function fetchRoles(){ async function fetchRoles(){
const url = new URL('/admin/getRoles', window.location.origin) const url = new URL('/admin/api/getRoles', window.location.origin)
url.searchParams.append('page', roles_current_page) url.searchParams.append('page', roles_current_page)
url.searchParams.append('limit', roles_limit) url.searchParams.append('limit', roles_limit)
const response = await fetch(url) const response = await fetch(url)
@ -349,7 +348,7 @@ var logins_current_page = 1
var logins_end_page = 10 var logins_end_page = 10
var logins_limit = 25 var logins_limit = 25
async function fetchLogins(){ async function fetchLogins(){
const url = new URL('/admin/getLogins', window.location.origin) const url = new URL('/admin/api/getLogins', window.location.origin)
url.searchParams.append('page', logins_current_page) url.searchParams.append('page', logins_current_page)
url.searchParams.append('limit', logins_limit) url.searchParams.append('limit', logins_limit)
const response = await fetch(url) const response = await fetch(url)

View File

@ -277,5 +277,5 @@
<script type="text/javascript" src="{{ ASSET_URL }}"></script> <script type="text/javascript" src="{{ ASSET_URL }}"></script>
{% endassets %} {% endassets %}
<script>const session = {{session|tojson}}</script> <script>const session = {{session|tojson}}</script>
<script src="{{ url_for('static', filename='handlers/adminHandler.js') }}"></script> <script src="{{ url_for('admin_api.static', filename='js/adminHandler.js') }}"></script>
</html> </html>

View File

@ -207,7 +207,7 @@
default_primary_location: document.getElementById('new_default_primary_location').value default_primary_location: document.getElementById('new_default_primary_location').value
} }
const response = await fetch(`/admin/site/postAddSite`, { const response = await fetch(`/admin/api/site/postAddSite`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -226,7 +226,7 @@
site_description: document.getElementById('site_description').value} site_description: document.getElementById('site_description').value}
} }
const response = await fetch(`/admin/site/postEditSite`, { const response = await fetch(`/admin/api/site/postEditSite`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',

View File

@ -359,6 +359,9 @@ class SitePayload:
self.default_primary_location self.default_primary_location
) )
def get_dictionary(self):
return self.__dict__
#DONE #DONE
@dataclass @dataclass
class RolePayload: class RolePayload:
@ -375,6 +378,53 @@ class RolePayload:
json.dumps(self.flags) json.dumps(self.flags)
) )
def get_dictionary(self):
return self.__dict__
@dataclass
class LoginsPayload:
username:str
password:str
email: str
row_type: str
system_admin: bool = False
flags: dict = field(default_factory=dict)
favorites: dict = field(default_factory=dict)
unseen_pantry_items: list = field(default_factory=list)
unseen_groups: list = field(default_factory=list)
unseen_shopping_lists: list = field(default_factory=list)
unseen_recipes: list = field(default_factory=list)
seen_pantry_items: list = field(default_factory=list)
seen_groups: list = field(default_factory=list)
seen_shopping_lists: list = field(default_factory=list)
seen_recipes: list = field(default_factory=list)
sites: list = field(default_factory=list)
site_roles: list = field(default_factory=list)
def payload(self):
return (
self.username,
self.password,
self.email,
json.dumps(self.favorites),
lst2pgarr(self.unseen_pantry_items),
lst2pgarr(self.unseen_groups),
lst2pgarr(self.unseen_shopping_lists),
lst2pgarr(self.unseen_recipes),
lst2pgarr(self.seen_pantry_items),
lst2pgarr(self.seen_groups),
lst2pgarr(self.seen_shopping_lists),
lst2pgarr(self.seen_recipes),
lst2pgarr(self.sites),
lst2pgarr(self.site_roles),
self.system_admin,
json.dumps(self.flags),
self.row_type
)
def get_dictionary(self):
return self.__dict__
@dataclass @dataclass
class ItemLocationPayload: class ItemLocationPayload:
part_id: int part_id: int
@ -424,6 +474,77 @@ class ConversionPayload:
self.conv_factor self.conv_factor
) )
@dataclass
class ZonesPayload:
name: str
description: str = ""
def __post_init__(self):
if not isinstance(self.name, str):
raise TypeError(f"Zone name should be of type str; not {type(self.name)}")
def payload(self):
return (
self.name,
self.description,
)
@dataclass
class LocationsPayload:
uuid: str
name: str
zone_id: int
def __post_init__(self):
if not isinstance(self.uuid, str):
raise TypeError(f"uuid must be of type str; not {type(self.uuid)}")
if not isinstance(self.name, str):
raise TypeError(f"Location name must be of type str; not {type(self.name)}")
if not isinstance(self.zone_id, int):
raise TypeError(f"zone_id must be of type str; not {type(self.zone_id)}")
def payload(self):
return (
self.uuid,
self.name,
self.zone_id
)
@dataclass
class VendorsPayload:
vendor_name: str
created_by: int
vendor_address: str = ""
creation_date: datetime.datetime = field(init=False)
phone_number: str = ""
def __post_init__(self):
if not isinstance(self.vendor_name, str):
raise TypeError(f"vendor_name should be of type str; not {type(self.vendor_name)}")
self.creation_date = datetime.datetime.now()
def payload(self):
return (
self.vendor_name,
self.vendor_address,
self.creation_date,
self.created_by,
self.phone_number
)
@dataclass
class BrandsPayload:
name: str
def __post_init__(self):
if not isinstance(self.name, str):
return TypeError(f"brand name should be of type str; not {type(self.name)}")
def payload(self):
return (
self.name,
)
@dataclass @dataclass
class SiteManager: class SiteManager:
site_name: str site_name: str

View File

@ -1980,3 +1980,29 @@
2025-08-02 10:08:28.632959 --- ERROR --- DatabaseError(message='duplicate key value violates unique constraint "test_logistics_info_barcode_key"DETAIL: Key (barcode)=(%041789001314%) already exists.', 2025-08-02 10:08:28.632959 --- ERROR --- DatabaseError(message='duplicate key value violates unique constraint "test_logistics_info_barcode_key"DETAIL: Key (barcode)=(%041789001314%) already exists.',
payload=('%041789001314%', 1, 1, 1, 1), payload=('%041789001314%', 1, 1, 1, 1),
sql='INSERT INTO test_logistics_info(barcode, primary_location, primary_zone, auto_issue_location, auto_issue_zone) VALUES (%s, %s, %s, %s, %s) RETURNING *;') sql='INSERT INTO test_logistics_info(barcode, primary_location, primary_zone, auto_issue_location, auto_issue_zone) VALUES (%s, %s, %s, %s, %s) RETURNING *;')
2025-08-02 18:17:38.668622 --- ERROR --- DatabaseError(message='table "testa_sku_prefix" does not exist',
payload=DROP TABLE testa_sku_prefix CASCADE;,
sql='sku_prefix')
2025-08-02 18:18:33.853991 --- ERROR --- DatabaseError(message='table "testa_sku_prefix" does not exist',
payload=DROP TABLE testa_sku_prefix CASCADE;,
sql='sku_prefix')
2025-08-02 18:21:15.792255 --- ERROR --- DatabaseError(message='table "tesc_sku_prefix" does not exist',
payload=DROP TABLE tesc_sku_prefix CASCADE;,
sql='sku_prefix')
2025-08-02 18:22:19.192251 --- ERROR --- DatabaseError(message='table "tesc_sku_prefix" does not exist',
payload=DROP TABLE tesc_sku_prefix CASCADE;,
sql='sku_prefix')
2025-08-02 18:22:55.826884 --- ERROR --- DatabaseError(message='table "tesc_sku_prefix" does not exist',
payload=DROP TABLE tesc_sku_prefix CASCADE;,
sql='sku_prefix')
2025-08-02 18:29:18.729209 --- ERROR --- DatabaseError(message='syntax error at or near "Site_cost_layers"LINE 1: CREATE TABLE IF NOT EXISTS Test Site_cost_layers ( ^',
payload=CREATE TABLE IF NOT EXISTS Test Site_cost_layers (
id SERIAL PRIMARY KEY,
aquisition_date TIMESTAMP NOT NULL,
quantity FLOAT8 NOT NULL,
cost FLOAT8 NOT NULL,
currency_type VARCHAR(16) NOT NULL,
expires TIMESTAMP,
vendor INTEGER DEFAULT 0
);,
sql='cost_layers')

373
logs/process.log Normal file
View File

@ -0,0 +1,373 @@
2025-08-02 17:36:03.821628 --- ERROR --- name 'database' is not defined
2025-08-02 17:37:08.215424 --- ERROR --- name 'database' is not defined
2025-08-02 17:37:30.670854 --- ERROR --- name 'database' is not defined
2025-08-02 17:38:43.505273 --- ERROR --- name 'database' is not defined
2025-08-02 17:42:54.590907 --- INFO --- logins Created!
2025-08-02 17:42:54.598471 --- INFO --- sites Created!
2025-08-02 17:42:54.605416 --- INFO --- roles Created!
2025-08-02 17:42:54.611444 --- INFO --- units Created!
2025-08-02 17:42:54.627265 --- INFO --- cost_layers Created!
2025-08-02 17:42:54.635985 --- INFO --- linked_items Created!
2025-08-02 17:42:54.643245 --- INFO --- brands Created!
2025-08-02 17:42:54.651461 --- INFO --- food_info Created!
2025-08-02 17:42:54.660454 --- INFO --- item_info Created!
2025-08-02 17:42:54.668702 --- INFO --- zones Created!
2025-08-02 17:42:54.677999 --- INFO --- locations Created!
2025-08-02 17:42:54.686410 --- INFO --- logistics_info Created!
2025-08-02 17:42:54.694318 --- INFO --- transactions Created!
2025-08-02 17:42:54.704109 --- INFO --- item Created!
2025-08-02 17:42:54.712218 --- INFO --- vendors Created!
2025-08-02 17:42:54.721027 --- INFO --- groups Created!
2025-08-02 17:42:54.730414 --- INFO --- group_items Created!
2025-08-02 17:42:54.739860 --- INFO --- receipts Created!
2025-08-02 17:42:54.748305 --- INFO --- receipt_items Created!
2025-08-02 17:42:54.756246 --- INFO --- recipes Created!
2025-08-02 17:42:54.765949 --- INFO --- recipe_items Created!
2025-08-02 17:42:54.775736 --- INFO --- shopping_lists Created!
2025-08-02 17:42:54.785212 --- INFO --- shopping_list_items Created!
2025-08-02 17:42:54.795440 --- INFO --- item_locations Created!
2025-08-02 17:42:54.803953 --- INFO --- conversions Created!
2025-08-02 17:42:54.808022 --- INFO --- Admin User Created!
2025-08-02 17:43:14.105719 --- INFO --- logins Created!
2025-08-02 17:43:14.109485 --- INFO --- sites Created!
2025-08-02 17:43:14.113017 --- INFO --- roles Created!
2025-08-02 17:43:14.116541 --- INFO --- units Created!
2025-08-02 17:43:14.122976 --- INFO --- cost_layers Created!
2025-08-02 17:43:14.129403 --- INFO --- linked_items Created!
2025-08-02 17:43:14.134059 --- INFO --- brands Created!
2025-08-02 17:43:14.139917 --- INFO --- food_info Created!
2025-08-02 17:43:14.146866 --- INFO --- item_info Created!
2025-08-02 17:43:14.153027 --- INFO --- zones Created!
2025-08-02 17:43:14.158782 --- INFO --- locations Created!
2025-08-02 17:43:14.164872 --- INFO --- logistics_info Created!
2025-08-02 17:43:14.170766 --- INFO --- transactions Created!
2025-08-02 17:43:14.178190 --- INFO --- item Created!
2025-08-02 17:43:14.183717 --- INFO --- vendors Created!
2025-08-02 17:43:14.189851 --- INFO --- groups Created!
2025-08-02 17:43:14.197086 --- INFO --- group_items Created!
2025-08-02 17:43:14.203797 --- INFO --- receipts Created!
2025-08-02 17:43:14.209943 --- INFO --- receipt_items Created!
2025-08-02 17:43:14.215472 --- INFO --- recipes Created!
2025-08-02 17:43:14.222578 --- INFO --- recipe_items Created!
2025-08-02 17:43:14.229450 --- INFO --- shopping_lists Created!
2025-08-02 17:43:14.237020 --- INFO --- shopping_list_items Created!
2025-08-02 17:43:14.244526 --- INFO --- item_locations Created!
2025-08-02 17:43:14.250619 --- INFO --- conversions Created!
2025-08-02 17:43:14.254671 --- INFO --- Admin User Created!
2025-08-02 18:17:38.525725 --- INFO --- item_info DROPPED!
2025-08-02 18:17:38.534293 --- INFO --- items DROPPED!
2025-08-02 18:17:38.540958 --- INFO --- cost_layers DROPPED!
2025-08-02 18:17:38.547807 --- INFO --- linked_items DROPPED!
2025-08-02 18:17:38.554534 --- INFO --- transactions DROPPED!
2025-08-02 18:17:38.560954 --- INFO --- brands DROPPED!
2025-08-02 18:17:38.567781 --- INFO --- food_info DROPPED!
2025-08-02 18:17:38.574657 --- INFO --- logistics_info DROPPED!
2025-08-02 18:17:38.581696 --- INFO --- zones DROPPED!
2025-08-02 18:17:38.588647 --- INFO --- locations DROPPED!
2025-08-02 18:17:38.595646 --- INFO --- vendors DROPPED!
2025-08-02 18:17:38.602703 --- INFO --- group_items DROPPED!
2025-08-02 18:17:38.609603 --- INFO --- groups DROPPED!
2025-08-02 18:17:38.616542 --- INFO --- receipt_items DROPPED!
2025-08-02 18:17:38.623379 --- INFO --- receipts DROPPED!
2025-08-02 18:17:38.630435 --- INFO --- recipe_items DROPPED!
2025-08-02 18:17:38.637322 --- INFO --- recipes DROPPED!
2025-08-02 18:17:38.644400 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:17:38.651263 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:17:38.658144 --- INFO --- item_locations DROPPED!
2025-08-02 18:17:38.664832 --- INFO --- conversions DROPPED!
2025-08-02 18:17:38.671963 --- ERROR --- DatabaseError(message='table "testa_sku_prefix" does not exist', payload=DROP TABLE testa_sku_prefix CASCADE;, sql='sku_prefix')
2025-08-02 18:18:33.762077 --- INFO --- item_info DROPPED!
2025-08-02 18:18:33.769451 --- INFO --- items DROPPED!
2025-08-02 18:18:33.773873 --- INFO --- cost_layers DROPPED!
2025-08-02 18:18:33.778329 --- INFO --- linked_items DROPPED!
2025-08-02 18:18:33.783414 --- INFO --- transactions DROPPED!
2025-08-02 18:18:33.787465 --- INFO --- brands DROPPED!
2025-08-02 18:18:33.791576 --- INFO --- food_info DROPPED!
2025-08-02 18:18:33.796560 --- INFO --- logistics_info DROPPED!
2025-08-02 18:18:33.801151 --- INFO --- zones DROPPED!
2025-08-02 18:18:33.805538 --- INFO --- locations DROPPED!
2025-08-02 18:18:33.809589 --- INFO --- vendors DROPPED!
2025-08-02 18:18:33.813623 --- INFO --- group_items DROPPED!
2025-08-02 18:18:33.817500 --- INFO --- groups DROPPED!
2025-08-02 18:18:33.821491 --- INFO --- receipt_items DROPPED!
2025-08-02 18:18:33.825404 --- INFO --- receipts DROPPED!
2025-08-02 18:18:33.829525 --- INFO --- recipe_items DROPPED!
2025-08-02 18:18:33.833380 --- INFO --- recipes DROPPED!
2025-08-02 18:18:33.837410 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:18:33.841363 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:18:33.845368 --- INFO --- item_locations DROPPED!
2025-08-02 18:18:33.849277 --- INFO --- conversions DROPPED!
2025-08-02 18:18:33.857471 --- ERROR --- DatabaseError(message='table "testa_sku_prefix" does not exist', payload=DROP TABLE testa_sku_prefix CASCADE;, sql='sku_prefix')
2025-08-02 18:21:04.309258 --- INFO --- logins Created!
2025-08-02 18:21:04.318315 --- INFO --- sites Created!
2025-08-02 18:21:04.322035 --- INFO --- roles Created!
2025-08-02 18:21:04.325716 --- INFO --- units Created!
2025-08-02 18:21:04.332729 --- INFO --- cost_layers Created!
2025-08-02 18:21:04.340251 --- INFO --- linked_items Created!
2025-08-02 18:21:04.345274 --- INFO --- brands Created!
2025-08-02 18:21:04.351605 --- INFO --- food_info Created!
2025-08-02 18:21:04.358995 --- INFO --- item_info Created!
2025-08-02 18:21:04.366026 --- INFO --- zones Created!
2025-08-02 18:21:04.372607 --- INFO --- locations Created!
2025-08-02 18:21:04.379748 --- INFO --- logistics_info Created!
2025-08-02 18:21:04.386304 --- INFO --- transactions Created!
2025-08-02 18:21:04.394657 --- INFO --- item Created!
2025-08-02 18:21:04.400126 --- INFO --- vendors Created!
2025-08-02 18:21:04.407038 --- INFO --- groups Created!
2025-08-02 18:21:04.414685 --- INFO --- group_items Created!
2025-08-02 18:21:04.421843 --- INFO --- receipts Created!
2025-08-02 18:21:04.428601 --- INFO --- receipt_items Created!
2025-08-02 18:21:04.434706 --- INFO --- recipes Created!
2025-08-02 18:21:04.442442 --- INFO --- recipe_items Created!
2025-08-02 18:21:04.449400 --- INFO --- shopping_lists Created!
2025-08-02 18:21:04.457406 --- INFO --- shopping_list_items Created!
2025-08-02 18:21:04.465015 --- INFO --- item_locations Created!
2025-08-02 18:21:04.471077 --- INFO --- conversions Created!
2025-08-02 18:21:04.475386 --- INFO --- Admin User Created!
2025-08-02 18:21:15.697984 --- INFO --- item_info DROPPED!
2025-08-02 18:21:15.707335 --- INFO --- items DROPPED!
2025-08-02 18:21:15.711647 --- INFO --- cost_layers DROPPED!
2025-08-02 18:21:15.715884 --- INFO --- linked_items DROPPED!
2025-08-02 18:21:15.720256 --- INFO --- transactions DROPPED!
2025-08-02 18:21:15.724344 --- INFO --- brands DROPPED!
2025-08-02 18:21:15.728559 --- INFO --- food_info DROPPED!
2025-08-02 18:21:15.733749 --- INFO --- logistics_info DROPPED!
2025-08-02 18:21:15.738026 --- INFO --- zones DROPPED!
2025-08-02 18:21:15.742003 --- INFO --- locations DROPPED!
2025-08-02 18:21:15.746147 --- INFO --- vendors DROPPED!
2025-08-02 18:21:15.750367 --- INFO --- group_items DROPPED!
2025-08-02 18:21:15.754295 --- INFO --- groups DROPPED!
2025-08-02 18:21:15.758458 --- INFO --- receipt_items DROPPED!
2025-08-02 18:21:15.762403 --- INFO --- receipts DROPPED!
2025-08-02 18:21:15.766757 --- INFO --- recipe_items DROPPED!
2025-08-02 18:21:15.770751 --- INFO --- recipes DROPPED!
2025-08-02 18:21:15.775030 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:21:15.779021 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:21:15.783189 --- INFO --- item_locations DROPPED!
2025-08-02 18:21:15.787297 --- INFO --- conversions DROPPED!
2025-08-02 18:21:15.795686 --- ERROR --- DatabaseError(message='table "tesc_sku_prefix" does not exist', payload=DROP TABLE tesc_sku_prefix CASCADE;, sql='sku_prefix')
2025-08-02 18:21:39.494823 --- INFO --- logins Created!
2025-08-02 18:21:39.501922 --- INFO --- sites Created!
2025-08-02 18:21:39.506715 --- INFO --- roles Created!
2025-08-02 18:21:39.510848 --- INFO --- units Created!
2025-08-02 18:21:39.517874 --- INFO --- cost_layers Created!
2025-08-02 18:21:39.524903 --- INFO --- linked_items Created!
2025-08-02 18:21:39.529872 --- INFO --- brands Created!
2025-08-02 18:21:39.536513 --- INFO --- food_info Created!
2025-08-02 18:21:39.543806 --- INFO --- item_info Created!
2025-08-02 18:21:39.551659 --- INFO --- zones Created!
2025-08-02 18:21:39.558627 --- INFO --- locations Created!
2025-08-02 18:21:39.566378 --- INFO --- logistics_info Created!
2025-08-02 18:21:39.573581 --- INFO --- transactions Created!
2025-08-02 18:21:39.582850 --- INFO --- item Created!
2025-08-02 18:21:39.586437 --- INFO --- vendors Created!
2025-08-02 18:21:39.593704 --- INFO --- groups Created!
2025-08-02 18:21:39.601914 --- INFO --- group_items Created!
2025-08-02 18:21:39.609446 --- INFO --- receipts Created!
2025-08-02 18:21:39.616274 --- INFO --- receipt_items Created!
2025-08-02 18:21:39.622822 --- INFO --- recipes Created!
2025-08-02 18:21:39.631045 --- INFO --- recipe_items Created!
2025-08-02 18:21:39.638724 --- INFO --- shopping_lists Created!
2025-08-02 18:21:39.646926 --- INFO --- shopping_list_items Created!
2025-08-02 18:21:39.654793 --- INFO --- item_locations Created!
2025-08-02 18:21:39.661064 --- INFO --- conversions Created!
2025-08-02 18:21:39.665273 --- INFO --- Admin User Created!
2025-08-02 18:22:19.097726 --- INFO --- item_info DROPPED!
2025-08-02 18:22:19.106429 --- INFO --- items DROPPED!
2025-08-02 18:22:19.110619 --- INFO --- cost_layers DROPPED!
2025-08-02 18:22:19.114650 --- INFO --- linked_items DROPPED!
2025-08-02 18:22:19.118889 --- INFO --- transactions DROPPED!
2025-08-02 18:22:19.122806 --- INFO --- brands DROPPED!
2025-08-02 18:22:19.126903 --- INFO --- food_info DROPPED!
2025-08-02 18:22:19.131615 --- INFO --- logistics_info DROPPED!
2025-08-02 18:22:19.135869 --- INFO --- zones DROPPED!
2025-08-02 18:22:19.139903 --- INFO --- locations DROPPED!
2025-08-02 18:22:19.144155 --- INFO --- vendors DROPPED!
2025-08-02 18:22:19.148457 --- INFO --- group_items DROPPED!
2025-08-02 18:22:19.152510 --- INFO --- groups DROPPED!
2025-08-02 18:22:19.156745 --- INFO --- receipt_items DROPPED!
2025-08-02 18:22:19.161235 --- INFO --- receipts DROPPED!
2025-08-02 18:22:19.166118 --- INFO --- recipe_items DROPPED!
2025-08-02 18:22:19.170322 --- INFO --- recipes DROPPED!
2025-08-02 18:22:19.174853 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:22:19.179479 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:22:19.183063 --- INFO --- item_locations DROPPED!
2025-08-02 18:22:19.187254 --- INFO --- conversions DROPPED!
2025-08-02 18:22:19.195739 --- ERROR --- DatabaseError(message='table "tesc_sku_prefix" does not exist', payload=DROP TABLE tesc_sku_prefix CASCADE;, sql='sku_prefix')
2025-08-02 18:22:55.730401 --- INFO --- item_info DROPPED!
2025-08-02 18:22:55.739583 --- INFO --- items DROPPED!
2025-08-02 18:22:55.743911 --- INFO --- cost_layers DROPPED!
2025-08-02 18:22:55.748244 --- INFO --- linked_items DROPPED!
2025-08-02 18:22:55.752754 --- INFO --- transactions DROPPED!
2025-08-02 18:22:55.756804 --- INFO --- brands DROPPED!
2025-08-02 18:22:55.760914 --- INFO --- food_info DROPPED!
2025-08-02 18:22:55.765817 --- INFO --- logistics_info DROPPED!
2025-08-02 18:22:55.770411 --- INFO --- zones DROPPED!
2025-08-02 18:22:55.774706 --- INFO --- locations DROPPED!
2025-08-02 18:22:55.779111 --- INFO --- vendors DROPPED!
2025-08-02 18:22:55.783646 --- INFO --- group_items DROPPED!
2025-08-02 18:22:55.787896 --- INFO --- groups DROPPED!
2025-08-02 18:22:55.792318 --- INFO --- receipt_items DROPPED!
2025-08-02 18:22:55.796560 --- INFO --- receipts DROPPED!
2025-08-02 18:22:55.801016 --- INFO --- recipe_items DROPPED!
2025-08-02 18:22:55.805185 --- INFO --- recipes DROPPED!
2025-08-02 18:22:55.809664 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:22:55.813894 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:22:55.818120 --- INFO --- item_locations DROPPED!
2025-08-02 18:22:55.822243 --- INFO --- conversions DROPPED!
2025-08-02 18:22:55.830589 --- ERROR --- DatabaseError(message='table "tesc_sku_prefix" does not exist', payload=DROP TABLE tesc_sku_prefix CASCADE;, sql='sku_prefix')
2025-08-02 18:24:27.267149 --- INFO --- logins Created!
2025-08-02 18:24:27.274683 --- INFO --- sites Created!
2025-08-02 18:24:27.279507 --- INFO --- roles Created!
2025-08-02 18:24:27.283605 --- INFO --- units Created!
2025-08-02 18:24:27.291352 --- INFO --- cost_layers Created!
2025-08-02 18:24:27.298690 --- INFO --- linked_items Created!
2025-08-02 18:24:27.304349 --- INFO --- brands Created!
2025-08-02 18:24:27.310778 --- INFO --- food_info Created!
2025-08-02 18:24:27.318536 --- INFO --- item_info Created!
2025-08-02 18:24:27.325567 --- INFO --- zones Created!
2025-08-02 18:24:27.332454 --- INFO --- locations Created!
2025-08-02 18:24:27.339921 --- INFO --- logistics_info Created!
2025-08-02 18:24:27.346686 --- INFO --- transactions Created!
2025-08-02 18:24:27.355421 --- INFO --- item Created!
2025-08-02 18:24:27.361561 --- INFO --- vendors Created!
2025-08-02 18:24:27.369016 --- INFO --- groups Created!
2025-08-02 18:24:27.377229 --- INFO --- group_items Created!
2025-08-02 18:24:27.384953 --- INFO --- receipts Created!
2025-08-02 18:24:27.391473 --- INFO --- receipt_items Created!
2025-08-02 18:24:27.397204 --- INFO --- recipes Created!
2025-08-02 18:24:27.404108 --- INFO --- recipe_items Created!
2025-08-02 18:24:27.410341 --- INFO --- shopping_lists Created!
2025-08-02 18:24:27.417144 --- INFO --- shopping_list_items Created!
2025-08-02 18:24:27.424318 --- INFO --- item_locations Created!
2025-08-02 18:24:27.430403 --- INFO --- conversions Created!
2025-08-02 18:24:27.437113 --- INFO --- sku_prefix Created!
2025-08-02 18:24:27.441214 --- INFO --- Admin User Created!
2025-08-02 18:24:48.552044 --- INFO --- item_info DROPPED!
2025-08-02 18:24:48.558982 --- INFO --- items DROPPED!
2025-08-02 18:24:48.563515 --- INFO --- cost_layers DROPPED!
2025-08-02 18:24:48.568029 --- INFO --- linked_items DROPPED!
2025-08-02 18:24:48.572525 --- INFO --- transactions DROPPED!
2025-08-02 18:24:48.576655 --- INFO --- brands DROPPED!
2025-08-02 18:24:48.580796 --- INFO --- food_info DROPPED!
2025-08-02 18:24:48.585973 --- INFO --- logistics_info DROPPED!
2025-08-02 18:24:48.590455 --- INFO --- zones DROPPED!
2025-08-02 18:24:48.594769 --- INFO --- locations DROPPED!
2025-08-02 18:24:48.599463 --- INFO --- vendors DROPPED!
2025-08-02 18:24:48.603895 --- INFO --- group_items DROPPED!
2025-08-02 18:24:48.608092 --- INFO --- groups DROPPED!
2025-08-02 18:24:48.612422 --- INFO --- receipt_items DROPPED!
2025-08-02 18:24:48.616603 --- INFO --- receipts DROPPED!
2025-08-02 18:24:48.621086 --- INFO --- recipe_items DROPPED!
2025-08-02 18:24:48.625294 --- INFO --- recipes DROPPED!
2025-08-02 18:24:48.629821 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:24:48.634125 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:24:48.638433 --- INFO --- item_locations DROPPED!
2025-08-02 18:24:48.642509 --- INFO --- conversions DROPPED!
2025-08-02 18:24:48.646827 --- INFO --- sku_prefix DROPPED!
2025-08-02 18:26:20.489233 --- INFO --- item_info DROPPED!
2025-08-02 18:26:20.495014 --- INFO --- items DROPPED!
2025-08-02 18:26:20.499055 --- INFO --- cost_layers DROPPED!
2025-08-02 18:26:20.503107 --- INFO --- linked_items DROPPED!
2025-08-02 18:26:20.507497 --- INFO --- transactions DROPPED!
2025-08-02 18:26:20.511634 --- INFO --- brands DROPPED!
2025-08-02 18:26:20.515839 --- INFO --- food_info DROPPED!
2025-08-02 18:26:20.520888 --- INFO --- logistics_info DROPPED!
2025-08-02 18:26:20.525258 --- INFO --- zones DROPPED!
2025-08-02 18:26:20.529569 --- INFO --- locations DROPPED!
2025-08-02 18:26:20.534098 --- INFO --- vendors DROPPED!
2025-08-02 18:26:20.538631 --- INFO --- group_items DROPPED!
2025-08-02 18:26:20.542828 --- INFO --- groups DROPPED!
2025-08-02 18:26:20.547157 --- INFO --- receipt_items DROPPED!
2025-08-02 18:26:20.551313 --- INFO --- receipts DROPPED!
2025-08-02 18:26:20.555830 --- INFO --- recipe_items DROPPED!
2025-08-02 18:26:20.559942 --- INFO --- recipes DROPPED!
2025-08-02 18:26:20.564471 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:26:20.568722 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:26:20.572906 --- INFO --- item_locations DROPPED!
2025-08-02 18:26:20.576984 --- INFO --- conversions DROPPED!
2025-08-02 18:26:20.580751 --- INFO --- sku_prefix DROPPED!
2025-08-02 18:26:50.901713 --- INFO --- item_info DROPPED!
2025-08-02 18:26:50.908050 --- INFO --- items DROPPED!
2025-08-02 18:26:50.912340 --- INFO --- cost_layers DROPPED!
2025-08-02 18:26:50.916637 --- INFO --- linked_items DROPPED!
2025-08-02 18:26:50.921024 --- INFO --- transactions DROPPED!
2025-08-02 18:26:50.925016 --- INFO --- brands DROPPED!
2025-08-02 18:26:50.929134 --- INFO --- food_info DROPPED!
2025-08-02 18:26:50.934046 --- INFO --- logistics_info DROPPED!
2025-08-02 18:26:50.938560 --- INFO --- zones DROPPED!
2025-08-02 18:26:50.942916 --- INFO --- locations DROPPED!
2025-08-02 18:26:50.947325 --- INFO --- vendors DROPPED!
2025-08-02 18:26:50.951778 --- INFO --- group_items DROPPED!
2025-08-02 18:26:50.956021 --- INFO --- groups DROPPED!
2025-08-02 18:26:50.960452 --- INFO --- receipt_items DROPPED!
2025-08-02 18:26:50.964739 --- INFO --- receipts DROPPED!
2025-08-02 18:26:50.969244 --- INFO --- recipe_items DROPPED!
2025-08-02 18:26:50.973415 --- INFO --- recipes DROPPED!
2025-08-02 18:26:50.978248 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:26:50.982442 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:26:50.986611 --- INFO --- item_locations DROPPED!
2025-08-02 18:26:50.990758 --- INFO --- conversions DROPPED!
2025-08-02 18:26:50.994543 --- INFO --- sku_prefix DROPPED!
2025-08-02 18:29:18.712464 --- INFO --- logins Created!
2025-08-02 18:29:18.716965 --- INFO --- sites Created!
2025-08-02 18:29:18.720695 --- INFO --- roles Created!
2025-08-02 18:29:18.724324 --- INFO --- units Created!
2025-08-02 18:29:18.732922 --- ERROR --- DatabaseError(message='syntax error at or near "Site_cost_layers"LINE 1: CREATE TABLE IF NOT EXISTS Test Site_cost_layers ( ^', payload=CREATE TABLE IF NOT EXISTS Test Site_cost_layers (
id SERIAL PRIMARY KEY,
aquisition_date TIMESTAMP NOT NULL,
quantity FLOAT8 NOT NULL,
cost FLOAT8 NOT NULL,
currency_type VARCHAR(16) NOT NULL,
expires TIMESTAMP,
vendor INTEGER DEFAULT 0
);, sql='cost_layers')
2025-08-02 18:29:41.466678 --- INFO --- logins Created!
2025-08-02 18:29:41.470676 --- INFO --- sites Created!
2025-08-02 18:29:41.474490 --- INFO --- roles Created!
2025-08-02 18:29:41.478247 --- INFO --- units Created!
2025-08-02 18:29:41.485739 --- INFO --- cost_layers Created!
2025-08-02 18:29:41.493886 --- INFO --- linked_items Created!
2025-08-02 18:29:41.499122 --- INFO --- brands Created!
2025-08-02 18:29:41.505196 --- INFO --- food_info Created!
2025-08-02 18:29:41.512272 --- INFO --- item_info Created!
2025-08-02 18:29:41.518926 --- INFO --- zones Created!
2025-08-02 18:29:41.525031 --- INFO --- locations Created!
2025-08-02 18:29:41.531342 --- INFO --- logistics_info Created!
2025-08-02 18:29:41.537275 --- INFO --- transactions Created!
2025-08-02 18:29:41.544813 --- INFO --- item Created!
2025-08-02 18:29:41.550784 --- INFO --- vendors Created!
2025-08-02 18:29:41.557165 --- INFO --- groups Created!
2025-08-02 18:29:41.564073 --- INFO --- group_items Created!
2025-08-02 18:29:41.570683 --- INFO --- receipts Created!
2025-08-02 18:29:41.576683 --- INFO --- receipt_items Created!
2025-08-02 18:29:41.582667 --- INFO --- recipes Created!
2025-08-02 18:29:41.589472 --- INFO --- recipe_items Created!
2025-08-02 18:29:41.595473 --- INFO --- shopping_lists Created!
2025-08-02 18:29:41.602034 --- INFO --- shopping_list_items Created!
2025-08-02 18:29:41.608539 --- INFO --- item_locations Created!
2025-08-02 18:29:41.614052 --- INFO --- conversions Created!
2025-08-02 18:29:41.619896 --- INFO --- sku_prefix Created!
2025-08-02 18:29:41.623834 --- INFO --- Admin User Created!
2025-08-02 18:29:58.848852 --- INFO --- item_info DROPPED!
2025-08-02 18:29:58.855462 --- INFO --- items DROPPED!
2025-08-02 18:29:58.859968 --- INFO --- cost_layers DROPPED!
2025-08-02 18:29:58.864374 --- INFO --- linked_items DROPPED!
2025-08-02 18:29:58.869202 --- INFO --- transactions DROPPED!
2025-08-02 18:29:58.873266 --- INFO --- brands DROPPED!
2025-08-02 18:29:58.877586 --- INFO --- food_info DROPPED!
2025-08-02 18:29:58.882732 --- INFO --- logistics_info DROPPED!
2025-08-02 18:29:58.887276 --- INFO --- zones DROPPED!
2025-08-02 18:29:58.891650 --- INFO --- locations DROPPED!
2025-08-02 18:29:58.898845 --- INFO --- vendors DROPPED!
2025-08-02 18:29:58.903407 --- INFO --- group_items DROPPED!
2025-08-02 18:29:58.907658 --- INFO --- groups DROPPED!
2025-08-02 18:29:58.912101 --- INFO --- receipt_items DROPPED!
2025-08-02 18:29:58.916474 --- INFO --- receipts DROPPED!
2025-08-02 18:29:58.920960 --- INFO --- recipe_items DROPPED!
2025-08-02 18:29:58.925484 --- INFO --- recipes DROPPED!
2025-08-02 18:29:58.930078 --- INFO --- shopping_list_items DROPPED!
2025-08-02 18:29:58.934443 --- INFO --- shopping_lists DROPPED!
2025-08-02 18:29:58.939191 --- INFO --- item_locations DROPPED!
2025-08-02 18:29:58.943407 --- INFO --- conversions DROPPED!
2025-08-02 18:29:58.947995 --- INFO --- sku_prefix DROPPED!