Compare commits
No commits in common. "3e81734e14a265c1544c369ab361755f8fe710ec" and "6e08825ad242e216d22162d9944226f8c5e5e5b4" have entirely different histories.
3e81734e14
...
6e08825ad2
52
.gitignore
vendored
52
.gitignore
vendored
@ -4,3 +4,55 @@ static/css/uikit-rtl.css
|
|||||||
static/css/uikit-rtl.min.css
|
static/css/uikit-rtl.min.css
|
||||||
static/css/uikit.css
|
static/css/uikit.css
|
||||||
static/css/uikit.min.css
|
static/css/uikit.min.css
|
||||||
|
__pycache__/api_admin.cpython-312.pyc
|
||||||
|
__pycache__/api_admin.cpython-313.pyc
|
||||||
|
__pycache__/api.cpython-312.pyc
|
||||||
|
__pycache__/api.cpython-313.pyc
|
||||||
|
__pycache__/config.cpython-312.pyc
|
||||||
|
__pycache__/config.cpython-313.pyc
|
||||||
|
__pycache__/database_admin.cpython-312.pyc
|
||||||
|
__pycache__/database_admin.cpython-313.pyc
|
||||||
|
__pycache__/database.cpython-312.pyc
|
||||||
|
__pycache__/database.cpython-313.pyc
|
||||||
|
__pycache__/external_API.cpython-312.pyc
|
||||||
|
__pycache__/external_API.cpython-313.pyc
|
||||||
|
__pycache__/group_api.cpython-312.pyc
|
||||||
|
__pycache__/group_api.cpython-313.pyc
|
||||||
|
__pycache__/main.cpython-312.pyc
|
||||||
|
__pycache__/main.cpython-313.pyc
|
||||||
|
__pycache__/manage.cpython-312.pyc
|
||||||
|
__pycache__/manage.cpython-313.pyc
|
||||||
|
__pycache__/MyDataclasses.cpython-312.pyc
|
||||||
|
__pycache__/MyDataclasses.cpython-313.pyc
|
||||||
|
__pycache__/postsqldb.cpython-312.pyc
|
||||||
|
__pycache__/postsqldb.cpython-313.pyc
|
||||||
|
__pycache__/process.cpython-312.pyc
|
||||||
|
__pycache__/process.cpython-313.pyc
|
||||||
|
__pycache__/receipts_API.cpython-312.pyc
|
||||||
|
__pycache__/receipts_API.cpython-313.pyc
|
||||||
|
__pycache__/shopping_list_API.cpython-312.pyc
|
||||||
|
__pycache__/shopping_list_API.cpython-313.pyc
|
||||||
|
__pycache__/user_api.cpython-312.pyc
|
||||||
|
__pycache__/user_api.cpython-313.pyc
|
||||||
|
__pycache__/webpush.cpython-312.pyc
|
||||||
|
__pycache__/webpush.cpython-313.pyc
|
||||||
|
__pycache__/workshop_api.cpython-312.pyc
|
||||||
|
__pycache__/workshop_api.cpython-313.pyc
|
||||||
|
application/recipes/__pycache__/__init__.cpython-312.pyc
|
||||||
|
application/recipes/__pycache__/__init__.cpython-313.pyc
|
||||||
|
application/recipes/__pycache__/database_recipes.cpython-312.pyc
|
||||||
|
application/recipes/__pycache__/database_recipes.cpython-313.pyc
|
||||||
|
application/recipes/__pycache__/recipes_api.cpython-312.pyc
|
||||||
|
application/recipes/__pycache__/recipes_api.cpython-313.pyc
|
||||||
|
application/__pycache__/__init__.cpython-312.pyc
|
||||||
|
application/__pycache__/__init__.cpython-313.pyc
|
||||||
|
application/__pycache__/postsqldb.cpython-312.pyc
|
||||||
|
application/__pycache__/postsqldb.cpython-313.pyc
|
||||||
|
application/items/__pycache__/__init__.cpython-312.pyc
|
||||||
|
application/items/__pycache__/__init__.cpython-313.pyc
|
||||||
|
application/items/__pycache__/database_items.cpython-312.pyc
|
||||||
|
application/items/__pycache__/database_items.cpython-313.pyc
|
||||||
|
application/items/__pycache__/items_API.cpython-312.pyc
|
||||||
|
application/items/__pycache__/items_API.cpython-313.pyc
|
||||||
|
application/items/__pycache__/items_processes.cpython-312.pyc
|
||||||
|
application/items/__pycache__/items_processes.cpython-313.pyc
|
||||||
|
|||||||
@ -359,10 +359,6 @@ class SitePayload:
|
|||||||
self.default_primary_location
|
self.default_primary_location
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_dictionary(self):
|
|
||||||
return self.__dict__
|
|
||||||
|
|
||||||
|
|
||||||
#DONE
|
#DONE
|
||||||
@dataclass
|
@dataclass
|
||||||
class RolePayload:
|
class RolePayload:
|
||||||
|
|||||||
18
README.md
18
README.md
@ -1,17 +1,5 @@
|
|||||||
## PantryTrack
|
## postgresql and python learning
|
||||||
|
|
||||||
This is currently a passion project that I have started to learn and develop a few different things:
|
I am attempting to understand how to connect and execute commands through python against a remote postgresql to update my systems
|
||||||
- Barcode Scanners and how they work
|
|
||||||
- Posgresql Databases
|
|
||||||
- SQL, JAVASCRIPT, CSS, HTML
|
|
||||||
- Database Schema management and design
|
|
||||||
|
|
||||||
PantryTrack is am inventory system that you add items to, use those items to build recipes and shopping lists.
|
This is a test
|
||||||
|
|
||||||
You then can manually create receipts and or use a barcode scanner to set up a kiosk to scan in all the things you
|
|
||||||
purchase into a receipt.
|
|
||||||
|
|
||||||
You then edit and receive that receipt into the system.
|
|
||||||
|
|
||||||
There is also the ability to use a kiosk like interface to set up a scan in and out system, where as you use things
|
|
||||||
the system will remove those items by scanning them.
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
370
api.py
Normal file
370
api.py
Normal file
@ -0,0 +1,370 @@
|
|||||||
|
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||||
|
import psycopg2, math, json, datetime, main, copy, requests, process, database
|
||||||
|
from config import config, sites_config
|
||||||
|
from main import unfoldCostLayers
|
||||||
|
|
||||||
|
database_api= Blueprint('database_api', __name__)
|
||||||
|
|
||||||
|
@database_api.route("/changeSite", methods=["POST"])
|
||||||
|
def changeSite():
|
||||||
|
if request.method == "POST":
|
||||||
|
site = request.json['site']
|
||||||
|
session['selected_site'] = site
|
||||||
|
return jsonify({'error': False, 'message': 'Site Changed!'})
|
||||||
|
|
||||||
|
|
||||||
|
@database_api.route("/getGroups")
|
||||||
|
def paginate_groups():
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
limit = int(request.args.get('limit', 10))
|
||||||
|
site_name = session['selected_site']
|
||||||
|
offset = (page - 1) * limit
|
||||||
|
|
||||||
|
groups = []
|
||||||
|
count = 0
|
||||||
|
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"SELECT * FROM {site_name}_groups LIMIT %s OFFSET %s;"
|
||||||
|
count = f"SELECT COUNT(*) FROM {site_name}_groups"
|
||||||
|
|
||||||
|
cur.execute(sql, (limit, offset))
|
||||||
|
groups = cur.fetchall()
|
||||||
|
cur.execute(count)
|
||||||
|
count = cur.fetchone()[0]
|
||||||
|
|
||||||
|
|
||||||
|
sql_item = f"SELECT {site_name}_items.barcode, {site_name}_items.item_name, {site_name}_logistics_info.quantity_on_hand FROM {site_name}_items LEFT JOIN {site_name}_logistics_info ON {site_name}_items.logistics_info_id = {site_name}_logistics_info.id WHERE {site_name}_items.id = %s; "
|
||||||
|
new_groups = []
|
||||||
|
for group in groups:
|
||||||
|
qty = 0
|
||||||
|
group = list(group)
|
||||||
|
items = []
|
||||||
|
print(group[3])
|
||||||
|
for item_id in group[3]:
|
||||||
|
cur.execute(sql_item, (item_id,))
|
||||||
|
item_row = list(cur.fetchone())
|
||||||
|
cur.execute(f"SELECT quantity_on_hand FROM {site_name}_item_locations WHERE part_id=%s;", (item_id, ))
|
||||||
|
item_locations = cur.fetchall()[0]
|
||||||
|
qty += float(sum(item_locations))
|
||||||
|
item_row[2] = sum(item_locations)
|
||||||
|
items.append(item_row)
|
||||||
|
group[3] = items
|
||||||
|
group.append(qty)
|
||||||
|
new_groups.append(group)
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
return jsonify({'groups': new_groups, "end": math.ceil(count/limit)})
|
||||||
|
|
||||||
|
|
||||||
|
@database_api.route("/getVendors")
|
||||||
|
def get_vendors():
|
||||||
|
database_config = config()
|
||||||
|
site_name = session['selected_site']
|
||||||
|
vendors = []
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"SELECT * FROM {site_name}_vendors;"
|
||||||
|
cur.execute(sql)
|
||||||
|
vendors = cur.fetchall()
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
return jsonify(vendors=vendors)
|
||||||
|
|
||||||
|
|
||||||
|
@database_api.route("/addGroup")
|
||||||
|
def addGroup():
|
||||||
|
name = str(request.args.get('name', ""))
|
||||||
|
description = str(request.args.get('description', ""))
|
||||||
|
group_type = str(request.args.get('type', ""))
|
||||||
|
site_name = session['selected_site']
|
||||||
|
state = "FAILED"
|
||||||
|
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"INSERT INTO {site_name}_groups (name, description, included_items, group_type) VALUES (%s, %s, %s, %s);"
|
||||||
|
cur.execute(sql, (name, description, json.dumps({}), group_type))
|
||||||
|
state = "SUCCESS"
|
||||||
|
conn.commit()
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
conn.rollback()
|
||||||
|
|
||||||
|
|
||||||
|
return jsonify({'state': state})
|
||||||
|
|
||||||
|
@database_api.route("/getGroup")
|
||||||
|
def get_group():
|
||||||
|
id = int(request.args.get('id', 1))
|
||||||
|
database_config = config()
|
||||||
|
site_name = session['selected_site']
|
||||||
|
|
||||||
|
group = []
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"SELECT * FROM {site_name}_groups WHERE id=%s;"
|
||||||
|
cur.execute(sql, (id, ))
|
||||||
|
group = list(cur.fetchone())
|
||||||
|
|
||||||
|
sql_item = f"SELECT {site_name}_items.id, {site_name}_items.barcode, {site_name}_items.item_name, {site_name}_logistics_info.quantity_on_hand FROM {site_name}_items LEFT JOIN {site_name}_logistics_info ON {site_name}_items.logistics_info_id = {site_name}_logistics_info.id WHERE {site_name}_items.id = %s;"
|
||||||
|
qty = 0
|
||||||
|
group = list(group)
|
||||||
|
items = []
|
||||||
|
print(group[3])
|
||||||
|
for item_id in group[3]:
|
||||||
|
cur.execute(sql_item, (item_id,))
|
||||||
|
item_row = cur.fetchone()
|
||||||
|
qty += float(item_row[3])
|
||||||
|
items.append(item_row)
|
||||||
|
group[3] = items
|
||||||
|
group.append(qty)
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
return jsonify(group=group)
|
||||||
|
|
||||||
|
@database_api.route("/updateGroup", methods=["POST"])
|
||||||
|
def update_group():
|
||||||
|
if request.method == "POST":
|
||||||
|
site_name = session['selected_site']
|
||||||
|
group_id = request.get_json()['id']
|
||||||
|
items = request.get_json()['items']
|
||||||
|
name = request.get_json()['name']
|
||||||
|
description = request.get_json()['description']
|
||||||
|
group_type = request.get_json()['group_type']
|
||||||
|
data = (name, description, items, group_type, group_id)
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
# Start by updating the group -> included items with the up to date list
|
||||||
|
sql = f"UPDATE {site_name}_groups SET name = %s, description = %s, included_items = %s, group_type = %s WHERE id=%s;"
|
||||||
|
cur.execute(sql, data)
|
||||||
|
|
||||||
|
update_item_sql = f"UPDATE {site_name}_item_info SET groups = %s WHERE id = %s;"
|
||||||
|
select_item_sql = f"SELECT {site_name}_item_info.id, {site_name}_item_info.groups FROM {site_name}_items LEFT JOIN {site_name}_item_info ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_items.id = %s;"
|
||||||
|
# Now we will fetch each item row one by one and check if the group id is already inside of its groups array
|
||||||
|
for item_id in items:
|
||||||
|
cur.execute(select_item_sql, (item_id, ))
|
||||||
|
item = cur.fetchone()
|
||||||
|
print(item)
|
||||||
|
item_groups: set = set(item[1])
|
||||||
|
# Condition check, adds it if it doesnt exist.
|
||||||
|
if group_id not in item_groups:
|
||||||
|
item_groups.add(group_id)
|
||||||
|
cur.execute(update_item_sql, (list(item_groups), item[0]))
|
||||||
|
|
||||||
|
# Now we fetch all items that have the group id in its groups array
|
||||||
|
fetch_items_with_group = f"SELECT {site_name}_items.id, groups, {site_name}_item_info.id FROM {site_name}_item_info LEFT JOIN {site_name}_items ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE groups @> ARRAY[%s];"
|
||||||
|
cur.execute(fetch_items_with_group, (group_id, ))
|
||||||
|
group_items = cur.fetchall()
|
||||||
|
print(items)
|
||||||
|
# We will then check each item id against the groups new included_items list to see if the item should be in there
|
||||||
|
for item_id, group, info_id in group_items:
|
||||||
|
# If it is not we remove the group form the items list and update the item
|
||||||
|
if item_id not in items:
|
||||||
|
groups: list = list(group)
|
||||||
|
groups.remove(group_id)
|
||||||
|
cur.execute(update_item_sql, (list(groups), info_id))
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
conn.rollback()
|
||||||
|
|
||||||
|
return jsonify({"state": "SUCCESS"})
|
||||||
|
return jsonify({"state": "FAILED"})
|
||||||
|
|
||||||
|
@database_api.route("/addList")
|
||||||
|
def addList():
|
||||||
|
name = str(request.args.get('name', ""))
|
||||||
|
description = str(request.args.get('description', ""))
|
||||||
|
list_type = str(request.args.get('type', ""))
|
||||||
|
site_name = session['selected_site']
|
||||||
|
|
||||||
|
print(name, description, list_type)
|
||||||
|
state = "FAILED"
|
||||||
|
|
||||||
|
#if name or description or group_type == "":
|
||||||
|
# print("this is empty")
|
||||||
|
# return jsonify({'state': state})
|
||||||
|
timestamp = datetime.datetime.now()
|
||||||
|
data = (name, description, [], json.dumps({}), [], [], 0, timestamp, list_type)
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"INSERT INTO {site_name}_shopping_lists (name, description, pantry_items, custom_items, recipes, groups, author, creation_date, type) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s);"
|
||||||
|
cur.execute(sql, data)
|
||||||
|
state = "SUCCESS"
|
||||||
|
conn.commit()
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
conn.rollback()
|
||||||
|
|
||||||
|
|
||||||
|
return jsonify({'state': state})
|
||||||
|
|
||||||
|
@database_api.route("/getLists")
|
||||||
|
def paginate_lists():
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
limit = int(request.args.get('limit', 10))
|
||||||
|
site_name = session['selected_site']
|
||||||
|
|
||||||
|
offset = (page - 1) * limit
|
||||||
|
|
||||||
|
lists = []
|
||||||
|
count = 0
|
||||||
|
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"SELECT * FROM {site_name}_shopping_lists LIMIT %s OFFSET %s;"
|
||||||
|
count = f"SELECT COUNT(*) FROM {site_name}_shopping_lists;"
|
||||||
|
|
||||||
|
cur.execute(sql, (limit, offset))
|
||||||
|
temp_lists = list(cur.fetchall())
|
||||||
|
cur.execute(count)
|
||||||
|
count = cur.fetchone()[0]
|
||||||
|
|
||||||
|
for shopping_list in temp_lists:
|
||||||
|
shopping_list: list = list(shopping_list)
|
||||||
|
pantry_items = shopping_list[3]
|
||||||
|
custom_items = shopping_list[4]
|
||||||
|
list_length = len(custom_items)
|
||||||
|
|
||||||
|
sqlfile = open(f"sites/{site_name}/sql/unique/shopping_lists_safetystock_count.sql", "r+")
|
||||||
|
sql = "\n".join(sqlfile.readlines())
|
||||||
|
sqlfile.close()
|
||||||
|
print(sql)
|
||||||
|
if shopping_list[10] == 'calculated':
|
||||||
|
print(shopping_list[0])
|
||||||
|
cur.execute(sql, (shopping_list[0], ))
|
||||||
|
list_length += cur.fetchone()[0]
|
||||||
|
|
||||||
|
else:
|
||||||
|
list_length += len(pantry_items)
|
||||||
|
|
||||||
|
shopping_list.append(list_length)
|
||||||
|
lists.append(shopping_list)
|
||||||
|
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
return jsonify({'lists': lists, 'end': math.ceil(count/limit)})
|
||||||
|
|
||||||
|
@database_api.route("/getListView")
|
||||||
|
def get_list_view():
|
||||||
|
id = int(request.args.get('id', 1))
|
||||||
|
site_name = session['selected_site']
|
||||||
|
shopping_list = []
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"SELECT * FROM {site_name}_shopping_lists WHERE id=%s;"
|
||||||
|
cur.execute(sql, (id, ))
|
||||||
|
shopping_list = list(cur.fetchone())
|
||||||
|
|
||||||
|
if shopping_list[10] == "calculated":
|
||||||
|
sqlfile = open(f"sites/{site_name}/sql/unique/shopping_lists_safetystock.sql", "r+")
|
||||||
|
sql = "\n".join(sqlfile.readlines())
|
||||||
|
sqlfile.close()
|
||||||
|
else:
|
||||||
|
sqlfile = open(f"sites/{site_name}/sql/unique/shopping_lists_safetystock_uncalculated.sql", "r+")
|
||||||
|
sql = "\n".join(sqlfile.readlines())
|
||||||
|
sqlfile.close()
|
||||||
|
|
||||||
|
cur.execute(sql, (id, ))
|
||||||
|
shopping_list[3] = list(cur.fetchall())
|
||||||
|
print(shopping_list[4])
|
||||||
|
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
return jsonify(shopping_list=shopping_list)
|
||||||
|
|
||||||
|
@database_api.route("/getList")
|
||||||
|
def get_list():
|
||||||
|
id = int(request.args.get('id', 1))
|
||||||
|
database_config = config()
|
||||||
|
site_name = session['selected_site']
|
||||||
|
shopping_list = []
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = f"SELECT * FROM {site_name}_shopping_lists WHERE id=%s;"
|
||||||
|
cur.execute(sql, (id, ))
|
||||||
|
shopping_list = list(cur.fetchone())
|
||||||
|
itemSQL = f"SELECT {site_name}_items.id, {site_name}_items.barcode, {site_name}_items.item_name, {site_name}_items.links, {site_name}_item_info.uom FROM {site_name}_items LEFT JOIN {site_name}_item_info ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_item_info.shopping_lists @> ARRAY[%s];"
|
||||||
|
cur.execute(itemSQL, (id, ))
|
||||||
|
shopping_list[3] = list(cur.fetchall())
|
||||||
|
print(shopping_list)
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
return jsonify(shopping_list=shopping_list)
|
||||||
|
|
||||||
|
@database_api.route("/updateList", methods=["POST"])
|
||||||
|
def update_list():
|
||||||
|
if request.method == "POST":
|
||||||
|
site_name = session['selected_site']
|
||||||
|
list_id = request.get_json()['id']
|
||||||
|
items = request.get_json()['items']
|
||||||
|
print(items)
|
||||||
|
custom_items = request.get_json()['custom']
|
||||||
|
name = request.get_json()['name']
|
||||||
|
description = request.get_json()['description']
|
||||||
|
list_type = request.get_json()['list_type']
|
||||||
|
quantities = request.get_json()['quantities']
|
||||||
|
data = (name, description, items, json.dumps(custom_items), list_type, json.dumps(quantities), list_id)
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
# Start by updating the group -> included items with the up to date list
|
||||||
|
sql = f"UPDATE {site_name}_shopping_lists SET name = %s, description = %s, pantry_items = %s, custom_items = %s, type = %s, quantities = %s WHERE id=%s;"
|
||||||
|
cur.execute(sql, data)
|
||||||
|
|
||||||
|
update_item_sql = f"UPDATE {site_name}_item_info SET shopping_lists = %s WHERE id = %s;"
|
||||||
|
select_item_sql = f"SELECT {site_name}_item_info.id, {site_name}_item_info.shopping_lists FROM {site_name}_items LEFT JOIN {site_name}_item_info ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_items.id = %s;"
|
||||||
|
# Now we will fetch each item row one by one and check if the group id is already inside of its groups array
|
||||||
|
for item_id in items:
|
||||||
|
cur.execute(select_item_sql, (item_id, ))
|
||||||
|
item = cur.fetchone()
|
||||||
|
print(item)
|
||||||
|
shopping_lists: set = set(item[1])
|
||||||
|
# Condition check, adds it if it doesnt exist.
|
||||||
|
if list_id not in shopping_lists:
|
||||||
|
shopping_lists.add(list_id)
|
||||||
|
cur.execute(update_item_sql, (list(shopping_lists), item[0]))
|
||||||
|
|
||||||
|
# Now we fetch all items that have the group id in its groups array
|
||||||
|
fetch_items_with_list = f"SELECT {site_name}_items.id, {site_name}_item_info.shopping_lists, {site_name}_item_info.id FROM {site_name}_item_info LEFT JOIN {site_name}_items ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_item_info.shopping_lists @> ARRAY[%s];"
|
||||||
|
cur.execute(fetch_items_with_list, (list_id, ))
|
||||||
|
list_items = cur.fetchall()
|
||||||
|
print(items)
|
||||||
|
# We will then check each item id against the groups new included_items list to see if the item should be in there
|
||||||
|
for item_id, shopping_list, info_id in list_items:
|
||||||
|
# If it is not we remove the group form the items list and update the item
|
||||||
|
if item_id not in items:
|
||||||
|
shopping_lists: list = list(shopping_list)
|
||||||
|
shopping_lists.remove(list_id)
|
||||||
|
cur.execute(update_item_sql, (list(shopping_lists), info_id))
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
print(error)
|
||||||
|
conn.rollback()
|
||||||
|
|
||||||
|
return jsonify({"state": "SUCCESS"})
|
||||||
|
return jsonify({"state": "FAILED"})
|
||||||
263
api_admin.py
Normal file
263
api_admin.py
Normal file
@ -0,0 +1,263 @@
|
|||||||
|
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||||
|
import psycopg2, math, json, datetime, main, copy, requests
|
||||||
|
from config import config, sites_config
|
||||||
|
from main import unfoldCostLayers, get_sites, get_roles, create_site_secondary, getUser
|
||||||
|
from manage import create
|
||||||
|
from user_api import login_required
|
||||||
|
import postsqldb, process, hashlib, database_admin
|
||||||
|
|
||||||
|
|
||||||
|
admin_api = Blueprint('admin_api', __name__)
|
||||||
|
|
||||||
|
@admin_api.route('/admin')
|
||||||
|
def admin_index():
|
||||||
|
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||||
|
return render_template("admin/index.html",
|
||||||
|
current_site=session['selected_site'],
|
||||||
|
sites=sites)
|
||||||
|
|
||||||
|
@admin_api.route('/admin/site/<id>')
|
||||||
|
@login_required
|
||||||
|
def adminSites(id):
|
||||||
|
if id == "new":
|
||||||
|
new_site = postsqldb.SitesTable.Payload(
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
session['user_id']
|
||||||
|
)
|
||||||
|
return render_template("admin/site.html", site=new_site.get_dictionary())
|
||||||
|
else:
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
site = postsqldb.SitesTable.select_tuple(conn, (id,))
|
||||||
|
return render_template('admin/site.html', site=site)
|
||||||
|
|
||||||
|
@admin_api.route('/admin/role/<id>')
|
||||||
|
@login_required
|
||||||
|
def adminRoles(id):
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
sites = postsqldb.SitesTable.selectTuples(conn)
|
||||||
|
if id == "new":
|
||||||
|
new_role = postsqldb.RolesTable.Payload(
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
0
|
||||||
|
)
|
||||||
|
return render_template("admin/role.html", role=new_role.get_dictionary(), sites=sites)
|
||||||
|
else:
|
||||||
|
role = postsqldb.RolesTable.select_tuple(conn, (id,))
|
||||||
|
return render_template('admin/role.html', role=role, sites=sites)
|
||||||
|
|
||||||
|
@admin_api.route('/admin/user/<id>')
|
||||||
|
@login_required
|
||||||
|
def adminUser(id):
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
if id == "new":
|
||||||
|
new_user = postsqldb.LoginsTable.Payload("", "", "", "")
|
||||||
|
return render_template("admin/user.html", user=new_user.get_dictionary())
|
||||||
|
else:
|
||||||
|
user = database_admin.selectLoginsUser(int(id))
|
||||||
|
return render_template('admin/user.html', user=user)
|
||||||
|
|
||||||
|
@admin_api.route('/admin/getSites', methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
def getSites():
|
||||||
|
if request.method == "GET":
|
||||||
|
records = []
|
||||||
|
count = 0
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
limit = int(request.args.get('limit', 10))
|
||||||
|
offset = (page - 1) * limit
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
records, count = postsqldb.SitesTable.paginateTuples(conn, (limit, offset))
|
||||||
|
return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Sites Loaded Successfully!'})
|
||||||
|
return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Sites!'})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/getRoles', methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
def getRoles():
|
||||||
|
if request.method == "GET":
|
||||||
|
records = []
|
||||||
|
count = 0
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
limit = int(request.args.get('limit', 10))
|
||||||
|
offset = (page - 1) * limit
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
records, count = postsqldb.RolesTable.paginate_tuples(conn, (limit, offset))
|
||||||
|
return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Roles Loaded Successfully!'})
|
||||||
|
return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Roles!'})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/getLogins', methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
def getLogins():
|
||||||
|
if request.method == "GET":
|
||||||
|
records = []
|
||||||
|
count = 0
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
limit = int(request.args.get('limit', 10))
|
||||||
|
offset = (page - 1) * limit
|
||||||
|
database_config = config()
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
records, count = postsqldb.LoginsTable.paginate_tuples(conn, (limit, offset))
|
||||||
|
return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':False, 'message': 'logins Loaded Successfully!'})
|
||||||
|
return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading logins!'})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/site/postDeleteSite', methods=["POST"])
|
||||||
|
def postDeleteSite():
|
||||||
|
if request.method == "POST":
|
||||||
|
site_id = request.get_json()['site_id']
|
||||||
|
database_config = config()
|
||||||
|
user_id = session['user_id']
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
user = postsqldb.LoginsTable.select_tuple(conn, (user_id,))
|
||||||
|
admin_user = (user['username'], user['password'], user['email'], user['row_type'])
|
||||||
|
site = postsqldb.SitesTable.select_tuple(conn, (site_id,))
|
||||||
|
site = postsqldb.SitesTable.Manager(
|
||||||
|
site['site_name'],
|
||||||
|
admin_user,
|
||||||
|
site['default_zone'],
|
||||||
|
site['default_primary_location'],
|
||||||
|
site['site_description']
|
||||||
|
)
|
||||||
|
process.deleteSite(site_manager=site)
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
|
return jsonify({'error': False, 'message': f""})
|
||||||
|
return jsonify({'error': True, 'message': f""})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/site/postAddSite', methods=["POST"])
|
||||||
|
def postAddSite():
|
||||||
|
if request.method == "POST":
|
||||||
|
payload = request.get_json()['payload']
|
||||||
|
database_config = config()
|
||||||
|
site_name = session['selected_site']
|
||||||
|
user_id = session['user_id']
|
||||||
|
print(payload)
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
user = postsqldb.LoginsTable.select_tuple(conn, (user_id,))
|
||||||
|
admin_user = (user['username'], user['password'], user['email'], user['row_type'])
|
||||||
|
site = postsqldb.SitesTable.Manager(
|
||||||
|
payload['site_name'],
|
||||||
|
admin_user,
|
||||||
|
payload['default_zone'],
|
||||||
|
payload['default_primary_location'],
|
||||||
|
payload['site_description']
|
||||||
|
)
|
||||||
|
process.addSite(site_manager=site)
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
|
return jsonify({'error': False, 'message': f"Zone added to {site_name}."})
|
||||||
|
return jsonify({'error': True, 'message': f"These was an error with adding this Zone to {site_name}."})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/site/postEditSite', methods=["POST"])
|
||||||
|
def postEditSite():
|
||||||
|
if request.method == "POST":
|
||||||
|
payload = request.get_json()['payload']
|
||||||
|
database_config = config()
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
postsqldb.SitesTable.update_tuple(conn, payload)
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
|
return jsonify({'error': False, 'message': f"Site updated."})
|
||||||
|
return jsonify({'error': True, 'message': f"These was an error with updating Site."})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/role/postAddRole', methods=["POST"])
|
||||||
|
def postAddRole():
|
||||||
|
if request.method == "POST":
|
||||||
|
payload = request.get_json()['payload']
|
||||||
|
database_config = config()
|
||||||
|
print(payload)
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
role = postsqldb.RolesTable.Payload(
|
||||||
|
payload['role_name'],
|
||||||
|
payload['role_description'],
|
||||||
|
payload['site_id']
|
||||||
|
)
|
||||||
|
postsqldb.RolesTable.insert_tuple(conn, role.payload())
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
|
return jsonify({'error': False, 'message': f"Role added."})
|
||||||
|
return jsonify({'error': True, 'message': f"These was an error with adding this Role."})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/role/postEditRole', methods=["POST"])
|
||||||
|
def postEditRole():
|
||||||
|
if request.method == "POST":
|
||||||
|
payload = request.get_json()['payload']
|
||||||
|
database_config = config()
|
||||||
|
print(payload)
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
postsqldb.RolesTable.update_tuple(conn, payload)
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
|
return jsonify({'error': False, 'message': f"Role updated."})
|
||||||
|
return jsonify({'error': True, 'message': f"These was an error with updating this Role."})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/user/postAddLogin', methods=["POST"])
|
||||||
|
def postAddLogin():
|
||||||
|
if request.method == "POST":
|
||||||
|
payload = request.get_json()['payload']
|
||||||
|
database_config = config()
|
||||||
|
user = []
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
user = postsqldb.LoginsTable.Payload(
|
||||||
|
payload['username'],
|
||||||
|
hashlib.sha256(payload['password'].encode()).hexdigest(),
|
||||||
|
payload['email'],
|
||||||
|
payload['row_type']
|
||||||
|
)
|
||||||
|
user = postsqldb.LoginsTable.insert_tuple(conn, user.payload())
|
||||||
|
except postsqldb.DatabaseError as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'user': user, 'error': True, 'message': error})
|
||||||
|
return jsonify({'user': user, 'error': False, 'message': f"User added."})
|
||||||
|
return jsonify({'user': user, 'error': True, 'message': f"These was an error with adding this User."})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/user/postEditLogin', methods=["POST"])
|
||||||
|
def postEditLogin():
|
||||||
|
if request.method == "POST":
|
||||||
|
payload = request.get_json()['payload']
|
||||||
|
database_config = config()
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
postsqldb.LoginsTable.update_tuple(conn, payload)
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
|
return jsonify({'error': False, 'message': f"User was Added Successfully."})
|
||||||
|
return jsonify({'error': True, 'message': f"These was an error with adding this user."})
|
||||||
|
|
||||||
|
@admin_api.route('/admin/user/postEditLoginPassword', methods=["POST"])
|
||||||
|
def postEditLoginPassword():
|
||||||
|
if request.method == "POST":
|
||||||
|
payload = request.get_json()['payload']
|
||||||
|
database_config = config()
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
user = postsqldb.LoginsTable.select_tuple(conn, (payload['id'],))
|
||||||
|
if hashlib.sha256(payload['current_password'].encode()).hexdigest() != user['password']:
|
||||||
|
return jsonify({'error': True, 'message': "The provided current password is incorrect"})
|
||||||
|
payload['update']['password'] = hashlib.sha256(payload['update']['password'].encode()).hexdigest()
|
||||||
|
postsqldb.LoginsTable.update_tuple(conn, payload)
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
|
return jsonify({'error': False, 'message': f"Password was changed successfully."})
|
||||||
|
return jsonify({'error': True, 'message': f"These was an error with updating this Users password."})
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,103 +0,0 @@
|
|||||||
import psycopg2
|
|
||||||
|
|
||||||
import config
|
|
||||||
from application import postsqldb
|
|
||||||
|
|
||||||
def washUserDictionary(user):
|
|
||||||
return {
|
|
||||||
'id': user['id'],
|
|
||||||
'username': user['username'],
|
|
||||||
'sites': user['sites'],
|
|
||||||
'site_roles': user['site_roles'],
|
|
||||||
'system_admin': user['system_admin'],
|
|
||||||
'flags': user['flags'],
|
|
||||||
'profile_pic_url': user['profile_pic_url'],
|
|
||||||
'login_type': user['login_type']
|
|
||||||
}
|
|
||||||
|
|
||||||
def selectLoginsTupleByID(payload, convert=True, conn=None):
|
|
||||||
""" payload = (id,)"""
|
|
||||||
self_conn = False
|
|
||||||
user = ()
|
|
||||||
sql = f"SELECT * FROM logins WHERE id=%s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
user = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
user = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return user
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectUserByEmail(payload, convert=True, conn=None):
|
|
||||||
""" payload = (email,)"""
|
|
||||||
self_conn = False
|
|
||||||
user = ()
|
|
||||||
sql = f"SELECT * FROM logins WHERE email=%s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
user = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
user = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return user
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
|
|
||||||
def updateLoginsTuple(payload, convert=True, conn=None):
|
|
||||||
""" payload = {'id': user_id, 'update': {...}}"""
|
|
||||||
self_conn = False
|
|
||||||
user = ()
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE logins SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
user = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
user = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return user
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,222 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
from flask import (
|
|
||||||
Blueprint, request, render_template, session, jsonify, redirect
|
|
||||||
)
|
|
||||||
import math
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from application.access_module import access_api
|
|
||||||
from application.administration import administration_database, administration_processes
|
|
||||||
from application import database_payloads, postsqldb
|
|
||||||
|
|
||||||
|
|
||||||
admin_api = Blueprint('admin_api', __name__, template_folder="templates", static_folder="static")
|
|
||||||
|
|
||||||
|
|
||||||
# ROOT TEMPLATE ROUTES
|
|
||||||
@admin_api.route('/')
|
|
||||||
@access_api.login_required
|
|
||||||
def admin_index():
|
|
||||||
sites = [site[1] for site in postsqldb.get_sites(session['user']['sites'])]
|
|
||||||
return render_template("admin_index.html", current_site=session['selected_site'], sites=sites)
|
|
||||||
|
|
||||||
@admin_api.route('/site/<id>')
|
|
||||||
@access_api.login_required
|
|
||||||
def adminSites(id):
|
|
||||||
if id == "new":
|
|
||||||
new_site_payload = database_payloads.SitePayload("", "", session['user_id'])
|
|
||||||
return render_template("site.html", site=new_site_payload.get_dictionary())
|
|
||||||
else:
|
|
||||||
site = administration_database.selectSitesTuple((id,))
|
|
||||||
return render_template('site.html', site=site)
|
|
||||||
|
|
||||||
@admin_api.route('/role/<id>')
|
|
||||||
@access_api.login_required
|
|
||||||
def adminRoles(id):
|
|
||||||
sites = administration_database.selectSitesTuples()
|
|
||||||
if id == "new":
|
|
||||||
new_role_payload = database_payloads.RolePayload("", "", 0)
|
|
||||||
return render_template("role.html", role=new_role_payload.get_dictionary(), sites=sites)
|
|
||||||
else:
|
|
||||||
role = administration_database.selectRolesTuple((id,))
|
|
||||||
return render_template('role.html', role=role, sites=sites)
|
|
||||||
|
|
||||||
@admin_api.route('/user/<id>')
|
|
||||||
@access_api.login_required
|
|
||||||
def adminUser(id):
|
|
||||||
if id == "new":
|
|
||||||
new_user_payload = database_payloads.LoginsPayload("", "", "", "")
|
|
||||||
return render_template("user.html", user=new_user_payload.get_dictionary())
|
|
||||||
else:
|
|
||||||
user = administration_database.selectLoginsTuple((int(id),))
|
|
||||||
return render_template('user.html', user=user)
|
|
||||||
|
|
||||||
@admin_api.route('/setup', methods=['GET', 'POST'])
|
|
||||||
def first_time_setup():
|
|
||||||
if request.method == "POST":
|
|
||||||
database_address = request.form['database_address']
|
|
||||||
database_port = request.form['database_port']
|
|
||||||
database_name = request.form['database_name']
|
|
||||||
database_user = request.form['database_user']
|
|
||||||
database_password = request.form['database_address']
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"site_name" : request.form['site_name'],
|
|
||||||
"admin_user": (request.form['username'], hashlib.sha256(request.form['password'].encode()).hexdigest(), request.form['email']),
|
|
||||||
"default_zone": request.form['site_default_zone'],
|
|
||||||
"default_primary_location": request.form['site_default_location'],
|
|
||||||
"site_description": request.form['site_description']
|
|
||||||
}
|
|
||||||
|
|
||||||
administration_processes.addSite(payload)
|
|
||||||
|
|
||||||
return redirect("/login")
|
|
||||||
|
|
||||||
return render_template("setup.html")
|
|
||||||
|
|
||||||
# API ROUTES
|
|
||||||
@admin_api.route('/api/getSites', methods=['GET'])
|
|
||||||
@access_api.login_required
|
|
||||||
def getSites():
|
|
||||||
if request.method == "GET":
|
|
||||||
records = []
|
|
||||||
count = 0
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 10))
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
records, count = administration_database.paginateSitesTuples((limit, offset))
|
|
||||||
return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Sites Loaded Successfully!'})
|
|
||||||
return jsonify({'sites': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Sites!'})
|
|
||||||
|
|
||||||
@admin_api.route('/api/getRoles', methods=['GET'])
|
|
||||||
@access_api.login_required
|
|
||||||
def getRoles():
|
|
||||||
if request.method == "GET":
|
|
||||||
records = []
|
|
||||||
count = 0
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 10))
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
records, count = administration_database.paginateRolesTuples((limit, offset))
|
|
||||||
return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':False, 'message': 'Roles Loaded Successfully!'})
|
|
||||||
return jsonify({'roles': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading Roles!'})
|
|
||||||
|
|
||||||
@admin_api.route('/api/getLogins', methods=['GET'])
|
|
||||||
@access_api.login_required
|
|
||||||
def getLogins():
|
|
||||||
if request.method == "GET":
|
|
||||||
records = []
|
|
||||||
count = 0
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 10))
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
records, count = administration_database.paginateLoginsTuples((limit, offset))
|
|
||||||
return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':False, 'message': 'logins Loaded Successfully!'})
|
|
||||||
return jsonify({'logins': records, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading logins!'})
|
|
||||||
|
|
||||||
@admin_api.route('/api/site/postDeleteSite', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postDeleteSite():
|
|
||||||
if request.method == "POST":
|
|
||||||
site_id = request.get_json()['site_id']
|
|
||||||
user_id = session['user_id']
|
|
||||||
site = administration_database.selectSitesTuple((site_id,))
|
|
||||||
user = administration_database.selectLoginsTuple((user_id,))
|
|
||||||
if user['id'] != site['site_owner_id']:
|
|
||||||
return jsonify({'error': True, 'message': f"You must be the owner of this site to delete."})
|
|
||||||
|
|
||||||
try:
|
|
||||||
administration_processes.deleteSite(site, user)
|
|
||||||
except Exception as err:
|
|
||||||
print(err)
|
|
||||||
|
|
||||||
return jsonify({'error': False, 'message': f""})
|
|
||||||
return jsonify({'error': True, 'message': f""})
|
|
||||||
|
|
||||||
@admin_api.route('/api/site/postAddSite', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postAddSite():
|
|
||||||
if request.method == "POST":
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
site_name = session['selected_site']
|
|
||||||
user_id = session['user_id']
|
|
||||||
user = administration_database.selectLoginsTuple((user_id,))
|
|
||||||
payload['admin_user'] = (user['username'], user['password'], user['email'], user['row_type'])
|
|
||||||
|
|
||||||
administration_processes.addSite(payload)
|
|
||||||
|
|
||||||
|
|
||||||
return jsonify({'error': False, 'message': f"Zone added to {site_name}."})
|
|
||||||
return jsonify({'error': True, 'message': f"These was an error with adding this Zone to {site_name}."})
|
|
||||||
|
|
||||||
@admin_api.route('/api/site/postEditSite', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postEditSite():
|
|
||||||
if request.method == "POST":
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
administration_database.updateSitesTuple(payload)
|
|
||||||
return jsonify({'error': False, 'message': f"Site updated."})
|
|
||||||
return jsonify({'error': True, 'message': f"These was an error with updating Site."})
|
|
||||||
|
|
||||||
@admin_api.route('/api/role/postAddRole', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postAddRole():
|
|
||||||
if request.method == "POST":
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
print(payload)
|
|
||||||
role = database_payloads.RolePayload(
|
|
||||||
payload['role_name'],
|
|
||||||
payload['role_description'],
|
|
||||||
payload['site_id']
|
|
||||||
)
|
|
||||||
administration_database.insertRolesTuple(role.payload())
|
|
||||||
return jsonify({'error': False, 'message': f"Role added."})
|
|
||||||
return jsonify({'error': True, 'message': f"These was an error with adding this Role."})
|
|
||||||
|
|
||||||
@admin_api.route('/api/role/postEditRole', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postEditRole():
|
|
||||||
if request.method == "POST":
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
administration_database.updateRolesTuple(payload)
|
|
||||||
return jsonify({'error': False, 'message': f"Role updated."})
|
|
||||||
return jsonify({'error': True, 'message': f"These was an error with updating this Role."})
|
|
||||||
|
|
||||||
@admin_api.route('/api/user/postAddLogin', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postAddLogin():
|
|
||||||
if request.method == "POST":
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
user = database_payloads.LoginsPayload(
|
|
||||||
payload['username'],
|
|
||||||
hashlib.sha256(payload['password'].encode()).hexdigest(),
|
|
||||||
payload['email'],
|
|
||||||
payload['row_type']
|
|
||||||
)
|
|
||||||
user = administration_database.insertLoginsTuple(user.payload())
|
|
||||||
|
|
||||||
return jsonify({'user': user, 'error': False, 'message': f"User added."})
|
|
||||||
return jsonify({'user': user, 'error': True, 'message': f"These was an error with adding this User."})
|
|
||||||
|
|
||||||
@admin_api.route('/api/user/postEditLogin', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postEditLogin():
|
|
||||||
if request.method == "POST":
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
administration_database.updateLoginsTuple(payload)
|
|
||||||
return jsonify({'error': False, 'message': f"User was Added Successfully."})
|
|
||||||
return jsonify({'error': True, 'message': f"These was an error with adding this user."})
|
|
||||||
|
|
||||||
@admin_api.route('/api/user/postEditLoginPassword', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postEditLoginPassword():
|
|
||||||
if request.method == "POST":
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
user = administration_database.selectLoginsTuple((payload['id'],))
|
|
||||||
if hashlib.sha256(payload['current_password'].encode()).hexdigest() != user['password']:
|
|
||||||
return jsonify({'error': True, 'message': "The provided current password is incorrect"})
|
|
||||||
payload['update']['password'] = hashlib.sha256(payload['update']['password'].encode()).hexdigest()
|
|
||||||
administration_database.updateLoginsTuple(payload)
|
|
||||||
return jsonify({'error': False, 'message': f"Password was changed successfully."})
|
|
||||||
return jsonify({'error': True, 'message': f"These was an error with updating this Users password."})
|
|
||||||
@ -1,767 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from application import postsqldb
|
|
||||||
import config
|
|
||||||
|
|
||||||
def getUser(conn, payload, convert=False):
|
|
||||||
"""_summary_
|
|
||||||
|
|
||||||
Args:
|
|
||||||
conn (_type_): _description_
|
|
||||||
payload (tuple): (username, password)
|
|
||||||
convert (bool, optional): _description_. Defaults to False.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
DatabaseError: _description_
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
_type_: _description_
|
|
||||||
"""
|
|
||||||
user = ()
|
|
||||||
try:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
sql = f"SELECT * FROM logins WHERE username=%s;"
|
|
||||||
cur.execute(sql, (payload[0],))
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and rows[2] == payload[1] and convert:
|
|
||||||
user = tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and rows[2] == payload[1] and not convert:
|
|
||||||
user = rows
|
|
||||||
except Exception as error:
|
|
||||||
raise DatabaseError(error, payload, sql)
|
|
||||||
return user
|
|
||||||
|
|
||||||
def selectLoginsTuple(payload, convert=True, conn=None):
|
|
||||||
user = ()
|
|
||||||
self_conn = False
|
|
||||||
with open("application/administration/sql/selectLoginsUser.sql", "r") as file:
|
|
||||||
sql = file.read()
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
user = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
user = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return user
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectSitesTuple(payload, convert=True, conn=None):
|
|
||||||
record = []
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM sites WHERE id=%s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
record = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
record = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return record
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, (), sql)
|
|
||||||
|
|
||||||
def selectSiteTupleByName(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (site_name,) """
|
|
||||||
site = ()
|
|
||||||
self_conn = False
|
|
||||||
select_site_sql = f"SELECT * FROM sites WHERE site_name = %s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_site_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
site = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
site = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return site
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, select_site_sql)
|
|
||||||
|
|
||||||
def selectSitesTuples(convert=True, conn=None):
|
|
||||||
sites = []
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM sites;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
sites = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
sites = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return sites
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, (), sql)
|
|
||||||
|
|
||||||
def selectRolesTuple(payload, convert=True, conn=None):
|
|
||||||
role = []
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT roles.*, row_to_json(sites.*) as site FROM roles LEFT JOIN sites ON sites.id = roles.site_id WHERE roles.id=%s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
role = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
role = rows
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return role
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, (), sql)
|
|
||||||
|
|
||||||
def selectRolesTupleBySite(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (site_id,) """
|
|
||||||
roles = ()
|
|
||||||
self_conn = False
|
|
||||||
select_roles_sql = f"SELECT * FROM roles WHERE site_id = %s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_roles_sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
roles = [postsqldb.tupleDictionaryFactory(cur.description, role) for role in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
roles = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return roles
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, select_roles_sql)
|
|
||||||
|
|
||||||
def paginateSitesTuples(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (limit, offset) """
|
|
||||||
recordsets = []
|
|
||||||
count = 0
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM sites LIMIT %s OFFSET %s;"
|
|
||||||
sql_count = f"SELECT COUNT(*) FROM sites;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
recordsets = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
recordsets = rows
|
|
||||||
cur.execute(sql_count)
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return recordsets, count
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, (), sql)
|
|
||||||
|
|
||||||
def paginateRolesTuples(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (limit, offset) """
|
|
||||||
recordset = []
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT roles.*, row_to_json(sites.*) as site FROM roles LEFT JOIN sites ON sites.id = roles.site_id LIMIT %s OFFSET %s;"
|
|
||||||
sql_count = f"SELECT COUNT(*) FROM roles;"
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
recordset = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
recordset = rows
|
|
||||||
cur.execute(sql_count)
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return recordset, count
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def paginateLoginsTuples(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (limit, offset) """
|
|
||||||
recordset = []
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM logins LIMIT %s OFFSET %s;"
|
|
||||||
sql_count = f"SELECT COUNT(*) FROM logins;"
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
recordset = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
recordset = rows
|
|
||||||
|
|
||||||
cur.execute(sql_count)
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return recordset, count
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertSitesTuple(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (site_name[str], site_description[str], creation_date[timestamp], site_owner_id[int],
|
|
||||||
flags[dict], default_zone[str], default_auto_issue_location[str], default_primary_location[str]) """
|
|
||||||
site_tuple = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/insertSitesTuple.sql", "r+") as file:
|
|
||||||
sql = file.read()
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
site_tuple = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
site_tuple = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return site_tuple
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertRolesTuple(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (role_name[str], role_description[str], site_id[int], flags[jsonb]) """
|
|
||||||
role_tuple = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/insertRolesTuple.sql", "r+") as file:
|
|
||||||
sql = file.read()
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
role_tuple = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
role_tuple = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return role_tuple
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertLoginsTuple(payload, convert=True, conn=None):
|
|
||||||
"""payload (tuple): (username, password, email, favorites, unseen_pantry_items, unseen_groups, unseen_shopping_lists,
|
|
||||||
unseen_recipes, seen_pantry_items, seen_groups, seen_shopping_lists, seen_recipes,
|
|
||||||
sites, site_roles, system_admin, flags, row_type)"""
|
|
||||||
login = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/insertLoginsTupleFull.sql", "r+") as file:
|
|
||||||
sql = file.read()
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
login = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
login = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return login
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertZonesTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (name[str],) """
|
|
||||||
zone = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/insertZonesTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
zone = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
zone = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return zone
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertLocationsTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (uuid[str], name[str], zone_id[int], items[jsonb]) """
|
|
||||||
location = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/insertLocationsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
location = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
location = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return location
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertVendorsTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (vendor_name[str], vendor_address[str], creation_date[timestamp], created_by[int], phone_number[str]) """
|
|
||||||
vendor = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/insertVendorsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
vendor = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
vendor = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return vendor
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertBrandsTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (brand_name[str], ) """
|
|
||||||
brand = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/insertBrandsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
brand = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
brand = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return brand
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateAddLoginSitesRoles(payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (site_id, role_id, login_id) """
|
|
||||||
sql = f"UPDATE logins SET sites = sites || %s, site_roles = site_roles || %s WHERE id=%s RETURNING *;"
|
|
||||||
login = ()
|
|
||||||
self_conn = False
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
login = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
login = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return login
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateSitesTuple(payload, convert=True, conn=None):
|
|
||||||
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}} """
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE sites SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateUsersSites(payload, convert=True, conn=None):
|
|
||||||
""" payload: {'site_id',} """
|
|
||||||
self_conn = False
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
select_sql = f"SELECT logins.id FROM logins WHERE sites @> ARRAY[%s];"
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_sql, (payload['site_id'], ))
|
|
||||||
user = tuple([row[0] for row in cur.fetchall()])
|
|
||||||
|
|
||||||
update_sql = f"UPDATE logins SET sites = array_remove(sites, %s) WHERE id = %s;"
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
for user_id in user:
|
|
||||||
cur.execute(update_sql, (payload['site_id'], user_id))
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise error
|
|
||||||
|
|
||||||
def updateUsersRoles(payload, convert=True, conn=None):
|
|
||||||
""" payload: {'role_id',} """
|
|
||||||
self_conn = False
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
select_sql = f"SELECT logins.id FROM logins WHERE site_roles @> ARRAY[%s];"
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_sql, (payload['role_id'], ))
|
|
||||||
users = tuple([row[0] for row in cur.fetchall()])
|
|
||||||
|
|
||||||
update_sql = f"UPDATE logins SET site_roles = array_remove(site_roles, %s) WHERE id = %s;"
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
for user_id in users:
|
|
||||||
cur.execute(update_sql, (payload['role_id'], user_id))
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise error
|
|
||||||
|
|
||||||
def updateRolesTuple(payload, convert=True, conn=None):
|
|
||||||
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}"""
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE roles SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateLoginsTuple(payload, convert=True, conn=None):
|
|
||||||
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}} """
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE logins SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def createTable(site, table, conn=None):
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/CREATE/{table}.sql", 'r') as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql)
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, sql, table)
|
|
||||||
|
|
||||||
def dropTable(site, table, conn=None):
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/administration/sql/DROP/{table}.sql", 'r') as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql)
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, sql, table)
|
|
||||||
|
|
||||||
def deleteSitesTuple(payload, convert=True, conn=None):
|
|
||||||
"""payload (tuple): (tuple_id, )"""
|
|
||||||
deleted = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"WITH deleted_rows AS (DELETE FROM sites WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
deleted = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return deleted
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def deleteRolesTuple(payload, convert=True, conn=None):
|
|
||||||
"""payload (tuple): (tuple_id, )"""
|
|
||||||
deleted = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"WITH deleted_rows AS (DELETE FROM roles WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
deleted = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return deleted
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
@ -1,167 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
import psycopg2
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
import config
|
|
||||||
from application import postsqldb, database_payloads
|
|
||||||
from application.administration import administration_database
|
|
||||||
|
|
||||||
def dropSiteTables(conn, site_manager):
|
|
||||||
try:
|
|
||||||
for table in site_manager.drop_order:
|
|
||||||
administration_database.dropTable(site_manager.site_name, table, conn=conn)
|
|
||||||
with open("logs/process.log", "a+") as file:
|
|
||||||
file.write(f"{datetime.datetime.now()} --- INFO --- {table} DROPPED!\n")
|
|
||||||
except Exception as error:
|
|
||||||
raise error
|
|
||||||
|
|
||||||
def deleteSite(site, user, conn=None):
|
|
||||||
"""Uses a Site Manager to delete a site from the system.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
site_manager (MyDataclasses.SiteManager):
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
Exception:
|
|
||||||
"""
|
|
||||||
self_conn = False
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
try:
|
|
||||||
admin_user = (user['username'], user['password'], user['email'], user['row_type'])
|
|
||||||
site_manager = database_payloads.SiteManager(
|
|
||||||
site['site_name'],
|
|
||||||
admin_user,
|
|
||||||
site['default_zone'],
|
|
||||||
site['default_primary_location'],
|
|
||||||
site['site_description']
|
|
||||||
)
|
|
||||||
|
|
||||||
roles = administration_database.selectRolesTupleBySite((site['id'],), conn=conn)
|
|
||||||
administration_database.deleteRolesTuple([role['id'] for role in roles], conn=conn)
|
|
||||||
|
|
||||||
dropSiteTables(conn, site_manager)
|
|
||||||
|
|
||||||
for role in roles:
|
|
||||||
administration_database.updateUsersRoles({'role_id': role['id']}, conn=conn)
|
|
||||||
|
|
||||||
administration_database.updateUsersSites({'site_id': site['id']}, conn=conn)
|
|
||||||
|
|
||||||
site = administration_database.deleteSitesTuple((site['id'], ), conn=conn)
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
with open("logs/process.log", "a+") as file:
|
|
||||||
file.write(f"{datetime.datetime.now()} --- ERROR --- {error}\n")
|
|
||||||
conn.rollback()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def addAdminUser(conn, site_manager, convert=True):
|
|
||||||
admin_user = ()
|
|
||||||
try:
|
|
||||||
sql = f"INSERT INTO logins (username, password, email, row_type) VALUES (%s, %s, %s, %s) ON CONFLICT (username) DO UPDATE SET username = excluded.username RETURNING *;"
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, site_manager.admin_user)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
admin_user = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
admin_user = rows
|
|
||||||
with open("logs/process.log", "a+") as file:
|
|
||||||
file.write(f"{datetime.datetime.now()} --- INFO --- Admin User Created!\n")
|
|
||||||
except Exception as error:
|
|
||||||
raise error
|
|
||||||
return admin_user
|
|
||||||
|
|
||||||
def setupSiteTables(conn, site_manager):
|
|
||||||
try:
|
|
||||||
for table in site_manager.create_order:
|
|
||||||
administration_database.createTable(site_manager.site_name, table, conn=conn)
|
|
||||||
with open("logs/process.log", "a+") as file:
|
|
||||||
file.write(f"{datetime.datetime.now()} --- INFO --- {table} Created!\n")
|
|
||||||
except Exception as error:
|
|
||||||
raise error
|
|
||||||
|
|
||||||
def addSite(payload, conn=None):
|
|
||||||
"""uses a Site Manager to add a site to the system
|
|
||||||
|
|
||||||
Args:
|
|
||||||
site_manager (MyDataclasses.SiteManager):
|
|
||||||
"""
|
|
||||||
self_conn = False
|
|
||||||
site_manager = database_payloads.SiteManager(
|
|
||||||
payload['site_name'],
|
|
||||||
payload['admin_user'],
|
|
||||||
payload['default_zone'],
|
|
||||||
payload['default_primary_location'],
|
|
||||||
payload['site_description']
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
setupSiteTables(conn, site_manager)
|
|
||||||
|
|
||||||
admin_user = addAdminUser(conn, site_manager)
|
|
||||||
|
|
||||||
site = database_payloads.SitePayload(
|
|
||||||
site_name=site_manager.site_name,
|
|
||||||
site_description=site_manager.description,
|
|
||||||
site_owner_id=admin_user['id']
|
|
||||||
)
|
|
||||||
|
|
||||||
site = administration_database.insertSitesTuple(site.payload(), conn=conn)
|
|
||||||
|
|
||||||
role = database_payloads.RolePayload("Admin", f"Admin for {site['site_name']}", site['id'])
|
|
||||||
role = administration_database.insertRolesTuple(role.payload(), conn=conn)
|
|
||||||
|
|
||||||
admin_user = administration_database.updateAddLoginSitesRoles((site["id"], role["id"], admin_user["id"]), conn=conn)
|
|
||||||
|
|
||||||
default_zone = database_payloads.ZonesPayload(site_manager.default_zone)
|
|
||||||
default_zone = administration_database.insertZonesTuple(site["site_name"], default_zone.payload(), conn=conn)
|
|
||||||
uuid = f"{site_manager.default_zone}@{site_manager.default_location}"
|
|
||||||
|
|
||||||
default_location = database_payloads.LocationsPayload(uuid, site_manager.default_location, default_zone['id'])
|
|
||||||
default_location = administration_database.insertLocationsTuple(site['site_name'], default_location.payload(), conn=conn)
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
'id': site['id'],
|
|
||||||
'update': {
|
|
||||||
'default_zone': default_zone['id'],
|
|
||||||
'default_auto_issue_location': default_location['id'],
|
|
||||||
'default_primary_location': default_location['id']
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
administration_database.updateSitesTuple(payload, conn=conn)
|
|
||||||
|
|
||||||
|
|
||||||
blank_vendor = database_payloads.VendorsPayload("None", admin_user['id'])
|
|
||||||
blank_brand = database_payloads.BrandsPayload("None")
|
|
||||||
|
|
||||||
blank_vendor = administration_database.insertVendorsTuple(site['site_name'], blank_vendor.payload(), conn=conn)
|
|
||||||
blank_brand = administration_database.insertBrandsTuple(site['site_name'], blank_brand.payload(), conn=conn)
|
|
||||||
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
with open("logs/process.log", "a+") as file:
|
|
||||||
file.write(f"{datetime.datetime.now()} --- ERROR --- {error}\n")
|
|
||||||
conn.rollback()
|
|
||||||
raise error
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_brands
|
|
||||||
(name)
|
|
||||||
VALUES (%s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_locations
|
|
||||||
(uuid, name, zone_id)
|
|
||||||
VALUES (%s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
INSERT INTO logins
|
|
||||||
(username, password, email, favorites, unseen_pantry_items, unseen_groups, unseen_shopping_lists,
|
|
||||||
unseen_recipes, seen_pantry_items, seen_groups, seen_shopping_lists, seen_recipes,
|
|
||||||
sites, site_roles, system_admin, flags, row_type)
|
|
||||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO roles
|
|
||||||
(role_name, role_description, site_id, flags)
|
|
||||||
VALUES (%s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
INSERT INTO sites
|
|
||||||
(site_name, site_description, creation_date, site_owner_id, flags, default_zone,
|
|
||||||
default_auto_issue_location, default_primary_location)
|
|
||||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_vendors
|
|
||||||
(vendor_name, vendor_address, creation_date, created_by, phone_number)
|
|
||||||
VALUES (%s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_zones
|
|
||||||
(name, description)
|
|
||||||
VALUES (%s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
WITH passed_id AS (SELECT %s AS passed_id),
|
|
||||||
cte_login AS (
|
|
||||||
SELECT logins.* FROM logins
|
|
||||||
WHERE logins.id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_roles AS (
|
|
||||||
SELECT roles.*,
|
|
||||||
row_to_json(sites.*) AS site
|
|
||||||
FROM roles
|
|
||||||
LEFT JOIN sites ON sites.id = roles.site_id
|
|
||||||
WHERE roles.id = ANY(SELECT unnest(site_roles) FROM cte_login)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT login.*,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(r)), '{}') FROM cte_roles r) AS site_roles
|
|
||||||
FROM cte_login login;
|
|
||||||
@ -1,610 +0,0 @@
|
|||||||
from dataclasses import dataclass, field
|
|
||||||
import json, datetime
|
|
||||||
from database import lst2pgarr
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LogisticsInfoPayload:
|
|
||||||
barcode: str
|
|
||||||
primary_location: int
|
|
||||||
primary_zone: int
|
|
||||||
auto_issue_location: int
|
|
||||||
auto_issue_zone: int
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (self.barcode,
|
|
||||||
self.primary_location,
|
|
||||||
self.primary_zone,
|
|
||||||
self.auto_issue_location,
|
|
||||||
self.auto_issue_zone)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ItemInfoPayload:
|
|
||||||
barcode: str
|
|
||||||
packaging: str = ""
|
|
||||||
uom_quantity: float = 1.0
|
|
||||||
uom: int = 1
|
|
||||||
cost: float = 0.0
|
|
||||||
safety_stock: float = 0.0
|
|
||||||
lead_time_days: float = 0.0
|
|
||||||
ai_pick: bool = False
|
|
||||||
prefixes: list = field(default_factory=list)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.barcode, str):
|
|
||||||
raise TypeError(f"barcode must be of type str; not {type(self.barcode)}")
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.barcode,
|
|
||||||
self.packaging,
|
|
||||||
self.uom_quantity,
|
|
||||||
self.uom,
|
|
||||||
self.cost,
|
|
||||||
self.safety_stock,
|
|
||||||
self.lead_time_days,
|
|
||||||
self.ai_pick,
|
|
||||||
lst2pgarr(self.prefixes)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class FoodInfoPayload:
|
|
||||||
food_groups: list = field(default_factory=list)
|
|
||||||
ingrediants: list = field(default_factory=list)
|
|
||||||
nutrients: dict = field(default_factory=dict)
|
|
||||||
expires: bool = False
|
|
||||||
default_expiration: float = 0.0
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
lst2pgarr(self.food_groups),
|
|
||||||
lst2pgarr(self.ingrediants),
|
|
||||||
json.dumps(self.nutrients),
|
|
||||||
self.expires,
|
|
||||||
self.default_expiration
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ItemsPayload:
|
|
||||||
barcode: str
|
|
||||||
item_name: str
|
|
||||||
item_info_id: int
|
|
||||||
logistics_info_id: int
|
|
||||||
food_info_id: int
|
|
||||||
brand: int = 0
|
|
||||||
description: str = ""
|
|
||||||
tags: list = field(default_factory=list)
|
|
||||||
links: dict = field(default_factory=dict)
|
|
||||||
row_type: str = ""
|
|
||||||
item_type: str = ""
|
|
||||||
search_string: str =""
|
|
||||||
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.barcode,
|
|
||||||
self.item_name,
|
|
||||||
self.brand,
|
|
||||||
self.description,
|
|
||||||
lst2pgarr(self.tags),
|
|
||||||
json.dumps(self.links),
|
|
||||||
self.item_info_id,
|
|
||||||
self.logistics_info_id,
|
|
||||||
self.food_info_id,
|
|
||||||
self.row_type,
|
|
||||||
self.item_type,
|
|
||||||
self.search_string
|
|
||||||
)
|
|
||||||
|
|
||||||
# done
|
|
||||||
@dataclass
|
|
||||||
class TransactionPayload:
|
|
||||||
timestamp: datetime.datetime
|
|
||||||
logistics_info_id: int
|
|
||||||
barcode: str
|
|
||||||
name: str
|
|
||||||
transaction_type: str
|
|
||||||
quantity: float
|
|
||||||
description: str
|
|
||||||
user_id: int
|
|
||||||
data: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.timestamp,
|
|
||||||
self.logistics_info_id,
|
|
||||||
self.barcode,
|
|
||||||
self.name,
|
|
||||||
self.transaction_type,
|
|
||||||
self.quantity,
|
|
||||||
self.description,
|
|
||||||
self.user_id,
|
|
||||||
json.dumps(self.data)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class CostLayerPayload:
|
|
||||||
aquisition_date: datetime.datetime
|
|
||||||
quantity: float
|
|
||||||
cost: float
|
|
||||||
currency_type: str
|
|
||||||
vendor: int = 0
|
|
||||||
expires: datetime.datetime = None
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.aquisition_date,
|
|
||||||
self.quantity,
|
|
||||||
self.cost,
|
|
||||||
self.currency_type,
|
|
||||||
self.expires,
|
|
||||||
self.vendor
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ItemLinkPayload:
|
|
||||||
barcode: str
|
|
||||||
link: int
|
|
||||||
data: dict = field(default_factory=dict)
|
|
||||||
conv_factor: float = 1
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.barcode, str):
|
|
||||||
raise TypeError(f"barcode must be of type str; not {type(self.barocde)}")
|
|
||||||
if not isinstance(self.link, int):
|
|
||||||
raise TypeError(f"link must be of type str; not {type(self.link)}")
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.barcode,
|
|
||||||
self.link,
|
|
||||||
json.dumps(self.data),
|
|
||||||
self.conv_factor
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class GroupPayload:
|
|
||||||
name: str
|
|
||||||
description: str
|
|
||||||
group_type: str = "plain"
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.name,
|
|
||||||
self.description,
|
|
||||||
self.group_type
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class GroupItemPayload:
|
|
||||||
uuid: str
|
|
||||||
gr_id: int
|
|
||||||
item_type: str
|
|
||||||
item_name:str
|
|
||||||
uom: str
|
|
||||||
qty: float = 0.0
|
|
||||||
item_id: int = None
|
|
||||||
links: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.uuid,
|
|
||||||
self.gr_id,
|
|
||||||
self.item_type,
|
|
||||||
self.item_name,
|
|
||||||
self.uom,
|
|
||||||
self.qty,
|
|
||||||
self.item_id,
|
|
||||||
json.dumps(self.links)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RecipeItemPayload:
|
|
||||||
uuid: str
|
|
||||||
rp_id: int
|
|
||||||
item_type: str
|
|
||||||
item_name:str
|
|
||||||
uom: str
|
|
||||||
qty: float = 0.0
|
|
||||||
item_id: int = None
|
|
||||||
links: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.uuid,
|
|
||||||
self.rp_id,
|
|
||||||
self.item_type,
|
|
||||||
self.item_name,
|
|
||||||
self.uom,
|
|
||||||
self.qty,
|
|
||||||
self.item_id,
|
|
||||||
json.dumps(self.links)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RecipePayload:
|
|
||||||
name: str
|
|
||||||
author: int
|
|
||||||
description: str
|
|
||||||
creation_date: datetime.datetime = field(init=False)
|
|
||||||
instructions: list = field(default_factory=list)
|
|
||||||
picture_path: str = ""
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
self.creation_date = datetime.datetime.now()
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.name,
|
|
||||||
self.author,
|
|
||||||
self.description,
|
|
||||||
self.creation_date,
|
|
||||||
lst2pgarr(self.instructions),
|
|
||||||
self.picture_path
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ReceiptItemPayload:
|
|
||||||
type: str
|
|
||||||
receipt_id: int
|
|
||||||
barcode: str
|
|
||||||
name: str
|
|
||||||
qty: float = 1.0
|
|
||||||
uom: str = "each"
|
|
||||||
data: dict = field(default_factory=dict)
|
|
||||||
status: str = "Unresolved"
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.type,
|
|
||||||
self.receipt_id,
|
|
||||||
self.barcode,
|
|
||||||
self.name,
|
|
||||||
self.qty,
|
|
||||||
self.uom,
|
|
||||||
json.dumps(self.data),
|
|
||||||
self.status
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ReceiptPayload:
|
|
||||||
receipt_id: str
|
|
||||||
receipt_status: str = "Unresolved"
|
|
||||||
date_submitted: datetime.datetime = field(init=False)
|
|
||||||
submitted_by: int = 0
|
|
||||||
vendor_id: int = 1
|
|
||||||
files: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
self.date_submitted = datetime.datetime.now()
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.receipt_id,
|
|
||||||
self.receipt_status,
|
|
||||||
self.date_submitted,
|
|
||||||
self.submitted_by,
|
|
||||||
self.vendor_id,
|
|
||||||
json.dumps(self.files)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ShoppingListItemPayload:
|
|
||||||
uuid: str
|
|
||||||
sl_id: int
|
|
||||||
item_type: str
|
|
||||||
item_name: str
|
|
||||||
uom: str
|
|
||||||
qty: float
|
|
||||||
item_id: int = None
|
|
||||||
links: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.uuid,
|
|
||||||
self.sl_id,
|
|
||||||
self.item_type,
|
|
||||||
self.item_name,
|
|
||||||
self.uom,
|
|
||||||
self.qty,
|
|
||||||
self.item_id,
|
|
||||||
json.dumps(self.links)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ShoppingListPayload:
|
|
||||||
name: str
|
|
||||||
description: str
|
|
||||||
author: int
|
|
||||||
type: str = "plain"
|
|
||||||
creation_date: datetime.datetime = field(init=False)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
self.creation_date = datetime.datetime.now()
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.name,
|
|
||||||
self.description,
|
|
||||||
self.author,
|
|
||||||
self.creation_date,
|
|
||||||
self.type
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# DONE
|
|
||||||
@dataclass
|
|
||||||
class SitePayload:
|
|
||||||
site_name: str
|
|
||||||
site_description: str
|
|
||||||
site_owner_id: int
|
|
||||||
default_zone: str = None
|
|
||||||
default_auto_issue_location: str = None
|
|
||||||
default_primary_location: str = None
|
|
||||||
creation_date: datetime.datetime = field(init=False)
|
|
||||||
flags: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
self.creation_date = datetime.datetime.now()
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.site_name,
|
|
||||||
self.site_description,
|
|
||||||
self.creation_date,
|
|
||||||
self.site_owner_id,
|
|
||||||
json.dumps(self.flags),
|
|
||||||
self.default_zone,
|
|
||||||
self.default_auto_issue_location,
|
|
||||||
self.default_primary_location
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_dictionary(self):
|
|
||||||
return self.__dict__
|
|
||||||
|
|
||||||
#DONE
|
|
||||||
@dataclass
|
|
||||||
class RolePayload:
|
|
||||||
role_name:str
|
|
||||||
role_description:str
|
|
||||||
site_id: int
|
|
||||||
flags: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.role_name,
|
|
||||||
self.role_description,
|
|
||||||
self.site_id,
|
|
||||||
json.dumps(self.flags)
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_dictionary(self):
|
|
||||||
return self.__dict__
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LoginsPayload:
|
|
||||||
username:str
|
|
||||||
password:str
|
|
||||||
email: str
|
|
||||||
row_type: str
|
|
||||||
system_admin: bool = False
|
|
||||||
flags: dict = field(default_factory=dict)
|
|
||||||
favorites: dict = field(default_factory=dict)
|
|
||||||
unseen_pantry_items: list = field(default_factory=list)
|
|
||||||
unseen_groups: list = field(default_factory=list)
|
|
||||||
unseen_shopping_lists: list = field(default_factory=list)
|
|
||||||
unseen_recipes: list = field(default_factory=list)
|
|
||||||
seen_pantry_items: list = field(default_factory=list)
|
|
||||||
seen_groups: list = field(default_factory=list)
|
|
||||||
seen_shopping_lists: list = field(default_factory=list)
|
|
||||||
seen_recipes: list = field(default_factory=list)
|
|
||||||
sites: list = field(default_factory=list)
|
|
||||||
site_roles: list = field(default_factory=list)
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.username,
|
|
||||||
self.password,
|
|
||||||
self.email,
|
|
||||||
json.dumps(self.favorites),
|
|
||||||
lst2pgarr(self.unseen_pantry_items),
|
|
||||||
lst2pgarr(self.unseen_groups),
|
|
||||||
lst2pgarr(self.unseen_shopping_lists),
|
|
||||||
lst2pgarr(self.unseen_recipes),
|
|
||||||
lst2pgarr(self.seen_pantry_items),
|
|
||||||
lst2pgarr(self.seen_groups),
|
|
||||||
lst2pgarr(self.seen_shopping_lists),
|
|
||||||
lst2pgarr(self.seen_recipes),
|
|
||||||
lst2pgarr(self.sites),
|
|
||||||
lst2pgarr(self.site_roles),
|
|
||||||
self.system_admin,
|
|
||||||
json.dumps(self.flags),
|
|
||||||
self.row_type
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_dictionary(self):
|
|
||||||
return self.__dict__
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ItemLocationPayload:
|
|
||||||
part_id: int
|
|
||||||
location_id: int
|
|
||||||
quantity_on_hand: float = 0.0
|
|
||||||
cost_layers: list = field(default_factory=list)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.part_id, int):
|
|
||||||
raise TypeError(f"part_id must be of type int; not {type(self.part_id)}")
|
|
||||||
if not isinstance(self.location_id, int):
|
|
||||||
raise TypeError(f"part_id must be of type int; not {type(self.part_id)}")
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.part_id,
|
|
||||||
self.location_id,
|
|
||||||
self.quantity_on_hand,
|
|
||||||
lst2pgarr(self.cost_layers)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class SKUPrefixPayload:
|
|
||||||
__slots__ = ('uuid', 'name', 'description')
|
|
||||||
|
|
||||||
uuid: str
|
|
||||||
name: str
|
|
||||||
description: str
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.uuid,
|
|
||||||
self.name,
|
|
||||||
self.description
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ConversionPayload:
|
|
||||||
item_id: int
|
|
||||||
uom_id: int
|
|
||||||
conv_factor: float
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.item_id,
|
|
||||||
self.uom_id,
|
|
||||||
self.conv_factor
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ZonesPayload:
|
|
||||||
name: str
|
|
||||||
description: str = ""
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.name, str):
|
|
||||||
raise TypeError(f"Zone name should be of type str; not {type(self.name)}")
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.name,
|
|
||||||
self.description,
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LocationsPayload:
|
|
||||||
uuid: str
|
|
||||||
name: str
|
|
||||||
zone_id: int
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.uuid, str):
|
|
||||||
raise TypeError(f"uuid must be of type str; not {type(self.uuid)}")
|
|
||||||
if not isinstance(self.name, str):
|
|
||||||
raise TypeError(f"Location name must be of type str; not {type(self.name)}")
|
|
||||||
if not isinstance(self.zone_id, int):
|
|
||||||
raise TypeError(f"zone_id must be of type str; not {type(self.zone_id)}")
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.uuid,
|
|
||||||
self.name,
|
|
||||||
self.zone_id
|
|
||||||
)
|
|
||||||
@dataclass
|
|
||||||
class VendorsPayload:
|
|
||||||
vendor_name: str
|
|
||||||
created_by: int
|
|
||||||
vendor_address: str = ""
|
|
||||||
creation_date: datetime.datetime = field(init=False)
|
|
||||||
phone_number: str = ""
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.vendor_name, str):
|
|
||||||
raise TypeError(f"vendor_name should be of type str; not {type(self.vendor_name)}")
|
|
||||||
self.creation_date = datetime.datetime.now()
|
|
||||||
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.vendor_name,
|
|
||||||
self.vendor_address,
|
|
||||||
self.creation_date,
|
|
||||||
self.created_by,
|
|
||||||
self.phone_number
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class BrandsPayload:
|
|
||||||
name: str
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.name, str):
|
|
||||||
return TypeError(f"brand name should be of type str; not {type(self.name)}")
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class SiteManager:
|
|
||||||
site_name: str
|
|
||||||
admin_user: tuple
|
|
||||||
default_zone: int
|
|
||||||
default_location: int
|
|
||||||
description: str
|
|
||||||
create_order: list = field(init=False)
|
|
||||||
drop_order: list = field(init=False)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
self.create_order = [
|
|
||||||
"logins",
|
|
||||||
"sites",
|
|
||||||
"roles",
|
|
||||||
"units",
|
|
||||||
"cost_layers",
|
|
||||||
"linked_items",
|
|
||||||
"brands",
|
|
||||||
"food_info",
|
|
||||||
"item_info",
|
|
||||||
"zones",
|
|
||||||
"locations",
|
|
||||||
"logistics_info",
|
|
||||||
"transactions",
|
|
||||||
"item",
|
|
||||||
"vendors",
|
|
||||||
"groups",
|
|
||||||
"group_items",
|
|
||||||
"receipts",
|
|
||||||
"receipt_items",
|
|
||||||
"recipes",
|
|
||||||
"recipe_items",
|
|
||||||
"shopping_lists",
|
|
||||||
"shopping_list_items",
|
|
||||||
"item_locations",
|
|
||||||
"conversions",
|
|
||||||
"sku_prefix"
|
|
||||||
]
|
|
||||||
self.drop_order = [
|
|
||||||
"item_info",
|
|
||||||
"items",
|
|
||||||
"cost_layers",
|
|
||||||
"linked_items",
|
|
||||||
"transactions",
|
|
||||||
"brands",
|
|
||||||
"food_info",
|
|
||||||
"logistics_info",
|
|
||||||
"zones",
|
|
||||||
"locations",
|
|
||||||
"vendors",
|
|
||||||
"group_items",
|
|
||||||
"groups",
|
|
||||||
"receipt_items",
|
|
||||||
"receipts",
|
|
||||||
"recipe_items",
|
|
||||||
"recipes",
|
|
||||||
"shopping_list_items",
|
|
||||||
"shopping_lists",
|
|
||||||
"item_locations",
|
|
||||||
"conversions",
|
|
||||||
"sku_prefix"
|
|
||||||
]
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,14 +1,10 @@
|
|||||||
# 3RD PARTY IMPORTS
|
from application import postsqldb
|
||||||
|
import config
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from application import postsqldb
|
|
||||||
import config
|
|
||||||
|
|
||||||
|
|
||||||
def getTransactions(site:str, payload: tuple, convert:bool=True):
|
def getTransactions(site:str, payload: tuple, convert:bool=True):
|
||||||
""" payload (tuple): (logistics_id, limit, offset) """
|
|
||||||
database_config = config.config()
|
database_config = config.config()
|
||||||
sql = f"SELECT * FROM {site}_transactions WHERE logistics_info_id=%s LIMIT %s OFFSET %s;"
|
sql = f"SELECT * FROM {site}_transactions WHERE logistics_info_id=%s LIMIT %s OFFSET %s;"
|
||||||
sql_count = f"SELECT COUNT(*) FROM {site}_transactions WHERE logistics_info_id=%s;"
|
sql_count = f"SELECT COUNT(*) FROM {site}_transactions WHERE logistics_info_id=%s;"
|
||||||
@ -186,128 +182,6 @@ def getLocation(site:str, payload:tuple, convert:bool=True):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
def getZone(site:str, payload:tuple, convert:bool=True):
|
|
||||||
selected = ()
|
|
||||||
database_config = config.config()
|
|
||||||
sql = f"SELECT * FROM {site}_zones WHERE id=%s;"
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
selected = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
selected = rows
|
|
||||||
return selected
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def getItemLocations(site, payload, convert=True, conn=None):
|
|
||||||
locations = []
|
|
||||||
count = 0
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/items/sql/getItemLocations.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
locations = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
if rows and not convert:
|
|
||||||
locations = rows
|
|
||||||
|
|
||||||
cur.execute(f"SELECT COUNT(*) FROM {site}_item_locations WHERE part_id=%s;", (payload[0],))
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return locations, count
|
|
||||||
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def getItemInfoTuple(site:str, payload:tuple, convert=True):
|
|
||||||
""" payload (_type_): (item_info_id,) """
|
|
||||||
selected = ()
|
|
||||||
database_config = config.config()
|
|
||||||
sql = f"SELECT * FROM {site}_item_info WHERE id=%s;"
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
selected = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
selected = rows
|
|
||||||
return selected
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectItemLocationsTuple(site_name, payload, convert=True):
|
|
||||||
""" payload (tuple): [item_id, location_id] """
|
|
||||||
item_locations = ()
|
|
||||||
database_config = config.config()
|
|
||||||
select_item_location_sql = f"SELECT * FROM {site_name}_item_locations WHERE part_id = %s AND location_id = %s;"
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_item_location_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item_locations = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item_locations = rows
|
|
||||||
return item_locations
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def selectCostLayersTuple(site_name, payload, convert=True):
|
|
||||||
""" payload (tuple): (item_locations_id, ) """
|
|
||||||
cost_layers = ()
|
|
||||||
database_config = config.config()
|
|
||||||
select_cost_layers_sql = f"SELECT cl.* FROM {site_name}_item_locations il JOIN {site_name}_cost_layers cl ON cl.id = ANY(il.cost_layers) where il.id=%s;"
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_cost_layers_sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
cost_layers = rows
|
|
||||||
cost_layers = [postsqldb.tupleDictionaryFactory(cur.description, layer) for layer in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
cost_layers = rows
|
|
||||||
return cost_layers
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def selectSiteTuple(payload, convert=True):
|
|
||||||
""" payload (tuple): (site_name,) """
|
|
||||||
site = ()
|
|
||||||
database_config = config.config()
|
|
||||||
select_site_sql = f"SELECT * FROM sites WHERE site_name = %s;"
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_site_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
site = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
site = rows
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, select_site_sql)
|
|
||||||
return site
|
|
||||||
|
|
||||||
def paginateZonesBySku(site: str, payload: tuple, convert=True):
|
def paginateZonesBySku(site: str, payload: tuple, convert=True):
|
||||||
database_config = config.config()
|
database_config = config.config()
|
||||||
zones, count = (), 0
|
zones, count = (), 0
|
||||||
@ -391,394 +265,12 @@ def paginateBrands(site:str, payload:tuple, convert:bool=True):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
def insertCostLayersTuple(site, payload, convert=True, conn=None):
|
|
||||||
cost_layer = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/items/sql/insertCostLayersTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
cost_layer = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
cost_layer = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return cost_layer
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertItemLocationsTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (part_id[int], location_id[int], quantity_on_hand[float], cost_layers[lst2pgarr]) """
|
|
||||||
location = ()
|
|
||||||
self_conn = False
|
|
||||||
database_config = config.config()
|
|
||||||
with open(f"application/items/sql/insertItemLocationsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
location = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
location = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return location
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertLogisticsInfoTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (barcode[str], primary_location[str], auto_issue_location[str], dynamic_locations[jsonb],
|
|
||||||
location_data[jsonb], quantity_on_hand[float]) """
|
|
||||||
logistics_info = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/items/sql/insertLogisticsInfoTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
logistics_info = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
logistics_info = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return logistics_info
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertItemInfoTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (barcode[str], linked_items[lst2pgarr], shopping_lists[lst2pgarr], recipes[lst2pgarr], groups[lst2pgarr],
|
|
||||||
packaging[str], uom[str], cost[float], safety_stock[float], lead_time_days[float], ai_pick[bool]) """
|
|
||||||
item_info = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/items/sql/insertItemInfoTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item_info = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item_info = rows
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item_info
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertFoodInfoTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (_type_): (ingrediants[lst2pgarr], food_groups[lst2pgarr], nutrients[jsonstr], expires[bool]) """
|
|
||||||
food_info = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/items/sql/insertFoodInfoTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
food_info = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
food_info = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return food_info
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertItemTuple(site, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (barcode[str], item_name[str], brand[int], description[str],
|
|
||||||
tags[lst2pgarr], links[jsonb], item_info_id[int], logistics_info_id[int],
|
|
||||||
food_info_id[int], row_type[str], item_type[str], search_string[str]) """
|
|
||||||
item = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/items/sql/insertItemTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertSKUPrefixtuple(site:str, payload:tuple, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (name[str],) """
|
|
||||||
prefix = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/items/sql/insertSKUPrefixTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
prefix = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
prefix = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return prefix
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertConversionTuple(site: str, payload: list, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (item_id, uom_id, conversion_factor) """
|
|
||||||
record = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"sql/INSERT/insertConversionsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
record = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
record = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return record
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def postDeleteCostLayer(site_name, payload, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (table_to_delete_from, tuple_id) """
|
|
||||||
deleted = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"WITH deleted_rows AS (DELETE FROM {site_name}_cost_layers WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
deleted = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return deleted
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def deleteConversionTuple(site_name: str, payload: tuple, convert=True, conn=None):
|
|
||||||
""" payload (tuple): (tuple_id,...) """
|
|
||||||
deleted = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"WITH deleted_rows AS (DELETE FROM {site_name}_conversions WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
deleted = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
deleted = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return deleted
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateConversionTuple(site:str, payload: dict, convert=True, conn=None):
|
|
||||||
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}} """
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE {site}_conversions SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateItemInfoTuple(site:str, payload: dict, convert=True, conn=None):
|
|
||||||
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}} """
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE {site}_item_info SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def postUpdateItemLocation(site: str, payload: tuple, conn=None):
|
|
||||||
|
|
||||||
item_location = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"sql/updateItemLocation.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows:
|
|
||||||
item_location = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item_location
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
# TODO: This should be in the item's process module
|
|
||||||
def postUpdateItem(site:str, payload:dict):
|
def postUpdateItem(site:str, payload:dict):
|
||||||
""" payload (dict): STRICT FORMAT
|
""" POST and update to an item
|
||||||
|
|
||||||
|
Args:
|
||||||
|
site (str): name of the site the item exists in.
|
||||||
|
payload (dict): STRICT FORMAT
|
||||||
{id: item_id, data: SEE BELOW, user_id: updater}
|
{id: item_id, data: SEE BELOW, user_id: updater}
|
||||||
|
|
||||||
data is complex structure
|
data is complex structure
|
||||||
@ -867,9 +359,13 @@ def postUpdateItem(site:str, payload:dict):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, "MULTICALL!")
|
raise postsqldb.DatabaseError(error, payload, "MULTICALL!")
|
||||||
|
|
||||||
# TODO: This should be in the item's process module
|
|
||||||
def postUpdateItemLink(site: str, payload: dict):
|
def postUpdateItemLink(site: str, payload: dict):
|
||||||
""" payload (dict): {id, update, old_conv_factor, user_id} """
|
""" POST update to ItemLink
|
||||||
|
|
||||||
|
Args:
|
||||||
|
site (str): _description_
|
||||||
|
payload (dict): {id, update, old_conv_factor, user_id}
|
||||||
|
"""
|
||||||
def postUpdateData(conn, table, payload, convert=True):
|
def postUpdateData(conn, table, payload, convert=True):
|
||||||
updated = ()
|
updated = ()
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
||||||
@ -925,7 +421,21 @@ def postUpdateItemLink(site: str, payload: dict):
|
|||||||
postAddTransaction(conn, site, transaction.payload())
|
postAddTransaction(conn, site, transaction.payload())
|
||||||
|
|
||||||
def postUpdateCostLayer(site, payload, convert=True, conn=None):
|
def postUpdateCostLayer(site, payload, convert=True, conn=None):
|
||||||
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}} """
|
"""_summary_
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn (_T_connector@connect): Postgresql Connector
|
||||||
|
site (str):
|
||||||
|
table (str):
|
||||||
|
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||||
|
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DatabaseError:
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple or dict: updated tuple
|
||||||
|
"""
|
||||||
updated = ()
|
updated = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
|
|
||||||
@ -955,6 +465,174 @@ def postUpdateCostLayer(site, payload, convert=True, conn=None):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def insertCostLayersTuple(site, payload, convert=True, conn=None):
|
||||||
|
cost_layer = ()
|
||||||
|
self_conn = False
|
||||||
|
|
||||||
|
with open(f"application/items/sql/insertCostLayersTuple.sql", "r+") as file:
|
||||||
|
sql = file.read().replace("%%site_name%%", site)
|
||||||
|
try:
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
cost_layer = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
elif rows and not convert:
|
||||||
|
cost_layer = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return cost_layer
|
||||||
|
except Exception as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def insertItemLocationsTuple(conn, site, payload, convert=True):
|
||||||
|
location = ()
|
||||||
|
database_config = config.config()
|
||||||
|
with open(f"application/items/sql/insertItemLocationsTuple.sql", "r+") as file:
|
||||||
|
sql = file.read().replace("%%site_name%%", site)
|
||||||
|
try:
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = False
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
location = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
elif rows and not convert:
|
||||||
|
location = rows
|
||||||
|
return location, conn
|
||||||
|
except Exception as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def selectItemLocationsTuple(site_name, payload, convert=True):
|
||||||
|
"""select a single tuple from ItemLocations table for site_name
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn (_T_connector@connect):
|
||||||
|
site_name (str):
|
||||||
|
payload (tuple): [item_id, location_id]
|
||||||
|
convert (bool): defaults to False, used to determine return of tuple/dict
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: the row that was returned from the table
|
||||||
|
"""
|
||||||
|
item_locations = ()
|
||||||
|
database_config = config.config()
|
||||||
|
select_item_location_sql = f"SELECT * FROM {site_name}_item_locations WHERE part_id = %s AND location_id = %s;"
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(select_item_location_sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
item_locations = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
elif rows and not convert:
|
||||||
|
item_locations = rows
|
||||||
|
return item_locations
|
||||||
|
except Exception as error:
|
||||||
|
return error
|
||||||
|
|
||||||
|
def selectCostLayersTuple(site_name, payload, convert=True):
|
||||||
|
"""select a single or series of cost layers from the database for site_name
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn (_T_connector@connect):
|
||||||
|
site_name (str):
|
||||||
|
payload (tuple): (item_locations_id, )
|
||||||
|
convert (bool): defaults to False, used for determining return as tuple/dict
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: list of tuples/dict from the cost_layers table for site_name
|
||||||
|
"""
|
||||||
|
cost_layers = ()
|
||||||
|
database_config = config.config()
|
||||||
|
select_cost_layers_sql = f"SELECT cl.* FROM {site_name}_item_locations il JOIN {site_name}_cost_layers cl ON cl.id = ANY(il.cost_layers) where il.id=%s;"
|
||||||
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(select_cost_layers_sql, payload)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
if rows and convert:
|
||||||
|
cost_layers = rows
|
||||||
|
cost_layers = [postsqldb.tupleDictionaryFactory(cur.description, layer) for layer in rows]
|
||||||
|
elif rows and not convert:
|
||||||
|
cost_layers = rows
|
||||||
|
return cost_layers
|
||||||
|
except Exception as error:
|
||||||
|
return error
|
||||||
|
|
||||||
|
def postDeleteCostLayer(site_name, payload, convert=True, conn=None):
|
||||||
|
"""
|
||||||
|
payload (tuple): (table_to_delete_from, tuple_id)
|
||||||
|
Raises:
|
||||||
|
DatabaseError:
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple or dict: deleted tuple
|
||||||
|
"""
|
||||||
|
deleted = ()
|
||||||
|
self_conn = False
|
||||||
|
sql = f"WITH deleted_rows AS (DELETE FROM {site_name}_cost_layers WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = False
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, payload)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
if rows and convert:
|
||||||
|
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
|
||||||
|
elif rows and not convert:
|
||||||
|
deleted = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return deleted
|
||||||
|
except Exception as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def postUpdateItemLocation(site, payload, conn=None):
|
||||||
|
|
||||||
|
item_location = ()
|
||||||
|
self_conn = False
|
||||||
|
with open(f"sql/updateItemLocation.sql", "r+") as file:
|
||||||
|
sql = file.read().replace("%%site_name%%", site)
|
||||||
|
try:
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = False
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows:
|
||||||
|
item_location = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return item_location
|
||||||
|
except Exception as error:
|
||||||
|
return error
|
||||||
|
|
||||||
def postAddTransaction(site, payload, convert=False, conn=None):
|
def postAddTransaction(site, payload, convert=False, conn=None):
|
||||||
transaction = ()
|
transaction = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
@ -982,8 +660,22 @@ def postAddTransaction(site, payload, convert=False, conn=None):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
|
||||||
def postInsertItemLink(site, payload, convert=True, conn=None):
|
def postInsertItemLink(site, payload, convert=True, conn=None):
|
||||||
""" payload (tuple): (barcode[str], link[int], data[jsonb], conv_factor[float]) """
|
"""insert payload into itemlinks table of site
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn (_T_connector@connect): Postgresql Connector
|
||||||
|
site (str):
|
||||||
|
payload (tuple): (barcode[str], link[int], data[jsonb], conv_factor[float])
|
||||||
|
convert (bool, optional): Determines if to return tuple as dictionary. Defaults to False.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DatabaseError:
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple or dict: inserted tuple
|
||||||
|
"""
|
||||||
link = ()
|
link = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
|
|
||||||
@ -1013,7 +705,21 @@ def postInsertItemLink(site, payload, convert=True, conn=None):
|
|||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
def postUpdateItemByID(site, payload, convert=True, conn=None):
|
def postUpdateItemByID(site, payload, convert=True, conn=None):
|
||||||
""" payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}} """
|
""" high level update of an item specific data, none of its relationships
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn (_T_connector@connect): Postgresql Connector
|
||||||
|
site (str):
|
||||||
|
table (str):
|
||||||
|
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||||
|
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DatabaseError:
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple or dict: updated tuple
|
||||||
|
"""
|
||||||
updated = ()
|
updated = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
||||||
|
|||||||
@ -1,70 +1,29 @@
|
|||||||
# 3RD PARTY IMPORTS
|
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||||
from flask import (
|
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
|
||||||
Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
from config import config, sites_config
|
||||||
)
|
from main import unfoldCostLayers
|
||||||
import psycopg2
|
from user_api import login_required
|
||||||
import math
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from config import config
|
|
||||||
from application.access_module import access_api
|
|
||||||
import application.postsqldb as db
|
import application.postsqldb as db
|
||||||
from application.items import database_items
|
from application.items import database_items
|
||||||
from application.items import items_processes
|
from application.items import items_processes
|
||||||
import application.database_payloads as dbPayloads
|
|
||||||
|
|
||||||
items_api = Blueprint('items_api', __name__, template_folder="templates", static_folder="static")
|
items_api = Blueprint('items_api', __name__)
|
||||||
|
|
||||||
def update_session_user():
|
@items_api.route("/item/<parent_id>/itemLink/<id>")
|
||||||
database_config = config()
|
@login_required
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
user = db.LoginsTable.get_washed_tuple(conn, (session['user_id'],))
|
|
||||||
session['user'] = user
|
|
||||||
|
|
||||||
# ROOT TEMPLATE ROUTES
|
|
||||||
@items_api.route("/")
|
|
||||||
@access_api.login_required
|
|
||||||
def items():
|
|
||||||
update_session_user()
|
|
||||||
sites = [site[1] for site in db.get_sites(session['user']['sites'])]
|
|
||||||
return render_template("index.html",
|
|
||||||
current_site=session['selected_site'],
|
|
||||||
sites=sites)
|
|
||||||
|
|
||||||
@items_api.route("/<id>")
|
|
||||||
@access_api.login_required
|
|
||||||
def item(id):
|
|
||||||
sites = [site[1] for site in db.get_sites(session['user']['sites'])]
|
|
||||||
database_config = config()
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
units = db.UnitsTable.getAll(conn)
|
|
||||||
return render_template("item_new.html", id=id, units=units, current_site=session['selected_site'], sites=sites)
|
|
||||||
|
|
||||||
@items_api.route("/transaction")
|
|
||||||
@access_api.login_required
|
|
||||||
def transaction():
|
|
||||||
sites = [site[1] for site in db.get_sites(session['user']['sites'])]
|
|
||||||
database_config = config()
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
units = db.UnitsTable.getAll(conn)
|
|
||||||
return render_template("transaction.html", units=units, current_site=session['selected_site'], sites=sites, proto={'referrer': request.referrer})
|
|
||||||
|
|
||||||
@items_api.route("/transactions/<id>")
|
|
||||||
@access_api.login_required
|
|
||||||
def transactions(id):
|
|
||||||
sites = [site[1] for site in db.get_sites(session['user']['sites'])]
|
|
||||||
return render_template("transactions.html", id=id, current_site=session['selected_site'], sites=sites)
|
|
||||||
|
|
||||||
@items_api.route("/<parent_id>/itemLink/<id>")
|
|
||||||
@access_api.login_required
|
|
||||||
def itemLink(parent_id, id):
|
def itemLink(parent_id, id):
|
||||||
sites = [site[1] for site in db.get_sites(session['user']['sites'])]
|
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||||
return render_template("itemlink.html", current_site=session['selected_site'], sites=sites, proto={'referrer': request.referrer}, id=id)
|
return render_template("items/itemlink.html", current_site=session['selected_site'], sites=sites, proto={'referrer': request.referrer}, id=id)
|
||||||
|
|
||||||
# API CALLS
|
@items_api.route("/item/getTransactions", methods=["GET"])
|
||||||
@items_api.route("/getTransactions", methods=["GET"])
|
@login_required
|
||||||
@access_api.login_required
|
|
||||||
def getTransactions():
|
def getTransactions():
|
||||||
|
""" GET a subquery of transactions by passing a logistics_info_id, limit, and page
|
||||||
|
---
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: transactions received successfully.
|
||||||
|
"""
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
recordset = []
|
recordset = []
|
||||||
count = 0
|
count = 0
|
||||||
@ -77,9 +36,24 @@ def getTransactions():
|
|||||||
return jsonify({"transactions": recordset, "end": math.ceil(count/limit), "error": False, "message": ""})
|
return jsonify({"transactions": recordset, "end": math.ceil(count/limit), "error": False, "message": ""})
|
||||||
return jsonify({"transactions": recordset, "end": math.ceil(count/limit), "error": True, "message": f"method {request.method} is not allowed."})
|
return jsonify({"transactions": recordset, "end": math.ceil(count/limit), "error": True, "message": f"method {request.method} is not allowed."})
|
||||||
|
|
||||||
@items_api.route("/getTransaction", methods=["GET"])
|
@items_api.route("/item/getTransaction", methods=["GET"])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def getTransaction():
|
def getTransaction():
|
||||||
|
""" GET a transaction from the system by passing an ID
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
required: true
|
||||||
|
description: The transaction.id
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Transaction Object received successfully.
|
||||||
|
"""
|
||||||
transaction = ()
|
transaction = ()
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
id = int(request.args.get('id', 1))
|
id = int(request.args.get('id', 1))
|
||||||
@ -88,9 +62,23 @@ def getTransaction():
|
|||||||
return jsonify({"transaction": transaction, "error": False, "message": ""})
|
return jsonify({"transaction": transaction, "error": False, "message": ""})
|
||||||
return jsonify({"transaction": transaction, "error": True, "message": f"method {request.method} is not allowed."})
|
return jsonify({"transaction": transaction, "error": True, "message": f"method {request.method} is not allowed."})
|
||||||
|
|
||||||
@items_api.route("/getItem", methods=["GET"])
|
@items_api.route("/item/getItem", methods=["GET"])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def get_item():
|
def get_item():
|
||||||
|
""" GET item from system by passing its ID
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: item.id
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Item.id received successfully!
|
||||||
|
"""
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
id = int(request.args.get('id', 1))
|
id = int(request.args.get('id', 1))
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
@ -99,9 +87,47 @@ def get_item():
|
|||||||
return jsonify({'item': item, 'error': False, 'message': ''})
|
return jsonify({'item': item, 'error': False, 'message': ''})
|
||||||
return jsonify({'item': item, 'error': True, 'message': f'method {request.method} not allowed.'})
|
return jsonify({'item': item, 'error': True, 'message': f'method {request.method} not allowed.'})
|
||||||
|
|
||||||
@items_api.route("/getItemsWithQOH", methods=['GET'])
|
@items_api.route("/item/getItemsWithQOH", methods=['GET'])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def pagninate_items():
|
def pagninate_items():
|
||||||
|
""" GET items from the system by passing a page, limit, search_string, sort, and order
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 1
|
||||||
|
description: page number for offset
|
||||||
|
- in: query
|
||||||
|
name: limit
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 50
|
||||||
|
description: number of records to grab
|
||||||
|
- in: query
|
||||||
|
name: search_string
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
description: string to look for in column search_string
|
||||||
|
- in: query
|
||||||
|
name: sort
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
description: items table column to sort by
|
||||||
|
- in: query
|
||||||
|
name: order
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: ['ASC', 'DESC']
|
||||||
|
default: 'ASC'
|
||||||
|
description: Order to sort items table sort parameter by
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Items received successfully.
|
||||||
|
"""
|
||||||
items = []
|
items = []
|
||||||
count = 0
|
count = 0
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
@ -122,9 +148,34 @@ def pagninate_items():
|
|||||||
return jsonify({'items': items, "end": math.ceil(count/limit), 'error':False, 'message': 'Items Loaded Successfully!'})
|
return jsonify({'items': items, "end": math.ceil(count/limit), 'error':False, 'message': 'Items Loaded Successfully!'})
|
||||||
return jsonify({'items': items, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading the items!'})
|
return jsonify({'items': items, "end": math.ceil(count/limit), 'error':True, 'message': 'There was a problem loading the items!'})
|
||||||
|
|
||||||
@items_api.route('/getModalItems', methods=["GET"])
|
@items_api.route('/item/getModalItems', methods=["GET"])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def getModalItems():
|
def getModalItems():
|
||||||
|
""" GET items from the system by passing a page, limit, search_string. For select modals
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 1
|
||||||
|
description: page number for offset
|
||||||
|
- in: query
|
||||||
|
name: limit
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 25
|
||||||
|
description: number of records to grab
|
||||||
|
- in: query
|
||||||
|
name: search_string
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
description: string to look for in column search_string
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Items received successfully.
|
||||||
|
"""
|
||||||
recordset, count = tuple(), 0
|
recordset, count = tuple(), 0
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
page = int(request.args.get('page', 1))
|
page = int(request.args.get('page', 1))
|
||||||
@ -137,9 +188,30 @@ def getModalItems():
|
|||||||
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
||||||
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":True, "message": f"method {request.method} is not allowed."})
|
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":True, "message": f"method {request.method} is not allowed."})
|
||||||
|
|
||||||
@items_api.route('/getPrefixes', methods=["GET"])
|
@items_api.route('/item/getPrefixes', methods=["GET"])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def getModalPrefixes():
|
def getModalPrefixes():
|
||||||
|
""" GET prefixes from the system by passing page and limit.
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: page of the database records
|
||||||
|
- in: query
|
||||||
|
name: limit
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 10
|
||||||
|
description: number of database records to GET
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Prefixes received from the system successfully!
|
||||||
|
"""
|
||||||
recordset = []
|
recordset = []
|
||||||
count = 0
|
count = 0
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
@ -152,9 +224,37 @@ def getModalPrefixes():
|
|||||||
return jsonify({"prefixes":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
return jsonify({"prefixes":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
||||||
return jsonify({"prefixes":recordset, "end":math.ceil(count/limit), "error":True, "message":f"method {request.method} is not allowed!"})
|
return jsonify({"prefixes":recordset, "end":math.ceil(count/limit), "error":True, "message":f"method {request.method} is not allowed!"})
|
||||||
|
|
||||||
@items_api.route('/getZonesBySku', methods=["GET"])
|
@items_api.route('/item/getZonesBySku', methods=["GET"])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def getZonesbySku():
|
def getZonesbySku():
|
||||||
|
""" GET zones by sku by passing page, limit, item_id
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: page of the records to GET
|
||||||
|
- in: query
|
||||||
|
name: limit
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 10
|
||||||
|
description: number of records to grab from the system
|
||||||
|
- in: query
|
||||||
|
name: item_id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: item_id to pull zones for
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Zones received successfully.
|
||||||
|
"""
|
||||||
zones, count = [], 0
|
zones, count = [], 0
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
page = int(request.args.get('page', 1))
|
page = int(request.args.get('page', 1))
|
||||||
@ -166,9 +266,44 @@ def getZonesbySku():
|
|||||||
return jsonify({'zones': zones, 'endpage': math.ceil(count/limit), 'error':False, 'message': f''})
|
return jsonify({'zones': zones, 'endpage': math.ceil(count/limit), 'error':False, 'message': f''})
|
||||||
return jsonify({'zones': zones, 'endpage': math.ceil(count/limit), 'error':False, 'message': f'method {request.method} not allowed.'})
|
return jsonify({'zones': zones, 'endpage': math.ceil(count/limit), 'error':False, 'message': f'method {request.method} not allowed.'})
|
||||||
|
|
||||||
@items_api.route('/getLocationsBySkuZone', methods=['GET'])
|
@items_api.route('/item/getLocationsBySkuZone', methods=['GET'])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def getLocationsBySkuZone():
|
def getLocationsBySkuZone():
|
||||||
|
""" GET locations by sku by passing page, limit, item_id, zone_id
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: page of the records to GET
|
||||||
|
- in: query
|
||||||
|
name: limit
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 10
|
||||||
|
description: number of records to grab from the system
|
||||||
|
- in: query
|
||||||
|
name: item_id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: item_id to pull locations for zone_id
|
||||||
|
- in: query
|
||||||
|
name: zone_id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: zone_id to pull locations for item_id
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Locations received successfully.
|
||||||
|
"""
|
||||||
locations, count = [], 0
|
locations, count = [], 0
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
zone_id = int(request.args.get('zone_id', 1))
|
zone_id = int(request.args.get('zone_id', 1))
|
||||||
@ -181,9 +316,30 @@ def getLocationsBySkuZone():
|
|||||||
return jsonify({'locations': locations, 'endpage': math.ceil(count/limit), 'error': False, 'message': f''})
|
return jsonify({'locations': locations, 'endpage': math.ceil(count/limit), 'error': False, 'message': f''})
|
||||||
return jsonify({'locations': locations, 'endpage': math.ceil(count/limit), 'error': True, 'message': f'method {request.method} is not allowed.'})
|
return jsonify({'locations': locations, 'endpage': math.ceil(count/limit), 'error': True, 'message': f'method {request.method} is not allowed.'})
|
||||||
|
|
||||||
@items_api.route('/getBrands', methods=['GET'])
|
@items_api.route('/item/getBrands', methods=['GET'])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def getBrands():
|
def getBrands():
|
||||||
|
""" GET brands from the system by passing page, limit
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: page of the records to GET
|
||||||
|
- in: query
|
||||||
|
name: limit
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 10
|
||||||
|
description: number of records to grab from the system
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Brands received successfully.
|
||||||
|
"""
|
||||||
brands, count = [], 0
|
brands, count = [], 0
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
page = int(request.args.get('page', 1))
|
page = int(request.args.get('page', 1))
|
||||||
@ -194,9 +350,27 @@ def getBrands():
|
|||||||
return jsonify({'brands': brands, 'endpage': math.ceil(count/limit), 'error': False, 'message': f''})
|
return jsonify({'brands': brands, 'endpage': math.ceil(count/limit), 'error': False, 'message': f''})
|
||||||
return jsonify({'brands': brands, 'endpage': math.ceil(count/limit), 'error': True, 'message': f'method {request.method} is not allowed.'})
|
return jsonify({'brands': brands, 'endpage': math.ceil(count/limit), 'error': True, 'message': f'method {request.method} is not allowed.'})
|
||||||
|
|
||||||
@items_api.route('/updateItem', methods=['POST'])
|
|
||||||
@access_api.login_required
|
@items_api.route('/item/updateItem', methods=['POST'])
|
||||||
|
@login_required
|
||||||
def updateItem():
|
def updateItem():
|
||||||
|
""" POST update to item in the system by passing item_id, data
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: item_id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: item_id that the POST targets
|
||||||
|
- in: header
|
||||||
|
name: data
|
||||||
|
description: data to update in system
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: item updated successfully.
|
||||||
|
"""
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
id = request.get_json()['id']
|
id = request.get_json()['id']
|
||||||
data = request.get_json()['data']
|
data = request.get_json()['data']
|
||||||
@ -205,9 +379,42 @@ def updateItem():
|
|||||||
return jsonify({'error': False, 'message': f'Item was updated successfully!'})
|
return jsonify({'error': False, 'message': f'Item was updated successfully!'})
|
||||||
return jsonify({'error': True, 'message': f'method {request.method} is not allowed!'})
|
return jsonify({'error': True, 'message': f'method {request.method} is not allowed!'})
|
||||||
|
|
||||||
@items_api.route('/updateItemLink', methods=['POST'])
|
@items_api.route('/item/updateItemLink', methods=['POST'])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def updateItemLink():
|
def updateItemLink():
|
||||||
|
""" UPDATE item link by passing id, conv_factor, barcode, old_conv
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
required: true
|
||||||
|
description: Id of item link to update
|
||||||
|
- in: query
|
||||||
|
name: conv_factor
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
required: true
|
||||||
|
description: new conversion factor of item_link id
|
||||||
|
- in: query
|
||||||
|
name: barcode
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: barcode of item_link id
|
||||||
|
- in: query
|
||||||
|
name: old_conv
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
required: true
|
||||||
|
description: old conversion factor of item_link id
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Item Link updated successfully.
|
||||||
|
"""
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
id = request.get_json()['id']
|
id = request.get_json()['id']
|
||||||
conv_factor = request.get_json()['conv_factor']
|
conv_factor = request.get_json()['conv_factor']
|
||||||
@ -219,9 +426,31 @@ def updateItemLink():
|
|||||||
return jsonify({'error':False, 'message': "Linked Item was updated successfully"})
|
return jsonify({'error':False, 'message': "Linked Item was updated successfully"})
|
||||||
return jsonify({'error': True, 'message': f"method {request.method} not allowed."})
|
return jsonify({'error': True, 'message': f"method {request.method} not allowed."})
|
||||||
|
|
||||||
@items_api.route('/getPossibleLocations', methods=["GET"])
|
|
||||||
@access_api.login_required
|
@items_api.route('/item/getPossibleLocations', methods=["GET"])
|
||||||
|
@login_required
|
||||||
def getPossibleLocations():
|
def getPossibleLocations():
|
||||||
|
""" GET locations with zones by passing a page and limit
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
schema:
|
||||||
|
type: interger
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: page in the records to GET
|
||||||
|
- in: query
|
||||||
|
name: limit
|
||||||
|
schema:
|
||||||
|
type: interger
|
||||||
|
minimum: 1
|
||||||
|
default: 1
|
||||||
|
description: number of records to GET
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Locations GET successful.
|
||||||
|
"""
|
||||||
locations, count = (), 0
|
locations, count = (), 0
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
page = int(request.args.get('page', 1))
|
page = int(request.args.get('page', 1))
|
||||||
@ -232,9 +461,23 @@ def getPossibleLocations():
|
|||||||
return jsonify({'locations': locations, 'end':math.ceil(count/limit), 'error':False, 'message': f'Locations received successfully!'})
|
return jsonify({'locations': locations, 'end':math.ceil(count/limit), 'error':False, 'message': f'Locations received successfully!'})
|
||||||
return jsonify({'locations': locations, 'end':math.ceil(count/limit), 'error':True, 'message': f'method {request.method} not allowed.'})
|
return jsonify({'locations': locations, 'end':math.ceil(count/limit), 'error':True, 'message': f'method {request.method} not allowed.'})
|
||||||
|
|
||||||
@items_api.route('/getLinkedItem', methods=["GET"])
|
@items_api.route('/item/getLinkedItem', methods=["GET"])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def getLinkedItem():
|
def getLinkedItem():
|
||||||
|
""" GET itemlink from system by passing an ID
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 1
|
||||||
|
required: true
|
||||||
|
description: item link to get from the system
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Item Link GET successful.
|
||||||
|
"""
|
||||||
linked_item = {}
|
linked_item = {}
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
id = int(request.args.get('id', 1))
|
id = int(request.args.get('id', 1))
|
||||||
@ -243,9 +486,37 @@ def getLinkedItem():
|
|||||||
return jsonify({'linked_item': linked_item, 'error': False, 'message': 'Linked Item added!!'})
|
return jsonify({'linked_item': linked_item, 'error': False, 'message': 'Linked Item added!!'})
|
||||||
return jsonify({'linked_item': linked_item, 'error': True, 'message': f'method {request.method} not allowed'})
|
return jsonify({'linked_item': linked_item, 'error': True, 'message': f'method {request.method} not allowed'})
|
||||||
|
|
||||||
@items_api.route('/addLinkedItem', methods=["POST"])
|
@items_api.route('/item/addLinkedItem', methods=["POST"])
|
||||||
@access_api.login_required
|
@login_required
|
||||||
def addLinkedItem():
|
def addLinkedItem():
|
||||||
|
""" POST a link between items by passing a parent_id, a child_id, conv_factor
|
||||||
|
---
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: parent_id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 1
|
||||||
|
required: true
|
||||||
|
description: id to linked list item
|
||||||
|
- in: query
|
||||||
|
name: child_id
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 1
|
||||||
|
required: true
|
||||||
|
description: id to item to be linked to list.
|
||||||
|
- in: query
|
||||||
|
name: conv_factor
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
default: 1
|
||||||
|
required: true
|
||||||
|
description: integer factor between child id to parent id.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Items linked successfully.
|
||||||
|
"""
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
parent_id = request.get_json()['parent_id']
|
parent_id = request.get_json()['parent_id']
|
||||||
child_id = request.get_json()['child_id']
|
child_id = request.get_json()['child_id']
|
||||||
@ -263,8 +534,7 @@ def addLinkedItem():
|
|||||||
return jsonify({'error': False, 'message': 'Linked Item added!!'})
|
return jsonify({'error': False, 'message': 'Linked Item added!!'})
|
||||||
return jsonify({'error': True, 'message': 'These was an error with adding to the linked list!'})
|
return jsonify({'error': True, 'message': 'These was an error with adding to the linked list!'})
|
||||||
|
|
||||||
@items_api.route('/addBlankItem', methods=["POST"])
|
@items_api.route('/items/addBlankItem', methods=["POST"])
|
||||||
@access_api.login_required
|
|
||||||
def addBlankItem():
|
def addBlankItem():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
data = {
|
data = {
|
||||||
@ -272,141 +542,147 @@ def addBlankItem():
|
|||||||
'name': request.get_json()['name'],
|
'name': request.get_json()['name'],
|
||||||
'subtype': request.get_json()['subtype']
|
'subtype': request.get_json()['subtype']
|
||||||
}
|
}
|
||||||
|
pprint.pprint(data)
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
user_id = session['user_id']
|
user_id = session['user_id']
|
||||||
|
try:
|
||||||
items_processes.postNewBlankItem(site_name, user_id, data)
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
process.postNewBlankItem(conn, site_name, user_id, data)
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
return jsonify({'error': False, 'message': 'Item added!!'})
|
return jsonify({'error': False, 'message': 'Item added!!'})
|
||||||
return jsonify({'error': True, 'message': 'These was an error with adding Item!'})
|
return jsonify({'error': True, 'message': 'These was an error with adding Item!'})
|
||||||
|
|
||||||
@items_api.route('/addSKUPrefix', methods=["POST"])
|
@items_api.route('/items/addSKUPrefix', methods=["POST"])
|
||||||
@access_api.login_required
|
|
||||||
def addSKUPrefix():
|
def addSKUPrefix():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
prefix = dbPayloads.SKUPrefixPayload(
|
try:
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
prefix = db.SKUPrefixTable.Payload(
|
||||||
request.get_json()['uuid'],
|
request.get_json()['uuid'],
|
||||||
request.get_json()['name'],
|
request.get_json()['name'],
|
||||||
request.get_json()['description']
|
request.get_json()['description']
|
||||||
)
|
)
|
||||||
database_items.insertSKUPrefixtuple(site_name, prefix.payload())
|
db.SKUPrefixTable.insert_tuple(conn, site_name, prefix.payload())
|
||||||
|
except Exception as error:
|
||||||
|
conn.rollback()
|
||||||
|
return jsonify({'error': True, 'message': error})
|
||||||
return jsonify({'error': False, 'message': 'Prefix added!!'})
|
return jsonify({'error': False, 'message': 'Prefix added!!'})
|
||||||
return jsonify({'error': True, 'message': 'These was an error with adding this Prefix!'})
|
return jsonify({'error': True, 'message': 'These was an error with adding this Prefix!'})
|
||||||
|
|
||||||
@items_api.route('/addConversion', methods=['POST'])
|
@items_api.route('/item/addConversion', methods=['POST'])
|
||||||
@access_api.login_required
|
|
||||||
def addConversion():
|
def addConversion():
|
||||||
|
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
item_id = request.get_json()['parent_id']
|
item_id = request.get_json()['parent_id']
|
||||||
uom_id = request.get_json()['uom_id']
|
uom_id = request.get_json()['uom_id']
|
||||||
conv_factor = request.get_json()['conv_factor']
|
conv_factor = request.get_json()['conv_factor']
|
||||||
site_name = session['selected_site']
|
|
||||||
|
|
||||||
conversion = dbPayloads.ConversionPayload(
|
database_config = config()
|
||||||
|
site_name = session['selected_site']
|
||||||
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
conversion = db.ConversionsTable.Payload(
|
||||||
item_id, uom_id, conv_factor
|
item_id, uom_id, conv_factor
|
||||||
)
|
)
|
||||||
|
db.ConversionsTable.insert_tuple(conn, site_name, conversion.payload())
|
||||||
database_items.insertConversionTuple(site_name, conversion.payload())
|
|
||||||
|
|
||||||
return jsonify(error=False, message="Conversion was added successfully")
|
return jsonify(error=False, message="Conversion was added successfully")
|
||||||
return jsonify(error=True, message="Unable to save this conversion, ERROR!")
|
return jsonify(error=True, message="Unable to save this conversion, ERROR!")
|
||||||
|
|
||||||
@items_api.route('/deleteConversion', methods=['POST'])
|
@items_api.route('/item/deleteConversion', methods=['POST'])
|
||||||
@access_api.login_required
|
|
||||||
def deleteConversion():
|
def deleteConversion():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
conversion_id = request.get_json()['conversion_id']
|
conversion_id = request.get_json()['conversion_id']
|
||||||
|
print(conversion_id)
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
database_items.deleteConversionTuple(site_name, (conversion_id,))
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
db.ConversionsTable.delete_item_tuple(conn, site_name, (conversion_id,))
|
||||||
|
|
||||||
return jsonify(error=False, message="Conversion was deleted successfully")
|
return jsonify(error=False, message="Conversion was deleted successfully")
|
||||||
return jsonify(error=True, message="Unable to delete this conversion, ERROR!")
|
return jsonify(error=True, message="Unable to delete this conversion, ERROR!")
|
||||||
|
|
||||||
@items_api.route('/updateConversion', methods=['POST'])
|
@items_api.route('/item/updateConversion', methods=['POST'])
|
||||||
@access_api.login_required
|
|
||||||
def updateConversion():
|
def updateConversion():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
conversion_id = request.get_json()['conversion_id']
|
conversion_id = request.get_json()['conversion_id']
|
||||||
update_dictionary = request.get_json()['update']
|
update_dictionary = request.get_json()['update']
|
||||||
|
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
database_items.updateConversionTuple(site_name, {'id': conversion_id, 'update': update_dictionary})
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
db.ConversionsTable.update_item_tuple(conn, site_name, {'id': conversion_id, 'update': update_dictionary})
|
||||||
return jsonify(error=False, message="Conversion was updated successfully")
|
return jsonify(error=False, message="Conversion was updated successfully")
|
||||||
return jsonify(error=True, message="Unable to save this conversion, ERROR!")
|
return jsonify(error=True, message="Unable to save this conversion, ERROR!")
|
||||||
|
|
||||||
@items_api.route('/addPrefix', methods=['POST'])
|
@items_api.route('/item/addPrefix', methods=['POST'])
|
||||||
@access_api.login_required
|
|
||||||
def addPrefix():
|
def addPrefix():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
item_info_id = request.get_json()['parent_id']
|
item_info_id = request.get_json()['parent_id']
|
||||||
prefix_id = request.get_json()['prefix_id']
|
prefix_id = request.get_json()['prefix_id']
|
||||||
|
print(item_info_id)
|
||||||
|
print(prefix_id)
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
prefixes = database_items.getItemInfoTuple(site_name, (item_info_id,))['prefixes']
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
prefixes = db.ItemInfoTable.select_tuple(conn, site_name, (item_info_id,))['prefixes']
|
||||||
|
print(prefixes)
|
||||||
prefixes.append(prefix_id)
|
prefixes.append(prefix_id)
|
||||||
database_items.updateItemInfoTuple(site_name, {'id': item_info_id, 'update':{'prefixes': prefixes}})
|
db.ItemInfoTable.update_tuple(conn, site_name, {'id': item_info_id, 'update':{'prefixes': prefixes}})
|
||||||
return jsonify(error=False, message="Prefix was added successfully")
|
return jsonify(error=False, message="Prefix was added successfully")
|
||||||
return jsonify(error=True, message="Unable to save this prefix, ERROR!")
|
return jsonify(error=True, message="Unable to save this prefix, ERROR!")
|
||||||
|
|
||||||
@items_api.route('/deletePrefix', methods=['POST'])
|
@items_api.route('/item/deletePrefix', methods=['POST'])
|
||||||
@access_api.login_required
|
|
||||||
def deletePrefix():
|
def deletePrefix():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
item_info_id = request.get_json()['item_info_id']
|
item_info_id = request.get_json()['item_info_id']
|
||||||
prefix_id = request.get_json()['prefix_id']
|
prefix_id = request.get_json()['prefix_id']
|
||||||
|
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
prefixes = database_items.getItemInfoTuple(site_name, (item_info_id,))['prefixes']
|
with psycopg2.connect(**database_config) as conn:
|
||||||
|
prefixes = db.ItemInfoTable.select_tuple(conn, site_name, (item_info_id,))['prefixes']
|
||||||
prefixes.remove(prefix_id)
|
prefixes.remove(prefix_id)
|
||||||
database_items.updateItemInfoTuple(site_name, {'id': item_info_id, 'update':{'prefixes': prefixes}})
|
db.ItemInfoTable.update_tuple(conn, site_name, {'id': item_info_id, 'update':{'prefixes': prefixes}})
|
||||||
return jsonify(error=False, message="Prefix was deleted successfully")
|
return jsonify(error=False, message="Prefix was deleted successfully")
|
||||||
return jsonify(error=True, message="Unable to delete this prefix, ERROR!")
|
return jsonify(error=True, message="Unable to delete this prefix, ERROR!")
|
||||||
|
|
||||||
@items_api.route('/refreshSearchString', methods=['POST'])
|
@items_api.route('/item/refreshSearchString', methods=['POST'])
|
||||||
@access_api.login_required
|
|
||||||
def refreshSearchString():
|
def refreshSearchString():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
item_id = request.get_json()['item_id']
|
item_id = request.get_json()['item_id']
|
||||||
|
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
item = database_items.getItemAllByID(site_name, (item_id,))
|
with psycopg2.connect(**database_config) as conn:
|
||||||
search_string = items_processes.createSearchStringFromItem(item)
|
item = db.ItemTable.getItemAllByID(conn, site_name, (item_id,))
|
||||||
database_items.postUpdateItemByID(site_name, {'id': item_id, 'update':{'search_string': search_string}})
|
parameters = [f"id::{item['id']}", f"barcode::{item['barcode']}", f"name::{item['item_name']}", f"brand::{item['brand']['name']}",
|
||||||
|
f"expires::{item['food_info']['expires']}", f"row_type::{item['row_type']}", f"item_type::{item['item_type']}"]
|
||||||
|
|
||||||
|
for prefix in item['item_info']['prefixes']:
|
||||||
|
parameters.append(f"prefix::{prefix['name']}")
|
||||||
|
|
||||||
|
search_string = "&&".join(parameters)
|
||||||
|
db.ItemTable.update_tuple(conn, site_name, {'id': item_id, 'update':{'search_string': search_string}})
|
||||||
|
|
||||||
return jsonify(error=False, message="Search String was updated successfully")
|
return jsonify(error=False, message="Search String was updated successfully")
|
||||||
return jsonify(error=True, message="Unable to update this search string, ERROR!")
|
return jsonify(error=True, message="Unable to update this search string, ERROR!")
|
||||||
|
|
||||||
@items_api.route('/postNewItemLocation', methods=['POST'])
|
@items_api.route('/item/postNewItemLocation', methods=['POST'])
|
||||||
@access_api.login_required
|
|
||||||
def postNewItemLocation():
|
def postNewItemLocation():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
item_id = request.get_json()['item_id']
|
item_id = request.get_json()['item_id']
|
||||||
location_id = request.get_json()['location_id']
|
location_id = request.get_json()['location_id']
|
||||||
|
database_config = config()
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
item_location = dbPayloads.ItemLocationPayload(item_id, location_id)
|
with psycopg2.connect(**database_config) as conn:
|
||||||
database_items.insertItemLocationsTuple(site_name, item_location.payload())
|
item_location = db.ItemLocationsTable.Payload(
|
||||||
|
item_id,
|
||||||
|
location_id
|
||||||
|
)
|
||||||
|
db.ItemLocationsTable.insert_tuple(conn, site_name, item_location.payload())
|
||||||
return jsonify(error=False, message="Location was added successfully")
|
return jsonify(error=False, message="Location was added successfully")
|
||||||
return jsonify(error=True, message="Unable to save this location, ERROR!")
|
return jsonify(error=True, message="Unable to save this location, ERROR!")
|
||||||
|
|
||||||
@items_api.route("/getItemLocations", methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def getItemLocations():
|
|
||||||
recordset = []
|
|
||||||
count = 0
|
|
||||||
if request.method == "GET":
|
|
||||||
item_id = int(request.args.get('id', 1))
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 10))
|
|
||||||
site_name = session['selected_site']
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
recordset, count = database_items.getItemLocations(site_name, (item_id, limit, offset))
|
|
||||||
return jsonify({"locations":recordset, "end":math.ceil(count/limit), "error":False, "message":"item fetched succesfully!"})
|
|
||||||
return jsonify({"locations":recordset, "end": math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
|
|
||||||
|
|
||||||
@items_api.route('/postTransaction', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def post_transaction():
|
|
||||||
if request.method == "POST":
|
|
||||||
result = items_processes.postAdjustment(
|
|
||||||
site_name=session['selected_site'],
|
|
||||||
user_id=session['user_id'],
|
|
||||||
data=dict(request.json)
|
|
||||||
)
|
|
||||||
return jsonify(result)
|
|
||||||
return jsonify({"error":True, "message":"There was an error with this POST statement"})
|
|
||||||
@ -1,111 +1,9 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
import datetime
|
|
||||||
import psycopg2
|
|
||||||
import json
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from application.items import database_items
|
from application.items import database_items
|
||||||
import application.postsqldb as db
|
import application.postsqldb as db
|
||||||
import application.database_payloads as dbPayloads
|
|
||||||
import config
|
import config
|
||||||
|
|
||||||
"""
|
import datetime
|
||||||
|
import psycopg2
|
||||||
items_processes.py handles more higher order workflows that a single database call would not be able to accomplish
|
|
||||||
or when more complex logics are needed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def postNewBlankItem(site_name: str, user_id: int, data: dict, conn=None):
|
|
||||||
""" data = {'barcode', 'name', 'subtype'}"""
|
|
||||||
self_conn = False
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
site = database_items.selectSiteTuple((site_name,))
|
|
||||||
default_zone = database_items.getZone(site_name,(site['default_zone'], ))
|
|
||||||
default_location = database_items.getLocation(site_name, (site['default_primary_location'],))
|
|
||||||
uuid = f"{default_zone['name']}@{default_location['name']}"
|
|
||||||
|
|
||||||
# create logistics info
|
|
||||||
logistics_info = dbPayloads.LogisticsInfoPayload(
|
|
||||||
barcode=data['barcode'],
|
|
||||||
primary_location=site['default_primary_location'],
|
|
||||||
primary_zone=site['default_zone'],
|
|
||||||
auto_issue_location=site['default_auto_issue_location'],
|
|
||||||
auto_issue_zone=site['default_zone']
|
|
||||||
)
|
|
||||||
|
|
||||||
# create item info
|
|
||||||
item_info = dbPayloads.ItemInfoPayload(data['barcode'])
|
|
||||||
|
|
||||||
# create Food Info
|
|
||||||
food_info = dbPayloads.FoodInfoPayload()
|
|
||||||
|
|
||||||
logistics_info_id = 0
|
|
||||||
item_info_id = 0
|
|
||||||
food_info_id = 0
|
|
||||||
brand_id = 1
|
|
||||||
|
|
||||||
|
|
||||||
logistics_info = database_items.insertLogisticsInfoTuple(site_name, logistics_info.payload(), conn=conn)
|
|
||||||
item_info = database_items.insertItemInfoTuple(site_name, item_info.payload(), conn=conn)
|
|
||||||
food_info = database_items.insertFoodInfoTuple(site_name, food_info.payload(), conn=conn)
|
|
||||||
|
|
||||||
name = data['name']
|
|
||||||
name = name.replace("'", "@&apostraphe&")
|
|
||||||
description = ""
|
|
||||||
tags = db.lst2pgarr([])
|
|
||||||
links = json.dumps({})
|
|
||||||
search_string = f"&&{data['barcode']}&&{name}&&"
|
|
||||||
|
|
||||||
|
|
||||||
item = dbPayloads.ItemsPayload(
|
|
||||||
data['barcode'],
|
|
||||||
data['name'],
|
|
||||||
item_info['id'],
|
|
||||||
logistics_info['id'],
|
|
||||||
food_info['id'],
|
|
||||||
brand=brand_id,
|
|
||||||
row_type="single",
|
|
||||||
item_type=data['subtype'],
|
|
||||||
search_string=search_string
|
|
||||||
)
|
|
||||||
|
|
||||||
item = database_items.insertItemTuple(site_name, item.payload(), conn=conn)
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(f"SELECT id FROM {site_name}_locations WHERE uuid=%s;", (uuid, ))
|
|
||||||
location_id = cur.fetchone()[0]
|
|
||||||
|
|
||||||
dbPayloads.ItemLocationPayload
|
|
||||||
item_location = dbPayloads.ItemLocationPayload(item['id'], location_id)
|
|
||||||
database_items.insertItemLocationsTuple(site_name, item_location.payload(), conn=conn)
|
|
||||||
|
|
||||||
|
|
||||||
creation_tuple = dbPayloads.TransactionPayload(
|
|
||||||
datetime.datetime.now(),
|
|
||||||
logistics_info['id'],
|
|
||||||
item['barcode'],
|
|
||||||
item['item_name'],
|
|
||||||
"SYSTEM",
|
|
||||||
0.0,
|
|
||||||
"Item added to the System!",
|
|
||||||
user_id,
|
|
||||||
{'location': uuid}
|
|
||||||
)
|
|
||||||
|
|
||||||
database_items.postAddTransaction(site_name, creation_tuple.payload(), conn=conn)
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
return False
|
|
||||||
|
|
||||||
return conn
|
|
||||||
|
|
||||||
def postLinkedItem(site, payload):
|
def postLinkedItem(site, payload):
|
||||||
"""
|
"""
|
||||||
@ -269,13 +167,3 @@ def postAdjustment(site_name, user_id, data: dict, conn=None):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
def createSearchStringFromItem(item: dict):
|
|
||||||
parameters = [f"id::{item['id']}", f"barcode::{item['barcode']}", f"name::{item['item_name']}", f"brand::{item['brand']['name']}",
|
|
||||||
f"expires::{item['food_info']['expires']}", f"row_type::{item['row_type']}", f"item_type::{item['item_type']}"]
|
|
||||||
|
|
||||||
for prefix in item['item_info']['prefixes']:
|
|
||||||
parameters.append(f"prefix::{prefix['name']}")
|
|
||||||
|
|
||||||
search_string = "&&".join(parameters)
|
|
||||||
return search_string
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
SELECT * FROM %%site_name%%_item_locations
|
|
||||||
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
|
|
||||||
WHERE part_id = %s
|
|
||||||
LIMIT %s
|
|
||||||
OFFSET %s;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_food_info
|
|
||||||
(ingrediants, food_groups, nutrients, expires, default_expiration)
|
|
||||||
VALUES (%s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_item_info
|
|
||||||
(barcode, packaging, uom_quantity, uom, cost, safety_stock, lead_time_days, ai_pick, prefixes)
|
|
||||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_item_locations
|
|
||||||
(part_id, location_id, quantity_on_hand, cost_layers)
|
|
||||||
VALUES (%s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_items
|
|
||||||
(barcode, item_name, brand, description, tags, links, item_info_id, logistics_info_id,
|
|
||||||
food_info_id, row_type, item_type, search_string)
|
|
||||||
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_logistics_info
|
|
||||||
(barcode, primary_location, primary_zone, auto_issue_location, auto_issue_zone)
|
|
||||||
VALUES (%s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_sku_prefix
|
|
||||||
(uuid, name, description)
|
|
||||||
VALUES (%s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,254 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en" dir="ltr" id="main_html">
|
|
||||||
<head>
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" charset="utf-8" />
|
|
||||||
<title id="title"></title>
|
|
||||||
<!-- Material Icons -->
|
|
||||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Outlined Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Rounded Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Sharp Set -->
|
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/uikit.min.css') }}"/>
|
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/pantry.css') }}"/>
|
|
||||||
|
|
||||||
<link id="dark-mode" rel="stylesheet" href="{{ url_for('static', filename='css/dark-mode.css') }}" disabled/>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</head>
|
|
||||||
<style>
|
|
||||||
.custom_row:hover{
|
|
||||||
background-color: rgb(230, 230, 230) !important;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<body>
|
|
||||||
<nav class="uk-navbar-container">
|
|
||||||
<div class="uk-container uk-container-expand">
|
|
||||||
<div class="uk-navbar uk-navbar-primary">
|
|
||||||
<!-- Application Navigation-->
|
|
||||||
<div class="uk-navbar-left">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href>Apps</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/recipes">Recipes</a></li>
|
|
||||||
<li><a href="/shopping-lists">Shopping Lists</a></li>
|
|
||||||
<li class="uk-nav-header">Logistics</li>
|
|
||||||
<li><a href="/items">Items</a></li>
|
|
||||||
<li><a href="/items/transaction">Transaction</a></li>
|
|
||||||
<li><a href="/receipts">Receipts</a></li>
|
|
||||||
<li class="uk-nav-header">Points of Ease</li>
|
|
||||||
<li><a href="/poe/scanner">Transaction Scanner</a></li>
|
|
||||||
<li><a href="/poe/receipts">Receipts Scanner</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
<!-- Breadcrumbs Navigation -->
|
|
||||||
<div class="uk-navbar-center uk-visible@m">
|
|
||||||
<ul class="uk-breadcrumb uk-margin-remove">
|
|
||||||
<li style="cursor: pointer;"><span><strong>{{current_site}}</strong></span>
|
|
||||||
<div uk-dropdown="mode: hover">
|
|
||||||
<ul class="uk-nav uk-dropdown-nav">
|
|
||||||
<li class="uk-nav-header">Select Site</li>
|
|
||||||
<li class="uk-nav-divider"></li>
|
|
||||||
{% for site in sites %}
|
|
||||||
{% if site == current_site %}
|
|
||||||
<li><a class="uk-disabled" href="#">{{site}}</a></li>
|
|
||||||
{% else %}
|
|
||||||
<li><a onclick="changeSite('{{site}}')">{{site}}</a></li>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li style="cursor: default; user-select: none;" class="uk-disabled"><span>Logistics</span></li>
|
|
||||||
<li class="uk-disabled"><span>Manual Transaction Entry</span></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<!-- Profile/Management Navigation-->
|
|
||||||
<div class="uk-navbar-right">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href="#">
|
|
||||||
<img src="{{session['user']['profile_pic_url']}}" alt="Profile Picture" class="profile-pic uk-visible@m" style="width: 40px; height: 40px; border-radius: 50%; margin-right: 5px;">
|
|
||||||
{{username}}
|
|
||||||
</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/profile">Profile</a></li>
|
|
||||||
<li><a onclick="toggleDarkMode()">Dark Mode</a></li>
|
|
||||||
<li><a href="/site-management">Site Management</a></li>
|
|
||||||
<li><a href="/administration">System Management</a></li>
|
|
||||||
<li><a href="/access/logout">Logout</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
<div class="uk-container uk-section">
|
|
||||||
|
|
||||||
|
|
||||||
<div class="uk-width-1-1 uk-visible@m">
|
|
||||||
<h5 class="uk-heading-small uk-text-center">Enter Transaction Receipt</h5>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-visible@m">
|
|
||||||
<hr class="uk-divider-icon">
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<ul uk-tab>
|
|
||||||
<li><a href="#">Manual Transaction</a></li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<div class="uk-switcher">
|
|
||||||
<div class="uk-grid-small" uk-grid>
|
|
||||||
<div class="uk-width-1-1">
|
|
||||||
<div uk-grid>
|
|
||||||
<div class="uk-width-1-1 uk-child-width-expand@s uk-grid-small uk-flex uk-flex-bottom uk-margin" uk-grid>
|
|
||||||
<div class="uk-width-1-3@m">
|
|
||||||
<label class="uk-form-label" for="database_id">Database ID</label>
|
|
||||||
<input id="database_id" class="uk-input uk-disabled uk-flex uk-flex-bottom" type="text">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-2-3@m uk-flex uk-flex-bottom">
|
|
||||||
<button onclick="openItemsModal('itemsPage')" class="uk-button uk-button-default uk-flex uk-flex-middle uk-margin-remove-left" uk-tooltip="Select an item from the system by its database id."><span class="material-symbols-outlined" style="padding-right: 5px;">event_list</span> Item Lookup</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1">
|
|
||||||
<label class="uk-form-label" for="barcode">Barcode</label>
|
|
||||||
<input class="uk-input uk-width-1-2@m uk-disabled" id="barcode" type="text" placeholder=" " maxlength="20">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-margin-top">
|
|
||||||
<label class="uk-form-label" for="name">Item Name</label>
|
|
||||||
<input class="uk-input uk-disabled" id="name" type="text" placeholder=" " maxlength="20">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-margin-top" uk-grid>
|
|
||||||
<div class="uk-width-1-2">
|
|
||||||
<label class="uk-form-label" for="QOH">Quantity on Hand</label>
|
|
||||||
<input class="uk-input uk-disabled" id="QOH" type="text" placeholder=" " maxlength="20">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-2">
|
|
||||||
<label class="uk-form-label" for="UOM">Unit of Measure</label>
|
|
||||||
<input class="uk-input uk-disabled" id="UOM" type="text" placeholder=" " maxlength="20">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1" uk-grid>
|
|
||||||
<div class="uk-width-expand">
|
|
||||||
<label for="trans_type">Transaction Type</label>
|
|
||||||
<select onchange="setTransactionTypeAdjustments()" class="uk-select" id="trans_type" aria-label="Select">
|
|
||||||
<option value="0" disabled selected>Choose your option</option>
|
|
||||||
<option value="Adjust In">Adjust In</option>
|
|
||||||
<option value="Adjust Out">Adjust Out</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-flex" uk-grid>
|
|
||||||
<div class="uk-width-1-1 uk-child-width-expand@s uk-grid-small uk-flex uk-flex-bottom uk-margin" uk-grid>
|
|
||||||
<div class="uk-width-1-3@m">
|
|
||||||
<label class="uk-form-label" for="zone">Zone</label>
|
|
||||||
<input id="zone" class="uk-input uk-disabled uk-flex uk-flex-bottom" type="text">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-2-3@m uk-flex uk-flex-bottom">
|
|
||||||
<a id="itemLocations" href="#itemLocationsModal" class="uk-button uk-button-default uk-flex uk-flex-middle uk-disabled" uk-tooltip="See all the item's locations and their QOH" uk-toggle><span class="material-symbols-outlined" style="padding-right: 5px;">event_list</span> Item Locations</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-flex" uk-grid>
|
|
||||||
<div class="uk-width-1-1 uk-child-width-expand@s uk-grid-small uk-flex uk-flex-bottom uk-margin" uk-grid>
|
|
||||||
<div class="uk-width-1-3@m">
|
|
||||||
<label class="uk-form-label" for="location">Locations</label>
|
|
||||||
<input id="location" class="uk-input uk-disabled uk-flex uk-flex-bottom" type="text">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-flex" uk-grid>
|
|
||||||
<div class="uk-width-1-4@m">
|
|
||||||
<label class="uk-form-label" for="transaction_quantity">Quantity</label>
|
|
||||||
<input class="uk-input" id="transaction_quantity" type="text" placeholder=" " maxlength="20" value="0.00">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-4@m">
|
|
||||||
<label class="uk-form-label" for="transaction_cost">SKU cost</label>
|
|
||||||
<input class="uk-input" id="transaction_cost" type="text" placeholder=" " maxlength="20" value="0.00">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1">
|
|
||||||
<label class="uk-form-label" for="transaction_description">Description</label>
|
|
||||||
<input class="uk-input" id="transaction_description" type="text" placeholder=" " maxlength="128" value="">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1">
|
|
||||||
<hr class="uk-divider-icon">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-flex uk-flex-right">
|
|
||||||
<button onclick="submitTransaction()" class="uk-button uk-button-primary">Submit</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- Modals -->
|
|
||||||
<!-- This is the modal USED FOR MANUAL TRANSACTION-->
|
|
||||||
<div id="itemsModal" uk-modal>
|
|
||||||
<div id="itemsModalInner" class="uk-modal-dialog uk-modal-body " uk-overflow-auto>
|
|
||||||
<h2 class="uk-modal-title">Select Item</h2>
|
|
||||||
<p>Select an Item from the system...</p>
|
|
||||||
<div id="searchItemsForm" onkeydown="searchTable(event, 'items', 'itemsPage')" class="uk-search uk-search-default uk-align-center">
|
|
||||||
<input id="searchItemsInput" class="uk-border-pill uk-search-input" type="search" placeholder="" aria-label="">
|
|
||||||
<span class="uk-search-icon-flip" uk-search-icon></span>
|
|
||||||
</div>
|
|
||||||
<nav aria-label="Pagination">
|
|
||||||
<ul id="itemsPage" class="uk-pagination uk-flex-center" uk-margin>
|
|
||||||
<li><a href="#"><span uk-pagination-previous></span></a></li>
|
|
||||||
<li><a href="#">1</a></li>
|
|
||||||
<li class="uk-disabled"><span>…</span></li>
|
|
||||||
<li><a href="#">5</a></li>
|
|
||||||
<li><a href="#">6</a></li>
|
|
||||||
<li class="uk-active"><span aria-current="page">7</span></li>
|
|
||||||
<li><a href="#">8</a></li>
|
|
||||||
<li><a href="#"><span uk-pagination-next></span></a></li>
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
<table class="uk-table uk-table-striped uk-table-hover">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>ID</th>
|
|
||||||
<th>Barcode</th>
|
|
||||||
<th>Name</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="itemsTableBody">
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- Item Locations Modal USED FOR MANUAL TRANSACTION-->
|
|
||||||
<div id="itemLocationsModal" uk-modal>
|
|
||||||
<div id="itemLocationsModalInner" class="uk-modal-dialog uk-modal-body " uk-overflow-auto>
|
|
||||||
<h2 class="uk-modal-title">Item Locations</h2>
|
|
||||||
<table class="uk-table uk-table-striped uk-table-hover">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Zone</th>
|
|
||||||
<th>Location</th>
|
|
||||||
<th>QOH</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="itemLocationTableBody">
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% assets "js_all" %}
|
|
||||||
<script type="text/javascript" src="{{ ASSET_URL }}"></script>
|
|
||||||
{% endassets %}
|
|
||||||
<script type="text/javascript" src="{{ url_for('items_api.static', filename='transactionHandler.js') }}"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@ -1,142 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en" dir="ltr">
|
|
||||||
<head>
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" charset="utf-8" />
|
|
||||||
<title id="title"></title>
|
|
||||||
|
|
||||||
|
|
||||||
<!-- Material Icons -->
|
|
||||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Outlined Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Rounded Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Sharp Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Sharp" rel="stylesheet" />
|
|
||||||
|
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/uikit.min.css') }}"/>
|
|
||||||
|
|
||||||
<script src="{{ url_for('static', filename='js/uikit.min.js') }}"></script>
|
|
||||||
<script src="{{ url_for('static', filename='js/uikit-icons.min.js') }}"></script>
|
|
||||||
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<nav class="uk-navbar-container">
|
|
||||||
<div class="uk-container uk-container-expand">
|
|
||||||
<div class="uk-navbar uk-navbar-primary">
|
|
||||||
<!-- Application Navigation-->
|
|
||||||
<div class="uk-navbar-left">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href>Apps</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/recipes">Recipes</a></li>
|
|
||||||
<li><a href="/shopping-lists">Shopping Lists</a></li>
|
|
||||||
<li class="uk-nav-header">Logistics</li>
|
|
||||||
<li><a href="/items">Items</a></li>
|
|
||||||
<li><a href="/items/transaction">Transaction</a></li>
|
|
||||||
<li><a href="/receipts">Receipts</a></li>
|
|
||||||
<li class="uk-nav-header">Points of Ease</li>
|
|
||||||
<li><a href="/poe/scanner">Transaction Scanner</a></li>
|
|
||||||
<li><a href="/poe/receipts">Receipts Scanner</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
<!-- Breadcrumbs Navigation -->
|
|
||||||
<div class="uk-navbar-center uk-visible@m">
|
|
||||||
<ul class="uk-breadcrumb uk-margin-remove">
|
|
||||||
<li class="uk-disabled" style="cursor: pointer;"><span><strong>{{current_site}}</strong></span>
|
|
||||||
<div uk-dropdown="mode: hover">
|
|
||||||
<ul class="uk-nav uk-dropdown-nav">
|
|
||||||
<li class="uk-nav-header">Select Site</li>
|
|
||||||
<li class="uk-nav-divider"></li>
|
|
||||||
{% for site in sites %}
|
|
||||||
{% if site == current_site %}
|
|
||||||
<li><a class="uk-disabled" href="#">{{site}}</a></li>
|
|
||||||
{% else %}
|
|
||||||
<li><a onclick="changeSite('{{site}}')">{{site}}</a></li>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li style="cursor: default; user-select: none;" class="uk-disabled"><span>Logistics</span></li>
|
|
||||||
<li class="uk-disabled"><span>Items</span></li>
|
|
||||||
<li class="uk-disabled"><span>Transactions</span></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<!-- Profile/Management Navigation-->
|
|
||||||
<div class="uk-navbar-right">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href="#">
|
|
||||||
<img src="{{session['user']['profile_pic_url']}}" alt="Profile Picture" class="profile-pic uk-visible@m" style="width: 40px; height: 40px; border-radius: 50%; margin-right: 5px;">
|
|
||||||
{{username}}
|
|
||||||
</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/profile">Profile</a></li>
|
|
||||||
<li><a onclick="toggleDarkMode()">Dark Mode</a></li>
|
|
||||||
<li><a href="/site-management">Site Management</a></li>
|
|
||||||
<li><a href="/administration">System Management</a></li>
|
|
||||||
<li><a href="/access/logout">Logout</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
<div class="uk-container">
|
|
||||||
<div class="uk-section">
|
|
||||||
<nav aria-label="Pagination">
|
|
||||||
<ul id="paginationElement" class="uk-pagination uk-flex-center" uk-margin>
|
|
||||||
<!-- populated by javascript updatePaginationElement -->
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
<table id="transactionsTable" class="uk-table uk-table-striped uk-table-hover">
|
|
||||||
<!-- populated by javascript -->
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Timestamp</th>
|
|
||||||
<th>Barcode</th>
|
|
||||||
<th>Name</th>
|
|
||||||
<th>Transaction Type</th>
|
|
||||||
<th>Quantity</th>
|
|
||||||
<th>Description</th>
|
|
||||||
<th>User ID</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
|
|
||||||
<tbody id="transactionsTableBody"></tbody>
|
|
||||||
</table>
|
|
||||||
<a href="#" uk-totop uk-scroll></a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="transactionModal" uk-modal>
|
|
||||||
<div class="uk-modal-dialog uk-modal-body">
|
|
||||||
<h2 class="uk-modal-title">Transaction Receipt</h2>
|
|
||||||
<p id="trans_barcode">...</p>
|
|
||||||
<p id="trans_database_id">...</p>
|
|
||||||
<p id="trans_timestamp">...</p>
|
|
||||||
<p id="trans_name">...</p>
|
|
||||||
<p id="trans_type">...</p>
|
|
||||||
<p id="trans_qty">...</p>
|
|
||||||
<p id="trans_description">...</p>
|
|
||||||
<p id="trans_user">...</p>
|
|
||||||
<table id="dataTable" class="uk-table uk-table-striped">
|
|
||||||
<!-- populated by javascript -->
|
|
||||||
<thead><tr><th>Key</th><th>Value</th></tr></thead>
|
|
||||||
<tbody id="receiptTableBody"></tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
<script src="{{ url_for('items_api.static', filename='transactionsHandler.js') }}"></script>
|
|
||||||
<script>const item_id = {{id|tojson}}</script>
|
|
||||||
</html>
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,70 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
from flask import (
|
|
||||||
Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
|
||||||
)
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from config import config
|
|
||||||
from application.access_module import access_api
|
|
||||||
from application.poe import poe_processes, poe_database
|
|
||||||
from application import postsqldb
|
|
||||||
|
|
||||||
|
|
||||||
point_of_ease = Blueprint('poe', __name__, template_folder="templates", static_folder="static")
|
|
||||||
|
|
||||||
|
|
||||||
@point_of_ease.route('/scanner', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def scannerEndpoint():
|
|
||||||
sites = [site[1] for site in postsqldb.get_sites(session['user']['sites'])]
|
|
||||||
return render_template('scanner.html', current_site=session['selected_site'],
|
|
||||||
sites=sites)
|
|
||||||
|
|
||||||
@point_of_ease.route('/receipts', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def receiptsEndpoint():
|
|
||||||
sites = [site[1] for site in postsqldb.get_sites(session['user']['sites'])]
|
|
||||||
database_config = config()
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
units = postsqldb.UnitsTable.getAll(conn)
|
|
||||||
return render_template('receipts.html', current_site=session['selected_site'],
|
|
||||||
sites=sites, units=units)
|
|
||||||
|
|
||||||
@point_of_ease.route('/getItem/barcode', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def getItemBarcode():
|
|
||||||
record = {}
|
|
||||||
if request.method == "GET":
|
|
||||||
item_barcode = f"%{str(request.args.get('barcode', 1))}%"
|
|
||||||
site_name = session['selected_site']
|
|
||||||
record = poe_database.selectItemAllByBarcode(site_name, (item_barcode,))
|
|
||||||
print(record)
|
|
||||||
if record == {} or record == ():
|
|
||||||
return jsonify({"item":None, "error":True, "message":"Item either does not exist or there was a larger problem!"})
|
|
||||||
else:
|
|
||||||
return jsonify({"item":record, "error":False, "message":"item fetched succesfully!"})
|
|
||||||
return jsonify({"item":record, "error":True, "message":"There was an error with this GET statement"})
|
|
||||||
|
|
||||||
@point_of_ease.route('/postTransaction', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def post_transaction():
|
|
||||||
if request.method == "POST":
|
|
||||||
result = poe_processes.postTransaction(
|
|
||||||
site_name=session['selected_site'],
|
|
||||||
user_id=session['user_id'],
|
|
||||||
data=dict(request.json)
|
|
||||||
)
|
|
||||||
return jsonify(result)
|
|
||||||
return jsonify({"error":True, "message":"There was an error with this POST statement"})
|
|
||||||
|
|
||||||
@point_of_ease.route('/postReceipt', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def post_receipt():
|
|
||||||
if request.method == "POST":
|
|
||||||
site_name = session['selected_site']
|
|
||||||
user_id = session['user_id']
|
|
||||||
data= {'items': request.json['items']}
|
|
||||||
status = poe_processes.post_receipt(site_name, user_id, data)
|
|
||||||
return jsonify(status)
|
|
||||||
return jsonify({"error":True, "message":"There was an error with this POST statement"})
|
|
||||||
@ -1,419 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
import config
|
|
||||||
from application import postsqldb
|
|
||||||
|
|
||||||
def request_receipt_id(conn, site_name):
|
|
||||||
next_receipt_id = None
|
|
||||||
sql = f"SELECT receipt_id FROM {site_name}_receipts ORDER BY id DESC LIMIT 1;"
|
|
||||||
try:
|
|
||||||
database_config = config.config()
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql)
|
|
||||||
next_receipt_id = cur.fetchone()
|
|
||||||
if next_receipt_id == None:
|
|
||||||
next_receipt_id = "00000001"
|
|
||||||
else:
|
|
||||||
next_receipt_id = next_receipt_id[0]
|
|
||||||
next_receipt_id = int(next_receipt_id.split("-")[1]) + 1
|
|
||||||
y = str(next_receipt_id)
|
|
||||||
len_str = len(y)
|
|
||||||
x = "".join(["0" for _ in range(8 - len_str)])
|
|
||||||
next_receipt_id = x + y
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload=(), sql=sql)
|
|
||||||
|
|
||||||
return next_receipt_id
|
|
||||||
|
|
||||||
def selectItemLocationsTuple(site_name, payload, convert=True):
|
|
||||||
item_locations = ()
|
|
||||||
database_config = config.config()
|
|
||||||
select_item_location_sql = f"SELECT * FROM {site_name}_item_locations WHERE part_id = %s AND location_id = %s;"
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_item_location_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item_locations = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item_locations = rows
|
|
||||||
return item_locations
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def selectCostLayersTuple(site_name, payload, convert=True):
|
|
||||||
cost_layers = ()
|
|
||||||
database_config = config.config()
|
|
||||||
select_cost_layers_sql = f"SELECT cl.* FROM {site_name}_item_locations il JOIN {site_name}_cost_layers cl ON cl.id = ANY(il.cost_layers) where il.id=%s;"
|
|
||||||
try:
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_cost_layers_sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
cost_layers = rows
|
|
||||||
cost_layers = [postsqldb.tupleDictionaryFactory(cur.description, layer) for layer in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
cost_layers = rows
|
|
||||||
return cost_layers
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def selectLocationsTuple(site, payload, convert=True, conn=None):
|
|
||||||
selected = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_locations WHERE id=%s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
selected = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
selected = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return selected
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectItemLocationsTuple(site_name, payload, convert=True, conn=None):
|
|
||||||
item_locations = ()
|
|
||||||
self_conn = False
|
|
||||||
select_item_location_sql = f"SELECT * FROM {site_name}_item_locations WHERE part_id = %s AND location_id = %s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_item_location_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item_locations = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item_locations = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item_locations
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def selectLinkedItemByBarcode(site, payload, convert=True, conn=None):
|
|
||||||
item = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_itemlinks WHERE barcode=%s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectItemAllByID(site, payload, convert=True, conn=None):
|
|
||||||
item = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/poe/sql/getItemAllByID.sql", "r+") as file:
|
|
||||||
getItemAllByID_sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(getItemAllByID_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, getItemAllByID_sql)
|
|
||||||
|
|
||||||
def selectItemAllByBarcode(site, payload, convert=True, conn=None):
|
|
||||||
item = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
if convert:
|
|
||||||
item = {}
|
|
||||||
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
linked_item = selectLinkedItemByBarcode(site, (payload[0],))
|
|
||||||
|
|
||||||
if len(linked_item) > 1:
|
|
||||||
item = selectItemAllByID(site, payload=(linked_item['link'], ), convert=convert)
|
|
||||||
item['item_info']['uom_quantity'] = linked_item['conv_factor']
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
return item
|
|
||||||
else:
|
|
||||||
with open(f"application/poe/sql/getItemAllByBarcode.sql", "r+") as file:
|
|
||||||
getItemAllByBarcode_sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(getItemAllByBarcode_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
return item
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, getItemAllByBarcode_sql)
|
|
||||||
|
|
||||||
def insertCostLayersTuple(site, payload, convert=True, conn=None):
|
|
||||||
cost_layer = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/poe/sql/insertCostLayersTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
cost_layer = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
cost_layer = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return cost_layer
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertTransactionsTuple(site, payload, convert=True, conn=None):
|
|
||||||
# payload (tuple): (timestamp[timestamp], logistics_info_id[int], barcode[str], name[str],
|
|
||||||
transaction = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/poe/sql/insertTransactionsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
transaction = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
transaction = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
return transaction
|
|
||||||
|
|
||||||
def insertReceiptsTuple(site, payload, convert=True, conn=None):
|
|
||||||
receipt = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/poe/sql/insertReceiptsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
receipt = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
receipt = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return receipt
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertReceiptItemsTuple(site, payload, convert=True, conn=None):
|
|
||||||
receipt_item = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/poe/sql/insertReceiptItemsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
receipt_item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
receipt_item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return receipt_item
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateCostLayersTuple(site, payload, convert=True, conn=None):
|
|
||||||
cost_layer = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE {site}_cost_layers SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
cost_layer = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
cost_layer = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return cost_layer
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def updateItemLocation(site, payload, convert=True, conn=None):
|
|
||||||
item_location = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/poe/sql/updateItemLocation.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item_location = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item_location = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item_location
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def deleteCostLayersTuple(site, payload, convert=True, conn=None):
|
|
||||||
deleted = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"WITH deleted_rows AS (DELETE FROM {site}_cost_layers WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
deleted = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return deleted
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
@ -1,136 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
import datetime
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from application import postsqldb, database_payloads
|
|
||||||
from application.poe import poe_database
|
|
||||||
import config
|
|
||||||
|
|
||||||
""" This module will hold all the multilayerd/complex process used in the
|
|
||||||
point of ease module. """
|
|
||||||
|
|
||||||
|
|
||||||
def postTransaction(site_name, user_id, data: dict, conn=None):
|
|
||||||
""" dict_keys(['item_id', 'logistics_info_id', 'barcode', 'item_name', 'transaction_type',
|
|
||||||
'quantity', 'description', 'cost', 'vendor', 'expires', 'location_id'])"""
|
|
||||||
def quantityFactory(quantity_on_hand:float, quantity:float, transaction_type:str):
|
|
||||||
if transaction_type == "Adjust In":
|
|
||||||
quantity_on_hand += quantity
|
|
||||||
return quantity_on_hand
|
|
||||||
if transaction_type == "Adjust Out":
|
|
||||||
quantity_on_hand -= quantity
|
|
||||||
return quantity_on_hand
|
|
||||||
raise Exception("The transaction type is wrong!")
|
|
||||||
|
|
||||||
self_conn = False
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
|
|
||||||
transaction_time = datetime.datetime.now()
|
|
||||||
|
|
||||||
cost_layer = postsqldb.CostLayerPayload(
|
|
||||||
aquisition_date=transaction_time,
|
|
||||||
quantity=float(data['quantity']),
|
|
||||||
cost=float(data['cost']),
|
|
||||||
currency_type="USD",
|
|
||||||
vendor=int(data['vendor']),
|
|
||||||
expires=data['expires']
|
|
||||||
)
|
|
||||||
transaction = postsqldb.TransactionPayload(
|
|
||||||
timestamp=transaction_time,
|
|
||||||
logistics_info_id=int(data['logistics_info_id']),
|
|
||||||
barcode=data['barcode'],
|
|
||||||
name=data['item_name'],
|
|
||||||
transaction_type=data['transaction_type'],
|
|
||||||
quantity=float(data['quantity']),
|
|
||||||
description=data['description'],
|
|
||||||
user_id=user_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
location = poe_database.selectItemLocationsTuple(site_name, payload=(data['item_id'], data['location_id']), conn=conn)
|
|
||||||
cost_layers: list = location['cost_layers']
|
|
||||||
if data['transaction_type'] == "Adjust In":
|
|
||||||
cost_layer = poe_database.insertCostLayersTuple(site_name, cost_layer.payload(), conn=conn)
|
|
||||||
cost_layers.append(cost_layer['id'])
|
|
||||||
|
|
||||||
if data['transaction_type'] == "Adjust Out":
|
|
||||||
if float(location['quantity_on_hand']) < float(data['quantity']):
|
|
||||||
return {"error":True, "message":f"The quantity on hand in the chosen location is not enough to satisfy your transaction!"}
|
|
||||||
cost_layers = poe_database.selectCostLayersTuple(site_name, payload=(location['id'], ))
|
|
||||||
|
|
||||||
new_cost_layers = []
|
|
||||||
qty = float(data['quantity'])
|
|
||||||
for layer in cost_layers:
|
|
||||||
if qty == 0.0:
|
|
||||||
new_cost_layers.append(layer['id'])
|
|
||||||
elif qty >= float(layer['quantity']):
|
|
||||||
qty -= float(layer['quantity'])
|
|
||||||
layer['quantity'] = 0.0
|
|
||||||
else:
|
|
||||||
layer['quantity'] -= qty
|
|
||||||
new_cost_layers.append(layer['id'])
|
|
||||||
poe_database.updateCostLayersTuple(site_name, {'id': layer['id'], 'update': {'quantity': layer['quantity']}}, conn=conn)
|
|
||||||
qty = 0.0
|
|
||||||
|
|
||||||
if layer['quantity'] == 0.0:
|
|
||||||
poe_database.deleteCostLayersTuple(site_name, (layer['id'],), conn=conn)
|
|
||||||
|
|
||||||
cost_layers = new_cost_layers
|
|
||||||
|
|
||||||
quantity_on_hand = quantityFactory(float(location['quantity_on_hand']), data['quantity'], data['transaction_type'])
|
|
||||||
|
|
||||||
updated_item_location_payload = (cost_layers, quantity_on_hand, data['item_id'], data['location_id'])
|
|
||||||
poe_database.updateItemLocation(site_name, updated_item_location_payload, conn=conn)
|
|
||||||
|
|
||||||
site_location = poe_database.selectLocationsTuple(site_name, (location['location_id'], ), conn=conn)
|
|
||||||
|
|
||||||
transaction.data = {'location': site_location['uuid']}
|
|
||||||
|
|
||||||
poe_database.insertTransactionsTuple(site_name, transaction.payload(), conn=conn)
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return {"error": False, "message":f"Transaction Successful!"}
|
|
||||||
|
|
||||||
def post_receipt(site_name, user_id, data: dict, conn=None):
|
|
||||||
# data = {'items': items}
|
|
||||||
self_conn = False
|
|
||||||
items = data['items']
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
receipt_id = poe_database.request_receipt_id(conn, site_name)
|
|
||||||
receipt_id = f"SIR-{receipt_id}"
|
|
||||||
receipt = database_payloads.ReceiptPayload(
|
|
||||||
receipt_id=receipt_id,
|
|
||||||
submitted_by=user_id
|
|
||||||
)
|
|
||||||
receipt = poe_database.insertReceiptsTuple(site_name, receipt.payload(), conn=conn)
|
|
||||||
|
|
||||||
for item in items:
|
|
||||||
receipt_item = database_payloads.ReceiptItemPayload(
|
|
||||||
type=item['type'],
|
|
||||||
receipt_id=receipt['id'],
|
|
||||||
barcode=item['item']['barcode'],
|
|
||||||
name=item['item']['item_name'],
|
|
||||||
qty=item['item']['qty'],
|
|
||||||
uom=item['item']['uom'],
|
|
||||||
data=item['item']['data']
|
|
||||||
)
|
|
||||||
poe_database.insertReceiptItemsTuple(site_name, receipt_item.payload(), conn=conn)
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return {"error":False, "message":"Transaction Complete!"}
|
|
||||||
@ -1,75 +0,0 @@
|
|||||||
WITH passed_id AS (SELECT id AS passed_id FROM %%site_name%%_items WHERE barcode=%s),
|
|
||||||
logistics_id AS (SELECT logistics_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
info_id AS (SELECT item_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
cte_item_info AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_item_info.*,
|
|
||||||
row_to_json(units.*) as uom
|
|
||||||
FROM %%site_name%%_item_info
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
WHERE %%site_name%%_item_info.id = (SELECT item_info_id FROM info_id)
|
|
||||||
),
|
|
||||||
cte_groups AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_groups.*,
|
|
||||||
%%site_name%%_group_items.uuid,
|
|
||||||
%%site_name%%_group_items.item_type,
|
|
||||||
%%site_name%%_group_items.qty
|
|
||||||
FROM %%site_name%%_groups
|
|
||||||
JOIN %%site_name%%_group_items ON %%site_name%%_groups.id = %%site_name%%_group_items.gr_id
|
|
||||||
WHERE %%site_name%%_group_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_shopping_lists AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_shopping_lists.*,
|
|
||||||
%%site_name%%_shopping_list_items.uuid,
|
|
||||||
%%site_name%%_shopping_list_items.item_type,
|
|
||||||
%%site_name%%_shopping_list_items.qty
|
|
||||||
FROM %%site_name%%_shopping_lists
|
|
||||||
JOIN %%site_name%%_shopping_list_items ON %%site_name%%_shopping_lists.id = %%site_name%%_shopping_list_items.sl_id
|
|
||||||
WHERE %%site_name%%_shopping_list_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_itemlinks AS (
|
|
||||||
SELECT * FROM %%site_name%%_itemlinks WHERE link=(SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_item_locations AS (
|
|
||||||
SELECT * FROM %%site_name%%_item_locations
|
|
||||||
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
|
|
||||||
WHERE part_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_logistics_info AS (
|
|
||||||
SELECT
|
|
||||||
li.*,
|
|
||||||
row_to_json(pl) AS primary_location,
|
|
||||||
row_to_json(ail) AS auto_issue_location,
|
|
||||||
row_to_json(pz) AS primary_zone,
|
|
||||||
row_to_json(aiz) AS auto_issue_zone
|
|
||||||
FROM %%site_name%%_logistics_info AS li
|
|
||||||
LEFT JOIN %%site_name%%_locations AS pl ON li.primary_location = pl.id
|
|
||||||
LEFT JOIN %%site_name%%_locations AS ail ON li.auto_issue_location = ail.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS pz ON li.primary_zone = pz.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS aiz ON li.auto_issue_zone = aiz.id
|
|
||||||
WHERE li.id=(SELECT logistics_info_id FROM logistics_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
(SELECT passed_id FROM passed_id) AS passed_id,
|
|
||||||
%%site_name%%_items.*,
|
|
||||||
(SELECT COALESCE(row_to_json(logis), '{}') FROM cte_logistics_info logis) AS logistics_info,
|
|
||||||
row_to_json(%%site_name%%_food_info.*) as food_info,
|
|
||||||
row_to_json(%%site_name%%_brands.*) as brand,
|
|
||||||
(SELECT COALESCE(row_to_json(ii), '{}') FROM cte_item_info ii) AS item_info,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM cte_groups g) AS item_groups,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(sl)), '{}') FROM cte_shopping_lists sl) AS item_shopping_lists,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(il)), '{}') FROM cte_itemlinks il) AS linked_items,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(ils)), '{}') FROM cte_item_locations ils) AS item_locations
|
|
||||||
FROM %%site_name%%_items
|
|
||||||
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
|
|
||||||
LEFT JOIN %%site_name%%_food_info ON %%site_name%%_items.food_info_id = %%site_name%%_food_info.id
|
|
||||||
LEFT JOIN %%site_name%%_brands ON %%site_name%%_items.brand = %%site_name%%_brands.id
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
LEFT JOIN cte_groups ON %%site_name%%_items.id = cte_groups.id
|
|
||||||
LEFT JOIN cte_shopping_lists ON %%site_name%%_items.id = cte_shopping_lists.id
|
|
||||||
WHERE %%site_name%%_items.id=(SELECT passed_id FROM passed_id)
|
|
||||||
GROUP BY
|
|
||||||
%%site_name%%_items.id, %%site_name%%_item_info.id, %%site_name%%_food_info.id, %%site_name%%_brands.id;
|
|
||||||
@ -1,86 +0,0 @@
|
|||||||
WITH passed_id AS (SELECT %s AS passed_id),
|
|
||||||
logistics_id AS (SELECT logistics_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
info_id AS (SELECT item_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
cte_conversions AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_conversions.id as conv_id,
|
|
||||||
%%site_name%%_conversions.conv_factor as conv_factor,
|
|
||||||
units.* as uom
|
|
||||||
FROM %%site_name%%_conversions
|
|
||||||
LEFT JOIN units ON %%site_name%%_conversions.uom_id = units.id
|
|
||||||
WHERE %%site_name%%_conversions.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_item_info AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_item_info.*,
|
|
||||||
row_to_json(units.*) as uom,
|
|
||||||
COALESCE((SELECT json_agg(convs) FROM cte_conversions convs), '[]'::json) AS conversions,
|
|
||||||
COALESCE((SELECT json_agg(p.*) FROM %%site_name%%_sku_prefix as p WHERE p.id = ANY(%%site_name%%_item_info.prefixes)), '[]'::json) as prefixes
|
|
||||||
FROM %%site_name%%_item_info
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
WHERE %%site_name%%_item_info.id = (SELECT item_info_id FROM info_id)
|
|
||||||
),
|
|
||||||
cte_groups AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_groups.*,
|
|
||||||
%%site_name%%_group_items.uuid,
|
|
||||||
%%site_name%%_group_items.item_type,
|
|
||||||
%%site_name%%_group_items.qty
|
|
||||||
FROM %%site_name%%_groups
|
|
||||||
JOIN %%site_name%%_group_items ON %%site_name%%_groups.id = %%site_name%%_group_items.gr_id
|
|
||||||
WHERE %%site_name%%_group_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_shopping_lists AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_shopping_lists.*,
|
|
||||||
%%site_name%%_shopping_list_items.uuid,
|
|
||||||
%%site_name%%_shopping_list_items.item_type,
|
|
||||||
%%site_name%%_shopping_list_items.qty
|
|
||||||
FROM %%site_name%%_shopping_lists
|
|
||||||
JOIN %%site_name%%_shopping_list_items ON %%site_name%%_shopping_lists.id = %%site_name%%_shopping_list_items.sl_id
|
|
||||||
WHERE %%site_name%%_shopping_list_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_itemlinks AS (
|
|
||||||
SELECT * FROM %%site_name%%_itemlinks WHERE link=(SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_item_locations AS (
|
|
||||||
SELECT * FROM %%site_name%%_item_locations
|
|
||||||
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
|
|
||||||
WHERE part_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_logistics_info AS (
|
|
||||||
SELECT
|
|
||||||
li.*,
|
|
||||||
row_to_json(pl) AS primary_location,
|
|
||||||
row_to_json(ail) AS auto_issue_location,
|
|
||||||
row_to_json(pz) AS primary_zone,
|
|
||||||
row_to_json(aiz) AS auto_issue_zone
|
|
||||||
FROM %%site_name%%_logistics_info AS li
|
|
||||||
LEFT JOIN %%site_name%%_locations AS pl ON li.primary_location = pl.id
|
|
||||||
LEFT JOIN %%site_name%%_locations AS ail ON li.auto_issue_location = ail.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS pz ON li.primary_zone = pz.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS aiz ON li.auto_issue_zone = aiz.id
|
|
||||||
WHERE li.id=(SELECT logistics_info_id FROM logistics_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
(SELECT passed_id FROM passed_id) AS passed_id,
|
|
||||||
%%site_name%%_items.*,
|
|
||||||
(SELECT COALESCE(row_to_json(logis), '{}') FROM cte_logistics_info logis) AS logistics_info,
|
|
||||||
row_to_json(%%site_name%%_food_info.*) as food_info,
|
|
||||||
row_to_json(%%site_name%%_brands.*) as brand,
|
|
||||||
(SELECT COALESCE(row_to_json(ii), '{}') FROM cte_item_info ii) AS item_info,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM cte_groups g) AS item_groups,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(sl)), '{}') FROM cte_shopping_lists sl) AS item_shopping_lists,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(il)), '{}') FROM cte_itemlinks il) AS linked_items,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(ils)), '{}') FROM cte_item_locations ils) AS item_locations
|
|
||||||
FROM %%site_name%%_items
|
|
||||||
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
|
|
||||||
LEFT JOIN %%site_name%%_food_info ON %%site_name%%_items.food_info_id = %%site_name%%_food_info.id
|
|
||||||
LEFT JOIN %%site_name%%_brands ON %%site_name%%_items.brand = %%site_name%%_brands.id
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
LEFT JOIN cte_groups ON %%site_name%%_items.id = cte_groups.id
|
|
||||||
LEFT JOIN cte_shopping_lists ON %%site_name%%_items.id = cte_shopping_lists.id
|
|
||||||
WHERE %%site_name%%_items.id=(SELECT passed_id FROM passed_id)
|
|
||||||
GROUP BY
|
|
||||||
%%site_name%%_items.id, %%site_name%%_item_info.id, %%site_name%%_food_info.id, %%site_name%%_brands.id;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_cost_layers
|
|
||||||
(aquisition_date, quantity, cost, currency_type, expires, vendor)
|
|
||||||
VALUES (%s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_receipt_items
|
|
||||||
(type, receipt_id, barcode, name, qty, uom, data, status)
|
|
||||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_receipts
|
|
||||||
(receipt_id, receipt_status, date_submitted, submitted_by, vendor_id, files)
|
|
||||||
VALUES (%s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
INSERT INTO %%site_name%%_transactions
|
|
||||||
(timestamp, logistics_info_id, barcode, name, transaction_type,
|
|
||||||
quantity, description, user_id, data)
|
|
||||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
UPDATE %%site_name%%_item_locations
|
|
||||||
SET cost_layers = %s, quantity_on_hand = %s
|
|
||||||
WHERE part_id=%s AND location_id=%s
|
|
||||||
RETURNING *;
|
|
||||||
@ -1,241 +0,0 @@
|
|||||||
var pagination_current = 1;
|
|
||||||
var search_string = '';
|
|
||||||
var defaqult_limit = 2;
|
|
||||||
var pagination_end = 1;
|
|
||||||
var item;
|
|
||||||
|
|
||||||
async function changeSite(site){
|
|
||||||
console.log(site)
|
|
||||||
const response = await fetch(`/changeSite`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
site: site,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
data = await response.json();
|
|
||||||
transaction_status = "success"
|
|
||||||
if (data.error){
|
|
||||||
transaction_status = "danger"
|
|
||||||
}
|
|
||||||
|
|
||||||
UIkit.notification({
|
|
||||||
message: data.message,
|
|
||||||
status: transaction_status,
|
|
||||||
pos: 'top-right',
|
|
||||||
timeout: 5000
|
|
||||||
});
|
|
||||||
location.reload(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async function getItemBarcode(barcode) {
|
|
||||||
console.log(`selected item: ${barcode}`)
|
|
||||||
const url = new URL('/poe/getItem/barcode', window.location.origin);
|
|
||||||
url.searchParams.append('barcode', barcode);
|
|
||||||
const response = await fetch(url);
|
|
||||||
data = await response.json();
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function submitScanReceipt(items) {
|
|
||||||
const response = await fetch(`/poe/postReceipt`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
items: items
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
data = await response.json();
|
|
||||||
transaction_status = "success"
|
|
||||||
if (data.error){
|
|
||||||
transaction_status = "danger"
|
|
||||||
}
|
|
||||||
|
|
||||||
UIkit.notification({
|
|
||||||
message: data.message,
|
|
||||||
status: transaction_status,
|
|
||||||
pos: 'top-right',
|
|
||||||
timeout: 5000
|
|
||||||
});
|
|
||||||
|
|
||||||
return data.error
|
|
||||||
}
|
|
||||||
|
|
||||||
var openedReceipt = false
|
|
||||||
async function startReceipt() {
|
|
||||||
openedReceipt = true
|
|
||||||
document.getElementById('barcode-input').classList.remove('uk-disabled')
|
|
||||||
document.getElementById('barcode-table').classList.remove('uk-disabled')
|
|
||||||
|
|
||||||
document.getElementById('receiptStart').classList.add('uk-disabled')
|
|
||||||
document.getElementById('receiptComplete').classList.remove('uk-disabled')
|
|
||||||
document.getElementById('receiptClose').classList.remove('uk-disabled')
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
async function completeReceipt() {
|
|
||||||
openedReceipt = false
|
|
||||||
document.getElementById('barcode-input').classList.add('uk-disabled')
|
|
||||||
document.getElementById('barcode-table').classList.add('uk-disabled')
|
|
||||||
|
|
||||||
document.getElementById('receiptStart').classList.remove('uk-disabled')
|
|
||||||
document.getElementById('receiptComplete').classList.add('uk-disabled')
|
|
||||||
document.getElementById('receiptClose').classList.add('uk-disabled')
|
|
||||||
|
|
||||||
await submitScanReceipt(scannedReceiptItems)
|
|
||||||
let scanReceiptTableBody = document.getElementById("scanReceiptTableBody")
|
|
||||||
scanReceiptTableBody.innerHTML = ""
|
|
||||||
|
|
||||||
scannedReceiptItems = Array()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function closeReceipt(){
|
|
||||||
openedReceipt = false
|
|
||||||
document.getElementById('barcode-input').classList.add('uk-disabled')
|
|
||||||
document.getElementById('barcode-table').classList.add('uk-disabled')
|
|
||||||
|
|
||||||
document.getElementById('receiptStart').classList.remove('uk-disabled')
|
|
||||||
document.getElementById('receiptComplete').classList.add('uk-disabled')
|
|
||||||
document.getElementById('receiptClose').classList.add('uk-disabled')
|
|
||||||
|
|
||||||
let scanReceiptTableBody = document.getElementById("scanReceiptTableBody")
|
|
||||||
scanReceiptTableBody.innerHTML = ""
|
|
||||||
|
|
||||||
scannedReceiptItems = Array()
|
|
||||||
}
|
|
||||||
|
|
||||||
var scannedReceiptItems = Array();
|
|
||||||
async function addToReceipt(event) {
|
|
||||||
if (event.key == "Enter"){
|
|
||||||
let barcode = document.getElementById('barcode-scan-receipt').value
|
|
||||||
let data = await getItemBarcode(barcode)
|
|
||||||
let scannedItem = data.item
|
|
||||||
if(scannedItem){
|
|
||||||
let expires = scannedItem.food_info.expires
|
|
||||||
if(scannedItem.food_info.expires){
|
|
||||||
let today = new Date();
|
|
||||||
today.setDate(today.getDate() + Number(scannedItem.food_info.default_expiration))
|
|
||||||
expires = today.toISOString().split('T')[0];
|
|
||||||
}
|
|
||||||
scannedReceiptItems.push({item: {
|
|
||||||
barcode: scannedItem.barcode,
|
|
||||||
item_name: scannedItem.item_name,
|
|
||||||
qty: scannedItem.item_info.uom_quantity,
|
|
||||||
uom: scannedItem.item_info.uom.id,
|
|
||||||
data: {cost: scannedItem.item_info.cost, expires: expires}
|
|
||||||
}, type: 'sku'})
|
|
||||||
document.getElementById('barcode-scan-receipt').value = ""
|
|
||||||
} else {
|
|
||||||
scannedReceiptItems.push({item: {
|
|
||||||
barcode: `%${barcode}%`,
|
|
||||||
item_name: "unknown",
|
|
||||||
qty: 1,
|
|
||||||
uom: 1,
|
|
||||||
data: {'cost': 0.00, 'expires': false}
|
|
||||||
}, type: 'new sku'})
|
|
||||||
document.getElementById('barcode-scan-receipt').value = ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await replenishScannedReceiptTable(scannedReceiptItems)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function replenishScannedReceiptTable(items) {
|
|
||||||
let scanReceiptTableBody = document.getElementById("scanReceiptTableBody")
|
|
||||||
scanReceiptTableBody.innerHTML = ""
|
|
||||||
|
|
||||||
for(let i = 0; i < items.length; i++){
|
|
||||||
let tableRow = document.createElement('tr')
|
|
||||||
|
|
||||||
let typeCell = document.createElement('td')
|
|
||||||
typeCell.innerHTML = items[i].type
|
|
||||||
let barcodeCell = document.createElement('td')
|
|
||||||
barcodeCell.innerHTML = items[i].item.barcode
|
|
||||||
let nameCell = document.createElement('td')
|
|
||||||
nameCell.innerHTML = items[i].item.item_name
|
|
||||||
|
|
||||||
let operationsCell = document.createElement('td')
|
|
||||||
|
|
||||||
let editOp = document.createElement('a')
|
|
||||||
editOp.style = "margin-right: 5px;"
|
|
||||||
editOp.setAttribute('class', 'uk-button uk-button-small uk-button-default')
|
|
||||||
editOp.setAttribute('uk-icon', 'icon: pencil')
|
|
||||||
editOp.onclick = async function () {
|
|
||||||
await openLineEditModal(i, items[i])
|
|
||||||
}
|
|
||||||
|
|
||||||
let deleteOp = document.createElement('a')
|
|
||||||
deleteOp.setAttribute('class', 'uk-button uk-button-small uk-button-default')
|
|
||||||
deleteOp.setAttribute('uk-icon', 'icon: trash')
|
|
||||||
deleteOp.onclick = async function() {
|
|
||||||
scannedReceiptItems.splice(i, 1)
|
|
||||||
await replenishScannedReceiptTable(scannedReceiptItems)
|
|
||||||
}
|
|
||||||
|
|
||||||
operationsCell.append(editOp, deleteOp)
|
|
||||||
|
|
||||||
operationsCell.classList.add("uk-flex")
|
|
||||||
operationsCell.classList.add("uk-flex-right")
|
|
||||||
|
|
||||||
tableRow.append(typeCell, barcodeCell, nameCell, operationsCell)
|
|
||||||
scanReceiptTableBody.append(tableRow)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function openLineEditModal(ind, line_data) {
|
|
||||||
console.log(line_data)
|
|
||||||
document.getElementById('lineName').value = line_data.item.item_name
|
|
||||||
document.getElementById('lineQty').value = line_data.item.qty
|
|
||||||
document.getElementById('lineUOM').value = line_data.item.uom
|
|
||||||
document.getElementById('lineCost').value = line_data.item.data.cost
|
|
||||||
document.getElementById('lineExpires').value = line_data.item.data.expires
|
|
||||||
if(line_data.type === 'sku'){
|
|
||||||
document.getElementById('lineUOM').classList.add('uk-disabled')
|
|
||||||
} else {
|
|
||||||
document.getElementById('lineUOM').classList.remove('uk-disabled')
|
|
||||||
}
|
|
||||||
|
|
||||||
if(!line_data.item.data.expires){
|
|
||||||
document.getElementById('lineExpires').classList.add('uk-disabled')
|
|
||||||
} else {
|
|
||||||
document.getElementById('lineExpires').classList.remove('uk-disabled')
|
|
||||||
}
|
|
||||||
|
|
||||||
document.getElementById('saveLineButton').onclick = async function() {
|
|
||||||
line_data.item.item_name = document.getElementById('lineName').value
|
|
||||||
line_data.item.qty = document.getElementById('lineQty').value
|
|
||||||
line_data.item.uom = document.getElementById('lineUOM').value
|
|
||||||
line_data.item.data.cost = document.getElementById('lineCost').value
|
|
||||||
if(line_data.item.data.expires){
|
|
||||||
line_data.item.data.expires = document.getElementById('lineExpires').value
|
|
||||||
}
|
|
||||||
|
|
||||||
scannedReceiptItems[ind] = line_data
|
|
||||||
UIkit.modal(document.getElementById("lineEditModal")).hide();
|
|
||||||
await replenishScannedReceiptTable(scannedReceiptItems)
|
|
||||||
}
|
|
||||||
|
|
||||||
UIkit.modal(document.getElementById("lineEditModal")).show();
|
|
||||||
}
|
|
||||||
|
|
||||||
var mode = false
|
|
||||||
async function toggleDarkMode() {
|
|
||||||
let darkMode = document.getElementById("dark-mode");
|
|
||||||
darkMode.disabled = !darkMode.disabled;
|
|
||||||
mode = !mode;
|
|
||||||
if(mode){
|
|
||||||
document.getElementById('modeToggle').innerHTML = "light_mode"
|
|
||||||
document.getElementById('main_html').classList.add('uk-light')
|
|
||||||
} else {
|
|
||||||
document.getElementById('modeToggle').innerHTML = "dark_mode"
|
|
||||||
document.getElementById('main_html').classList.remove('uk-light')
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,165 +0,0 @@
|
|||||||
async function changeSite(site){
|
|
||||||
console.log(site)
|
|
||||||
const response = await fetch(`/changeSite`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
site: site,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
data = await response.json();
|
|
||||||
transaction_status = "success"
|
|
||||||
if (data.error){
|
|
||||||
transaction_status = "danger"
|
|
||||||
}
|
|
||||||
|
|
||||||
UIkit.notification({
|
|
||||||
message: data.message,
|
|
||||||
status: transaction_status,
|
|
||||||
pos: 'top-right',
|
|
||||||
timeout: 5000
|
|
||||||
});
|
|
||||||
location.reload(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
var scannedItems = Array();
|
|
||||||
const queueLimit = 49; // 49 should be default
|
|
||||||
|
|
||||||
async function addToQueue(event) {
|
|
||||||
if (event.key == "Enter"){
|
|
||||||
let data = await getItemBarcode(document.getElementById('barcode-scan').value)
|
|
||||||
let scannedItem = data.item
|
|
||||||
if(data.error){
|
|
||||||
UIkit.notification({
|
|
||||||
message: data.message,
|
|
||||||
status: "danger",
|
|
||||||
pos: 'top-right',
|
|
||||||
timeout: 5000
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if(scannedItems.length > queueLimit){
|
|
||||||
scannedItems.shift()
|
|
||||||
}
|
|
||||||
if(!Array.isArray(scannedItem) && !data.error){
|
|
||||||
let status = await submitScanTransaction(scannedItem)
|
|
||||||
scannedItems.push({'item': scannedItem, 'type': `${document.getElementById('scan_trans_type').value}`, 'error': status})
|
|
||||||
document.getElementById('barcode-scan').value = ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await replenishScanTable()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getItemBarcode(barcode) {
|
|
||||||
console.log(`selected item: ${barcode}`)
|
|
||||||
const url = new URL('/poe/getItem/barcode', window.location.origin);
|
|
||||||
url.searchParams.append('barcode', barcode);
|
|
||||||
const response = await fetch(url);
|
|
||||||
data = await response.json();
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function submitScanTransaction(scannedItem) {
|
|
||||||
/// I need to find the location that matches the items auto issue location id
|
|
||||||
|
|
||||||
let trans_type = document.getElementById('scan_trans_type').value
|
|
||||||
let scan_transaction_item_location_id = 0
|
|
||||||
let comparator = 0
|
|
||||||
|
|
||||||
if (trans_type === "Adjust In"){
|
|
||||||
comparator = scannedItem.logistics_info.primary_location.id
|
|
||||||
} else if (trans_type === "Adjust Out"){
|
|
||||||
comparator = scannedItem.logistics_info.auto_issue_location.id
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < scannedItem.item_locations.length; i++){
|
|
||||||
if (scannedItem.item_locations[i].location_id === comparator){
|
|
||||||
scan_transaction_item_location_id = scannedItem.item_locations[i].id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const response = await fetch(`/poe/postTransaction`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
item_id: scannedItem.id,
|
|
||||||
logistics_info_id: scannedItem.logistics_info_id,
|
|
||||||
barcode: scannedItem.barcode,
|
|
||||||
item_name: scannedItem.item_name,
|
|
||||||
transaction_type: document.getElementById('scan_trans_type').value,
|
|
||||||
quantity: scannedItem.item_info.uom_quantity,
|
|
||||||
description: "",
|
|
||||||
cost: parseFloat(scannedItem.item_info.cost),
|
|
||||||
vendor: 0,
|
|
||||||
expires: null,
|
|
||||||
location_id: scan_transaction_item_location_id
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
data = await response.json();
|
|
||||||
transaction_status = "success"
|
|
||||||
if (data.error){
|
|
||||||
transaction_status = "danger"
|
|
||||||
}
|
|
||||||
|
|
||||||
UIkit.notification({
|
|
||||||
message: data.message,
|
|
||||||
status: transaction_status,
|
|
||||||
pos: 'top-right',
|
|
||||||
timeout: 5000
|
|
||||||
});
|
|
||||||
|
|
||||||
return data.error
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
async function replenishScanTable() {
|
|
||||||
let scanTableBody = document.getElementById("scanTableBody")
|
|
||||||
scanTableBody.innerHTML = ""
|
|
||||||
|
|
||||||
let reversedScannedItems = scannedItems.slice().reverse()
|
|
||||||
|
|
||||||
for(let i = 0; i < reversedScannedItems.length; i++){
|
|
||||||
let tableRow = document.createElement('tr')
|
|
||||||
|
|
||||||
let icon = `<span uk-icon="check"></span>`
|
|
||||||
if(reversedScannedItems[i].error){
|
|
||||||
icon = `<span uk-icon="warning"></span>`
|
|
||||||
}
|
|
||||||
|
|
||||||
let statusCell = document.createElement('td')
|
|
||||||
statusCell.innerHTML = icon
|
|
||||||
let barcodeCell = document.createElement('td')
|
|
||||||
barcodeCell.innerHTML = reversedScannedItems[i].item.barcode
|
|
||||||
let nameCell = document.createElement('td')
|
|
||||||
nameCell.innerHTML = reversedScannedItems[i].item.item_name
|
|
||||||
let typeCell = document.createElement('td')
|
|
||||||
typeCell.innerHTML = reversedScannedItems[i].type
|
|
||||||
let locationCell = document.createElement('td')
|
|
||||||
if (reversedScannedItems[i].type === "Adjust In"){
|
|
||||||
locationCell.innerHTML = reversedScannedItems[i].item.logistics_info.primary_location.uuid
|
|
||||||
} else {
|
|
||||||
locationCell.innerHTML = reversedScannedItems[i].item.logistics_info.auto_issue_location.uuid
|
|
||||||
}
|
|
||||||
|
|
||||||
tableRow.append(statusCell, barcodeCell, nameCell, typeCell, locationCell)
|
|
||||||
scanTableBody.append(tableRow)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var mode = false
|
|
||||||
async function toggleDarkMode() {
|
|
||||||
let darkMode = document.getElementById("dark-mode");
|
|
||||||
darkMode.disabled = !darkMode.disabled;
|
|
||||||
mode = !mode;
|
|
||||||
if(mode){
|
|
||||||
document.getElementById('modeToggle').innerHTML = "light_mode"
|
|
||||||
document.getElementById('main_html').classList.add('uk-light')
|
|
||||||
} else {
|
|
||||||
document.getElementById('modeToggle').innerHTML = "dark_mode"
|
|
||||||
document.getElementById('main_html').classList.remove('uk-light')
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,193 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en" dir="ltr" id="main_html">
|
|
||||||
<head>
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" charset="utf-8" />
|
|
||||||
<title id="title"></title>
|
|
||||||
<!-- Material Icons -->
|
|
||||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Outlined Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Rounded Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Sharp Set -->
|
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/uikit.min.css') }}"/>
|
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/pantry.css') }}"/>
|
|
||||||
|
|
||||||
<link id="dark-mode" rel="stylesheet" href="{{ url_for('static', filename='css/dark-mode.css') }}" disabled/>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</head>
|
|
||||||
<style>
|
|
||||||
.custom_row:hover{
|
|
||||||
background-color: rgb(230, 230, 230) !important;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<body>
|
|
||||||
<nav class="uk-navbar-container">
|
|
||||||
<div class="uk-container uk-container-expand">
|
|
||||||
<div class="uk-navbar uk-navbar-primary">
|
|
||||||
<!-- Application Navigation-->
|
|
||||||
<div class="uk-navbar-left">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href>Apps</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/recipes">Recipes</a></li>
|
|
||||||
<li><a href="/shopping-lists">Shopping Lists</a></li>
|
|
||||||
<li class="uk-nav-header">Logistics</li>
|
|
||||||
<li><a href="/items">Items</a></li>
|
|
||||||
<li><a href="/items/transaction">Transaction</a></li>
|
|
||||||
<li><a href="/receipts">Receipts</a></li>
|
|
||||||
<li class="uk-nav-header">Points of Ease</li>
|
|
||||||
<li><a href="/poe/scanner">Transaction Scanner</a></li>
|
|
||||||
<li class="uk-active"><a href="/poe/receipts">Receipts Scanner</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
<!-- Breadcrumbs Navigation -->
|
|
||||||
<div class="uk-navbar-center uk-visible@m">
|
|
||||||
<ul class="uk-breadcrumb uk-margin-remove">
|
|
||||||
<li style="cursor: pointer;"><span><strong>{{current_site}}</strong></span>
|
|
||||||
<div uk-dropdown="mode: hover">
|
|
||||||
<ul class="uk-nav uk-dropdown-nav">
|
|
||||||
<li class="uk-nav-header">Select Site</li>
|
|
||||||
<li class="uk-nav-divider"></li>
|
|
||||||
{% for site in sites %}
|
|
||||||
{% if site == current_site %}
|
|
||||||
<li><a class="uk-disabled" href="#">{{site}}</a></li>
|
|
||||||
{% else %}
|
|
||||||
<li><a onclick="changeSite('{{site}}')">{{site}}</a></li>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li style="cursor: default; user-select: none;" class="uk-disabled"><span>Point of Ease</span></li>
|
|
||||||
<li class="uk-disabled"><span>Scan to Receipt</span></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<!-- Profile/Management Navigation-->
|
|
||||||
<div class="uk-navbar-right">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href="#">
|
|
||||||
<img src="{{session['user']['profile_pic_url']}}" alt="Profile Picture" class="profile-pic uk-visible@m" style="width: 40px; height: 40px; border-radius: 50%; margin-right: 5px;">
|
|
||||||
{{username}}
|
|
||||||
</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/profile">Profile</a></li>
|
|
||||||
<li><a onclick="toggleDarkMode()">Dark Mode</a></li>
|
|
||||||
<li><a href="/site-management">Site Management</a></li>
|
|
||||||
<li><a href="/administration">System Management</a></li>
|
|
||||||
<li><a href="/access/logout">Logout</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
<div class="uk-container uk-section">
|
|
||||||
<div class="uk-grid-small" uk-grid>
|
|
||||||
<div class="uk-width-1-1">
|
|
||||||
<p class="uk-text-meta">Using this method of entering receipts does so by adding each barcode to a list and once the receipt has been built the
|
|
||||||
the system will then add the receipt to the system. Its important that you have the Barcode input focused and use a scanner that places the
|
|
||||||
characters into the field before it finishes up with a press of the ENTER key.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1" uk-grid>
|
|
||||||
<div>
|
|
||||||
<button id="receiptStart" onclick="startReceipt()" class="uk-button uk-button-default">Start Receipt</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<button id="receiptComplete" onclick="completeReceipt()" class="uk-button uk-button-default uk-disabled">Complete Receipt</button>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<button id="receiptClose" onclick="closeReceipt()" class="uk-button uk-button-default uk-disabled">Cancel Receipt</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1">
|
|
||||||
<hr class="uk-divider-icon">
|
|
||||||
</div>
|
|
||||||
<div id="barcode-input" class="uk-width-1-1 uk-flex uk-flex-left uk-disabled" uk-grid>
|
|
||||||
<div class="uk-width-1-3@m">
|
|
||||||
<label class="uk-form-label" for="barcode-scan-receipt">Barcode</label>
|
|
||||||
<input onkeydown="addToReceipt(event)" id="barcode-scan-receipt" class="uk-input uk-flex uk-flex-bottom" type="text">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="barcode-table" class="uk-width-1-1 uk-disabled">
|
|
||||||
<table class="uk-table uk-table-striped uk-table-hover">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th class="uk-table-shrink">Type</th>
|
|
||||||
<th class="uk-table-shrink">Barcode</th>
|
|
||||||
<th>Name</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="scanReceiptTableBody">
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- Line Edit Modal USED FOR RECEIPTS-->
|
|
||||||
<div id="lineEditModal" uk-modal>
|
|
||||||
<div class="uk-modal-dialog uk-modal-body">
|
|
||||||
<h2 class="uk-modal-title">Edit Line...</h2>
|
|
||||||
<p class="uk-text-meta">Edit any fields here for the selected Line and then save them.</p>
|
|
||||||
<table class="uk-table uk-table-responsive uk-table-striped">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th></th>
|
|
||||||
<th></th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td>QTY</td>
|
|
||||||
<td><input id="lineName" class="uk-input" type="text"></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>QTY</td>
|
|
||||||
<td><input id="lineQty" class="uk-input" type="text"></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>UOM</td>
|
|
||||||
<td>
|
|
||||||
<select id="lineUOM" class="uk-select" type="text">
|
|
||||||
{% for unit in units %}
|
|
||||||
<option value="{{unit['id']}}">{{unit['fullname']}}</option>
|
|
||||||
{% endfor %}
|
|
||||||
</select>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>COST</td>
|
|
||||||
<td><input id="lineCost" class="uk-input" type="text"></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>EXPIRES</td>
|
|
||||||
<td><input id="lineExpires" class="uk-input" type="date"></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
<p class="uk-text-right">
|
|
||||||
<button class="uk-button uk-button-default uk-modal-close" type="button">Cancel</button>
|
|
||||||
<button id="saveLineButton" class="uk-button uk-button-primary" type="button">Save</button>
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
{% assets "js_all" %}
|
|
||||||
<script type="text/javascript" src="{{ ASSET_URL }}"></script>
|
|
||||||
{% endassets %}
|
|
||||||
<script src="{{ url_for('poe.static', filename='js/receiptsHandler.js') }}"></script>
|
|
||||||
</html>
|
|
||||||
@ -1,139 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en" dir="ltr" id="main_html">
|
|
||||||
<head>
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" charset="utf-8" />
|
|
||||||
<title id="title"></title>
|
|
||||||
<!-- Material Icons -->
|
|
||||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Outlined Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Rounded Set -->
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded" rel="stylesheet" />
|
|
||||||
<!-- Material Symbols - Sharp Set -->
|
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/uikit.min.css') }}"/>
|
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/pantry.css') }}"/>
|
|
||||||
|
|
||||||
<link id="dark-mode" rel="stylesheet" href="{{ url_for('static', filename='css/dark-mode.css') }}" disabled/>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</head>
|
|
||||||
<style>
|
|
||||||
.custom_row:hover{
|
|
||||||
background-color: rgb(230, 230, 230) !important;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<body>
|
|
||||||
<nav class="uk-navbar-container">
|
|
||||||
<div class="uk-container uk-container-expand">
|
|
||||||
<div class="uk-navbar uk-navbar-primary">
|
|
||||||
<!-- Application Navigation-->
|
|
||||||
<div class="uk-navbar-left">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href>Apps</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/recipes">Recipes</a></li>
|
|
||||||
<li><a href="/shopping-lists">Shopping Lists</a></li>
|
|
||||||
<li class="uk-nav-header">Logistics</li>
|
|
||||||
<li><a href="/items">Items</a></li>
|
|
||||||
<li><a href="/items/transaction">Transaction</a></li>
|
|
||||||
<li><a href="/receipts">Receipts</a></li>
|
|
||||||
<li class="uk-nav-header">Points of Ease</li>
|
|
||||||
<li class="uk-active"><a href="/poe/scanner">Transaction Scanner</a></li>
|
|
||||||
<li><a href="/poe/receipts">Receipts Scanner</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
<!-- Breadcrumbs Navigation -->
|
|
||||||
<div class="uk-navbar-center uk-visible@m">
|
|
||||||
<ul class="uk-breadcrumb uk-margin-remove">
|
|
||||||
<li style="cursor: pointer;"><span><strong>{{current_site}}</strong></span>
|
|
||||||
<div uk-dropdown="mode: hover">
|
|
||||||
<ul class="uk-nav uk-dropdown-nav">
|
|
||||||
<li class="uk-nav-header">Select Site</li>
|
|
||||||
<li class="uk-nav-divider"></li>
|
|
||||||
{% for site in sites %}
|
|
||||||
{% if site == current_site %}
|
|
||||||
<li><a class="uk-disabled" href="#">{{site}}</a></li>
|
|
||||||
{% else %}
|
|
||||||
<li><a onclick="changeSite('{{site}}')">{{site}}</a></li>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li style="cursor: default; user-select: none;" class="uk-disabled"><span>Point of Ease</span></li>
|
|
||||||
<li class="uk-disabled"><span>Scan to Transaction</span></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<!-- Profile/Management Navigation-->
|
|
||||||
<div class="uk-navbar-right">
|
|
||||||
<ul class="uk-navbar-nav">
|
|
||||||
<li>
|
|
||||||
<a href="#">
|
|
||||||
<img src="{{session['user']['profile_pic_url']}}" alt="Profile Picture" class="profile-pic uk-visible@m" style="width: 40px; height: 40px; border-radius: 50%; margin-right: 5px;">
|
|
||||||
{{username}}
|
|
||||||
</a>
|
|
||||||
<div class="uk-navbar-dropdown" uk-drop="mode: click; multi:false">
|
|
||||||
<ul class="uk-nav uk-navbar-dropdown-nav">
|
|
||||||
<li><a href="/profile">Profile</a></li>
|
|
||||||
<li><a onclick="toggleDarkMode()">Dark Mode</a></li>
|
|
||||||
<li><a href="/site-management">Site Management</a></li>
|
|
||||||
<li><a href="/administration">System Management</a></li>
|
|
||||||
<li><a href="/access/logout">Logout</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
<div class="uk-container uk-section">
|
|
||||||
<div class="uk-grid-small" uk-grid>
|
|
||||||
<div class="uk-width-1-1 uk-visible@m">
|
|
||||||
<p class="uk-text-meta">Using this method of entering transaction receipts does so by transacting a single UOM of the barcode scanned. Its important that you have the
|
|
||||||
Barcode input focused and use a scanner that places the characters into the field before it finishes up with a press of the ENTER key.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1 uk-flex uk-flex-left" uk-grid>
|
|
||||||
<div class="uk-width-1-3@m">
|
|
||||||
<label class="uk-form-label" for="barcode-scan">Barcode</label>
|
|
||||||
<input onkeydown="addToQueue(event)" on id="barcode-scan" class="uk-input uk-flex uk-flex-bottom" type="text">
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-3@m uk-flex uk-flex-bottom uk-flex-right@m">
|
|
||||||
<select id="scan_trans_type" class="uk-select" aria-label="Select">
|
|
||||||
<option value="Adjust Out">Adjust Out</option>
|
|
||||||
<option value="Adjust In">Adjust In</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="uk-width-1-1">
|
|
||||||
<table class="uk-table uk-table-striped uk-table-hover">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Status</th>
|
|
||||||
<th>Barcode</th>
|
|
||||||
<th>Name</th>
|
|
||||||
<th>Type</th>
|
|
||||||
<th>Location</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="scanTableBody">
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
{% assets "js_all" %}
|
|
||||||
<script type="text/javascript" src="{{ ASSET_URL }}"></script>
|
|
||||||
{% endassets %}
|
|
||||||
<script src="{{ url_for('poe.static', filename='js/transactionHandler.js') }}"></script>
|
|
||||||
</html>
|
|
||||||
@ -4,7 +4,6 @@ import psycopg2.extras
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import config
|
|
||||||
|
|
||||||
class DatabaseError(Exception):
|
class DatabaseError(Exception):
|
||||||
def __init__(self, message, payload=[], sql=""):
|
def __init__(self, message, payload=[], sql=""):
|
||||||
@ -15,7 +14,7 @@ class DatabaseError(Exception):
|
|||||||
self.log_error()
|
self.log_error()
|
||||||
|
|
||||||
def log_error(self):
|
def log_error(self):
|
||||||
with open("logs/database.log", "a+") as file:
|
with open("database.log", "a+") as file:
|
||||||
file.write("\n")
|
file.write("\n")
|
||||||
file.write(f"{datetime.datetime.now()} --- ERROR --- DatabaseError(message='{self.message}',\n")
|
file.write(f"{datetime.datetime.now()} --- ERROR --- DatabaseError(message='{self.message}',\n")
|
||||||
file.write(f"{" "*41}payload={self.payload},\n")
|
file.write(f"{" "*41}payload={self.payload},\n")
|
||||||
@ -45,47 +44,6 @@ def getUUID(n):
|
|||||||
random_string = ''.join(random.choices(string.ascii_letters + string.digits, k=n))
|
random_string = ''.join(random.choices(string.ascii_letters + string.digits, k=n))
|
||||||
return random_string
|
return random_string
|
||||||
|
|
||||||
def get_sites(sites=[]):
|
|
||||||
database_config = config.config()
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
try:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
site_rows = []
|
|
||||||
for each in sites:
|
|
||||||
cur.execute(f"SELECT * FROM sites WHERE id=%s;", (each, ))
|
|
||||||
site_rows.append(cur.fetchone())
|
|
||||||
return site_rows
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
print(error)
|
|
||||||
conn.rollback()
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_units_of_measure(convert=True, conn=None):
|
|
||||||
records = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM units;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
records = [tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
records = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
return records
|
|
||||||
except Exception as error:
|
|
||||||
raise DatabaseError(error, "", sql)
|
|
||||||
|
|
||||||
class ConversionsTable:
|
class ConversionsTable:
|
||||||
@dataclass
|
@dataclass
|
||||||
class Payload:
|
class Payload:
|
||||||
@ -2451,64 +2409,3 @@ class ItemLinkPayload:
|
|||||||
json.dumps(self.data),
|
json.dumps(self.data),
|
||||||
self.conv_factor
|
self.conv_factor
|
||||||
)
|
)
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LogisticsInfoPayload:
|
|
||||||
barcode: str
|
|
||||||
primary_location: int
|
|
||||||
primary_zone: int
|
|
||||||
auto_issue_location: int
|
|
||||||
auto_issue_zone: int
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (self.barcode,
|
|
||||||
self.primary_location,
|
|
||||||
self.primary_zone,
|
|
||||||
self.auto_issue_location,
|
|
||||||
self.auto_issue_zone)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ItemInfoPayload:
|
|
||||||
barcode: str
|
|
||||||
packaging: str = ""
|
|
||||||
uom_quantity: float = 1.0
|
|
||||||
uom: int = 1
|
|
||||||
cost: float = 0.0
|
|
||||||
safety_stock: float = 0.0
|
|
||||||
lead_time_days: float = 0.0
|
|
||||||
ai_pick: bool = False
|
|
||||||
prefixes: list = field(default_factory=list)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if not isinstance(self.barcode, str):
|
|
||||||
raise TypeError(f"barcode must be of type str; not {type(self.barcode)}")
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
self.barcode,
|
|
||||||
self.packaging,
|
|
||||||
self.uom_quantity,
|
|
||||||
self.uom,
|
|
||||||
self.cost,
|
|
||||||
self.safety_stock,
|
|
||||||
self.lead_time_days,
|
|
||||||
self.ai_pick,
|
|
||||||
lst2pgarr(self.prefixes)
|
|
||||||
)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class FoodInfoPayload:
|
|
||||||
food_groups: list = field(default_factory=list)
|
|
||||||
ingrediants: list = field(default_factory=list)
|
|
||||||
nutrients: dict = field(default_factory=dict)
|
|
||||||
expires: bool = False
|
|
||||||
default_expiration: float = 0.0
|
|
||||||
|
|
||||||
def payload(self):
|
|
||||||
return (
|
|
||||||
lst2pgarr(self.food_groups),
|
|
||||||
lst2pgarr(self.ingrediants),
|
|
||||||
json.dumps(self.nutrients),
|
|
||||||
self.expires,
|
|
||||||
self.default_expiration
|
|
||||||
)
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,280 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
from flask import (
|
|
||||||
Blueprint, request, render_template, session, jsonify, current_app, send_from_directory
|
|
||||||
)
|
|
||||||
import math
|
|
||||||
import postsqldb
|
|
||||||
import mimetypes
|
|
||||||
import os
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
import webpush
|
|
||||||
from application.access_module import access_api
|
|
||||||
from application import postsqldb, database_payloads
|
|
||||||
from application.receipts import receipts_processes, receipts_database
|
|
||||||
|
|
||||||
|
|
||||||
receipt_api = Blueprint('receipt_api', __name__, template_folder='templates', static_folder='static')
|
|
||||||
|
|
||||||
|
|
||||||
# ROOT TEMPLATE ROUTES
|
|
||||||
@receipt_api.route("/")
|
|
||||||
@access_api.login_required
|
|
||||||
def receipts():
|
|
||||||
sites = [site[1] for site in postsqldb.get_sites(session['user']['sites'])]
|
|
||||||
return render_template("receipts_index.html", current_site=session['selected_site'], sites=sites)
|
|
||||||
|
|
||||||
@receipt_api.route("/<id>")
|
|
||||||
@access_api.login_required
|
|
||||||
def receipt(id):
|
|
||||||
sites = [site[1] for site in postsqldb.get_sites(session['user']['sites'])]
|
|
||||||
units = postsqldb.get_units_of_measure()
|
|
||||||
return render_template("receipt.html", id=id, current_site=session['selected_site'], sites=sites, units=units)
|
|
||||||
|
|
||||||
|
|
||||||
# API ROUTES
|
|
||||||
@receipt_api.route('/api/getItems', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def getItems():
|
|
||||||
recordset = []
|
|
||||||
count = {'count': 0}
|
|
||||||
if request.method == "GET":
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 10))
|
|
||||||
site_name = session['selected_site']
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
sort_order = "ID ASC"
|
|
||||||
payload = ("%%", limit, offset, sort_order)
|
|
||||||
recordset, count = receipts_database.getItemsWithQOH(site_name, payload)
|
|
||||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
|
|
||||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/getVendors', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def getVendors():
|
|
||||||
recordset = []
|
|
||||||
count = 0
|
|
||||||
if request.method == "GET":
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 10))
|
|
||||||
site_name = session['selected_site']
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
recordset, count = receipts_database.paginateVendorsTuples(site_name, payload=(limit, offset))
|
|
||||||
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
|
||||||
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/getLinkedLists', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def getLinkedLists():
|
|
||||||
recordset = []
|
|
||||||
count = 0
|
|
||||||
if request.method == "GET":
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 10))
|
|
||||||
site_name = session['selected_site']
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
recordset, count = receipts_database.paginateLinkedLists(site_name, payload=(limit, offset))
|
|
||||||
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
|
||||||
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/getReceipts', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def getReceipts():
|
|
||||||
recordset = []
|
|
||||||
if request.method == "GET":
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
limit = int(request.args.get('limit', 50))
|
|
||||||
offset = (page - 1) * limit
|
|
||||||
site_name = session['selected_site']
|
|
||||||
recordset, count = receipts_database.paginateReceiptsTuples(site_name, payload=(limit, offset))
|
|
||||||
return jsonify({'receipts':recordset, "end": math.ceil(count/limit), 'error': False, "message": "Get Receipts Successful!"})
|
|
||||||
return jsonify({'receipts': recordset, "end": math.ceil(count/limit), 'error': True, "message": "Something went wrong while getting receipts!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/getReceipt', methods=["GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def getReceipt():
|
|
||||||
receipt = []
|
|
||||||
if request.method == "GET":
|
|
||||||
receipt_id = int(request.args.get('id', 1))
|
|
||||||
site_name = session['selected_site']
|
|
||||||
receipt = receipts_database.getReceiptByID(site_name, (receipt_id, ))
|
|
||||||
return jsonify({'receipt': receipt, 'error': False, "message": "Get Receipts Successful!"})
|
|
||||||
return jsonify({'receipt': receipt, 'error': True, "message": "Something went wrong while getting receipts!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/addReceipt', methods=["POST", "GET"])
|
|
||||||
@access_api.login_required
|
|
||||||
def addReceipt():
|
|
||||||
if request.method == "GET":
|
|
||||||
user_id = session['user_id']
|
|
||||||
site_name = session['selected_site']
|
|
||||||
receipt = database_payloads.ReceiptPayload(
|
|
||||||
receipt_id=f"PR-{receipts_database.requestNextReceiptID(site_name)}",
|
|
||||||
submitted_by=user_id
|
|
||||||
)
|
|
||||||
receipts_database.insertReceiptsTuple(site_name, receipt.payload())
|
|
||||||
return jsonify({'error': False, "message": "Receipt Added Successful!"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while adding receipt!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/addSKULine', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def addSKULine():
|
|
||||||
if request.method == "POST":
|
|
||||||
item_id = int(request.get_json()['item_id'])
|
|
||||||
receipt_id = int(request.get_json()['receipt_id'])
|
|
||||||
|
|
||||||
site_name = session['selected_site']
|
|
||||||
item = receipts_database.getItemAllByID(site_name, (item_id, ))
|
|
||||||
data = {
|
|
||||||
'cost': item['item_info']['cost'],
|
|
||||||
'expires': item['food_info']['expires']
|
|
||||||
}
|
|
||||||
receipt_item = database_payloads.ReceiptItemPayload(
|
|
||||||
type="sku",
|
|
||||||
receipt_id=receipt_id,
|
|
||||||
barcode=item['barcode'],
|
|
||||||
name=item['item_name'],
|
|
||||||
qty=item['item_info']['uom_quantity'],
|
|
||||||
uom=item['item_info']['uom']['id'],
|
|
||||||
data=data
|
|
||||||
)
|
|
||||||
receipts_database.insertReceiptItemsTuple(site_name, receipt_item.payload())
|
|
||||||
return jsonify({'error': False, "message": "Line added Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while add SKU line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/deleteLine', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def deleteLine():
|
|
||||||
if request.method == "POST":
|
|
||||||
line_id = int(request.get_json()['line_id'])
|
|
||||||
site_name = session['selected_site']
|
|
||||||
receipts_database.deleteReceiptItemsTuple(site_name, (line_id, ))
|
|
||||||
return jsonify({'error': False, "message": "Line Deleted Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while deleting line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/denyLine', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def denyLine():
|
|
||||||
if request.method == "POST":
|
|
||||||
line_id = int(request.get_json()['line_id'])
|
|
||||||
site_name = session['selected_site']
|
|
||||||
receipts_database.updateReceiptItemsTuple(site_name, {'id': line_id, 'update': {'status': 'Denied'}})
|
|
||||||
return jsonify({'error': False, "message": "Line Denied Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while denying line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/saveLine', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def saveLine():
|
|
||||||
if request.method == "POST":
|
|
||||||
line_id = int(request.get_json()['line_id'])
|
|
||||||
payload = request.get_json()['payload']
|
|
||||||
site_name = session['selected_site']
|
|
||||||
receipt_item = receipts_database.selectReceiptItemsTuple(site_name, (line_id, ))
|
|
||||||
if 'api_data' in receipt_item['data'].keys():
|
|
||||||
payload['data']['api_data'] = receipt_item['data']['api_data']
|
|
||||||
receipts_database.updateReceiptItemsTuple(site_name, {'id': line_id, 'update': payload})
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/postLinkedItem', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postLinkedItem():
|
|
||||||
if request.method == "POST":
|
|
||||||
receipt_item_id = int(request.get_json()['receipt_item_id'])
|
|
||||||
link_list_id = int(request.get_json()['link_list_id'])
|
|
||||||
conv_factor = float(request.get_json()['conv_factor'])
|
|
||||||
|
|
||||||
site_name = session['selected_site']
|
|
||||||
user_id = session['user_id']
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
'receipt_item_id': receipt_item_id,
|
|
||||||
'linked_list_id': link_list_id,
|
|
||||||
'conv_factor': conv_factor
|
|
||||||
}
|
|
||||||
|
|
||||||
receipts_processes.linkItem(site_name, user_id, payload)
|
|
||||||
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/resolveLine', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def resolveLine():
|
|
||||||
if request.method == "POST":
|
|
||||||
line_id = int(request.get_json()['line_id'])
|
|
||||||
site_name = session['selected_site']
|
|
||||||
user_id = session['user_id']
|
|
||||||
payload = {'line_id': line_id}
|
|
||||||
receipts_processes.postLine(site_name, user_id, payload)
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/postVendorUpdate', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def postVendorUpdate():
|
|
||||||
if request.method == "POST":
|
|
||||||
receipt_id = int(request.get_json()['receipt_id'])
|
|
||||||
vendor_id = int(request.get_json()['vendor_id'])
|
|
||||||
site_name = session['selected_site']
|
|
||||||
receipts_database.updateReceiptsTuple(site_name, {'id': receipt_id, 'update': {'vendor_id': vendor_id}})
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/resolveReceipt', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def resolveReceipt():
|
|
||||||
if request.method == "POST":
|
|
||||||
receipt_id = int(request.get_json()['receipt_id'])
|
|
||||||
site_name = session['selected_site']
|
|
||||||
user= session['user']
|
|
||||||
receipt = receipts_database.updateReceiptsTuple(site_name, {'id': receipt_id, 'update': {'receipt_status': 'Resolved'}})
|
|
||||||
webpush.push_ntfy(title=f"Receipt '{receipt['receipt_id']}' Resolved", body=f"Receipt {receipt['receipt_id']} was completed by {user['username']}.")
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/uploadfile/<receipt_id>', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def uploadFile(receipt_id):
|
|
||||||
file = request.files['file']
|
|
||||||
file_path = current_app.config['FILES_FOLDER'] + f"/receipts/{file.filename.replace(" ", "_")}"
|
|
||||||
file.save(file_path)
|
|
||||||
file_type, _ = mimetypes.guess_type(file.filename)
|
|
||||||
preview_image = ""
|
|
||||||
if file_type == "application/pdf":
|
|
||||||
output_path = "static/files/receipts/previews/"
|
|
||||||
preview_image = receipts_processes.create_pdf_preview(file_path, output_path)
|
|
||||||
|
|
||||||
file_size = os.path.getsize(file_path)
|
|
||||||
site_name = session['selected_site']
|
|
||||||
username = session['user']['username']
|
|
||||||
receipt_files = receipts_database.selectReceiptsTuple(site_name, (receipt_id, ))['files']
|
|
||||||
receipt_files[file.filename.replace(" ", "_")] = {'file_path': file.filename.replace(" ", "_"), 'file_type': file_type, 'file_size': file_size, 'uploaded_by': username, 'preview_image': preview_image}
|
|
||||||
receipts_database.updateReceiptsTuple(site_name, {'id': receipt_id, 'update': {'files': receipt_files}})
|
|
||||||
return jsonify({})
|
|
||||||
|
|
||||||
@receipt_api.route('/api/getFile/<file_name>')
|
|
||||||
@access_api.login_required
|
|
||||||
def getFile(file_name):
|
|
||||||
path_ = current_app.config['FILES_FOLDER'] + "/receipts"
|
|
||||||
print(path_)
|
|
||||||
return send_from_directory(path_, file_name)
|
|
||||||
|
|
||||||
@receipt_api.route('/api/checkAPI', methods=["POST"])
|
|
||||||
@access_api.login_required
|
|
||||||
def checkAPI():
|
|
||||||
if request.method == "POST":
|
|
||||||
line_id = int(request.get_json()['line_id'])
|
|
||||||
barcode = request.get_json()['barcode']
|
|
||||||
site_name = session['selected_site']
|
|
||||||
api_response, api_data = receipts_processes.get_open_facts(barcode)
|
|
||||||
if api_response:
|
|
||||||
receipt_item = receipts_database.selectReceiptItemsTuple(site_name, (line_id, ))
|
|
||||||
item_data = receipt_item['data']
|
|
||||||
item_data['api_data'] = api_data
|
|
||||||
payload = {'id': line_id, 'update': {'type': 'api','data': item_data,'name': api_data['product_name']}}
|
|
||||||
receipts_database.updateReceiptItemsTuple(site_name, payload)
|
|
||||||
return jsonify({'error': False, "message": "Line updated for API, Succesfully"})
|
|
||||||
else:
|
|
||||||
return jsonify({'error': True, "message": "Item not in WorldFoodFacts!"})
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
|
||||||
@ -1,724 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
import config
|
|
||||||
from application import postsqldb
|
|
||||||
|
|
||||||
def requestNextReceiptID(site_name, conn=None):
|
|
||||||
"""gets the next id for receipts_id, currently returns a 8 digit number
|
|
||||||
|
|
||||||
Args:
|
|
||||||
site (str): site to get the next id for
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
json: receipt_id, message, error keys
|
|
||||||
"""
|
|
||||||
next_receipt_id = None
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT receipt_id FROM {site_name}_receipts ORDER BY id DESC LIMIT 1;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql)
|
|
||||||
next_receipt_id = cur.fetchone()
|
|
||||||
if next_receipt_id == None:
|
|
||||||
next_receipt_id = "00000001"
|
|
||||||
else:
|
|
||||||
next_receipt_id = next_receipt_id[0]
|
|
||||||
next_receipt_id = int(next_receipt_id.split("-")[1]) + 1
|
|
||||||
y = str(next_receipt_id)
|
|
||||||
len_str = len(y)
|
|
||||||
x = "".join(["0" for _ in range(8 - len_str)])
|
|
||||||
next_receipt_id = x + y
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return next_receipt_id
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload=(), sql=sql)
|
|
||||||
|
|
||||||
def getItemsWithQOH(site, payload, convert=True, conn=None):
|
|
||||||
recordset = []
|
|
||||||
count = 0
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/getItemsWithQOH.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site).replace("%%sort_order%%", payload[3])
|
|
||||||
|
|
||||||
payload = list(payload)
|
|
||||||
payload.pop(3)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
if convert:
|
|
||||||
with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
recordset = cur.fetchall()
|
|
||||||
recordset = [dict(record) for record in recordset]
|
|
||||||
cur.execute(f"SELECT COUNT(*) FROM {site}_items WHERE search_string LIKE '%%' || %s || '%%';", (payload[0], ))
|
|
||||||
count = cur.fetchone()
|
|
||||||
else:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
recordset = cur.fetchall()
|
|
||||||
cur.execute(f"SELECT COUNT(*) FROM {site}_items WHERE search_string LIKE '%%' || %s || '%%';", (payload[0], ))
|
|
||||||
count = cur.fetchone()
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return recordset, count
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def getLinkedItemByBarcode(site, payload, convert=True, conn=None):
|
|
||||||
item = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_itemlinks WHERE barcode=%s;"
|
|
||||||
if convert:
|
|
||||||
item = {}
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def getItemAllByBarcode(site, payload, convert=True, conn=None):
|
|
||||||
item = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
if convert:
|
|
||||||
item = {}
|
|
||||||
|
|
||||||
linked_item = getLinkedItemByBarcode(site, (payload[0],), conn=conn)
|
|
||||||
|
|
||||||
if len(linked_item) > 1:
|
|
||||||
item = getItemAllByID(site, payload=(linked_item['link'], ), convert=convert, conn=conn)
|
|
||||||
item['item_info']['uom_quantity'] = linked_item['conv_factor']
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item
|
|
||||||
else:
|
|
||||||
with open(f"application/receipts/sql/getItemAllByBarcode.sql", "r+") as file:
|
|
||||||
getItemAllByBarcode_sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(getItemAllByBarcode_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, getItemAllByBarcode_sql)
|
|
||||||
|
|
||||||
def getItemAllByID(site, payload, convert=True, conn=None):
|
|
||||||
item = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/receipts/sql/getItemAllByID.sql", "r+") as file:
|
|
||||||
getItemAllByID_sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(getItemAllByID_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, getItemAllByID_sql)
|
|
||||||
|
|
||||||
def getReceiptByID(site, payload, convert=True, conn=None):
|
|
||||||
receipt = []
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/getReceiptByID.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
row = cur.fetchone()
|
|
||||||
if row and convert:
|
|
||||||
receipt = postsqldb.tupleDictionaryFactory(cur.description, row)
|
|
||||||
if row and not convert:
|
|
||||||
receipt = row
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return receipt
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def paginateReceiptsTuples(site, payload, convert=True, conn=None):
|
|
||||||
"""payload=(limit, offset)"""
|
|
||||||
receipts = []
|
|
||||||
count = 0
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/getReceipts.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
receipts = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
if rows and not convert:
|
|
||||||
receipts = rows
|
|
||||||
|
|
||||||
cur.execute(f"SELECT COUNT(*) FROM {site}_receipts;")
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return receipts, count
|
|
||||||
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def paginateVendorsTuples(site, payload, convert=True, conn=None):
|
|
||||||
"""payload (tuple): (limit, offset)"""
|
|
||||||
recordset = ()
|
|
||||||
count = 0
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_vendors LIMIT %s OFFSET %s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
recordset = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
recordset = rows
|
|
||||||
|
|
||||||
cur.execute(f"SELECT COUNT(*) FROM {site}_vendors;")
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return recordset, count
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, (), sql)
|
|
||||||
|
|
||||||
def paginateLinkedLists(site, payload, convert=True, conn=None):
|
|
||||||
records = []
|
|
||||||
count = 0
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_items WHERE row_type = 'list' LIMIT %s OFFSET %s;"
|
|
||||||
sql_count = f"SELECT COUNT(*) FROM {site}_items WHERE row_type = 'list' LIMIT %s OFFSET %s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
records = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
|
|
||||||
if rows and not convert:
|
|
||||||
records = rows
|
|
||||||
|
|
||||||
cur.execute(sql_count, payload)
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return records, count
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectItemLocationsTuple(site_name, payload, convert=True, conn=None):
|
|
||||||
"""payload (tuple): [item_id, location_id]"""
|
|
||||||
item_locations = ()
|
|
||||||
self_conn = False
|
|
||||||
select_item_location_sql = f"SELECT * FROM {site_name}_item_locations WHERE part_id = %s AND location_id = %s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(select_item_location_sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item_locations = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item_locations = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item_locations
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def selectLocationsTuple(site, payload, convert=True, conn=None):
|
|
||||||
selected = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_locations WHERE id=%s;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
selected = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
selected = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return selected
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectReceiptsTuple(site, payload, convert=True, conn=None):
|
|
||||||
selected = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_receipts WHERE id=%s;"
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
selected = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
selected = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return selected
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def selectReceiptItemsTuple(site, payload, convert=True, conn=None):
|
|
||||||
selected = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"SELECT * FROM {site}_receipt_items WHERE id=%s;"
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
selected = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
if rows and not convert:
|
|
||||||
selected = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return selected
|
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def deleteReceiptItemsTuple(site, payload, convert=True, conn=None):
|
|
||||||
deleted = ()
|
|
||||||
self_conn = False
|
|
||||||
sql = f"WITH deleted_rows AS (DELETE FROM {site}_receipt_items WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
if rows and convert:
|
|
||||||
deleted = [postsqldb.tupleDictionaryFactory(cur.description, r) for r in rows]
|
|
||||||
elif rows and not convert:
|
|
||||||
deleted = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return deleted
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertTransactionsTuple(site, payload, convert=True, conn=None):
|
|
||||||
"""
|
|
||||||
payload (tuple): (timestamp[timestamp], logistics_info_id[int], barcode[str], name[str],
|
|
||||||
transaction_type[str], quantity[float], description[str], user_id[int], data[jsonb])
|
|
||||||
"""
|
|
||||||
transaction = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/insertTransactionsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
transaction = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
transaction = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return transaction
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertItemLinksTuple(site, payload, convert=True, conn=None):
|
|
||||||
"""payload (tuple): (barcode[str], link[int], data[jsonb], conv_factor[float]) """
|
|
||||||
link = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/insertItemLinksTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
link = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
link = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return link
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertCostLayersTuple(site, payload, convert=True, conn=None):
|
|
||||||
"""payload (tuple): (aquisition_date[timestamp], quantity[float], cost[float], currency_type[str], expires[timestamp/None], vendor[int])"""
|
|
||||||
cost_layer = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
with open(f"application/receipts/sql/insertCostLayersTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
cost_layer = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
cost_layer = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return cost_layer
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertReceiptItemsTuple(site, payload, convert=True, conn=None):
|
|
||||||
receipt_item = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/insertReceiptItemsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
receipt_item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
receipt_item = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return receipt_item
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def insertReceiptsTuple(site, payload, convert=True, conn=None):
|
|
||||||
receipt = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/insertReceiptsTuple.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
receipt = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
receipt = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return receipt
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateItemsTuple(site, payload, convert=True, conn=None):
|
|
||||||
"""payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}"""
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE {site}_items SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateItemLocation(site, payload, convert=True, conn=None):
|
|
||||||
item_location = ()
|
|
||||||
self_conn = False
|
|
||||||
with open(f"application/receipts/sql/updateItemLocation.sql", "r+") as file:
|
|
||||||
sql = file.read().replace("%%site_name%%", site)
|
|
||||||
try:
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, payload)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
item_location = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
item_location = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return item_location
|
|
||||||
except Exception as error:
|
|
||||||
return error
|
|
||||||
|
|
||||||
def updateReceiptsTuple(site, payload, convert=True, conn=None):
|
|
||||||
"""payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}"""
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE {site}_receipts SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
|
|
||||||
def updateReceiptItemsTuple(site, payload, convert=True, conn=None):
|
|
||||||
"""_summary_
|
|
||||||
|
|
||||||
Args:
|
|
||||||
conn (_T_connector@connect): Postgresql Connector
|
|
||||||
site (str):
|
|
||||||
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
|
||||||
convert (bool, optional): determines if to return tuple as dictionary. Defaults to True.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
DatabaseError:
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple or dict: updated tuple
|
|
||||||
"""
|
|
||||||
updated = ()
|
|
||||||
self_conn = False
|
|
||||||
|
|
||||||
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
|
||||||
values.append(payload['id'])
|
|
||||||
sql = f"UPDATE {site}_receipt_items SET {set_clause} WHERE id=%s RETURNING *;"
|
|
||||||
try:
|
|
||||||
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, values)
|
|
||||||
rows = cur.fetchone()
|
|
||||||
if rows and convert:
|
|
||||||
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
|
||||||
elif rows and not convert:
|
|
||||||
updated = rows
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return updated
|
|
||||||
|
|
||||||
except Exception as error:
|
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
|
||||||
@ -1,204 +0,0 @@
|
|||||||
# 3RD PARTY IMPORTS
|
|
||||||
import pymupdf
|
|
||||||
import os
|
|
||||||
import PIL
|
|
||||||
import openfoodfacts
|
|
||||||
import psycopg2
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
# APPLICATION IMPORTS
|
|
||||||
from application.receipts import receipts_database
|
|
||||||
from application import database_payloads
|
|
||||||
from application.items.items_processes import postNewBlankItem
|
|
||||||
import config
|
|
||||||
|
|
||||||
def create_pdf_preview(pdf_path, output_path, size=(600, 400)):
|
|
||||||
pdf = pymupdf.open(pdf_path)
|
|
||||||
page = pdf[0]
|
|
||||||
file_name = os.path.basename(pdf_path).replace('.pdf', "")
|
|
||||||
pix = page.get_pixmap()
|
|
||||||
img = PIL.Image.frombytes("RGB", (pix.width, pix.height), pix.samples)
|
|
||||||
output_path = output_path + file_name + '.jpg'
|
|
||||||
img.thumbnail(size)
|
|
||||||
img.save(output_path)
|
|
||||||
return file_name + '.jpg'
|
|
||||||
|
|
||||||
def linkItem(site, user_id, data, conn=None):
|
|
||||||
""" this is a higher level function used to process a new item into the system,
|
|
||||||
link it to another item, and update the receipt_item to the new linked item data.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
site (_type_): _description_
|
|
||||||
user_id (_type_): _description_
|
|
||||||
data (_type_): {'receipt_item_id', 'linked_list_id', 'conv_factor'}
|
|
||||||
conn (_type_, optional): Passed Connector. Defaults to None.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
_type_: _description_
|
|
||||||
"""
|
|
||||||
self_conn = False
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
# Select receipt item
|
|
||||||
receipt_item = receipts_database.selectReceiptItemsTuple(site, (data['receipt_item_id'],), conn=conn)
|
|
||||||
# select linked item
|
|
||||||
linked_list = receipts_database.getItemAllByID(site, (data['linked_list_id'],), conn=conn)
|
|
||||||
|
|
||||||
if receipt_item['type'] == 'api':
|
|
||||||
new_item_data = {
|
|
||||||
'barcode': receipt_item['barcode'],
|
|
||||||
'name': receipt_item['name'],
|
|
||||||
'subtype': 'FOOD'
|
|
||||||
}
|
|
||||||
postNewBlankItem(site, user_id, new_item_data, conn=conn)
|
|
||||||
|
|
||||||
name = receipt_item['name']
|
|
||||||
if receipt_item['name'] == "unknown":
|
|
||||||
name = linked_list['item_name']
|
|
||||||
if receipt_item['type'] == "new sku":
|
|
||||||
new_item_data = {
|
|
||||||
'barcode': receipt_item['barcode'],
|
|
||||||
'name': name,
|
|
||||||
'subtype': 'FOOD'
|
|
||||||
}
|
|
||||||
postNewBlankItem(site, user_id, new_item_data, conn=conn)
|
|
||||||
|
|
||||||
new_item = receipts_database.getItemAllByBarcode(site, (receipt_item['barcode'], ), conn=conn)
|
|
||||||
new_item = receipts_database.updateItemsTuple(site, {'id': new_item['id'], 'update':{'row_type': 'link'}}, conn=conn)
|
|
||||||
|
|
||||||
|
|
||||||
item_link = database_payloads.ItemLinkPayload(
|
|
||||||
new_item['barcode'],
|
|
||||||
linked_list['id'],
|
|
||||||
new_item,
|
|
||||||
data['conv_factor']
|
|
||||||
)
|
|
||||||
|
|
||||||
receipts_database.insertItemLinksTuple(site, item_link.payload(), conn=conn)
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
'id': receipt_item['id'],
|
|
||||||
'update': {
|
|
||||||
'barcode': linked_list['barcode'],
|
|
||||||
'name': linked_list['item_name'],
|
|
||||||
'uom': linked_list['item_info']['uom']['id'],
|
|
||||||
'qty': float(receipt_item['qty']*data['conv_factor']),
|
|
||||||
'type': 'sku'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
receipts_database.updateReceiptItemsTuple(site, payload, conn=conn)
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
return False
|
|
||||||
|
|
||||||
return conn
|
|
||||||
|
|
||||||
def postLine(site, user_id, data, conn=None):
|
|
||||||
self_conn = False
|
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = False
|
|
||||||
self_conn = True
|
|
||||||
transaction_time = datetime.datetime.now()
|
|
||||||
receipt_item = receipts_database.selectReceiptItemsTuple(site, (data['line_id'],), conn=conn)
|
|
||||||
receipt = receipts_database.getReceiptByID(site, (receipt_item['receipt_id'], ), conn=conn)
|
|
||||||
conv_factor = 1.0
|
|
||||||
if receipt_item['data']['expires'] is not False:
|
|
||||||
expiration = datetime.datetime.strptime(receipt_item['data']['expires'], "%Y-%m-%d")
|
|
||||||
else:
|
|
||||||
expiration = None
|
|
||||||
|
|
||||||
if receipt_item['type'] == 'sku':
|
|
||||||
linked_item = receipts_database.getLinkedItemByBarcode(site, (receipt_item['barcode'], ), conn=conn)
|
|
||||||
if len(linked_item) > 1:
|
|
||||||
conv_factor = linked_item['conv_factor']
|
|
||||||
receipt_item['data']['linked_child'] = linked_item['barcode']
|
|
||||||
|
|
||||||
if receipt_item['type'] == 'api':
|
|
||||||
new_item_data = {
|
|
||||||
'barcode': receipt_item['barcode'],
|
|
||||||
'name': receipt_item['name'],
|
|
||||||
'subtype': 'FOOD'
|
|
||||||
}
|
|
||||||
postNewBlankItem(site, user_id, new_item_data, conn=conn)
|
|
||||||
|
|
||||||
if receipt_item['type'] == "new sku":
|
|
||||||
new_item_data = {
|
|
||||||
'barcode': receipt_item['barcode'],
|
|
||||||
'name': receipt_item['name'],
|
|
||||||
'subtype': 'FOOD'
|
|
||||||
}
|
|
||||||
postNewBlankItem(site, user_id, new_item_data, conn=conn)
|
|
||||||
|
|
||||||
item = receipts_database.getItemAllByBarcode(site, (receipt_item['barcode'], ), conn=conn)
|
|
||||||
|
|
||||||
location = receipts_database.selectItemLocationsTuple(site, (item['id'], item['logistics_info']['primary_location']['id']), conn=conn)
|
|
||||||
cost_layers: list = location['cost_layers']
|
|
||||||
|
|
||||||
receipt_item['data']['location'] = item['logistics_info']['primary_location']['uuid']
|
|
||||||
|
|
||||||
transaction = database_payloads.TransactionPayload(
|
|
||||||
timestamp=transaction_time,
|
|
||||||
logistics_info_id=item['logistics_info_id'],
|
|
||||||
barcode=item['barcode'],
|
|
||||||
name=item['item_name'],
|
|
||||||
transaction_type="Adjust In",
|
|
||||||
quantity=(float(receipt_item['qty'])*conv_factor),
|
|
||||||
description=f"{receipt['receipt_id']}",
|
|
||||||
user_id=user_id,
|
|
||||||
data=receipt_item['data']
|
|
||||||
)
|
|
||||||
|
|
||||||
cost_layer = database_payloads.CostLayerPayload(
|
|
||||||
aquisition_date=transaction_time,
|
|
||||||
quantity=float(receipt_item['qty']),
|
|
||||||
cost=float(receipt_item['data']['cost']),
|
|
||||||
currency_type="USD",
|
|
||||||
vendor=receipt['vendor_id'],
|
|
||||||
expires=expiration
|
|
||||||
)
|
|
||||||
|
|
||||||
cost_layer = receipts_database.insertCostLayersTuple(site, cost_layer.payload(), conn=conn)
|
|
||||||
cost_layers.append(cost_layer['id'])
|
|
||||||
|
|
||||||
quantity_on_hand = float(location['quantity_on_hand']) + float(receipt_item['qty'])
|
|
||||||
|
|
||||||
updated_item_location_payload = (cost_layers, quantity_on_hand, item['id'], item['logistics_info']['primary_location']['id'])
|
|
||||||
receipts_database.updateItemLocation(site, updated_item_location_payload, conn=conn)
|
|
||||||
|
|
||||||
|
|
||||||
site_location = receipts_database.selectLocationsTuple(site, (location['location_id'], ), conn=conn)
|
|
||||||
|
|
||||||
receipt_item['data']['location'] = site_location['uuid']
|
|
||||||
receipts_database.insertTransactionsTuple(site, transaction.payload(), conn=conn)
|
|
||||||
|
|
||||||
receipts_database.updateReceiptItemsTuple(site, {'id': receipt_item['id'], 'update': {'status': "Resolved"}}, conn=conn)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if self_conn:
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
return False
|
|
||||||
|
|
||||||
return conn
|
|
||||||
|
|
||||||
# OPEN FOOD FACTS API INTEGRATION
|
|
||||||
open_food_api = openfoodfacts.API(user_agent="MyAwesomeApp/1.0")
|
|
||||||
open_food_enabled = True
|
|
||||||
|
|
||||||
def get_open_facts(barcode):
|
|
||||||
if open_food_enabled:
|
|
||||||
barcode: str = barcode.replace('%', "")
|
|
||||||
data = open_food_api.product.get(barcode)
|
|
||||||
if data != None:
|
|
||||||
return True, data
|
|
||||||
return False, {}
|
|
||||||
@ -1,75 +0,0 @@
|
|||||||
WITH passed_id AS (SELECT id AS passed_id FROM %%site_name%%_items WHERE barcode=%s),
|
|
||||||
logistics_id AS (SELECT logistics_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
info_id AS (SELECT item_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
cte_item_info AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_item_info.*,
|
|
||||||
row_to_json(units.*) as uom
|
|
||||||
FROM %%site_name%%_item_info
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
WHERE %%site_name%%_item_info.id = (SELECT item_info_id FROM info_id)
|
|
||||||
),
|
|
||||||
cte_groups AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_groups.*,
|
|
||||||
%%site_name%%_group_items.uuid,
|
|
||||||
%%site_name%%_group_items.item_type,
|
|
||||||
%%site_name%%_group_items.qty
|
|
||||||
FROM %%site_name%%_groups
|
|
||||||
JOIN %%site_name%%_group_items ON %%site_name%%_groups.id = %%site_name%%_group_items.gr_id
|
|
||||||
WHERE %%site_name%%_group_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_shopping_lists AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_shopping_lists.*,
|
|
||||||
%%site_name%%_shopping_list_items.uuid,
|
|
||||||
%%site_name%%_shopping_list_items.item_type,
|
|
||||||
%%site_name%%_shopping_list_items.qty
|
|
||||||
FROM %%site_name%%_shopping_lists
|
|
||||||
JOIN %%site_name%%_shopping_list_items ON %%site_name%%_shopping_lists.id = %%site_name%%_shopping_list_items.sl_id
|
|
||||||
WHERE %%site_name%%_shopping_list_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_itemlinks AS (
|
|
||||||
SELECT * FROM %%site_name%%_itemlinks WHERE link=(SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_item_locations AS (
|
|
||||||
SELECT * FROM %%site_name%%_item_locations
|
|
||||||
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
|
|
||||||
WHERE part_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_logistics_info AS (
|
|
||||||
SELECT
|
|
||||||
li.*,
|
|
||||||
row_to_json(pl) AS primary_location,
|
|
||||||
row_to_json(ail) AS auto_issue_location,
|
|
||||||
row_to_json(pz) AS primary_zone,
|
|
||||||
row_to_json(aiz) AS auto_issue_zone
|
|
||||||
FROM %%site_name%%_logistics_info AS li
|
|
||||||
LEFT JOIN %%site_name%%_locations AS pl ON li.primary_location = pl.id
|
|
||||||
LEFT JOIN %%site_name%%_locations AS ail ON li.auto_issue_location = ail.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS pz ON li.primary_zone = pz.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS aiz ON li.auto_issue_zone = aiz.id
|
|
||||||
WHERE li.id=(SELECT logistics_info_id FROM logistics_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
(SELECT passed_id FROM passed_id) AS passed_id,
|
|
||||||
%%site_name%%_items.*,
|
|
||||||
(SELECT COALESCE(row_to_json(logis), '{}') FROM cte_logistics_info logis) AS logistics_info,
|
|
||||||
row_to_json(%%site_name%%_food_info.*) as food_info,
|
|
||||||
row_to_json(%%site_name%%_brands.*) as brand,
|
|
||||||
(SELECT COALESCE(row_to_json(ii), '{}') FROM cte_item_info ii) AS item_info,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM cte_groups g) AS item_groups,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(sl)), '{}') FROM cte_shopping_lists sl) AS item_shopping_lists,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(il)), '{}') FROM cte_itemlinks il) AS linked_items,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(ils)), '{}') FROM cte_item_locations ils) AS item_locations
|
|
||||||
FROM %%site_name%%_items
|
|
||||||
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
|
|
||||||
LEFT JOIN %%site_name%%_food_info ON %%site_name%%_items.food_info_id = %%site_name%%_food_info.id
|
|
||||||
LEFT JOIN %%site_name%%_brands ON %%site_name%%_items.brand = %%site_name%%_brands.id
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
LEFT JOIN cte_groups ON %%site_name%%_items.id = cte_groups.id
|
|
||||||
LEFT JOIN cte_shopping_lists ON %%site_name%%_items.id = cte_shopping_lists.id
|
|
||||||
WHERE %%site_name%%_items.id=(SELECT passed_id FROM passed_id)
|
|
||||||
GROUP BY
|
|
||||||
%%site_name%%_items.id, %%site_name%%_item_info.id, %%site_name%%_food_info.id, %%site_name%%_brands.id;
|
|
||||||
@ -1,86 +0,0 @@
|
|||||||
WITH passed_id AS (SELECT %s AS passed_id),
|
|
||||||
logistics_id AS (SELECT logistics_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
info_id AS (SELECT item_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
|
||||||
cte_conversions AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_conversions.id as conv_id,
|
|
||||||
%%site_name%%_conversions.conv_factor as conv_factor,
|
|
||||||
units.* as uom
|
|
||||||
FROM %%site_name%%_conversions
|
|
||||||
LEFT JOIN units ON %%site_name%%_conversions.uom_id = units.id
|
|
||||||
WHERE %%site_name%%_conversions.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_item_info AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_item_info.*,
|
|
||||||
row_to_json(units.*) as uom,
|
|
||||||
COALESCE((SELECT json_agg(convs) FROM cte_conversions convs), '[]'::json) AS conversions,
|
|
||||||
COALESCE((SELECT json_agg(p.*) FROM %%site_name%%_sku_prefix as p WHERE p.id = ANY(%%site_name%%_item_info.prefixes)), '[]'::json) as prefixes
|
|
||||||
FROM %%site_name%%_item_info
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
WHERE %%site_name%%_item_info.id = (SELECT item_info_id FROM info_id)
|
|
||||||
),
|
|
||||||
cte_groups AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_groups.*,
|
|
||||||
%%site_name%%_group_items.uuid,
|
|
||||||
%%site_name%%_group_items.item_type,
|
|
||||||
%%site_name%%_group_items.qty
|
|
||||||
FROM %%site_name%%_groups
|
|
||||||
JOIN %%site_name%%_group_items ON %%site_name%%_groups.id = %%site_name%%_group_items.gr_id
|
|
||||||
WHERE %%site_name%%_group_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_shopping_lists AS (
|
|
||||||
SELECT
|
|
||||||
%%site_name%%_shopping_lists.*,
|
|
||||||
%%site_name%%_shopping_list_items.uuid,
|
|
||||||
%%site_name%%_shopping_list_items.item_type,
|
|
||||||
%%site_name%%_shopping_list_items.qty
|
|
||||||
FROM %%site_name%%_shopping_lists
|
|
||||||
JOIN %%site_name%%_shopping_list_items ON %%site_name%%_shopping_lists.id = %%site_name%%_shopping_list_items.sl_id
|
|
||||||
WHERE %%site_name%%_shopping_list_items.item_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_itemlinks AS (
|
|
||||||
SELECT * FROM %%site_name%%_itemlinks WHERE link=(SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_item_locations AS (
|
|
||||||
SELECT * FROM %%site_name%%_item_locations
|
|
||||||
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
|
|
||||||
WHERE part_id = (SELECT passed_id FROM passed_id)
|
|
||||||
),
|
|
||||||
cte_logistics_info AS (
|
|
||||||
SELECT
|
|
||||||
li.*,
|
|
||||||
row_to_json(pl) AS primary_location,
|
|
||||||
row_to_json(ail) AS auto_issue_location,
|
|
||||||
row_to_json(pz) AS primary_zone,
|
|
||||||
row_to_json(aiz) AS auto_issue_zone
|
|
||||||
FROM %%site_name%%_logistics_info AS li
|
|
||||||
LEFT JOIN %%site_name%%_locations AS pl ON li.primary_location = pl.id
|
|
||||||
LEFT JOIN %%site_name%%_locations AS ail ON li.auto_issue_location = ail.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS pz ON li.primary_zone = pz.id
|
|
||||||
LEFT JOIN %%site_name%%_zones AS aiz ON li.auto_issue_zone = aiz.id
|
|
||||||
WHERE li.id=(SELECT logistics_info_id FROM logistics_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
(SELECT passed_id FROM passed_id) AS passed_id,
|
|
||||||
%%site_name%%_items.*,
|
|
||||||
(SELECT COALESCE(row_to_json(logis), '{}') FROM cte_logistics_info logis) AS logistics_info,
|
|
||||||
row_to_json(%%site_name%%_food_info.*) as food_info,
|
|
||||||
row_to_json(%%site_name%%_brands.*) as brand,
|
|
||||||
(SELECT COALESCE(row_to_json(ii), '{}') FROM cte_item_info ii) AS item_info,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM cte_groups g) AS item_groups,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(sl)), '{}') FROM cte_shopping_lists sl) AS item_shopping_lists,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(il)), '{}') FROM cte_itemlinks il) AS linked_items,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(ils)), '{}') FROM cte_item_locations ils) AS item_locations
|
|
||||||
FROM %%site_name%%_items
|
|
||||||
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
|
|
||||||
LEFT JOIN %%site_name%%_food_info ON %%site_name%%_items.food_info_id = %%site_name%%_food_info.id
|
|
||||||
LEFT JOIN %%site_name%%_brands ON %%site_name%%_items.brand = %%site_name%%_brands.id
|
|
||||||
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
|
||||||
LEFT JOIN cte_groups ON %%site_name%%_items.id = cte_groups.id
|
|
||||||
LEFT JOIN cte_shopping_lists ON %%site_name%%_items.id = cte_shopping_lists.id
|
|
||||||
WHERE %%site_name%%_items.id=(SELECT passed_id FROM passed_id)
|
|
||||||
GROUP BY
|
|
||||||
%%site_name%%_items.id, %%site_name%%_item_info.id, %%site_name%%_food_info.id, %%site_name%%_brands.id;
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
WITH sum_cte AS (
|
|
||||||
SELECT mi.id, SUM(mil.quantity_on_hand)::FLOAT8 AS total_sum
|
|
||||||
FROM %%site_name%%_item_locations mil
|
|
||||||
JOIN %%site_name%%_items mi ON mil.part_id = mi.id
|
|
||||||
GROUP BY mi.id
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT %%site_name%%_items.*,
|
|
||||||
row_to_json(%%site_name%%_item_info.*) as item_info,
|
|
||||||
sum_cte.total_sum as total_qoh,
|
|
||||||
(SELECT COALESCE(row_to_json(u), '{}') FROM units as u WHERE u.id=%%site_name%%_item_info.uom) as uom
|
|
||||||
FROM %%site_name%%_items
|
|
||||||
LEFT JOIN sum_cte ON %%site_name%%_items.id = sum_cte.id
|
|
||||||
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
|
|
||||||
WHERE %%site_name%%_items.search_string LIKE '%%' || %s || '%%'
|
|
||||||
ORDER BY %%sort_order%%
|
|
||||||
LIMIT %s OFFSET %s;
|
|
||||||
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
WITH passed_id AS (SELECT %s AS passed_id),
|
|
||||||
cte_receipt_items AS (
|
|
||||||
SELECT items.* ,
|
|
||||||
(SELECT COALESCE(row_to_json(un), '{}') FROM units un WHERE un.id = items.uom LIMIT 1) AS uom
|
|
||||||
FROM %%site_name%%_receipt_items items
|
|
||||||
WHERE items.receipt_id = (SELECT passed_id FROM passed_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT (SELECT passed_id FROM passed_id) AS passed_id,
|
|
||||||
%%site_name%%_receipts.*,
|
|
||||||
logins.username as submitted_by,
|
|
||||||
(SELECT COALESCE(array_agg(row_to_json(ris)), '{}') FROM cte_receipt_items ris) AS receipt_items,
|
|
||||||
row_to_json(%%site_name%%_vendors.*) as vendor
|
|
||||||
FROM %%site_name%%_receipts
|
|
||||||
JOIN logins ON %%site_name%%_receipts.submitted_by = logins.id
|
|
||||||
LEFT JOIN %%site_name%%_vendors ON %%site_name%%_receipts.vendor_id = %%site_name%%_vendors.id
|
|
||||||
WHERE %%site_name%%_receipts.id=(SELECT passed_id FROM passed_id)
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
SELECT %%site_name%%_receipts.*,
|
|
||||||
logins.username as submitted_by
|
|
||||||
FROM %%site_name%%_receipts
|
|
||||||
JOIN logins ON %%site_name%%_receipts.submitted_by = logins.id
|
|
||||||
ORDER BY %%site_name%%_receipts.id DESC
|
|
||||||
LIMIT %s
|
|
||||||
OFFSET %s;
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user