Migrated the linkitem receipt process to new
schema
This commit is contained in:
parent
9931a8232f
commit
c251ad4977
@ -39,7 +39,8 @@ def getItemBarcode():
|
|||||||
item_barcode = f"%{str(request.args.get('barcode', 1))}%"
|
item_barcode = f"%{str(request.args.get('barcode', 1))}%"
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
record = poe_database.selectItemAllByBarcode(site_name, (item_barcode,))
|
record = poe_database.selectItemAllByBarcode(site_name, (item_barcode,))
|
||||||
if record == {}:
|
print(record)
|
||||||
|
if record == {} or record == ():
|
||||||
return jsonify({"item":None, "error":True, "message":"Item either does not exist or there was a larger problem!"})
|
return jsonify({"item":None, "error":True, "message":"Item either does not exist or there was a larger problem!"})
|
||||||
else:
|
else:
|
||||||
return jsonify({"item":record, "error":False, "message":"item fetched succesfully!"})
|
return jsonify({"item":record, "error":False, "message":"item fetched succesfully!"})
|
||||||
|
|||||||
@ -174,19 +174,28 @@ def selectItemAllByID(site, payload, convert=True, conn=None):
|
|||||||
def selectItemAllByBarcode(site, payload, convert=True, conn=None):
|
def selectItemAllByBarcode(site, payload, convert=True, conn=None):
|
||||||
item = ()
|
item = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
|
|
||||||
|
if convert:
|
||||||
|
item = {}
|
||||||
|
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
linked_item = selectLinkedItemByBarcode(site, (payload[0],))
|
linked_item = selectLinkedItemByBarcode(site, (payload[0],))
|
||||||
|
|
||||||
if len(linked_item) > 1:
|
if len(linked_item) > 1:
|
||||||
item = selectItemAllByID(site, payload=(linked_item['link'], ), convert=convert)
|
item = selectItemAllByID(site, payload=(linked_item['link'], ), convert=convert)
|
||||||
item['item_info']['uom_quantity'] = linked_item['conv_factor']
|
item['item_info']['uom_quantity'] = linked_item['conv_factor']
|
||||||
|
if self_conn:
|
||||||
|
conn.close()
|
||||||
|
return item
|
||||||
else:
|
else:
|
||||||
with open(f"application/poe/sql/getItemAllByBarcode.sql", "r+") as file:
|
with open(f"application/poe/sql/getItemAllByBarcode.sql", "r+") as file:
|
||||||
getItemAllByBarcode_sql = file.read().replace("%%site_name%%", site)
|
getItemAllByBarcode_sql = file.read().replace("%%site_name%%", site)
|
||||||
try:
|
try:
|
||||||
if not conn:
|
|
||||||
database_config = config.config()
|
|
||||||
conn = psycopg2.connect(**database_config)
|
|
||||||
conn.autocommit = True
|
|
||||||
self_conn = True
|
|
||||||
|
|
||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
cur.execute(getItemAllByBarcode_sql, payload)
|
cur.execute(getItemAllByBarcode_sql, payload)
|
||||||
@ -197,12 +206,11 @@ def selectItemAllByBarcode(site, payload, convert=True, conn=None):
|
|||||||
item = rows
|
item = rows
|
||||||
|
|
||||||
if self_conn:
|
if self_conn:
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
return item
|
||||||
except (Exception, psycopg2.DatabaseError) as error:
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, getItemAllByBarcode_sql)
|
raise postsqldb.DatabaseError(error, payload, getItemAllByBarcode_sql)
|
||||||
return item
|
|
||||||
|
|
||||||
def insertCostLayersTuple(site, payload, convert=True, conn=None):
|
def insertCostLayersTuple(site, payload, convert=True, conn=None):
|
||||||
cost_layer = ()
|
cost_layer = ()
|
||||||
|
|||||||
@ -2,7 +2,6 @@ from flask import Blueprint, request, render_template, redirect, session, url_fo
|
|||||||
import psycopg2, math, datetime, process, database, MyDataclasses
|
import psycopg2, math, datetime, process, database, MyDataclasses
|
||||||
from config import config
|
from config import config
|
||||||
from user_api import login_required
|
from user_api import login_required
|
||||||
import openfoodfacts
|
|
||||||
import postsqldb
|
import postsqldb
|
||||||
import mimetypes, os
|
import mimetypes, os
|
||||||
import webpush
|
import webpush
|
||||||
@ -60,6 +59,7 @@ def getVendors():
|
|||||||
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
||||||
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
|
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||||
|
|
||||||
|
# Added to Database
|
||||||
@receipt_api.route('/api/getLinkedLists', methods=["GET"])
|
@receipt_api.route('/api/getLinkedLists', methods=["GET"])
|
||||||
def getLinkedLists():
|
def getLinkedLists():
|
||||||
recordset = []
|
recordset = []
|
||||||
@ -171,6 +171,7 @@ def saveLine():
|
|||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||||
|
|
||||||
|
# Added to Process and database!
|
||||||
@receipt_api.route('/api/postLinkedItem', methods=["POST"])
|
@receipt_api.route('/api/postLinkedItem', methods=["POST"])
|
||||||
def postLinkedItem():
|
def postLinkedItem():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
@ -180,52 +181,14 @@ def postLinkedItem():
|
|||||||
|
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
user_id = session['user_id']
|
user_id = session['user_id']
|
||||||
database_config = config()
|
|
||||||
with psycopg2.connect(**database_config) as conn:
|
|
||||||
receipt_item = postsqldb.ReceiptTable.select_item_tuple(conn, site_name, (receipt_item_id,))
|
|
||||||
# get link list item
|
|
||||||
linked_list = postsqldb.ItemTable.getItemAllByID(conn, site_name, (link_list_id, ))
|
|
||||||
# add item to database
|
|
||||||
if receipt_item['type'] == 'api':
|
|
||||||
|
|
||||||
data = {
|
payload = {
|
||||||
'barcode': receipt_item['barcode'],
|
'receipt_item_id': receipt_item_id,
|
||||||
'name': receipt_item['name'],
|
'linked_list_id': link_list_id,
|
||||||
'subtype': 'FOOD'
|
'conv_factor': conv_factor
|
||||||
}
|
}
|
||||||
process.postNewBlankItem(conn, site_name, user_id, data)
|
|
||||||
|
|
||||||
name = receipt_item['name']
|
receipts_processes.linkItem(site_name, user_id, payload)
|
||||||
if receipt_item['name'] == "unknown":
|
|
||||||
name = linked_list['item_name']
|
|
||||||
if receipt_item['type'] == "new sku":
|
|
||||||
data = {
|
|
||||||
'barcode': receipt_item['barcode'],
|
|
||||||
'name': name,
|
|
||||||
'subtype': 'FOOD'
|
|
||||||
}
|
|
||||||
process.postNewBlankItem(conn, site_name, user_id, data)
|
|
||||||
|
|
||||||
new_item = postsqldb.ItemTable.getItemAllByBarcode(conn, site_name, (receipt_item['barcode'], ))
|
|
||||||
new_item = postsqldb.ItemTable.update_tuple(conn, site_name, {'id': new_item['id'], 'update':{'row_type': 'link'}})
|
|
||||||
|
|
||||||
# add item to link list
|
|
||||||
item_link = postsqldb.ItemLinksTable.Payload(
|
|
||||||
new_item['barcode'],
|
|
||||||
linked_list['id'],
|
|
||||||
new_item,
|
|
||||||
conv_factor
|
|
||||||
)
|
|
||||||
postsqldb.ItemLinksTable.insert_tuple(conn, site_name, item_link.payload())
|
|
||||||
# update line item with link list name and item_link with link list id
|
|
||||||
payload = {'id': receipt_item['id'], 'update': {
|
|
||||||
'barcode': linked_list['barcode'],
|
|
||||||
'name': linked_list['item_name'],
|
|
||||||
'uom': linked_list['item_info']['uom']['id'],
|
|
||||||
'qty': float(receipt_item['qty']*conv_factor),
|
|
||||||
'type': 'sku'
|
|
||||||
}}
|
|
||||||
postsqldb.ReceiptTable.update_receipt_item(conn, site_name, payload)
|
|
||||||
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||||
@ -343,6 +306,7 @@ def resolveReceipt():
|
|||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||||
|
|
||||||
|
# added to database
|
||||||
@receipt_api.route('/api/uploadfile/<receipt_id>', methods=["POST"])
|
@receipt_api.route('/api/uploadfile/<receipt_id>', methods=["POST"])
|
||||||
def uploadFile(receipt_id):
|
def uploadFile(receipt_id):
|
||||||
file = request.files['file']
|
file = request.files['file']
|
||||||
@ -355,14 +319,11 @@ def uploadFile(receipt_id):
|
|||||||
preview_image = receipts_processes.create_pdf_preview(file_path, output_path)
|
preview_image = receipts_processes.create_pdf_preview(file_path, output_path)
|
||||||
|
|
||||||
file_size = os.path.getsize(file_path)
|
file_size = os.path.getsize(file_path)
|
||||||
database_config = config()
|
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
username = session['user']['username']
|
username = session['user']['username']
|
||||||
with psycopg2.connect(**database_config) as conn:
|
receipt_files = receipts_database.selectReceiptsTuple(site_name, (receipt_id, ))['files']
|
||||||
files = postsqldb.ReceiptTable.select_tuple(conn, site_name, (receipt_id, ))['files']
|
receipt_files[file.filename.replace(" ", "_")] = {'file_path': file.filename.replace(" ", "_"), 'file_type': file_type, 'file_size': file_size, 'uploaded_by': username, 'preview_image': preview_image}
|
||||||
files[file.filename.replace(" ", "_")] = {'file_path': file.filename.replace(" ", "_"), 'file_type': file_type, 'file_size': file_size, 'uploaded_by': username, 'preview_image': preview_image}
|
receipts_database.updateReceiptsTuple(site_name, {'id': receipt_id, 'update': {'files': receipt_files}})
|
||||||
postsqldb.ReceiptTable.update_receipt(conn, site_name, {'id': receipt_id, 'update': {'files': files}})
|
|
||||||
|
|
||||||
return jsonify({})
|
return jsonify({})
|
||||||
|
|
||||||
# Does not need to be added to Database
|
# Does not need to be added to Database
|
||||||
@ -372,28 +333,22 @@ def getFile(file_name):
|
|||||||
print(path_)
|
print(path_)
|
||||||
return send_from_directory(path_, file_name)
|
return send_from_directory(path_, file_name)
|
||||||
|
|
||||||
|
# Added to database
|
||||||
@receipt_api.route('/api/checkAPI', methods=["POST"])
|
@receipt_api.route('/api/checkAPI', methods=["POST"])
|
||||||
def checkAPI():
|
def checkAPI():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
line_id = int(request.get_json()['line_id'])
|
line_id = int(request.get_json()['line_id'])
|
||||||
barcode = request.get_json()['barcode']
|
barcode = request.get_json()['barcode']
|
||||||
site_name = session['selected_site']
|
site_name = session['selected_site']
|
||||||
database_config = config()
|
api_response, api_data = receipts_processes.get_open_facts(barcode)
|
||||||
with psycopg2.connect(**database_config) as conn:
|
if api_response:
|
||||||
print(barcode, line_id)
|
receipt_item = receipts_database.selectReceiptItemsTuple(site_name, (line_id, ))
|
||||||
api_response, api_data = receipts_processes.get_open_facts(barcode)
|
item_data = receipt_item['data']
|
||||||
if api_response:
|
item_data['api_data'] = api_data
|
||||||
receipt_item = database.__selectTuple(conn, site_name, f"{site_name}_receipt_items", (line_id, ), convert=True)
|
payload = {'id': line_id, 'update': {'type': 'api','data': item_data,'name': api_data['product_name']}}
|
||||||
item_data = receipt_item['data']
|
receipts_database.updateReceiptItemsTuple(site_name, payload)
|
||||||
item_data['api_data'] = api_data
|
return jsonify({'error': False, "message": "Line updated for API, Succesfully"})
|
||||||
database.__updateTuple(conn, site_name, f"{site_name}_receipt_items",
|
else:
|
||||||
{'id': line_id, 'update': {
|
return jsonify({'error': True, "message": "Item not in WorldFoodFacts!"})
|
||||||
'type': 'api',
|
|
||||||
'data': item_data,
|
|
||||||
'name': api_data['product_name']
|
|
||||||
}})
|
|
||||||
return jsonify({'error': False, "message": "Line updated for API, Succesfully"})
|
|
||||||
else:
|
|
||||||
return jsonify({'error': True, "message": "Item not in WorldFoodFacts!"})
|
|
||||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||||
@ -80,6 +80,76 @@ def getItemsWithQOH(site, payload, convert=True, conn=None):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def getLinkedItemByBarcode(site, payload, convert=True, conn=None):
|
||||||
|
item = ()
|
||||||
|
self_conn = False
|
||||||
|
sql = f"SELECT * FROM {site}_itemlinks WHERE barcode=%s;"
|
||||||
|
if convert:
|
||||||
|
item = {}
|
||||||
|
try:
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
if rows and not convert:
|
||||||
|
item = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return item
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def getItemAllByBarcode(site, payload, convert=True, conn=None):
|
||||||
|
item = ()
|
||||||
|
self_conn = False
|
||||||
|
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
if convert:
|
||||||
|
item = {}
|
||||||
|
|
||||||
|
linked_item = getLinkedItemByBarcode(site, (payload[0],), conn=conn)
|
||||||
|
|
||||||
|
if len(linked_item) > 1:
|
||||||
|
item = getItemAllByID(site, payload=(linked_item['link'], ), convert=convert, conn=conn)
|
||||||
|
item['item_info']['uom_quantity'] = linked_item['conv_factor']
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return item
|
||||||
|
else:
|
||||||
|
with open(f"application/receipts/sql/getItemAllByBarcode.sql", "r+") as file:
|
||||||
|
getItemAllByBarcode_sql = file.read().replace("%%site_name%%", site)
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(getItemAllByBarcode_sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
if rows and not convert:
|
||||||
|
item = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return item
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, getItemAllByBarcode_sql)
|
||||||
|
|
||||||
def getItemAllByID(site, payload, convert=True, conn=None):
|
def getItemAllByID(site, payload, convert=True, conn=None):
|
||||||
item = ()
|
item = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
@ -231,6 +301,33 @@ def paginateLinkedLists(site, payload, convert=True, conn=None):
|
|||||||
except (Exception, psycopg2.DatabaseError) as error:
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def selectReceiptsTuple(site, payload, convert=True, conn=None):
|
||||||
|
selected = ()
|
||||||
|
self_conn = False
|
||||||
|
sql = f"SELECT * FROM {site}_receipts WHERE id=%s;"
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
selected = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
if rows and not convert:
|
||||||
|
selected = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return selected
|
||||||
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
def selectReceiptItemsTuple(site, payload, convert=True, conn=None):
|
def selectReceiptItemsTuple(site, payload, convert=True, conn=None):
|
||||||
selected = ()
|
selected = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
@ -285,6 +382,36 @@ def deleteReceiptItemsTuple(site, payload, convert=True, conn=None):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def insertItemLinksTuple(site, payload, convert=True, conn=None):
|
||||||
|
"""payload (tuple): (barcode[str], link[int], data[jsonb], conv_factor[float]) """
|
||||||
|
link = ()
|
||||||
|
self_conn = False
|
||||||
|
with open(f"application/receipts/sql/insertItemLinksTuple.sql", "r+") as file:
|
||||||
|
sql = file.read().replace("%%site_name%%", site)
|
||||||
|
try:
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, payload)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
link = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
elif rows and not convert:
|
||||||
|
link = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return link
|
||||||
|
except Exception as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
|
||||||
def insertReceiptItemsTuple(site, payload, convert=True, conn=None):
|
def insertReceiptItemsTuple(site, payload, convert=True, conn=None):
|
||||||
receipt_item = ()
|
receipt_item = ()
|
||||||
self_conn = False
|
self_conn = False
|
||||||
@ -341,6 +468,68 @@ def insertReceiptsTuple(site, payload, convert=True, conn=None):
|
|||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise postsqldb.DatabaseError(error, payload, sql)
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def updateItemsTuple(site, payload, convert=True, conn=None):
|
||||||
|
"""payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}"""
|
||||||
|
updated = ()
|
||||||
|
self_conn = False
|
||||||
|
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
||||||
|
values.append(payload['id'])
|
||||||
|
sql = f"UPDATE {site}_items SET {set_clause} WHERE id=%s RETURNING *;"
|
||||||
|
try:
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, values)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
elif rows and not convert:
|
||||||
|
updated = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return updated
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
|
def updateReceiptsTuple(site, payload, convert=True, conn=None):
|
||||||
|
"""payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}"""
|
||||||
|
updated = ()
|
||||||
|
self_conn = False
|
||||||
|
set_clause, values = postsqldb.updateStringFactory(payload['update'])
|
||||||
|
values.append(payload['id'])
|
||||||
|
sql = f"UPDATE {site}_receipts SET {set_clause} WHERE id=%s RETURNING *;"
|
||||||
|
try:
|
||||||
|
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = True
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, values)
|
||||||
|
rows = cur.fetchone()
|
||||||
|
if rows and convert:
|
||||||
|
updated = postsqldb.tupleDictionaryFactory(cur.description, rows)
|
||||||
|
elif rows and not convert:
|
||||||
|
updated = rows
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return updated
|
||||||
|
except Exception as error:
|
||||||
|
raise postsqldb.DatabaseError(error, payload, sql)
|
||||||
|
|
||||||
def updateReceiptItemsTuple(site, payload, convert=True, conn=None):
|
def updateReceiptItemsTuple(site, payload, convert=True, conn=None):
|
||||||
"""_summary_
|
"""_summary_
|
||||||
|
|
||||||
|
|||||||
@ -2,6 +2,12 @@ import pymupdf
|
|||||||
import os
|
import os
|
||||||
import PIL
|
import PIL
|
||||||
import openfoodfacts
|
import openfoodfacts
|
||||||
|
import psycopg2
|
||||||
|
|
||||||
|
from application.receipts import receipts_database
|
||||||
|
from application import database_payloads
|
||||||
|
from application.items.items_processes import postNewBlankItem
|
||||||
|
import config
|
||||||
|
|
||||||
def create_pdf_preview(pdf_path, output_path, size=(600, 400)):
|
def create_pdf_preview(pdf_path, output_path, size=(600, 400)):
|
||||||
pdf = pymupdf.open(pdf_path)
|
pdf = pymupdf.open(pdf_path)
|
||||||
@ -14,6 +20,82 @@ def create_pdf_preview(pdf_path, output_path, size=(600, 400)):
|
|||||||
img.save(output_path)
|
img.save(output_path)
|
||||||
return file_name + '.jpg'
|
return file_name + '.jpg'
|
||||||
|
|
||||||
|
def linkItem(site, user_id, data, conn=None):
|
||||||
|
""" this is a higher level function used to process a new item into the system,
|
||||||
|
link it to another item, and update the receipt_item to the new linked item data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
site (_type_): _description_
|
||||||
|
user_id (_type_): _description_
|
||||||
|
data (_type_): {'receipt_item_id', 'linked_list_id', 'conv_factor'}
|
||||||
|
conn (_type_, optional): Passed Connector. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
_type_: _description_
|
||||||
|
"""
|
||||||
|
self_conn = False
|
||||||
|
if not conn:
|
||||||
|
database_config = config.config()
|
||||||
|
conn = psycopg2.connect(**database_config)
|
||||||
|
conn.autocommit = False
|
||||||
|
self_conn = True
|
||||||
|
|
||||||
|
# Select receipt item
|
||||||
|
receipt_item = receipts_database.selectReceiptItemsTuple(site, (data['receipt_item_id'],), conn=conn)
|
||||||
|
# select linked item
|
||||||
|
linked_list = receipts_database.getItemAllByID(site, (data['linked_list_id'],), conn=conn)
|
||||||
|
|
||||||
|
if receipt_item['type'] == 'api':
|
||||||
|
new_item_data = {
|
||||||
|
'barcode': receipt_item['barcode'],
|
||||||
|
'name': receipt_item['name'],
|
||||||
|
'subtype': 'FOOD'
|
||||||
|
}
|
||||||
|
postNewBlankItem(site, user_id, new_item_data, conn=conn)
|
||||||
|
|
||||||
|
name = receipt_item['name']
|
||||||
|
if receipt_item['name'] == "unknown":
|
||||||
|
name = linked_list['item_name']
|
||||||
|
if receipt_item['type'] == "new sku":
|
||||||
|
new_item_data = {
|
||||||
|
'barcode': receipt_item['barcode'],
|
||||||
|
'name': name,
|
||||||
|
'subtype': 'FOOD'
|
||||||
|
}
|
||||||
|
postNewBlankItem(site, user_id, new_item_data, conn=conn)
|
||||||
|
|
||||||
|
new_item = receipts_database.getItemAllByBarcode(site, (receipt_item['barcode'], ), conn=conn)
|
||||||
|
new_item = receipts_database.updateItemsTuple(site, {'id': new_item['id'], 'update':{'row_type': 'link'}}, conn=conn)
|
||||||
|
|
||||||
|
|
||||||
|
item_link = database_payloads.ItemLinkPayload(
|
||||||
|
new_item['barcode'],
|
||||||
|
linked_list['id'],
|
||||||
|
new_item,
|
||||||
|
data['conv_factor']
|
||||||
|
)
|
||||||
|
|
||||||
|
receipts_database.insertItemLinksTuple(site, item_link.payload(), conn=conn)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'id': receipt_item['id'],
|
||||||
|
'update': {
|
||||||
|
'barcode': linked_list['barcode'],
|
||||||
|
'name': linked_list['item_name'],
|
||||||
|
'uom': linked_list['item_info']['uom']['id'],
|
||||||
|
'qty': float(receipt_item['qty']*data['conv_factor']),
|
||||||
|
'type': 'sku'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
receipts_database.updateReceiptItemsTuple(site, payload, conn=conn)
|
||||||
|
|
||||||
|
if self_conn:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
return False
|
||||||
|
|
||||||
|
return conn
|
||||||
|
|
||||||
# OPEN FOOD FACTS API INTEGRATION
|
# OPEN FOOD FACTS API INTEGRATION
|
||||||
open_food_api = openfoodfacts.API(user_agent="MyAwesomeApp/1.0")
|
open_food_api = openfoodfacts.API(user_agent="MyAwesomeApp/1.0")
|
||||||
|
|||||||
75
application/receipts/sql/getItemAllByBarcode.sql
Normal file
75
application/receipts/sql/getItemAllByBarcode.sql
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
WITH passed_id AS (SELECT id AS passed_id FROM %%site_name%%_items WHERE barcode=%s),
|
||||||
|
logistics_id AS (SELECT logistics_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
||||||
|
info_id AS (SELECT item_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
|
||||||
|
cte_item_info AS (
|
||||||
|
SELECT
|
||||||
|
%%site_name%%_item_info.*,
|
||||||
|
row_to_json(units.*) as uom
|
||||||
|
FROM %%site_name%%_item_info
|
||||||
|
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
||||||
|
WHERE %%site_name%%_item_info.id = (SELECT item_info_id FROM info_id)
|
||||||
|
),
|
||||||
|
cte_groups AS (
|
||||||
|
SELECT
|
||||||
|
%%site_name%%_groups.*,
|
||||||
|
%%site_name%%_group_items.uuid,
|
||||||
|
%%site_name%%_group_items.item_type,
|
||||||
|
%%site_name%%_group_items.qty
|
||||||
|
FROM %%site_name%%_groups
|
||||||
|
JOIN %%site_name%%_group_items ON %%site_name%%_groups.id = %%site_name%%_group_items.gr_id
|
||||||
|
WHERE %%site_name%%_group_items.item_id = (SELECT passed_id FROM passed_id)
|
||||||
|
),
|
||||||
|
cte_shopping_lists AS (
|
||||||
|
SELECT
|
||||||
|
%%site_name%%_shopping_lists.*,
|
||||||
|
%%site_name%%_shopping_list_items.uuid,
|
||||||
|
%%site_name%%_shopping_list_items.item_type,
|
||||||
|
%%site_name%%_shopping_list_items.qty
|
||||||
|
FROM %%site_name%%_shopping_lists
|
||||||
|
JOIN %%site_name%%_shopping_list_items ON %%site_name%%_shopping_lists.id = %%site_name%%_shopping_list_items.sl_id
|
||||||
|
WHERE %%site_name%%_shopping_list_items.item_id = (SELECT passed_id FROM passed_id)
|
||||||
|
),
|
||||||
|
cte_itemlinks AS (
|
||||||
|
SELECT * FROM %%site_name%%_itemlinks WHERE link=(SELECT passed_id FROM passed_id)
|
||||||
|
),
|
||||||
|
cte_item_locations AS (
|
||||||
|
SELECT * FROM %%site_name%%_item_locations
|
||||||
|
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
|
||||||
|
WHERE part_id = (SELECT passed_id FROM passed_id)
|
||||||
|
),
|
||||||
|
cte_logistics_info AS (
|
||||||
|
SELECT
|
||||||
|
li.*,
|
||||||
|
row_to_json(pl) AS primary_location,
|
||||||
|
row_to_json(ail) AS auto_issue_location,
|
||||||
|
row_to_json(pz) AS primary_zone,
|
||||||
|
row_to_json(aiz) AS auto_issue_zone
|
||||||
|
FROM %%site_name%%_logistics_info AS li
|
||||||
|
LEFT JOIN %%site_name%%_locations AS pl ON li.primary_location = pl.id
|
||||||
|
LEFT JOIN %%site_name%%_locations AS ail ON li.auto_issue_location = ail.id
|
||||||
|
LEFT JOIN %%site_name%%_zones AS pz ON li.primary_zone = pz.id
|
||||||
|
LEFT JOIN %%site_name%%_zones AS aiz ON li.auto_issue_zone = aiz.id
|
||||||
|
WHERE li.id=(SELECT logistics_info_id FROM logistics_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
(SELECT passed_id FROM passed_id) AS passed_id,
|
||||||
|
%%site_name%%_items.*,
|
||||||
|
(SELECT COALESCE(row_to_json(logis), '{}') FROM cte_logistics_info logis) AS logistics_info,
|
||||||
|
row_to_json(%%site_name%%_food_info.*) as food_info,
|
||||||
|
row_to_json(%%site_name%%_brands.*) as brand,
|
||||||
|
(SELECT COALESCE(row_to_json(ii), '{}') FROM cte_item_info ii) AS item_info,
|
||||||
|
(SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM cte_groups g) AS item_groups,
|
||||||
|
(SELECT COALESCE(array_agg(row_to_json(sl)), '{}') FROM cte_shopping_lists sl) AS item_shopping_lists,
|
||||||
|
(SELECT COALESCE(array_agg(row_to_json(il)), '{}') FROM cte_itemlinks il) AS linked_items,
|
||||||
|
(SELECT COALESCE(array_agg(row_to_json(ils)), '{}') FROM cte_item_locations ils) AS item_locations
|
||||||
|
FROM %%site_name%%_items
|
||||||
|
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
|
||||||
|
LEFT JOIN %%site_name%%_food_info ON %%site_name%%_items.food_info_id = %%site_name%%_food_info.id
|
||||||
|
LEFT JOIN %%site_name%%_brands ON %%site_name%%_items.brand = %%site_name%%_brands.id
|
||||||
|
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
|
||||||
|
LEFT JOIN cte_groups ON %%site_name%%_items.id = cte_groups.id
|
||||||
|
LEFT JOIN cte_shopping_lists ON %%site_name%%_items.id = cte_shopping_lists.id
|
||||||
|
WHERE %%site_name%%_items.id=(SELECT passed_id FROM passed_id)
|
||||||
|
GROUP BY
|
||||||
|
%%site_name%%_items.id, %%site_name%%_item_info.id, %%site_name%%_food_info.id, %%site_name%%_brands.id;
|
||||||
4
application/receipts/sql/insertItemLinksTuple.sql
Normal file
4
application/receipts/sql/insertItemLinksTuple.sql
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
INSERT INTO %%site_name%%_itemlinks
|
||||||
|
(barcode, link, data, conv_factor)
|
||||||
|
VALUES (%s, %s, %s, %s)
|
||||||
|
RETURNING *;
|
||||||
@ -1971,3 +1971,12 @@
|
|||||||
2025-07-12 15:45:15.171378 --- ERROR --- DatabaseError(message='can't adapt type 'dict'',
|
2025-07-12 15:45:15.171378 --- ERROR --- DatabaseError(message='can't adapt type 'dict'',
|
||||||
payload=('sku', 26, '%028400517829%', 'Tostitos', 1, {'id': 1, 'plural': 'pinches', 'single': ' pinch', 'fullname': ' Pinch', 'description': ' Less than 1/8 teaspoon.'}, '{"cost": 0, "expires": false}', 'Unresolved'),
|
payload=('sku', 26, '%028400517829%', 'Tostitos', 1, {'id': 1, 'plural': 'pinches', 'single': ' pinch', 'fullname': ' Pinch', 'description': ' Less than 1/8 teaspoon.'}, '{"cost": 0, "expires": false}', 'Unresolved'),
|
||||||
sql='INSERT INTO test_receipt_items(type, receipt_id, barcode, name, qty, uom, data, status) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING *;')
|
sql='INSERT INTO test_receipt_items(type, receipt_id, barcode, name, qty, uom, data, status) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING *;')
|
||||||
|
2025-08-02 09:05:32.081714 --- ERROR --- DatabaseError(message='operator does not exist: integer = textLINE 6: WHERE items.receipt_id = (SELECT passed_id FROM ... ^HINT: No operator matches the given name and argument types. You might need to add explicit type casts.',
|
||||||
|
payload=('25',),
|
||||||
|
sql='WITH passed_id AS (SELECT %s AS passed_id), cte_receipt_items AS ( SELECT items.* , (SELECT COALESCE(row_to_json(un), '{}') FROM units un WHERE un.id = items.uom LIMIT 1) AS uom FROM test_receipt_items items WHERE items.receipt_id = (SELECT passed_id FROM passed_id) )SELECT (SELECT passed_id FROM passed_id) AS passed_id, test_receipts.*, logins.username as submitted_by, (SELECT COALESCE(array_agg(row_to_json(ris)), '{}') FROM cte_receipt_items ris) AS receipt_items, row_to_json(test_vendors.*) as vendorFROM test_receiptsJOIN logins ON test_receipts.submitted_by = logins.idLEFT JOIN test_vendors ON test_receipts.vendor_id = test_vendors.id WHERE test_receipts.id=(SELECT passed_id FROM passed_id)')
|
||||||
|
2025-08-02 09:06:55.422936 --- ERROR --- DatabaseError(message='operator does not exist: integer = textLINE 6: WHERE items.receipt_id = (SELECT passed_id FROM ... ^HINT: No operator matches the given name and argument types. You might need to add explicit type casts.',
|
||||||
|
payload=('25',),
|
||||||
|
sql='WITH passed_id AS (SELECT %s AS passed_id), cte_receipt_items AS ( SELECT items.* , (SELECT COALESCE(row_to_json(un), '{}') FROM units un WHERE un.id = items.uom LIMIT 1) AS uom FROM test_receipt_items items WHERE items.receipt_id = (SELECT passed_id FROM passed_id) )SELECT (SELECT passed_id FROM passed_id) AS passed_id, test_receipts.*, logins.username as submitted_by, (SELECT COALESCE(array_agg(row_to_json(ris)), '{}') FROM cte_receipt_items ris) AS receipt_items, row_to_json(test_vendors.*) as vendorFROM test_receiptsJOIN logins ON test_receipts.submitted_by = logins.idLEFT JOIN test_vendors ON test_receipts.vendor_id = test_vendors.id WHERE test_receipts.id=(SELECT passed_id FROM passed_id)')
|
||||||
|
2025-08-02 10:08:28.632959 --- ERROR --- DatabaseError(message='duplicate key value violates unique constraint "test_logistics_info_barcode_key"DETAIL: Key (barcode)=(%041789001314%) already exists.',
|
||||||
|
payload=('%041789001314%', 1, 1, 1, 1),
|
||||||
|
sql='INSERT INTO test_logistics_info(barcode, primary_location, primary_zone, auto_issue_location, auto_issue_zone) VALUES (%s, %s, %s, %s, %s) RETURNING *;')
|
||||||
Binary file not shown.
|
After Width: | Height: | Size: 2.3 MiB |
BIN
static/files/receipts/chibi_character_silver_eyes.png
Normal file
BIN
static/files/receipts/chibi_character_silver_eyes.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.1 MiB |
Loading…
x
Reference in New Issue
Block a user