Began Migration of Receipts Module to new Schema

This commit is contained in:
Jadowyne Ulve 2025-07-12 15:49:23 -05:00
parent f127680164
commit ee2bdda226
30 changed files with 406 additions and 1114 deletions

343
api.py
View File

@ -14,54 +14,6 @@ def changeSite():
session['selected_site'] = site session['selected_site'] = site
return jsonify({'error': False, 'message': 'Site Changed!'}) return jsonify({'error': False, 'message': 'Site Changed!'})
@database_api.route("/getGroups")
def paginate_groups():
page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10))
site_name = session['selected_site']
offset = (page - 1) * limit
groups = []
count = 0
database_config = config()
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
sql = f"SELECT * FROM {site_name}_groups LIMIT %s OFFSET %s;"
count = f"SELECT COUNT(*) FROM {site_name}_groups"
cur.execute(sql, (limit, offset))
groups = cur.fetchall()
cur.execute(count)
count = cur.fetchone()[0]
sql_item = f"SELECT {site_name}_items.barcode, {site_name}_items.item_name, {site_name}_logistics_info.quantity_on_hand FROM {site_name}_items LEFT JOIN {site_name}_logistics_info ON {site_name}_items.logistics_info_id = {site_name}_logistics_info.id WHERE {site_name}_items.id = %s; "
new_groups = []
for group in groups:
qty = 0
group = list(group)
items = []
print(group[3])
for item_id in group[3]:
cur.execute(sql_item, (item_id,))
item_row = list(cur.fetchone())
cur.execute(f"SELECT quantity_on_hand FROM {site_name}_item_locations WHERE part_id=%s;", (item_id, ))
item_locations = cur.fetchall()[0]
qty += float(sum(item_locations))
item_row[2] = sum(item_locations)
items.append(item_row)
group[3] = items
group.append(qty)
new_groups.append(group)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return jsonify({'groups': new_groups, "end": math.ceil(count/limit)})
@database_api.route("/getVendors") @database_api.route("/getVendors")
def get_vendors(): def get_vendors():
database_config = config() database_config = config()
@ -76,297 +28,4 @@ def get_vendors():
except (Exception, psycopg2.DatabaseError) as error: except (Exception, psycopg2.DatabaseError) as error:
print(error) print(error)
return jsonify(vendors=vendors) return jsonify(vendors=vendors)
@database_api.route("/addGroup")
def addGroup():
name = str(request.args.get('name', ""))
description = str(request.args.get('description', ""))
group_type = str(request.args.get('type', ""))
site_name = session['selected_site']
state = "FAILED"
database_config = config()
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
sql = f"INSERT INTO {site_name}_groups (name, description, included_items, group_type) VALUES (%s, %s, %s, %s);"
cur.execute(sql, (name, description, json.dumps({}), group_type))
state = "SUCCESS"
conn.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
conn.rollback()
return jsonify({'state': state})
@database_api.route("/getGroup")
def get_group():
id = int(request.args.get('id', 1))
database_config = config()
site_name = session['selected_site']
group = []
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
sql = f"SELECT * FROM {site_name}_groups WHERE id=%s;"
cur.execute(sql, (id, ))
group = list(cur.fetchone())
sql_item = f"SELECT {site_name}_items.id, {site_name}_items.barcode, {site_name}_items.item_name, {site_name}_logistics_info.quantity_on_hand FROM {site_name}_items LEFT JOIN {site_name}_logistics_info ON {site_name}_items.logistics_info_id = {site_name}_logistics_info.id WHERE {site_name}_items.id = %s;"
qty = 0
group = list(group)
items = []
print(group[3])
for item_id in group[3]:
cur.execute(sql_item, (item_id,))
item_row = cur.fetchone()
qty += float(item_row[3])
items.append(item_row)
group[3] = items
group.append(qty)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return jsonify(group=group)
@database_api.route("/updateGroup", methods=["POST"])
def update_group():
if request.method == "POST":
site_name = session['selected_site']
group_id = request.get_json()['id']
items = request.get_json()['items']
name = request.get_json()['name']
description = request.get_json()['description']
group_type = request.get_json()['group_type']
data = (name, description, items, group_type, group_id)
database_config = config()
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
# Start by updating the group -> included items with the up to date list
sql = f"UPDATE {site_name}_groups SET name = %s, description = %s, included_items = %s, group_type = %s WHERE id=%s;"
cur.execute(sql, data)
update_item_sql = f"UPDATE {site_name}_item_info SET groups = %s WHERE id = %s;"
select_item_sql = f"SELECT {site_name}_item_info.id, {site_name}_item_info.groups FROM {site_name}_items LEFT JOIN {site_name}_item_info ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_items.id = %s;"
# Now we will fetch each item row one by one and check if the group id is already inside of its groups array
for item_id in items:
cur.execute(select_item_sql, (item_id, ))
item = cur.fetchone()
print(item)
item_groups: set = set(item[1])
# Condition check, adds it if it doesnt exist.
if group_id not in item_groups:
item_groups.add(group_id)
cur.execute(update_item_sql, (list(item_groups), item[0]))
# Now we fetch all items that have the group id in its groups array
fetch_items_with_group = f"SELECT {site_name}_items.id, groups, {site_name}_item_info.id FROM {site_name}_item_info LEFT JOIN {site_name}_items ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE groups @> ARRAY[%s];"
cur.execute(fetch_items_with_group, (group_id, ))
group_items = cur.fetchall()
print(items)
# We will then check each item id against the groups new included_items list to see if the item should be in there
for item_id, group, info_id in group_items:
# If it is not we remove the group form the items list and update the item
if item_id not in items:
groups: list = list(group)
groups.remove(group_id)
cur.execute(update_item_sql, (list(groups), info_id))
conn.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
conn.rollback()
return jsonify({"state": "SUCCESS"})
return jsonify({"state": "FAILED"})
@database_api.route("/addList")
def addList():
name = str(request.args.get('name', ""))
description = str(request.args.get('description', ""))
list_type = str(request.args.get('type', ""))
site_name = session['selected_site']
print(name, description, list_type)
state = "FAILED"
#if name or description or group_type == "":
# print("this is empty")
# return jsonify({'state': state})
timestamp = datetime.datetime.now()
data = (name, description, [], json.dumps({}), [], [], 0, timestamp, list_type)
database_config = config()
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
sql = f"INSERT INTO {site_name}_shopping_lists (name, description, pantry_items, custom_items, recipes, groups, author, creation_date, type) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s);"
cur.execute(sql, data)
state = "SUCCESS"
conn.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
conn.rollback()
return jsonify({'state': state})
@database_api.route("/getLists")
def paginate_lists():
page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10))
site_name = session['selected_site']
offset = (page - 1) * limit
lists = []
count = 0
database_config = config()
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
sql = f"SELECT * FROM {site_name}_shopping_lists LIMIT %s OFFSET %s;"
count = f"SELECT COUNT(*) FROM {site_name}_shopping_lists;"
cur.execute(sql, (limit, offset))
temp_lists = list(cur.fetchall())
cur.execute(count)
count = cur.fetchone()[0]
for shopping_list in temp_lists:
shopping_list: list = list(shopping_list)
pantry_items = shopping_list[3]
custom_items = shopping_list[4]
list_length = len(custom_items)
sqlfile = open(f"sites/{site_name}/sql/unique/shopping_lists_safetystock_count.sql", "r+")
sql = "\n".join(sqlfile.readlines())
sqlfile.close()
print(sql)
if shopping_list[10] == 'calculated':
print(shopping_list[0])
cur.execute(sql, (shopping_list[0], ))
list_length += cur.fetchone()[0]
else:
list_length += len(pantry_items)
shopping_list.append(list_length)
lists.append(shopping_list)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return jsonify({'lists': lists, 'end': math.ceil(count/limit)})
@database_api.route("/getListView")
def get_list_view():
id = int(request.args.get('id', 1))
site_name = session['selected_site']
shopping_list = []
database_config = config()
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
sql = f"SELECT * FROM {site_name}_shopping_lists WHERE id=%s;"
cur.execute(sql, (id, ))
shopping_list = list(cur.fetchone())
if shopping_list[10] == "calculated":
sqlfile = open(f"sites/{site_name}/sql/unique/shopping_lists_safetystock.sql", "r+")
sql = "\n".join(sqlfile.readlines())
sqlfile.close()
else:
sqlfile = open(f"sites/{site_name}/sql/unique/shopping_lists_safetystock_uncalculated.sql", "r+")
sql = "\n".join(sqlfile.readlines())
sqlfile.close()
cur.execute(sql, (id, ))
shopping_list[3] = list(cur.fetchall())
print(shopping_list[4])
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return jsonify(shopping_list=shopping_list)
@database_api.route("/getList")
def get_list():
id = int(request.args.get('id', 1))
database_config = config()
site_name = session['selected_site']
shopping_list = []
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
sql = f"SELECT * FROM {site_name}_shopping_lists WHERE id=%s;"
cur.execute(sql, (id, ))
shopping_list = list(cur.fetchone())
itemSQL = f"SELECT {site_name}_items.id, {site_name}_items.barcode, {site_name}_items.item_name, {site_name}_items.links, {site_name}_item_info.uom FROM {site_name}_items LEFT JOIN {site_name}_item_info ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_item_info.shopping_lists @> ARRAY[%s];"
cur.execute(itemSQL, (id, ))
shopping_list[3] = list(cur.fetchall())
print(shopping_list)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return jsonify(shopping_list=shopping_list)
@database_api.route("/updateList", methods=["POST"])
def update_list():
if request.method == "POST":
site_name = session['selected_site']
list_id = request.get_json()['id']
items = request.get_json()['items']
print(items)
custom_items = request.get_json()['custom']
name = request.get_json()['name']
description = request.get_json()['description']
list_type = request.get_json()['list_type']
quantities = request.get_json()['quantities']
data = (name, description, items, json.dumps(custom_items), list_type, json.dumps(quantities), list_id)
database_config = config()
with psycopg2.connect(**database_config) as conn:
try:
with conn.cursor() as cur:
# Start by updating the group -> included items with the up to date list
sql = f"UPDATE {site_name}_shopping_lists SET name = %s, description = %s, pantry_items = %s, custom_items = %s, type = %s, quantities = %s WHERE id=%s;"
cur.execute(sql, data)
update_item_sql = f"UPDATE {site_name}_item_info SET shopping_lists = %s WHERE id = %s;"
select_item_sql = f"SELECT {site_name}_item_info.id, {site_name}_item_info.shopping_lists FROM {site_name}_items LEFT JOIN {site_name}_item_info ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_items.id = %s;"
# Now we will fetch each item row one by one and check if the group id is already inside of its groups array
for item_id in items:
cur.execute(select_item_sql, (item_id, ))
item = cur.fetchone()
print(item)
shopping_lists: set = set(item[1])
# Condition check, adds it if it doesnt exist.
if list_id not in shopping_lists:
shopping_lists.add(list_id)
cur.execute(update_item_sql, (list(shopping_lists), item[0]))
# Now we fetch all items that have the group id in its groups array
fetch_items_with_list = f"SELECT {site_name}_items.id, {site_name}_item_info.shopping_lists, {site_name}_item_info.id FROM {site_name}_item_info LEFT JOIN {site_name}_items ON {site_name}_items.item_info_id = {site_name}_item_info.id WHERE {site_name}_item_info.shopping_lists @> ARRAY[%s];"
cur.execute(fetch_items_with_list, (list_id, ))
list_items = cur.fetchall()
print(items)
# We will then check each item id against the groups new included_items list to see if the item should be in there
for item_id, shopping_list, info_id in list_items:
# If it is not we remove the group form the items list and update the item
if item_id not in items:
shopping_lists: list = list(shopping_list)
shopping_lists.remove(list_id)
cur.execute(update_item_sql, (list(shopping_lists), info_id))
conn.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
conn.rollback()
return jsonify({"state": "SUCCESS"})
return jsonify({"state": "FAILED"})

View File

@ -208,6 +208,38 @@ def getZone(site:str, payload:tuple, convert:bool=True):
except Exception as error: except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql) raise postsqldb.DatabaseError(error, payload, sql)
def getItemLocations(site, payload, convert=True, conn=None):
locations = []
count = 0
self_conn = False
with open(f"application/items/sql/getItemLocations.sql", "r+") as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchall()
if rows and convert:
locations = [postsqldb.tupleDictionaryFactory(cur.description, row) for row in rows]
if rows and not convert:
locations = rows
cur.execute(f"SELECT COUNT(*) FROM {site}_item_locations WHERE part_id=%s;", (payload[0],))
count = cur.fetchone()[0]
if self_conn:
conn.close()
return locations, count
except (Exception, psycopg2.DatabaseError) as error:
raise postsqldb.DatabaseError(error, payload, sql)
def getItemInfoTuple(site:str, payload:tuple, convert=True): def getItemInfoTuple(site:str, payload:tuple, convert=True):
"""_summary_ """_summary_

View File

@ -896,4 +896,30 @@ def postNewItemLocation():
item_location = dbPayloads.ItemLocationPayload(item_id, location_id) item_location = dbPayloads.ItemLocationPayload(item_id, location_id)
database_items.insertItemLocationsTuple(site_name, item_location.payload()) database_items.insertItemLocationsTuple(site_name, item_location.payload())
return jsonify(error=False, message="Location was added successfully") return jsonify(error=False, message="Location was added successfully")
return jsonify(error=True, message="Unable to save this location, ERROR!") return jsonify(error=True, message="Unable to save this location, ERROR!")
@items_api.route("/getItemLocations", methods=["GET"])
def getItemLocations():
recordset = []
count = 0
if request.method == "GET":
item_id = int(request.args.get('id', 1))
page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10))
site_name = session['selected_site']
offset = (page - 1) * limit
recordset, count = database_items.getItemLocations(site_name, (item_id, limit, offset))
return jsonify({"locations":recordset, "end":math.ceil(count/limit), "error":False, "message":"item fetched succesfully!"})
return jsonify({"locations":recordset, "end": math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
@items_api.route('/postTransaction', methods=["POST"])
def post_transaction():
if request.method == "POST":
result = items_processes.postAdjustment(
site_name=session['selected_site'],
user_id=session['user_id'],
data=dict(request.json)
)
return jsonify(result)
return jsonify({"error":True, "message":"There was an error with this POST statement"})

View File

@ -0,0 +1,5 @@
SELECT * FROM %%site_name%%_item_locations
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
WHERE part_id = %s
LIMIT %s
OFFSET %s;

View File

@ -166,7 +166,7 @@ async function replenishItemLocationsTable(locations) {
let locations_limit = 10; let locations_limit = 10;
async function getItemLocations() { async function getItemLocations() {
console.log("getting Locations") console.log("getting Locations")
const url = new URL('/external/getItemLocations', window.location.origin); const url = new URL('/items/getItemLocations', window.location.origin);
url.searchParams.append('page', pagination_current); url.searchParams.append('page', pagination_current);
url.searchParams.append('limit', locations_limit); url.searchParams.append('limit', locations_limit);
url.searchParams.append('id', item.id); url.searchParams.append('id', item.id);
@ -182,7 +182,7 @@ async function getItemLocations() {
let items_limit = 50; let items_limit = 50;
async function getItems() { async function getItems() {
console.log("getting items") console.log("getting items")
const url = new URL('/external/getModalItems', window.location.origin); const url = new URL('/items/getModalItems', window.location.origin);
url.searchParams.append('page', pagination_current); url.searchParams.append('page', pagination_current);
url.searchParams.append('limit', items_limit); url.searchParams.append('limit', items_limit);
url.searchParams.append('search_string', search_string) url.searchParams.append('search_string', search_string)
@ -195,7 +195,7 @@ async function getItems() {
async function getItem(id) { async function getItem(id) {
console.log(`selected item: ${id}`) console.log(`selected item: ${id}`)
const url = new URL('/external/getItem', window.location.origin); const url = new URL('/items/getItem', window.location.origin);
url.searchParams.append('id', id); url.searchParams.append('id', id);
const response = await fetch(url); const response = await fetch(url);
data = await response.json(); data = await response.json();
@ -267,7 +267,7 @@ async function submitTransaction() {
let validated = await validateTransaction() let validated = await validateTransaction()
if (validated){ if (validated){
let cost = parseFloat(document.getElementById('transaction_cost').value.replace(/[^0-9.-]+/g, "")); let cost = parseFloat(document.getElementById('transaction_cost').value.replace(/[^0-9.-]+/g, ""));
const response = await fetch(`/external/postTransaction`, { const response = await fetch(`/items/postTransaction`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -408,278 +408,15 @@ async function updatePaginationElement(elementID) {
paginationElement.append(nextElement) paginationElement.append(nextElement)
} }
var scannedItems = Array();
const queueLimit = 5; // 49 should be default
async function addToQueue(event) {
if (event.key == "Enter"){
let data = await getItemBarcode(document.getElementById('barcode-scan').value)
let scannedItem = data.item
if(data.error){
UIkit.notification({
message: data.message,
status: "danger",
pos: 'top-right',
timeout: 5000
});
}
if(scannedItems.length > queueLimit){
scannedItems.shift()
}
if(!Array.isArray(scannedItem) && !data.error){
let status = await submitScanTransaction(scannedItem)
scannedItems.push({'item': scannedItem, 'type': `${document.getElementById('scan_trans_type').value}`, 'error': status})
document.getElementById('barcode-scan').value = ""
}
}
await replenishScanTable()
}
async function getItemBarcode(barcode) { async function getItemBarcode(barcode) {
console.log(`selected item: ${barcode}`) console.log(`selected item: ${barcode}`)
const url = new URL('/external/getItem/barcode', window.location.origin); const url = new URL('/items/getItem/barcode', window.location.origin);
url.searchParams.append('barcode', barcode); url.searchParams.append('barcode', barcode);
const response = await fetch(url); const response = await fetch(url);
data = await response.json(); data = await response.json();
return data; return data;
} }
async function submitScanTransaction(scannedItem) {
/// I need to find the location that matches the items auto issue location id
let trans_type = document.getElementById('scan_trans_type').value
let scan_transaction_item_location_id = 0
let comparator = 0
if (trans_type === "Adjust In"){
comparator = scannedItem.logistics_info.primary_location.id
} else if (trans_type === "Adjust Out"){
comparator = scannedItem.logistics_info.auto_issue_location.id
}
for (let i = 0; i < scannedItem.item_locations.length; i++){
if (scannedItem.item_locations[i].location_id === comparator){
scan_transaction_item_location_id = scannedItem.item_locations[i].id
}
}
const response = await fetch(`/external/postTransaction`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
item_id: scannedItem.id,
logistics_info_id: scannedItem.logistics_info_id,
barcode: scannedItem.barcode,
item_name: scannedItem.item_name,
transaction_type: document.getElementById('scan_trans_type').value,
quantity: scannedItem.item_info.uom_quantity,
description: "",
cost: parseFloat(scannedItem.item_info.cost),
vendor: 0,
expires: null,
location_id: scan_transaction_item_location_id
}),
});
data = await response.json();
transaction_status = "success"
if (data.error){
transaction_status = "danger"
}
UIkit.notification({
message: data.message,
status: transaction_status,
pos: 'top-right',
timeout: 5000
});
return data.error
}
async function replenishScanTable() {
let scanTableBody = document.getElementById("scanTableBody")
scanTableBody.innerHTML = ""
let reversedScannedItems = scannedItems.slice().reverse()
for(let i = 0; i < reversedScannedItems.length; i++){
let tableRow = document.createElement('tr')
let icon = `<span uk-icon="check"></span>`
if(reversedScannedItems[i].error){
icon = `<span uk-icon="warning"></span>`
}
let statusCell = document.createElement('td')
statusCell.innerHTML = icon
let barcodeCell = document.createElement('td')
barcodeCell.innerHTML = reversedScannedItems[i].item.barcode
let nameCell = document.createElement('td')
nameCell.innerHTML = reversedScannedItems[i].item.item_name
let typeCell = document.createElement('td')
typeCell.innerHTML = reversedScannedItems[i].type
let locationCell = document.createElement('td')
if (reversedScannedItems[i].type === "Adjust In"){
locationCell.innerHTML = reversedScannedItems[i].item.logistics_info.primary_location.uuid
} else {
locationCell.innerHTML = reversedScannedItems[i].item.logistics_info.auto_issue_location.uuid
}
tableRow.append(statusCell, barcodeCell, nameCell, typeCell, locationCell)
scanTableBody.append(tableRow)
}
}
async function submitScanReceipt(items) {
const response = await fetch(`/external/postReceipt`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
items: items
}),
});
data = await response.json();
transaction_status = "success"
if (data.error){
transaction_status = "danger"
}
UIkit.notification({
message: data.message,
status: transaction_status,
pos: 'top-right',
timeout: 5000
});
return data.error
}
var openedReceipt = false
async function startReceipt() {
openedReceipt = true
document.getElementById('barcode-input').classList.remove('uk-disabled')
document.getElementById('barcode-table').classList.remove('uk-disabled')
document.getElementById('receiptStart').classList.add('uk-disabled')
document.getElementById('receiptComplete').classList.remove('uk-disabled')
document.getElementById('receiptClose').classList.remove('uk-disabled')
}
async function completeReceipt() {
openedReceipt = false
document.getElementById('barcode-input').classList.add('uk-disabled')
document.getElementById('barcode-table').classList.add('uk-disabled')
document.getElementById('receiptStart').classList.remove('uk-disabled')
document.getElementById('receiptComplete').classList.add('uk-disabled')
document.getElementById('receiptClose').classList.add('uk-disabled')
await submitScanReceipt(scannedReceiptItems)
let scanReceiptTableBody = document.getElementById("scanReceiptTableBody")
scanReceiptTableBody.innerHTML = ""
scannedReceiptItems = Array()
}
async function closeReceipt(){
openedReceipt = false
document.getElementById('barcode-input').classList.add('uk-disabled')
document.getElementById('barcode-table').classList.add('uk-disabled')
document.getElementById('receiptStart').classList.remove('uk-disabled')
document.getElementById('receiptComplete').classList.add('uk-disabled')
document.getElementById('receiptClose').classList.add('uk-disabled')
let scanReceiptTableBody = document.getElementById("scanReceiptTableBody")
scanReceiptTableBody.innerHTML = ""
scannedReceiptItems = Array()
}
var scannedReceiptItems = Array();
async function addToReceipt(event) {
if (event.key == "Enter"){
let barcode = document.getElementById('barcode-scan-receipt').value
let data = await getItemBarcode(barcode)
let scannedItem = data.item
if(scannedItem){
let expires = scannedItem.food_info.expires
console.log(expires)
if(scannedItem.food_info.expires){
let today = new Date();
today.setDate(today.getDate() + Number(scannedItem.food_info.default_expiration))
expires = today.toISOString().split('T')[0];
}
scannedReceiptItems.push({item: {
barcode: scannedItem.barcode,
item_name: scannedItem.item_name,
qty: scannedItem.item_info.uom_quantity,
uom: scannedItem.item_info.uom.id,
data: {cost: scannedItem.item_info.cost, expires: expires}
}, type: 'sku'})
document.getElementById('barcode-scan-receipt').value = ""
} else {
scannedReceiptItems.push({item: {
barcode: `%${barcode}%`,
item_name: "unknown",
qty: 1,
uom: 1,
data: {'cost': 0.00, 'expires': false}
}, type: 'new sku'})
document.getElementById('barcode-scan-receipt').value = ""
}
}
await replenishScannedReceiptTable(scannedReceiptItems)
}
async function replenishScannedReceiptTable(items) {
let scanReceiptTableBody = document.getElementById("scanReceiptTableBody")
scanReceiptTableBody.innerHTML = ""
for(let i = 0; i < items.length; i++){
let tableRow = document.createElement('tr')
let typeCell = document.createElement('td')
typeCell.innerHTML = items[i].type
let barcodeCell = document.createElement('td')
barcodeCell.innerHTML = items[i].item.barcode
let nameCell = document.createElement('td')
nameCell.innerHTML = items[i].item.item_name
let operationsCell = document.createElement('td')
let editOp = document.createElement('a')
editOp.style = "margin-right: 5px;"
editOp.setAttribute('class', 'uk-button uk-button-small uk-button-default')
editOp.setAttribute('uk-icon', 'icon: pencil')
editOp.onclick = async function () {
await openLineEditModal(i, items[i])
}
let deleteOp = document.createElement('a')
deleteOp.setAttribute('class', 'uk-button uk-button-small uk-button-default')
deleteOp.setAttribute('uk-icon', 'icon: trash')
deleteOp.onclick = async function() {
scannedReceiptItems.splice(i, 1)
await replenishScannedReceiptTable(scannedReceiptItems)
}
operationsCell.append(editOp, deleteOp)
operationsCell.classList.add("uk-flex")
operationsCell.classList.add("uk-flex-right")
tableRow.append(typeCell, barcodeCell, nameCell, operationsCell)
scanReceiptTableBody.append(tableRow)
}
}
async function openLineEditModal(ind, line_data) { async function openLineEditModal(ind, line_data) {
console.log(line_data) console.log(line_data)

View File

@ -96,7 +96,7 @@ async function replenishTransactionsTable(transactions) {
} }
async function getItem(id) { async function getItem(id) {
const url = new URL('/external/getItem', window.location.origin); const url = new URL('/items/getItem', window.location.origin);
url.searchParams.append('id', id); url.searchParams.append('id', id);
const response = await fetch(url); const response = await fetch(url);
data = await response.json(); data = await response.json();

View File

@ -175,7 +175,6 @@ def selectItemAllByBarcode(site, payload, convert=True, conn=None):
item = () item = ()
self_conn = False self_conn = False
linked_item = selectLinkedItemByBarcode(site, (payload[0],)) linked_item = selectLinkedItemByBarcode(site, (payload[0],))
if len(linked_item) > 1: if len(linked_item) > 1:
item = selectItemAllByID(site, payload=(linked_item['link'], ), convert=convert) item = selectItemAllByID(site, payload=(linked_item['link'], ), convert=convert)
item['item_info']['uom_quantity'] = linked_item['conv_factor'] item['item_info']['uom_quantity'] = linked_item['conv_factor']

View File

@ -61,6 +61,31 @@ def get_sites(sites=[]):
return False return False
def get_units_of_measure(convert=True, conn=None):
records = ()
self_conn = False
sql = f"SELECT * FROM units;"
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql)
rows = cur.fetchall()
if rows and convert:
records = [tupleDictionaryFactory(cur.description, row) for row in rows]
elif rows and not convert:
records = rows
if self_conn:
conn.close()
return records
except Exception as error:
raise DatabaseError(error, "", sql)
class ConversionsTable: class ConversionsTable:
@dataclass @dataclass
class Payload: class Payload:

View File

View File

@ -1,44 +1,37 @@
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response, current_app, send_from_directory from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response, current_app, send_from_directory
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses import psycopg2, math, datetime, process, database, MyDataclasses
from config import config, sites_config from config import config
from user_api import login_required from user_api import login_required
import openfoodfacts import openfoodfacts
import postsqldb import postsqldb
import mimetypes, os import mimetypes, os
import pymupdf, PIL
import webpush import webpush
from application import postsqldb, database_payloads
def create_pdf_preview(pdf_path, output_path, size=(600, 400)): from application.receipts import receipts_processes, receipts_database
pdf = pymupdf.open(pdf_path)
page = pdf[0]
file_name = os.path.basename(pdf_path).replace('.pdf', "")
pix = page.get_pixmap()
img = PIL.Image.frombytes("RGB", (pix.width, pix.height), pix.samples)
output_path = output_path + file_name + '.jpg'
img.thumbnail(size)
img.save(output_path)
return file_name + '.jpg'
receipt_api = Blueprint('receipt_api', __name__) receipt_api = Blueprint('receipt_api', __name__, template_folder='templates', static_folder='static')
@receipt_api.route("/receipt/<id>")
@login_required
def receipt(id):
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
database_config = config()
with psycopg2.connect(**database_config) as conn:
units = postsqldb.UnitsTable.getAll(conn)
return render_template("receipts/receipt.html", id=id, current_site=session['selected_site'], sites=sites, units=units)
@receipt_api.route("/receipts") # ROOT TEMPLATE ROUTES
@receipt_api.route("/")
@login_required @login_required
def receipts(): def receipts():
sites = [site[1] for site in main.get_sites(session['user']['sites'])] sites = [site[1] for site in postsqldb.get_sites(session['user']['sites'])]
return render_template("receipts/index.html", current_site=session['selected_site'], sites=sites) return render_template("receipts_index.html", current_site=session['selected_site'], sites=sites)
@receipt_api.route('/receipts/getItems', methods=["GET"]) @receipt_api.route("/<id>")
@login_required
def receipt(id):
sites = [site[1] for site in postsqldb.get_sites(session['user']['sites'])]
units = postsqldb.get_units_of_measure()
return render_template("receipt.html", id=id, current_site=session['selected_site'], sites=sites, units=units)
# API ROUTES
# Added to Database
@receipt_api.route('/api/getItems', methods=["GET"])
def getItems(): def getItems():
recordset = [] recordset = []
count = {'count': 0} count = {'count': 0}
@ -47,14 +40,13 @@ def getItems():
limit = int(request.args.get('limit', 10)) limit = int(request.args.get('limit', 10))
site_name = session['selected_site'] site_name = session['selected_site']
offset = (page - 1) * limit offset = (page - 1) * limit
database_config = config() sort_order = "ID ASC"
with psycopg2.connect(**database_config) as conn: payload = ("%%", limit, offset, sort_order)
payload = ("%%", limit, offset) recordset, count = receipts_database.getItemsWithQOH(site_name, payload)
recordset, count = database.getItemsWithQOH(conn, site_name, payload, convert=True)
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"}) return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"}) return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
@receipt_api.route('/receipt/getVendors', methods=["GET"]) @receipt_api.route('/api/getVendors', methods=["GET"])
def getVendors(): def getVendors():
recordset = [] recordset = []
count = 0 count = 0
@ -70,7 +62,7 @@ def getVendors():
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"}) return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"}) return jsonify({"vendors":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
@receipt_api.route('/receipt/getLinkedLists', methods=["GET"]) @receipt_api.route('/api/getLinkedLists', methods=["GET"])
def getLinkedLists(): def getLinkedLists():
recordset = [] recordset = []
count = 0 count = 0
@ -86,7 +78,7 @@ def getLinkedLists():
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"}) return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"}) return jsonify({"items":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
@receipt_api.route('/receipts/getReceipts', methods=["GET"]) @receipt_api.route('/api/getReceipts', methods=["GET"])
def getReceipts(): def getReceipts():
recordset = [] recordset = []
if request.method == "GET": if request.method == "GET":
@ -100,7 +92,7 @@ def getReceipts():
return jsonify({'receipts':recordset, "end": math.ceil(count/limit), 'error': False, "message": "Get Receipts Successful!"}) return jsonify({'receipts':recordset, "end": math.ceil(count/limit), 'error': False, "message": "Get Receipts Successful!"})
return jsonify({'receipts': recordset, "end": math.ceil(count/limit), 'error': True, "message": "Something went wrong while getting receipts!"}) return jsonify({'receipts': recordset, "end": math.ceil(count/limit), 'error': True, "message": "Something went wrong while getting receipts!"})
@receipt_api.route('/receipts/getReceipt', methods=["GET"]) @receipt_api.route('/api/getReceipt', methods=["GET"])
def getReceipt(): def getReceipt():
record = [] record = []
if request.method == "GET": if request.method == "GET":
@ -112,7 +104,7 @@ def getReceipt():
return jsonify({'receipt': record, 'error': False, "message": "Get Receipts Successful!"}) return jsonify({'receipt': record, 'error': False, "message": "Get Receipts Successful!"})
return jsonify({'receipt': record, 'error': True, "message": "Something went wrong while getting receipts!"}) return jsonify({'receipt': record, 'error': True, "message": "Something went wrong while getting receipts!"})
@receipt_api.route('/receipts/addReceipt', methods=["POST", "GET"]) @receipt_api.route('/api/addReceipt', methods=["POST", "GET"])
def addReceipt(): def addReceipt():
if request.method == "GET": if request.method == "GET":
user_id = session['user_id'] user_id = session['user_id']
@ -127,34 +119,33 @@ def addReceipt():
return jsonify({'error': False, "message": "Receipt Added Successful!"}) return jsonify({'error': False, "message": "Receipt Added Successful!"})
return jsonify({'error': True, "message": "Something went wrong while adding receipt!"}) return jsonify({'error': True, "message": "Something went wrong while adding receipt!"})
@receipt_api.route('/receipts/addSKULine', methods=["POST"]) # Added to Database
@receipt_api.route('/api/addSKULine', methods=["POST"])
def addSKULine(): def addSKULine():
if request.method == "POST": if request.method == "POST":
item_id = int(request.get_json()['item_id']) item_id = int(request.get_json()['item_id'])
receipt_id = int(request.get_json()['receipt_id']) receipt_id = int(request.get_json()['receipt_id'])
site_name = session['selected_site'] site_name = session['selected_site']
database_config = config() item = receipts_database.getItemAllByID(site_name, (item_id, ))
with psycopg2.connect(**database_config) as conn: data = {
item = database.getItemAllByID(conn, site_name, (item_id, ), convert=True) 'cost': item['item_info']['cost'],
data = { 'expires': item['food_info']['expires']
'cost': item['item_info']['cost'], }
'expires': item['food_info']['expires'] receipt_item = database_payloads.ReceiptItemPayload(
} type="sku",
receipt_item = MyDataclasses.ReceiptItemPayload( receipt_id=receipt_id,
type="sku", barcode=item['barcode'],
receipt_id=receipt_id, name=item['item_name'],
barcode=item['barcode'], qty=item['item_info']['uom_quantity'],
name=item['item_name'], uom=item['item_info']['uom']['id'],
qty=item['item_info']['uom_quantity'], data=data
uom=item['item_info']['uom'], )
data=data receipts_database.insertReceiptItemsTuple(site_name, receipt_item.payload())
)
database.insertReceiptItemsTuple(conn, site_name, receipt_item.payload())
return jsonify({'error': False, "message": "Line added Succesfully"}) return jsonify({'error': False, "message": "Line added Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while add SKU line!"}) return jsonify({'error': True, "message": "Something went wrong while add SKU line!"})
@receipt_api.route('/receipts/deleteLine', methods=["POST"]) @receipt_api.route('/api/deleteLine', methods=["POST"])
def deleteLine(): def deleteLine():
if request.method == "POST": if request.method == "POST":
line_id = int(request.get_json()['line_id']) line_id = int(request.get_json()['line_id'])
@ -166,7 +157,7 @@ def deleteLine():
return jsonify({'error': False, "message": "Line Deleted Succesfully"}) return jsonify({'error': False, "message": "Line Deleted Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while deleting line!"}) return jsonify({'error': True, "message": "Something went wrong while deleting line!"})
@receipt_api.route('/receipts/denyLine', methods=["POST"]) @receipt_api.route('/api/denyLine', methods=["POST"])
def denyLine(): def denyLine():
if request.method == "POST": if request.method == "POST":
line_id = int(request.get_json()['line_id']) line_id = int(request.get_json()['line_id'])
@ -177,7 +168,7 @@ def denyLine():
return jsonify({'error': False, "message": "Line Denied Succesfully"}) return jsonify({'error': False, "message": "Line Denied Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while denying line!"}) return jsonify({'error': True, "message": "Something went wrong while denying line!"})
@receipt_api.route('/receipts/saveLine', methods=["POST"]) @receipt_api.route('/api/saveLine', methods=["POST"])
def saveLine(): def saveLine():
if request.method == "POST": if request.method == "POST":
line_id = int(request.get_json()['line_id']) line_id = int(request.get_json()['line_id'])
@ -192,7 +183,7 @@ def saveLine():
return jsonify({'error': False, "message": "Line Saved Succesfully"}) return jsonify({'error': False, "message": "Line Saved Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while saving line!"}) return jsonify({'error': True, "message": "Something went wrong while saving line!"})
@receipt_api.route('/receipt/postLinkedItem', methods=["POST"]) @receipt_api.route('/api/postLinkedItem', methods=["POST"])
def postLinkedItem(): def postLinkedItem():
if request.method == "POST": if request.method == "POST":
receipt_item_id = int(request.get_json()['receipt_item_id']) receipt_item_id = int(request.get_json()['receipt_item_id'])
@ -251,7 +242,7 @@ def postLinkedItem():
return jsonify({'error': False, "message": "Line Saved Succesfully"}) return jsonify({'error': False, "message": "Line Saved Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while saving line!"}) return jsonify({'error': True, "message": "Something went wrong while saving line!"})
@receipt_api.route('/receipts/resolveLine', methods=["POST"]) @receipt_api.route('/api/resolveLine', methods=["POST"])
def resolveLine(): def resolveLine():
if request.method == "POST": if request.method == "POST":
line_id = int(request.get_json()['line_id']) line_id = int(request.get_json()['line_id'])
@ -339,7 +330,7 @@ def resolveLine():
return jsonify({'error': False, "message": "Line Saved Succesfully"}) return jsonify({'error': False, "message": "Line Saved Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while saving line!"}) return jsonify({'error': True, "message": "Something went wrong while saving line!"})
@receipt_api.route('/receipt/postVendorUpdate', methods=["POST"]) @receipt_api.route('/api/postVendorUpdate', methods=["POST"])
def postVendorUpdate(): def postVendorUpdate():
if request.method == "POST": if request.method == "POST":
receipt_id = int(request.get_json()['receipt_id']) receipt_id = int(request.get_json()['receipt_id'])
@ -351,7 +342,7 @@ def postVendorUpdate():
return jsonify({'error': False, "message": "Line Saved Succesfully"}) return jsonify({'error': False, "message": "Line Saved Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while saving line!"}) return jsonify({'error': True, "message": "Something went wrong while saving line!"})
@receipt_api.route('/receipts/resolveReceipt', methods=["POST"]) @receipt_api.route('/api/resolveReceipt', methods=["POST"])
def resolveReceipt(): def resolveReceipt():
if request.method == "POST": if request.method == "POST":
receipt_id = int(request.get_json()['receipt_id']) receipt_id = int(request.get_json()['receipt_id'])
@ -364,7 +355,7 @@ def resolveReceipt():
return jsonify({'error': False, "message": "Line Saved Succesfully"}) return jsonify({'error': False, "message": "Line Saved Succesfully"})
return jsonify({'error': True, "message": "Something went wrong while saving line!"}) return jsonify({'error': True, "message": "Something went wrong while saving line!"})
@receipt_api.route('/receipt/uploadfile/<receipt_id>', methods=["POST"]) @receipt_api.route('/api/uploadfile/<receipt_id>', methods=["POST"])
def uploadFile(receipt_id): def uploadFile(receipt_id):
file = request.files['file'] file = request.files['file']
file_path = current_app.config['FILES_FOLDER'] + f"/receipts/{file.filename.replace(" ", "_")}" file_path = current_app.config['FILES_FOLDER'] + f"/receipts/{file.filename.replace(" ", "_")}"
@ -373,7 +364,7 @@ def uploadFile(receipt_id):
preview_image = "" preview_image = ""
if file_type == "application/pdf": if file_type == "application/pdf":
output_path = "static/files/receipts/previews/" output_path = "static/files/receipts/previews/"
preview_image = create_pdf_preview(file_path, output_path) preview_image = receipts_processes.create_pdf_preview(file_path, output_path)
file_size = os.path.getsize(file_path) file_size = os.path.getsize(file_path)
database_config = config() database_config = config()
@ -386,11 +377,11 @@ def uploadFile(receipt_id):
return jsonify({}) return jsonify({})
@receipt_api.route('/receipt/getFile/<file_name>') @receipt_api.route('/api/getFile/<file_name>')
def getFile(file_name): def getFile(file_name):
return send_from_directory('static/files/receipts', file_name) return send_from_directory('static/files/receipts', file_name)
@receipt_api.route('/receipts/checkAPI', methods=["POST"]) @receipt_api.route('/api/checkAPI', methods=["POST"])
def checkAPI(): def checkAPI():
if request.method == "POST": if request.method == "POST":
line_id = int(request.get_json()['line_id']) line_id = int(request.get_json()['line_id'])

View File

@ -0,0 +1,100 @@
import psycopg2
import config
from application import postsqldb
def getItemsWithQOH(site, payload, convert=True, conn=None):
recordset = []
count = 0
self_conn = False
with open(f"application/receipts/sql/getItemsWithQOH.sql", "r+") as file:
sql = file.read().replace("%%site_name%%", site).replace("%%sort_order%%", payload[3])
payload = list(payload)
payload.pop(3)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
if convert:
with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
cur.execute(sql, payload)
recordset = cur.fetchall()
recordset = [dict(record) for record in recordset]
cur.execute(f"SELECT COUNT(*) FROM {site}_items WHERE search_string LIKE '%%' || %s || '%%';", (payload[0], ))
count = cur.fetchone()
else:
with conn.cursor() as cur:
cur.execute(sql, payload)
recordset = cur.fetchall()
cur.execute(f"SELECT COUNT(*) FROM {site}_items WHERE search_string LIKE '%%' || %s || '%%';", (payload[0], ))
count = cur.fetchone()
if self_conn:
conn.close()
return recordset, count
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)
def getItemAllByID(site, payload, convert=True, conn=None):
item = ()
self_conn = False
with open(f"application/receipts/sql/getItemAllByID.sql", "r+") as file:
getItemAllByID_sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(getItemAllByID_sql, payload)
rows = cur.fetchone()
if rows and convert:
item = postsqldb.tupleDictionaryFactory(cur.description, rows)
if rows and not convert:
item = rows
if self_conn:
conn.close()
return item
except (Exception, psycopg2.DatabaseError) as error:
raise postsqldb.DatabaseError(error, payload, getItemAllByID_sql)
def insertReceiptItemsTuple(site, payload, convert=True, conn=None):
receipt_item = ()
self_conn = False
with open(f"application/receipts/sql/insertReceiptItemsTuple.sql", "r+") as file:
sql = file.read().replace("%%site_name%%", site)
try:
if not conn:
database_config = config.config()
conn = psycopg2.connect(**database_config)
conn.autocommit = True
self_conn = True
with conn.cursor() as cur:
cur.execute(sql, payload)
rows = cur.fetchone()
if rows and convert:
receipt_item = postsqldb.tupleDictionaryFactory(cur.description, rows)
elif rows and not convert:
receipt_item = rows
if self_conn:
conn.commit()
conn.close()
return receipt_item
except Exception as error:
raise postsqldb.DatabaseError(error, payload, sql)

View File

@ -0,0 +1,14 @@
import pymupdf
import os
import PIL
def create_pdf_preview(pdf_path, output_path, size=(600, 400)):
pdf = pymupdf.open(pdf_path)
page = pdf[0]
file_name = os.path.basename(pdf_path).replace('.pdf', "")
pix = page.get_pixmap()
img = PIL.Image.frombytes("RGB", (pix.width, pix.height), pix.samples)
output_path = output_path + file_name + '.jpg'
img.thumbnail(size)
img.save(output_path)
return file_name + '.jpg'

View File

@ -0,0 +1,86 @@
WITH passed_id AS (SELECT %s AS passed_id),
logistics_id AS (SELECT logistics_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
info_id AS (SELECT item_info_id FROM %%site_name%%_items WHERE id=(SELECT passed_id FROM passed_id)),
cte_conversions AS (
SELECT
%%site_name%%_conversions.id as conv_id,
%%site_name%%_conversions.conv_factor as conv_factor,
units.* as uom
FROM %%site_name%%_conversions
LEFT JOIN units ON %%site_name%%_conversions.uom_id = units.id
WHERE %%site_name%%_conversions.item_id = (SELECT passed_id FROM passed_id)
),
cte_item_info AS (
SELECT
%%site_name%%_item_info.*,
row_to_json(units.*) as uom,
COALESCE((SELECT json_agg(convs) FROM cte_conversions convs), '[]'::json) AS conversions,
COALESCE((SELECT json_agg(p.*) FROM %%site_name%%_sku_prefix as p WHERE p.id = ANY(%%site_name%%_item_info.prefixes)), '[]'::json) as prefixes
FROM %%site_name%%_item_info
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
WHERE %%site_name%%_item_info.id = (SELECT item_info_id FROM info_id)
),
cte_groups AS (
SELECT
%%site_name%%_groups.*,
%%site_name%%_group_items.uuid,
%%site_name%%_group_items.item_type,
%%site_name%%_group_items.qty
FROM %%site_name%%_groups
JOIN %%site_name%%_group_items ON %%site_name%%_groups.id = %%site_name%%_group_items.gr_id
WHERE %%site_name%%_group_items.item_id = (SELECT passed_id FROM passed_id)
),
cte_shopping_lists AS (
SELECT
%%site_name%%_shopping_lists.*,
%%site_name%%_shopping_list_items.uuid,
%%site_name%%_shopping_list_items.item_type,
%%site_name%%_shopping_list_items.qty
FROM %%site_name%%_shopping_lists
JOIN %%site_name%%_shopping_list_items ON %%site_name%%_shopping_lists.id = %%site_name%%_shopping_list_items.sl_id
WHERE %%site_name%%_shopping_list_items.item_id = (SELECT passed_id FROM passed_id)
),
cte_itemlinks AS (
SELECT * FROM %%site_name%%_itemlinks WHERE link=(SELECT passed_id FROM passed_id)
),
cte_item_locations AS (
SELECT * FROM %%site_name%%_item_locations
LEFT JOIN %%site_name%%_locations ON %%site_name%%_locations.id = %%site_name%%_item_locations.location_id
WHERE part_id = (SELECT passed_id FROM passed_id)
),
cte_logistics_info AS (
SELECT
li.*,
row_to_json(pl) AS primary_location,
row_to_json(ail) AS auto_issue_location,
row_to_json(pz) AS primary_zone,
row_to_json(aiz) AS auto_issue_zone
FROM %%site_name%%_logistics_info AS li
LEFT JOIN %%site_name%%_locations AS pl ON li.primary_location = pl.id
LEFT JOIN %%site_name%%_locations AS ail ON li.auto_issue_location = ail.id
LEFT JOIN %%site_name%%_zones AS pz ON li.primary_zone = pz.id
LEFT JOIN %%site_name%%_zones AS aiz ON li.auto_issue_zone = aiz.id
WHERE li.id=(SELECT logistics_info_id FROM logistics_id)
)
SELECT
(SELECT passed_id FROM passed_id) AS passed_id,
%%site_name%%_items.*,
(SELECT COALESCE(row_to_json(logis), '{}') FROM cte_logistics_info logis) AS logistics_info,
row_to_json(%%site_name%%_food_info.*) as food_info,
row_to_json(%%site_name%%_brands.*) as brand,
(SELECT COALESCE(row_to_json(ii), '{}') FROM cte_item_info ii) AS item_info,
(SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM cte_groups g) AS item_groups,
(SELECT COALESCE(array_agg(row_to_json(sl)), '{}') FROM cte_shopping_lists sl) AS item_shopping_lists,
(SELECT COALESCE(array_agg(row_to_json(il)), '{}') FROM cte_itemlinks il) AS linked_items,
(SELECT COALESCE(array_agg(row_to_json(ils)), '{}') FROM cte_item_locations ils) AS item_locations
FROM %%site_name%%_items
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
LEFT JOIN %%site_name%%_food_info ON %%site_name%%_items.food_info_id = %%site_name%%_food_info.id
LEFT JOIN %%site_name%%_brands ON %%site_name%%_items.brand = %%site_name%%_brands.id
LEFT JOIN units ON %%site_name%%_item_info.uom = units.id
LEFT JOIN cte_groups ON %%site_name%%_items.id = cte_groups.id
LEFT JOIN cte_shopping_lists ON %%site_name%%_items.id = cte_shopping_lists.id
WHERE %%site_name%%_items.id=(SELECT passed_id FROM passed_id)
GROUP BY
%%site_name%%_items.id, %%site_name%%_item_info.id, %%site_name%%_food_info.id, %%site_name%%_brands.id;

View File

@ -0,0 +1,18 @@
WITH sum_cte AS (
SELECT mi.id, SUM(mil.quantity_on_hand)::FLOAT8 AS total_sum
FROM %%site_name%%_item_locations mil
JOIN %%site_name%%_items mi ON mil.part_id = mi.id
GROUP BY mi.id
)
SELECT %%site_name%%_items.*,
row_to_json(%%site_name%%_item_info.*) as item_info,
sum_cte.total_sum as total_qoh,
(SELECT COALESCE(row_to_json(u), '{}') FROM units as u WHERE u.id=%%site_name%%_item_info.uom) as uom
FROM %%site_name%%_items
LEFT JOIN sum_cte ON %%site_name%%_items.id = sum_cte.id
LEFT JOIN %%site_name%%_item_info ON %%site_name%%_items.item_info_id = %%site_name%%_item_info.id
WHERE %%site_name%%_items.search_string LIKE '%%' || %s || '%%'
ORDER BY %%sort_order%%
LIMIT %s OFFSET %s;

View File

@ -0,0 +1,4 @@
INSERT INTO %%site_name%%_receipt_items
(type, receipt_id, barcode, name, qty, uom, data, status)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
RETURNING *;

View File

@ -38,7 +38,7 @@ async function replenishFields(receipt) {
async function checkAPI(line_id, barcode) { async function checkAPI(line_id, barcode) {
console.log(barcode) console.log(barcode)
const response = await fetch(`/receipts/checkAPI`, { const response = await fetch(`/receipts/api/checkAPI`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -300,7 +300,7 @@ async function openLineEditModal(line_data) {
async function addSKULine(item_id) { async function addSKULine(item_id) {
console.log(item_id) console.log(item_id)
const response = await fetch(`/receipts/addSKULine`, { const response = await fetch(`/receipts/api/addSKULine`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -316,7 +316,7 @@ async function addSKULine(item_id) {
} }
async function resolveLine(line_id) { async function resolveLine(line_id) {
const response = await fetch(`/receipts/resolveLine`, { const response = await fetch(`/receipts/api/resolveLine`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -329,7 +329,7 @@ async function resolveLine(line_id) {
} }
async function resolveReceipt() { async function resolveReceipt() {
const response = await fetch(`/receipts/resolveReceipt`, { const response = await fetch(`/receipts/api/resolveReceipt`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -346,7 +346,7 @@ async function uploadFile() {
const formData = new FormData(); const formData = new FormData();
formData.append('file', fileInput.files[0]); formData.append('file', fileInput.files[0]);
await fetch(`/receipt/uploadfile/${receipt_id}`, { await fetch(`/receipts/api/uploadfile/${receipt_id}`, {
method: 'POST', method: 'POST',
body: formData body: formData
}) })
@ -379,7 +379,7 @@ async function saveLine(line_id){
} }
} }
const response = await fetch(`/receipts/saveLine`, { const response = await fetch(`/receipts/api/saveLine`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -395,7 +395,7 @@ async function saveLine(line_id){
} }
async function deleteLine(id) { async function deleteLine(id) {
const response = await fetch(`/receipts/deleteLine`, { const response = await fetch(`/receipts/api/deleteLine`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -409,7 +409,7 @@ async function deleteLine(id) {
async function denyLine(id) { async function denyLine(id) {
console.log(id) console.log(id)
const response = await fetch(`/receipts/denyLine`, { const response = await fetch(`/receipts/api/denyLine`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -422,7 +422,7 @@ async function denyLine(id) {
} }
async function getReceipt(id) { async function getReceipt(id) {
const url = new URL('/receipts/getReceipt', window.location.origin); const url = new URL('/receipts/api/getReceipt', window.location.origin);
url.searchParams.append('id', id); url.searchParams.append('id', id);
const response = await fetch(url); const response = await fetch(url);
data = await response.json(); data = await response.json();
@ -433,7 +433,7 @@ async function getReceipt(id) {
let items_limit = 50; let items_limit = 50;
async function getItems() { async function getItems() {
console.log("getting items") console.log("getting items")
const url = new URL('/receipts/getItems', window.location.origin); const url = new URL('/receipts/api/getItems', window.location.origin);
url.searchParams.append('page', pagination_current); url.searchParams.append('page', pagination_current);
url.searchParams.append('limit', items_limit); url.searchParams.append('limit', items_limit);
const response = await fetch(url); const response = await fetch(url);
@ -531,7 +531,7 @@ let vendor_limit = 25
let vendor_current_page = 1 let vendor_current_page = 1
let vendor_end_page = 10 let vendor_end_page = 10
async function getVendors() { async function getVendors() {
const url = new URL('/receipt/getVendors', window.location.origin); const url = new URL('/receipts/api/getVendors', window.location.origin);
url.searchParams.append('page', vendor_current_page); url.searchParams.append('page', vendor_current_page);
url.searchParams.append('limit', vendor_limit); url.searchParams.append('limit', vendor_limit);
const response = await fetch(url); const response = await fetch(url);
@ -541,7 +541,7 @@ async function getVendors() {
} }
async function postVendorUpdate(vendor_id) { async function postVendorUpdate(vendor_id) {
const response = await fetch(`/receipt/postVendorUpdate`, { const response = await fetch(`/receipts/api/postVendorUpdate`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -691,7 +691,7 @@ let links_limit = 25
let links_current_page = 1 let links_current_page = 1
let links_end_page = 10 let links_end_page = 10
async function getLinkedLists() { async function getLinkedLists() {
const url = new URL('/receipt/getLinkedLists', window.location.origin); const url = new URL('/receipts/api/getLinkedLists', window.location.origin);
url.searchParams.append('page', vendor_current_page); url.searchParams.append('page', vendor_current_page);
url.searchParams.append('limit', vendor_limit); url.searchParams.append('limit', vendor_limit);
const response = await fetch(url); const response = await fetch(url);
@ -701,7 +701,7 @@ async function getLinkedLists() {
} }
async function postLinkedItem(receipt_item_id, link_list_id, conv_factor) { async function postLinkedItem(receipt_item_id, link_list_id, conv_factor) {
const response = await fetch(`/receipt/postLinkedItem`, { const response = await fetch(`/receipts/api/postLinkedItem`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',

View File

@ -64,7 +64,7 @@ async function replenishReceiptsTable(receipts) {
) )
tableRow.onclick = async function() { tableRow.onclick = async function() {
let url = `${window.location.origin}/receipt/${receipts[i].id}`; let url = `${window.location.origin}/receipts/${receipts[i].id}`;
window.location.href = url; window.location.href = url;
} }
@ -77,7 +77,7 @@ async function replenishReceiptsTable(receipts) {
var receipts_limit = 10 var receipts_limit = 10
async function getReceipts() { async function getReceipts() {
const url = new URL('/receipts/getReceipts', window.location.origin); const url = new URL('/receipts/api/getReceipts', window.location.origin);
url.searchParams.append('page', pagination_current); url.searchParams.append('page', pagination_current);
url.searchParams.append('limit', receipts_limit); url.searchParams.append('limit', receipts_limit);
const response = await fetch(url); const response = await fetch(url);

View File

@ -390,6 +390,6 @@
</div> </div>
</div> </div>
</body> </body>
<script src="{{ url_for('static', filename='handlers/receiptHandler.js') }}"></script> <script src="{{ url_for('receipt_api.static', filename='js/receiptHandler.js') }}"></script>
<script>const receipt_id = {{id|tojson}}</script> <script>const receipt_id = {{id|tojson}}</script>
</html> </html>

View File

@ -124,6 +124,6 @@
</div> </div>
</div> </div>
</div> </div>
<script type="text/javascript" src="/static/handlers/receiptsHandler.js"></script> <script src="{{ url_for('receipt_api.static', filename='js/receiptsHandler.js') }}"></script>
</body> </body>
</html> </html>

View File

@ -23,7 +23,7 @@ def recipes():
description: returns recipes/index.html with sites, current_site. description: returns recipes/index.html with sites, current_site.
""" """
sites = [site[1] for site in main.get_sites(session['user']['sites'])] sites = [site[1] for site in main.get_sites(session['user']['sites'])]
return render_template("index.html", return render_template("recipes_index.html",
current_site=session['selected_site'], current_site=session['selected_site'],
sites=sites) sites=sites)

View File

@ -1961,4 +1961,13 @@
sql='INSERT INTO test_shopping_list_items(uuid, sl_id, item_type, item_name, uom, qty, item_id, links) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING *;') sql='INSERT INTO test_shopping_list_items(uuid, sl_id, item_type, item_name, uom, qty, item_id, links) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING *;')
2025-07-12 09:29:10.063362 --- ERROR --- DatabaseError(message='tuple index out of range', 2025-07-12 09:29:10.063362 --- ERROR --- DatabaseError(message='tuple index out of range',
payload=(5,), payload=(5,),
sql='SELECT *, (SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM test_shopping_list_items g WHERE sl_id = test_shopping_lists.id) AS sl_items FROM test_shopping_lists LIMIT %s OFFSET %s;') sql='SELECT *, (SELECT COALESCE(array_agg(row_to_json(g)), '{}') FROM test_shopping_list_items g WHERE sl_id = test_shopping_lists.id) AS sl_items FROM test_shopping_lists LIMIT %s OFFSET %s;')
2025-07-12 10:32:47.237422 --- ERROR --- DatabaseError(message='invalid input syntax for type integer: "each"LINE 3: VALUES ('13kitql4', '13', 'custom', 'test', 'each', 1, NULL,... ^',
payload=('13kitql4', '13', 'custom', 'test', 'each', 1, None, '{"main": "test2"}'),
sql='INSERT INTO test_shopping_list_items(uuid, sl_id, item_type, item_name, uom, qty, item_id, links) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING *;')
2025-07-12 15:35:45.537855 --- ERROR --- DatabaseError(message='can't adapt type 'dict'',
payload=('sku', 26, '%041667029362%', 'Microwave popcorn', 1, {'id': 1, 'plural': 'pinches', 'single': ' pinch', 'fullname': ' Pinch', 'description': ' Less than 1/8 teaspoon.'}, '{"cost": 0, "expires": false}', 'Unresolved'),
sql='INSERT INTO test_receipt_items(type, receipt_id, barcode, name, qty, uom, data, status) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING *;')
2025-07-12 15:45:15.171378 --- ERROR --- DatabaseError(message='can't adapt type 'dict'',
payload=('sku', 26, '%028400517829%', 'Tostitos', 1, {'id': 1, 'plural': 'pinches', 'single': ' pinch', 'fullname': ' Pinch', 'description': ' Less than 1/8 teaspoon.'}, '{"cost": 0, "expires": false}', 'Unresolved'),
sql='INSERT INTO test_receipt_items(type, receipt_id, barcode, name, qty, uom, data, status) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING *;')

View File

@ -1,120 +0,0 @@
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
from config import config, sites_config
from main import unfoldCostLayers
from threading import Thread
from queue import Queue
import time, process
from user_api import login_required
import webpush
external_api = Blueprint('external', __name__)
@external_api.route('/external/getItemLocations', methods=["GET"])
def getItemLocations():
recordset = []
count = 0
if request.method == "GET":
item_id = int(request.args.get('id', 1))
page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10))
site_name = session['selected_site']
offset = (page - 1) * limit
database_config = config()
with psycopg2.connect(**database_config) as conn:
recordset, count = database.getItemLocations(conn, site_name, (item_id, limit, offset), convert=True)
return jsonify({"locations":recordset, "end":math.ceil(count/limit), "error":False, "message":"item fetched succesfully!"})
return jsonify({"locations":recordset, "end": math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
@external_api.route('/external/getItem', methods=["GET"])
def getItem():
record = {}
if request.method == "GET":
item_id = int(request.args.get('id', 1))
site_name = session['selected_site']
database_config = config()
with psycopg2.connect(**database_config) as conn:
record = database.getItemAllByID(conn, site_name, (item_id, ), convert=True)
return jsonify({"item":record, "error":False, "message":"item fetched succesfully!"})
return jsonify({"item":record, "error":True, "message":"There was an error with this GET statement"})
@external_api.route('/external/getItem/barcode', methods=["GET"])
def getItemBarcode():
record = {}
if request.method == "GET":
item_barcode = f"%{str(request.args.get('barcode', 1))}%"
site_name = session['selected_site']
database_config = config()
with psycopg2.connect(**database_config) as conn:
record = database.getItemAllByBarcode(conn, site_name, (item_barcode, ), convert=True)
if record == {}:
return jsonify({"item":None, "error":True, "message":"Item either does not exist or there was a larger problem!"})
else:
return jsonify({"item":record, "error":False, "message":"item fetched succesfully!"})
return jsonify({"item":record, "error":True, "message":"There was an error with this GET statement"})
@external_api.route('/external/getModalItems', methods=["GET"])
@login_required
def getModalItems():
recordset = []
count = {'count': 0}
if request.method == "GET":
page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 10))
search_string = request.args.get('search_string', '')
site_name = session['selected_site']
offset = (page - 1) * limit
database_config = config()
with psycopg2.connect(**database_config) as conn:
payload = (search_string, limit, offset)
recordset, count = database.getItemsForModal(conn, site_name, payload, convert=True)
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
@external_api.route('/external/postTransaction', methods=["POST"])
def post_transaction():
if request.method == "POST":
database_config = config()
with psycopg2.connect(**database_config) as conn:
result = process.postTransaction(
conn=conn,
site_name=session['selected_site'],
user_id=session['user_id'],
data=dict(request.json)
)
return jsonify(result)
return jsonify({"error":True, "message":"There was an error with this POST statement"})
@external_api.route('/external/postReceipt', methods=["POST"])
def post_receipt():
if request.method == "POST":
site_name = session['selected_site']
user_id = session['user_id']
database_config = config()
with psycopg2.connect(**database_config) as conn:
items = request.json['items']
receipt_id = database.request_receipt_id(conn, site_name)
receipt_id = f"SIR-{receipt_id}"
receipt = MyDataclasses.ReceiptPayload(
receipt_id=receipt_id,
submitted_by=user_id
)
receipt = database.insertReceiptsTuple(conn, site_name, receipt.payload(), convert=True)
for item in items:
receipt_item = MyDataclasses.ReceiptItemPayload(
type=item['type'],
receipt_id=receipt['id'],
barcode=item['item']['barcode'],
name=item['item']['item_name'],
qty=item['item']['qty'],
uom=item['item']['uom'],
data=item['item']['data']
)
database.insertReceiptItemsTuple(conn, site_name, receipt_item.payload())
#webpush.push_notifications('New Receipt', f"Receipt {receipt['receipt_id']} was added to Site -> {site_name}!")
webpush.push_ntfy('New Receipt', f"Receipt {receipt['receipt_id']} was added to Site -> {site_name}!")
return jsonify({"error":False, "message":"Transaction Complete!"})
return jsonify({"error":True, "message":"There was an error with this POST statement"})

View File

@ -1,34 +0,0 @@
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
from config import config, sites_config
from main import unfoldCostLayers
from user_api import login_required
groups_api = Blueprint('groups_api', __name__)
@groups_api.route("/groups")
@login_required
def groups():
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
return render_template("groups/index.html",
current_site=session['selected_site'],
sites=sites)
@groups_api.route("/group/<id>")
@login_required
def group(id):
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
return render_template("groups/group.html", id=id, current_site=session['selected_site'], sites=sites)
@groups_api.route('/groups/getGroups', methods=["GET"])
def getGroups():
groups = []
if request.method == "GET":
page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 1))
offset = (page-1)*limit
database_config = config()
site_name = session['selected_site']
with psycopg2.connect(**database_config) as conn:
groups, count = database.getGroups(conn, site_name, (limit, offset), convert=True)
return jsonify({'groups': groups, 'end': math.ceil(count/limit), 'error': False, 'message': 'bleh'})

View File

@ -1,235 +0,0 @@
from config import config
import psycopg2, requests, database, MyDataclasses
import main, datetime, json, csv
from main import lst2pgarr
import process
def importItemFromCSV(test, site_name, uuid, site):
logistics_info = MyDataclasses.LogisticsInfoPayload(
barcode=test['barcode'],
primary_location=site['default_primary_location'],
primary_zone=site['default_zone'],
auto_issue_location=site['default_auto_issue_location'],
auto_issue_zone=site['default_zone'])
item_info = MyDataclasses.ItemInfoPayload(test['barcode'])
# Food Info
t = ['serving', 'serving_unit', 'calories', 'calories_unit', 'proteins',
'proteins_unit', 'fats', 'fats_unit', 'carbohydrates', 'carbohydrates_unit', 'sugars', 'sugars_unit', 'sodium', 'sodium_unit',
'fibers', 'fibers_unit']
other_tags = [
'serving',
'serving_unit',
'calories',
'calories_unit',
'proteins_serving',
'proteins_unit',
'fat_serving',
'fat_unit',
'carbohydrates_serving',
'carbohydrates_unit',
'sugars_serving',
'sugars_unit',
'sodium_serving',
'sodium_unit',
'fiber_serving',
'fiber_unit',
]
nutriments = test['nutriments'].replace("'", '"')
nutriments = nutriments.replace("{", "").replace("}", "")
key_values = nutriments.split(", ")
nutriments = {}
if key_values != ['']:
for s in key_values:
s= s.split(": ")
k = s[0].replace('"', "")
v = s[1].replace('"', "")
nutriments[k] = v
nutrients = {}
for i in range(len(other_tags)):
if other_tags[i] in nutriments.keys():
nutrients[t[i]] = nutriments[other_tags[i]]
else:
nutrients[t[i]] = ''
food_groups = test['food_groups_tags']
food_groups = food_groups.replace('[', "").replace("]", "")
food_groups = food_groups.replace("'", "")
food_groups = food_groups.split(", ")
ingrediants = test['ingredients_hierarchy']
ingrediants = ingrediants.replace('[', "").replace("]", "")
ingrediants = ingrediants.replace("'", "")
ingrediants = ingrediants.split(", ")
food_info = MyDataclasses.FoodInfoPayload(food_groups, ingrediants, nutrients)
if test['brands'] != "":
brand = MyDataclasses.BrandsPayload(test['brands'])
logistics_info_id = 0
item_info_id = 0
food_info_id = 0
brand_id = 1
database_config = config()
try:
with psycopg2.connect(**database_config) as conn:
logistics_info = database.insertLogisticsInfoTuple(conn, site_name, logistics_info.payload())
item_info = database.insertItemInfoTuple(conn, site_name, item_info.payload())
food_info = database.insertFoodInfoTuple(conn, site_name, food_info.payload())
if test['brands'] != "":
brand = database.insertBrandsTuple(conn, site_name, brand.payload())
brand_id = brand[0]
print("Logistics:", logistics_info)
print("item_info:", item_info)
print("food_info:", food_info)
print("brand:", brand_id)
name = test['name']
name = name.replace("'", "@&apostraphe&")
description = ""
tags = lst2pgarr([])
links = json.dumps({})
search_string = f"&&{test['barcode']}&&{name}&&"
item = MyDataclasses.ItemsPayload(test['barcode'], test['name'], item_info[0],
logistics_info[0], food_info[0], brand=brand_id,
row_type="single", item_type=test["sub_type"], search_string=search_string)
item = database.insertItemTuple(conn, site_name, item.payload(), convert=True)
item = database.getItemAllByID(conn, site_name, (item['id'], ), convert=True)
print("Item:", item)
with conn.cursor() as cur:
cur.execute(f"SELECT id FROM {site_name}_locations WHERE uuid=%s;", (uuid, ))
location_id = cur.fetchone()[0]
print("Location ID:", location_id)
item_location = MyDataclasses.ItemLocationPayload(item['id'], location_id)
location = database.insertItemLocationsTuple(conn, site_name, item_location.payload(), convert=True)
print("Item location:", location)
creation_tuple = MyDataclasses.TransactionPayload(
datetime.datetime.now(),
logistics_info[0],
item['barcode'],
item['item_name'],
"SYSTEM",
0.0,
"Item added to the System!",
1,
{'location': uuid}
)
database.insertTransactionsTuple(conn, site_name, creation_tuple.payload())
qoh = float(test['qty_on_hand'])
print(qoh, type(qoh))
trans_type = "Adjust In"
if qoh != 0.0:
if qoh >= 0.0:
trans_type = "Adjust In"
else:
trans_type = "Adjust Out"
payload = {
'item_id': item['id'],
'logistics_info_id': item['logistics_info_id'],
'barcode': item['barcode'],
'item_name': item['item_name'],
'transaction_type': trans_type,
'quantity': float(qoh),
'description': f'creation quantity',
'cost': item['item_info']['cost'],
'vendor': 1,
'expires': None,
'location_id': location_id
}
process.postTransaction(conn, site_name, 1, payload)
conn.commit()
except Exception as error:
print(error, item_info)
def importCSV(path, site_name):
database_config = config()
with psycopg2.connect(**database_config) as conn:
site = database.selectSiteTuple(conn, (site_name,), convert=True)
default_zone = database.__selectTuple(conn, site_name, f"{site_name}_zones", (site['default_zone'], ), convert=True)
default_location = database.__selectTuple(conn, site_name, f"{site_name}_locations", (site['default_primary_location'],), convert=True)
uuid = f"{default_zone['name']}@{default_location['name']}"
print(uuid)
with open(path, "r+", encoding="utf-8") as file:
csv_reader = csv.DictReader(file)
for row in csv_reader:
try:
importItemFromCSV(row, site_name, uuid, site)
except Exception as error:
with open("process.log", "a+") as file:
file.write("\n")
file.write(f"{datetime.datetime.now()} --- CAUTION --- {error}\n")
file.write(f"{" "*41}{json.dumps(row)}")
#importCSV("2025-03-19-Pantry (1).csv", "main")
def importLinkFromCSV(row, site_name, conn):
barcode = row['barcode']
link_barcode=row['link_barcode']
item_data=json.loads(row['data'].replace('\\j*s*o*n\\', ""))
conv_factor=row['conv_factor']
link_item = database.getItemAllByBarcode(conn, site_name, (link_barcode, ), convert=True)
link = MyDataclasses.ItemLinkPayload(
barcode=barcode,
link=link_item['id'],
data=item_data,
conv_factor=conv_factor
)
newitem = {
'barcode': barcode,
'name': item_data['name'],
'subtype': ''
}
try:
process.postNewBlankItem(conn, site_name, 1, newitem)
except Exception as error:
print(error)
pass
lin = database.insertItemLinksTuple(conn, site_name, link.payload())
print(lin)
def importLinksFromCSV(path, site_name):
database_config = config()
with psycopg2.connect(**database_config) as conn:
with open(path, "r+", encoding="utf-8") as file:
csv_reader = csv.DictReader(file)
for row in csv_reader:
try:
importLinkFromCSV(row, site_name, conn)
except Exception as error:
with open("process.log", "a+") as file:
file.write("\n")
file.write(f"{datetime.datetime.now()} --- CAUTION --- {error}\n")
file.write(f"{" "*41}{json.dumps(row)}")
importLinksFromCSV("test.csv", 'test')

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 MiB

View File

@ -1,22 +0,0 @@
import schedule, time, psycopg2
import postsqldb
from config import config
def createCycleCount():
print("task is running")
database_config = config()
with psycopg2.connect(**database_config) as conn:
sites = postsqldb.SitesTable.selectTuples(conn)
print(sites)
conn.rollback()
def start_schedule():
schedule.every(1).minutes.do(createCycleCount)
while True:
schedule.run_pending()
time.sleep(60)
createCycleCount()

View File

@ -1,16 +1,15 @@
import celery.schedules
from flask import Flask, render_template, session, request, redirect, jsonify from flask import Flask, render_template, session, request, redirect, jsonify
from flask_assets import Environment, Bundle from flask_assets import Environment, Bundle
import api, config, user_api, psycopg2, main, api_admin, receipts_API, group_api import api, config, user_api, psycopg2, main, api_admin
from user_api import login_required, update_session_user from user_api import login_required, update_session_user
from workshop_api import workshop_api from workshop_api import workshop_api
import database import database
import postsqldb
from webpush import trigger_push_notifications_for_subscriptions from webpush import trigger_push_notifications_for_subscriptions
from application.recipes import recipes_api from application.recipes import recipes_api
from application.items import items_API from application.items import items_API
from application.poe import poe_api from application.poe import poe_api
from application.shoppinglists import shoplist_api from application.shoppinglists import shoplist_api
from application.receipts import receipts_api
from flasgger import Swagger from flasgger import Swagger
@ -31,9 +30,8 @@ app.register_blueprint(api_admin.admin_api)
app.register_blueprint(items_API.items_api, url_prefix='/items') app.register_blueprint(items_API.items_api, url_prefix='/items')
app.register_blueprint(poe_api.point_of_ease, url_prefix='/poe') app.register_blueprint(poe_api.point_of_ease, url_prefix='/poe')
app.register_blueprint(workshop_api) app.register_blueprint(workshop_api)
app.register_blueprint(receipts_API.receipt_api) app.register_blueprint(receipts_api.receipt_api, url_prefix='/receipts')
app.register_blueprint(shoplist_api.shopping_list_api, url_prefix="/shopping-lists") app.register_blueprint(shoplist_api.shopping_list_api, url_prefix="/shopping-lists")
app.register_blueprint(group_api.groups_api)
app.register_blueprint(recipes_api.recipes_api, url_prefix='/recipes') app.register_blueprint(recipes_api.recipes_api, url_prefix='/recipes')