Compare commits
No commits in common. "1799d00ef14a14033177a42682dc30ce230809f8" and "8e8738921daac76f8fa929ac60b2eaaea62e903b" have entirely different histories.
1799d00ef1
...
8e8738921d
396
2024-10-02-Pantry.csv
Normal file
396
2024-10-02-Pantry.csv
Normal file
File diff suppressed because one or more lines are too long
19
MyDataclasses,py
Normal file
19
MyDataclasses,py
Normal file
@ -0,0 +1,19 @@
|
||||
from dataclasses import dataclass, field
|
||||
import json
|
||||
from database import lst2pgarr
|
||||
|
||||
@dataclass
|
||||
class LogisticsInfoPayload:
|
||||
barcode: str
|
||||
primary_location: str
|
||||
auto_issue_location: str
|
||||
dynamic_locations: dict = field(default_factory=dict)
|
||||
location_data: dict = field(default_factory=dict)
|
||||
quantity_on_hand: float = 0.0
|
||||
|
||||
def payload(self):
|
||||
if self.barcode or self.primary_location or self.auto_issue == None:
|
||||
raise Exception("have to set values")
|
||||
return (self.barcode, self.primary_location, self.auto_issue_location,
|
||||
json.dumps(self.dynamic_locations), json.dumps(self.location_data),
|
||||
self.quantity_on_hand)
|
||||
526
MyDataclasses.py
526
MyDataclasses.py
@ -1,526 +0,0 @@
|
||||
from dataclasses import dataclass, field
|
||||
import json, datetime
|
||||
from database import lst2pgarr
|
||||
|
||||
@dataclass
|
||||
class LogisticsInfoPayload:
|
||||
barcode: str
|
||||
primary_location: int
|
||||
primary_zone: int
|
||||
auto_issue_location: int
|
||||
auto_issue_zone: int
|
||||
|
||||
def payload(self):
|
||||
return (self.barcode,
|
||||
self.primary_location,
|
||||
self.primary_zone,
|
||||
self.auto_issue_location,
|
||||
self.auto_issue_zone)
|
||||
|
||||
@dataclass
|
||||
class ItemInfoPayload:
|
||||
barcode: str
|
||||
packaging: str = ""
|
||||
uom_quantity: float = 1.0
|
||||
uom: int = 1
|
||||
cost: float = 0.0
|
||||
safety_stock: float = 0.0
|
||||
lead_time_days: float = 0.0
|
||||
ai_pick: bool = False
|
||||
prefixes: list = field(default_factory=list)
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.barcode, str):
|
||||
raise TypeError(f"barcode must be of type str; not {type(self.barcode)}")
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.barcode,
|
||||
self.packaging,
|
||||
self.uom_quantity,
|
||||
self.uom,
|
||||
self.cost,
|
||||
self.safety_stock,
|
||||
self.lead_time_days,
|
||||
self.ai_pick,
|
||||
lst2pgarr(self.prefixes)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class FoodInfoPayload:
|
||||
food_groups: list = field(default_factory=list)
|
||||
ingrediants: list = field(default_factory=list)
|
||||
nutrients: dict = field(default_factory=dict)
|
||||
expires: bool = False
|
||||
default_expiration: float = 0.0
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
lst2pgarr(self.food_groups),
|
||||
lst2pgarr(self.ingrediants),
|
||||
json.dumps(self.nutrients),
|
||||
self.expires,
|
||||
self.default_expiration
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class BrandsPayload:
|
||||
name: str
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.name, str):
|
||||
return TypeError(f"brand name should be of type str; not {type(self.name)}")
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.name,
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ItemsPayload:
|
||||
barcode: str
|
||||
item_name: str
|
||||
item_info_id: int
|
||||
logistics_info_id: int
|
||||
food_info_id: int
|
||||
brand: int = 0
|
||||
description: str = ""
|
||||
tags: list = field(default_factory=list)
|
||||
links: dict = field(default_factory=dict)
|
||||
row_type: str = ""
|
||||
item_type: str = ""
|
||||
search_string: str =""
|
||||
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.barcode,
|
||||
self.item_name,
|
||||
self.brand,
|
||||
self.description,
|
||||
lst2pgarr(self.tags),
|
||||
json.dumps(self.links),
|
||||
self.item_info_id,
|
||||
self.logistics_info_id,
|
||||
self.food_info_id,
|
||||
self.row_type,
|
||||
self.item_type,
|
||||
self.search_string
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ItemLocationPayload:
|
||||
part_id: int
|
||||
location_id: int
|
||||
quantity_on_hand: float = 0.0
|
||||
cost_layers: list = field(default_factory=list)
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.part_id, int):
|
||||
raise TypeError(f"part_id must be of type int; not {type(self.part_id)}")
|
||||
if not isinstance(self.location_id, int):
|
||||
raise TypeError(f"part_id must be of type int; not {type(self.part_id)}")
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.part_id,
|
||||
self.location_id,
|
||||
self.quantity_on_hand,
|
||||
lst2pgarr(self.cost_layers)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class TransactionPayload:
|
||||
timestamp: datetime.datetime
|
||||
logistics_info_id: int
|
||||
barcode: str
|
||||
name: str
|
||||
transaction_type: str
|
||||
quantity: float
|
||||
description: str
|
||||
user_id: int
|
||||
data: dict = field(default_factory=dict)
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.timestamp,
|
||||
self.logistics_info_id,
|
||||
self.barcode,
|
||||
self.name,
|
||||
self.transaction_type,
|
||||
self.quantity,
|
||||
self.description,
|
||||
self.user_id,
|
||||
json.dumps(self.data)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class CostLayerPayload:
|
||||
aquisition_date: datetime.datetime
|
||||
quantity: float
|
||||
cost: float
|
||||
currency_type: str
|
||||
vendor: int = 0
|
||||
expires: datetime.datetime = None
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.aquisition_date,
|
||||
self.quantity,
|
||||
self.cost,
|
||||
self.currency_type,
|
||||
self.expires,
|
||||
self.vendor
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class LocationPayload:
|
||||
uuid: str
|
||||
name: str
|
||||
zone_id: int
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.uuid, str):
|
||||
raise TypeError(f"uuid must be of type str; not {type(self.uuid)}")
|
||||
if not isinstance(self.name, str):
|
||||
raise TypeError(f"Location name must be of type str; not {type(self.name)}")
|
||||
if not isinstance(self.zone_id, int):
|
||||
raise TypeError(f"zone_id must be of type str; not {type(self.zone_id)}")
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.uuid,
|
||||
self.name,
|
||||
self.zone_id
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ZonePayload:
|
||||
name: str
|
||||
site_id: int
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.name, str):
|
||||
raise TypeError(f"Zone name should be of type str; not {type(self.name)}")
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.name,
|
||||
self.site_id
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class VendorPayload:
|
||||
vendor_name: str
|
||||
created_by: int
|
||||
vendor_address: str = ""
|
||||
creation_date: datetime.datetime = field(init=False)
|
||||
phone_number: str = ""
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.vendor_name, str):
|
||||
raise TypeError(f"vendor_name should be of type str; not {type(self.vendor_name)}")
|
||||
self.creation_date = datetime.datetime.now()
|
||||
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.vendor_name,
|
||||
self.vendor_address,
|
||||
self.creation_date,
|
||||
self.created_by,
|
||||
self.phone_number
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ItemLinkPayload:
|
||||
barcode: str
|
||||
link: int
|
||||
data: dict = field(default_factory=dict)
|
||||
conv_factor: float = 1
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.barcode, str):
|
||||
raise TypeError(f"barcode must be of type str; not {type(self.barocde)}")
|
||||
if not isinstance(self.link, int):
|
||||
raise TypeError(f"link must be of type str; not {type(self.link)}")
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.barcode,
|
||||
self.link,
|
||||
json.dumps(self.data),
|
||||
self.conv_factor
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class GroupPayload:
|
||||
name: str
|
||||
description: str
|
||||
group_type: str = "plain"
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.name,
|
||||
self.description,
|
||||
self.group_type
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class GroupItemPayload:
|
||||
uuid: str
|
||||
gr_id: int
|
||||
item_type: str
|
||||
item_name:str
|
||||
uom: str
|
||||
qty: float = 0.0
|
||||
item_id: int = None
|
||||
links: dict = field(default_factory=dict)
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.uuid,
|
||||
self.gr_id,
|
||||
self.item_type,
|
||||
self.item_name,
|
||||
self.uom,
|
||||
self.qty,
|
||||
self.item_id,
|
||||
json.dumps(self.links)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class RecipeItemPayload:
|
||||
uuid: str
|
||||
rp_id: int
|
||||
item_type: str
|
||||
item_name:str
|
||||
uom: str
|
||||
qty: float = 0.0
|
||||
item_id: int = None
|
||||
links: dict = field(default_factory=dict)
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.uuid,
|
||||
self.rp_id,
|
||||
self.item_type,
|
||||
self.item_name,
|
||||
self.uom,
|
||||
self.qty,
|
||||
self.item_id,
|
||||
json.dumps(self.links)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class RecipePayload:
|
||||
name: str
|
||||
author: int
|
||||
description: str
|
||||
creation_date: datetime.datetime = field(init=False)
|
||||
instructions: list = field(default_factory=list)
|
||||
picture_path: str = ""
|
||||
|
||||
def __post_init__(self):
|
||||
self.creation_date = datetime.datetime.now()
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.name,
|
||||
self.author,
|
||||
self.description,
|
||||
self.creation_date,
|
||||
lst2pgarr(self.instructions),
|
||||
self.picture_path
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ReceiptItemPayload:
|
||||
type: str
|
||||
receipt_id: int
|
||||
barcode: str
|
||||
name: str
|
||||
qty: float = 1.0
|
||||
uom: str = "each"
|
||||
data: dict = field(default_factory=dict)
|
||||
status: str = "Unresolved"
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.type,
|
||||
self.receipt_id,
|
||||
self.barcode,
|
||||
self.name,
|
||||
self.qty,
|
||||
self.uom,
|
||||
json.dumps(self.data),
|
||||
self.status
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ReceiptPayload:
|
||||
receipt_id: str
|
||||
receipt_status: str = "Unresolved"
|
||||
date_submitted: datetime.datetime = field(init=False)
|
||||
submitted_by: int = 0
|
||||
vendor_id: int = 1
|
||||
files: dict = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
self.date_submitted = datetime.datetime.now()
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.receipt_id,
|
||||
self.receipt_status,
|
||||
self.date_submitted,
|
||||
self.submitted_by,
|
||||
self.vendor_id,
|
||||
json.dumps(self.files)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ShoppingListItemPayload:
|
||||
uuid: str
|
||||
sl_id: int
|
||||
item_type: str
|
||||
item_name: str
|
||||
uom: str
|
||||
qty: float
|
||||
item_id: int = None
|
||||
links: dict = field(default_factory=dict)
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.uuid,
|
||||
self.sl_id,
|
||||
self.item_type,
|
||||
self.item_name,
|
||||
self.uom,
|
||||
self.qty,
|
||||
self.item_id,
|
||||
json.dumps(self.links)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ShoppingListPayload:
|
||||
name: str
|
||||
description: str
|
||||
author: int
|
||||
type: str = "plain"
|
||||
creation_date: datetime.datetime = field(init=False)
|
||||
|
||||
def __post_init__(self):
|
||||
self.creation_date = datetime.datetime.now()
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.name,
|
||||
self.description,
|
||||
self.author,
|
||||
self.creation_date,
|
||||
self.type
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class SitePayload:
|
||||
site_name: str
|
||||
site_description: str
|
||||
site_owner_id: int
|
||||
default_zone: str = None
|
||||
default_auto_issue_location: str = None
|
||||
default_primary_location: str = None
|
||||
creation_date: datetime.datetime = field(init=False)
|
||||
flags: dict = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
self.creation_date = datetime.datetime.now()
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.site_name,
|
||||
self.site_description,
|
||||
self.creation_date,
|
||||
self.site_owner_id,
|
||||
json.dumps(self.flags),
|
||||
self.default_zone,
|
||||
self.default_auto_issue_location,
|
||||
self.default_primary_location
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class RolePayload:
|
||||
role_name:str
|
||||
role_description:str
|
||||
site_id: int
|
||||
flags: dict = field(default_factory=dict)
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.role_name,
|
||||
self.role_description,
|
||||
self.site_id,
|
||||
json.dumps(self.flags)
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class SiteManager:
|
||||
site_name: str
|
||||
admin_user: tuple
|
||||
default_zone: int
|
||||
default_location: int
|
||||
description: str
|
||||
create_order: list = field(init=False)
|
||||
drop_order: list = field(init=False)
|
||||
|
||||
def __post_init__(self):
|
||||
self.create_order = [
|
||||
"logins",
|
||||
"sites",
|
||||
"roles",
|
||||
"units",
|
||||
"cost_layers",
|
||||
"linked_items",
|
||||
"brands",
|
||||
"food_info",
|
||||
"item_info",
|
||||
"zones",
|
||||
"locations",
|
||||
"logistics_info",
|
||||
"transactions",
|
||||
"item",
|
||||
"vendors",
|
||||
"groups",
|
||||
"group_items",
|
||||
"receipts",
|
||||
"receipt_items",
|
||||
"recipes",
|
||||
"recipe_items",
|
||||
"shopping_lists",
|
||||
"shopping_list_items",
|
||||
"item_locations",
|
||||
"conversions"
|
||||
]
|
||||
self.drop_order = [
|
||||
"item_info",
|
||||
"items",
|
||||
"cost_layers",
|
||||
"linked_items",
|
||||
"transactions",
|
||||
"brands",
|
||||
"food_info",
|
||||
"logistics_info",
|
||||
"zones",
|
||||
"locations",
|
||||
"vendors",
|
||||
"group_items",
|
||||
"groups",
|
||||
"receipt_items",
|
||||
"receipts",
|
||||
"recipe_items",
|
||||
"recipes",
|
||||
"shopping_list_items",
|
||||
"shopping_lists",
|
||||
"item_locations",
|
||||
"conversions"
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
2
admin.py
2
admin.py
@ -1,5 +1,5 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||
import psycopg2, math, json, datetime, main, copy, requests
|
||||
import psycopg2, math, json, datetime, main, copy, requests, html_factory
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers, get_sites, get_roles, create_site_secondary, getUser
|
||||
from manage import create
|
||||
|
||||
831
api.py
831
api.py
@ -1,17 +1,88 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||
import psycopg2, math, json, datetime, main, copy, requests, process, database
|
||||
import psycopg2, math, json, datetime, main, copy, requests
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers
|
||||
|
||||
database_api= Blueprint('database_api', __name__)
|
||||
|
||||
@database_api.route("/changeSite", methods=["POST"])
|
||||
@database_api.route("/changeSite")
|
||||
def changeSite():
|
||||
if request.method == "POST":
|
||||
site = request.json['site']
|
||||
site = request.args.get('site', 'main')
|
||||
session['selected_site'] = site
|
||||
return jsonify({'error': False, 'message': 'Site Changed!'})
|
||||
return jsonify({'status': 'SUCCESS'})
|
||||
|
||||
def paginate_with_params(cur, limit, offset, params):
|
||||
sql = f"SELECT * FROM main_items LEFT JOIN main_logistics_info ON main_items.logistics_info_id = main_logistics_info.id"
|
||||
count = f"SELECT COUNT(*) FROM main_items LEFT JOIN main_logistics_info ON main_items.logistics_info_id = main_logistics_info.id"
|
||||
# WHERE search_string LIKE '%{search_string}%'
|
||||
strings = []
|
||||
count_strings = []
|
||||
if params['search_string'] != "":
|
||||
s = params['search_string']
|
||||
strings.append(f" search_string LIKE '%{s}%'")
|
||||
count_strings.append(f" search_string LIKE '%{s}%'")
|
||||
|
||||
if params['view'] == 1:
|
||||
s = params['view']
|
||||
strings.append(f" main_logistics_info.quantity_on_hand <> 0.00")
|
||||
count_strings.append(f" main_logistics_info.quantity_on_hand <> 0.00")
|
||||
|
||||
|
||||
# LIMIT {limit} OFFSET {offset};"
|
||||
|
||||
if len(strings) > 0:
|
||||
sql = f"{sql} WHERE{" AND".join(strings)}"
|
||||
|
||||
if len(count_strings) > 0:
|
||||
count = f"{count} WHERE{" AND".join(count_strings)}"
|
||||
|
||||
sql = f"{sql} ORDER BY main_logistics_info.quantity_on_hand LIMIT {limit} OFFSET {offset};"
|
||||
count = f"{count};"
|
||||
print(count)
|
||||
print(sql)
|
||||
cur.execute(sql)
|
||||
pantry_inventory = cur.fetchall()
|
||||
cur.execute(count)
|
||||
count = cur.fetchone()[0]
|
||||
return pantry_inventory, count
|
||||
|
||||
def paginate_default(cur, limit, offset):
|
||||
sql = f"SELECT * FROM main_items LEFT JOIN main_logistics_info ON main_items.logistics_info_id = main_logistics_info.id LIMIT %s OFFSET %s;"
|
||||
cur.execute(sql, (limit, offset))
|
||||
pantry_inventory = cur.fetchall()
|
||||
cur.execute("SELECT COUNT(*) FROM main_items;")
|
||||
count = cur.fetchone()[0]
|
||||
return pantry_inventory, count
|
||||
|
||||
def paginate_with_params_groups(cur, limit, offset, params):
|
||||
sql = f"SELECT * FROM main_groups"
|
||||
count = f"SELECT COUNT(*) FROM main_groups"
|
||||
# WHERE search_string LIKE '%{search_string}%'
|
||||
strings = []
|
||||
count_strings = []
|
||||
if params['search_string'] != "":
|
||||
s = params['search_string']
|
||||
strings.append(f" search_string LIKE '%{s}%'")
|
||||
count_strings.append(f" search_string LIKE '%{s}%'")
|
||||
|
||||
|
||||
# LIMIT {limit} OFFSET {offset};"
|
||||
|
||||
if len(strings) > 0:
|
||||
sql = f"{sql} WHERE{" AND".join(strings)}"
|
||||
|
||||
if len(count_strings) > 0:
|
||||
count = f"{count} WHERE{" AND".join(count_strings)}"
|
||||
|
||||
sql = f"{sql} ORDER BY main_logistics_info.quantity_on_hand LIMIT {limit} OFFSET {offset};"
|
||||
count = f"{count};"
|
||||
print(count)
|
||||
print(sql)
|
||||
cur.execute(sql)
|
||||
pantry_inventory = cur.fetchall()
|
||||
cur.execute(count)
|
||||
count = cur.fetchone()[0]
|
||||
return pantry_inventory, count
|
||||
|
||||
@database_api.route("/getGroups")
|
||||
def paginate_groups():
|
||||
@ -59,6 +130,89 @@ def paginate_groups():
|
||||
|
||||
return jsonify({'groups': new_groups, "end": math.ceil(count/limit)})
|
||||
|
||||
@database_api.route("/getReceipts")
|
||||
def pagninate_receipts():
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
site_name = session['selected_site']
|
||||
|
||||
offset = (page - 1) * limit
|
||||
|
||||
receipts = []
|
||||
count = 0
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_receipts LIMIT {limit} OFFSET {offset};"
|
||||
count = f"SELECT COUNT(*) FROM {site_name}_receipts;"
|
||||
cur.execute(sql)
|
||||
receipts = cur.fetchall()
|
||||
cur.execute(count)
|
||||
count = cur.fetchone()[0]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify({'receipts': receipts, "end": math.ceil(count/limit)})
|
||||
|
||||
@database_api.route("/getItems")
|
||||
def pagninate_items():
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
search_string = str(request.args.get('search_text', ""))
|
||||
sort_order = int(request.args.get('sort_order', 1))
|
||||
view = int(request.args.get('view', 0))
|
||||
site_name = session['selected_site']
|
||||
|
||||
offset = (page - 1) * limit
|
||||
|
||||
pantry_inventory = []
|
||||
count = 0
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_items LEFT JOIN {site_name}_logistics_info ON {site_name}_items.logistics_info_id = {site_name}_logistics_info.id LIMIT {limit} OFFSET {offset};"
|
||||
count = f"SELECT COUNT(*) FROM {site_name}_items LEFT JOIN {site_name}_logistics_info ON {site_name}_items.logistics_info_id = {site_name}_logistics_info.id;"
|
||||
cur.execute(sql)
|
||||
pantry_inventory = cur.fetchall()
|
||||
cur.execute(count)
|
||||
count = cur.fetchone()[0]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify({'items': pantry_inventory, "end": math.ceil(count/limit)})
|
||||
|
||||
@database_api.route("/getTransactions")
|
||||
def pagninate_transactions():
|
||||
item_id = request.args.get('id', 1)
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
site_name = session['selected_site']
|
||||
|
||||
offset = (page - 1) * limit
|
||||
count = 0
|
||||
transactions = []
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
|
||||
cur.execute(f"SELECT logistics_info_id FROM {site_name}_items WHERE id={item_id};")
|
||||
logistics_info_id = cur.fetchone()[0]
|
||||
sql = f"SELECT * FROM {site_name}_transactions WHERE logistics_info_id={logistics_info_id} LIMIT {limit} OFFSET {offset};"
|
||||
count = f"SELECT COUNT(*) FROM {site_name}_transactions WHERE logistics_info_id={logistics_info_id};"
|
||||
cur.execute(sql)
|
||||
transactions = cur.fetchall()
|
||||
cur.execute(count)
|
||||
count = cur.fetchone()[0]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify({'transactions': transactions, "end": math.ceil(count/limit)})
|
||||
|
||||
@database_api.route("/getVendors")
|
||||
def get_vendors():
|
||||
@ -76,7 +230,674 @@ def get_vendors():
|
||||
|
||||
return jsonify(vendors=vendors)
|
||||
|
||||
@database_api.route("/getTransaction")
|
||||
def get_transaction():
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
transaction = []
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_transactions WHERE id=%s;"
|
||||
cur.execute(sql, (id, ))
|
||||
transaction = list(cur.fetchone())
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify(transaction=transaction)
|
||||
|
||||
@database_api.route("/getLocations")
|
||||
def get_locations():
|
||||
zone_name = request.args.get('zone', 1)
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
locations = []
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT id FROM {site_name}_zones WHERE name=%s;"
|
||||
cur.execute(sql, (zone_name,))
|
||||
zone_id = cur.fetchone()[0]
|
||||
|
||||
sqltwo = f"SELECT name FROM {site_name}_locations WHERE zone_id=%s;"
|
||||
cur.execute(sqltwo, (zone_id, ))
|
||||
locations = [location[0] for location in cur.fetchall()]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify(locations=locations)
|
||||
|
||||
@database_api.route("/getZones")
|
||||
def get_zones():
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
zones = []
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT name FROM {site_name}_zones;"
|
||||
cur.execute(sql)
|
||||
zones = [zone[0] for zone in cur.fetchall()]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
print(zones)
|
||||
return jsonify(zones=zones)
|
||||
|
||||
def checkReceiptState(index, site):
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT id, status FROM {site}_receipt_items WHERE receipt_id=%s;"
|
||||
cur.execute(sql, (index, ))
|
||||
items = cur.fetchall()
|
||||
number_unresolved = 0
|
||||
for item in items:
|
||||
if item[1] == "Unresolved":
|
||||
number_unresolved += 1
|
||||
|
||||
if number_unresolved == 0:
|
||||
sql = f"UPDATE {site}_receipts SET receipt_status = 'Resolved' WHERE id=%s;"
|
||||
cur.execute(sql, (index, ))
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
@database_api.route("/deleteReceiptItem", methods=["POST"])
|
||||
def deleteReceiptItem():
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
if request.method == "POST":
|
||||
index = request.json['index']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"DELETE FROM {site_name}_receipt_items WHERE id=%s;"
|
||||
cur.execute(sql, (index, ))
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
return jsonify({})
|
||||
|
||||
@database_api.route("/saveReceipt", methods=["POST"])
|
||||
def saveReceipt():
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
if request.method == "POST":
|
||||
receipt_index = request.json['receipt_index']
|
||||
vendor_index = request.json['vendor_index']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"UPDATE {site_name}_receipts SET vendor_id=%s WHERE id=%s;"
|
||||
cur.execute(sql, (vendor_index, receipt_index))
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
return jsonify({})
|
||||
|
||||
@database_api.route("/saveReceiptItem", methods=["POST"])
|
||||
def saveReceiptItem():
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
if request.method == "POST":
|
||||
index = request.json['index']
|
||||
cost= request.json['cost']
|
||||
qty = request.json['qty']
|
||||
barcode = request.json['barcode']
|
||||
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_receipt_items WHERE id=%s;"
|
||||
cur.execute(sql, (index, ))
|
||||
receipt_item = list(cur.fetchone())
|
||||
_new_type = receipt_item[1]
|
||||
_new_name = receipt_item[4]
|
||||
_new_item = receipt_item[6]
|
||||
_new_cost = cost
|
||||
if barcode != receipt_item[3]:
|
||||
# grab the new barcode data...
|
||||
sql = f"SELECT {site_name}_items.barcode FROM {site_name}_itemlinks LEFT JOIN {site_name}_items ON {site_name}_itemlinks.link = {site_name}_items.id WHERE {site_name}_itemlinks.barcode = %s;"
|
||||
cur.execute(sql, (barcode, ))
|
||||
x = cur.fetchone()
|
||||
if x != None:
|
||||
barcode = x[0]
|
||||
# 078742013718
|
||||
with open(f"sites/{site_name}/sql/unique/select_item_all_barcode.sql", "r+") as file:
|
||||
sql = file.read()
|
||||
cur.execute(sql, (barcode, ))
|
||||
item = list(cur.fetchone())
|
||||
if not item:
|
||||
return jsonify({})
|
||||
|
||||
#TODO: implement the api code, this will be a big function in external that will do all the parsing and stuff in the system.
|
||||
print(item)
|
||||
_new_type = 'Pantry'
|
||||
_new_name = item[2]
|
||||
_new_cost = item[28]
|
||||
_new_item = item
|
||||
|
||||
|
||||
_new_item[28] = _new_cost
|
||||
sql = f"UPDATE {site_name}_receipt_items SET type = %s, barcode = %s, name = %s, qty = %s, data = %s WHERE id=%s;"
|
||||
cur.execute(sql, (_new_type, barcode, _new_name, qty, json.dumps(_new_item), index))
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
return jsonify({})
|
||||
|
||||
@database_api.route("/voidReceiptItem", methods=["POST"])
|
||||
def voidReceiptItem():
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
if request.method == "POST":
|
||||
index = request.json['index']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"UPDATE {site_name}_receipt_items SET status = 'Voided' WHERE id=%s RETURNING receipt_id;"
|
||||
cur.execute(sql, (index, ))
|
||||
receipt_id = cur.fetchone()[0]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
checkReceiptState(receipt_id, site_name)
|
||||
|
||||
return jsonify({})
|
||||
|
||||
@database_api.route("/resolveReceiptItem", methods=["POST"])
|
||||
def resolveReceiptItem():
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
if request.method == "POST":
|
||||
index = request.json['index']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_receipt_items WHERE id=%s;"
|
||||
cur.execute(sql, (index, ))
|
||||
receipt_item = cur.fetchone()
|
||||
sql = f"SELECT receipt_id FROM {site_name}_receipts WHERE id=%s;"
|
||||
cur.execute(sql, (receipt_item[2], ))
|
||||
receipt_id = cur.fetchone()[0]
|
||||
payload = [
|
||||
datetime.datetime.now(),
|
||||
receipt_item[6][8],
|
||||
receipt_item[3],
|
||||
receipt_item[4],
|
||||
"Receipt",
|
||||
receipt_item[5],
|
||||
f"{receipt_id}",
|
||||
1,
|
||||
json.dumps({'location': receipt_item[6][15], 'cost': receipt_item[6][28]})
|
||||
]
|
||||
|
||||
print(payload)
|
||||
main.addTransaction(
|
||||
conn=conn,
|
||||
site_name=site_name,
|
||||
payload=payload,
|
||||
location=receipt_item[6][15],
|
||||
logistics_info_id=receipt_item[6][8],
|
||||
item_id=receipt_item[6][0],
|
||||
qty=receipt_item[5],
|
||||
cost=receipt_item[6][28]
|
||||
)
|
||||
|
||||
sql = f"UPDATE {site_name}_receipt_items SET status = 'Resolved' WHERE id=%s RETURNING receipt_id;"
|
||||
cur.execute(sql, (index, ))
|
||||
receipt_id = cur.fetchone()[0]
|
||||
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
checkReceiptState(receipt_id, site_name)
|
||||
|
||||
return jsonify({})
|
||||
|
||||
@database_api.route("/getReceiptItem")
|
||||
def get_receipt_item():
|
||||
id = int(request.args.get('index', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
receipt_item = []
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_receipt_items WHERE id=%s;"
|
||||
cur.execute(sql, (id, ))
|
||||
receipt_item = list(cur.fetchone())
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify({"receipt_item": receipt_item})
|
||||
|
||||
@database_api.route("/getReceipt")
|
||||
def get_receipt():
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
receipt = []
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_receipts LEFT JOIN {site_name}_vendors ON {site_name}_receipts.vendor_id = {site_name}_vendors.id WHERE {site_name}_receipts.id=%s;"
|
||||
cur.execute(sql, (id, ))
|
||||
receipt = list(cur.fetchone())
|
||||
sql = f"SELECT * FROM {site_name}_receipt_items WHERE receipt_id=%s;"
|
||||
cur.execute(sql, (id, ))
|
||||
receipt_items = cur.fetchall()
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify({"receipt": receipt, "receipt_items": receipt_items})
|
||||
|
||||
|
||||
@database_api.route("/getLinkedItem")
|
||||
def get_linked_item():
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
sql = f"SELECT * FROM {site_name}_itemlinks WHERE id=%s;"
|
||||
cur.execute(sql, (id, ))
|
||||
linked_item = cur.fetchone()
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify(linked_item=linked_item)
|
||||
|
||||
@database_api.route("/getItem")
|
||||
def get_item():
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
item = []
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
with open(f"sites/{site_name}/sql/unique/select_item_all.sql", "r+") as file:
|
||||
sql = file.read()
|
||||
cur.execute(sql, (id, ))
|
||||
item = list(cur.fetchone())
|
||||
SQL_groups = f"SELECT * FROM {site_name}_groups WHERE included_items @> ARRAY[%s];"
|
||||
cur.execute(SQL_groups, (item[0], ))
|
||||
item[25] = list(cur.fetchall())
|
||||
SQL_shopping_lists = f"SELECT * FROM {site_name}_shopping_lists WHERE pantry_items @> ARRAY[%s];"
|
||||
cur.execute(SQL_shopping_lists, (item[0], ))
|
||||
item[23] = list(cur.fetchall())
|
||||
sql_location_data = f"SELECT {site_name}_locations.uuid, {site_name}_item_locations.quantity_on_hand, {site_name}_item_locations.cost_layers FROM {site_name}_item_locations LEFT JOIN {site_name}_locations ON {site_name}_item_locations.location_id = {site_name}_locations.id WHERE part_id=%s;"
|
||||
cur.execute(sql_location_data, (item[0],))
|
||||
# losing cost layers here by uniforming to the javascript, change to take a list?
|
||||
columns = [desc[0] for desc in cur.description]
|
||||
x = cur.fetchall()
|
||||
qty_on_hand = sum([location[1] for location in x])
|
||||
y = {location[0]: location[1] for location in x}
|
||||
item[18] = y
|
||||
item[19] = qty_on_hand
|
||||
sql = f"SELECT * FROM {site_name}_itemlinks WHERE link=%s;"
|
||||
cur.execute(sql, (item[0], ))
|
||||
linked_items = cur.fetchall()
|
||||
print(linked_items)
|
||||
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
return jsonify(item=item, linked_items=linked_items)
|
||||
|
||||
@database_api.route("/addItem")
|
||||
def addItem():
|
||||
barcode = str(request.args.get('barcode', ""))
|
||||
name = str(request.args.get('item_name', ""))
|
||||
description = str(request.args.get('item_description', ""))
|
||||
item_type = str(request.args.get('item_type', ""))
|
||||
subtype = str(request.args.get('sub_type', ""))
|
||||
site_name = session['selected_site']
|
||||
state = "FAILED"
|
||||
|
||||
payload = copy.deepcopy(main.payload_food_item)
|
||||
|
||||
defaults = config(filename=f"sites/{site_name}/site.ini", section="defaults")
|
||||
uuid = f"{defaults["default_zone"]}@{defaults["default_primary_location"]}"
|
||||
name = name.replace("'", "@&apostraphe&")
|
||||
payload["logistics_info"]["primary_location"] = uuid
|
||||
payload["logistics_info"]["auto_issue_location"] = uuid
|
||||
|
||||
tags = main.lst2pgarr([])
|
||||
links = json.dumps({})
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
logistics_info_id = main.create_logistics_info(conn, site_name, barcode, payload["logistics_info"])
|
||||
if not logistics_info_id:
|
||||
return jsonify({'state': str(logistics_info_id)})
|
||||
item_info_id = main.create_item_info(conn, site_name, barcode, payload["item_info"])
|
||||
if not item_info_id:
|
||||
return jsonify({'state': str(item_info_id)})
|
||||
food_info_id = main.create_food_info(conn, site_name, payload["food_info"])
|
||||
if not food_info_id:
|
||||
return jsonify({'state': str(food_info_id)})
|
||||
|
||||
sqltwo = f"INSERT INTO {site_name}_items(barcode, item_name, tags, links, item_info_id, logistics_info_id, food_info_id, row_type, item_type, search_string) VALUES('{barcode}', '{name}', '{tags}', '{links}', {item_info_id}, {logistics_info_id}, {food_info_id}, 'single', 'FOOD', '{barcode}%{name}') RETURNING *;"
|
||||
sqlthree = f"INSERT INTO {site_name}_item_locations(part_id, location_id, quantity_on_hand, cost_layers) VALUES (%s, %s, %s, %s);"
|
||||
|
||||
row = None
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sqltwo)
|
||||
rows = cur.fetchone()
|
||||
if rows:
|
||||
row = rows[:]
|
||||
cur.execute(f"SELECT id FROM {site_name}_locations WHERE uuid=%s;", (uuid, ))
|
||||
location_id = cur.fetchone()
|
||||
cur.execute(sqlthree, (row[0], location_id, 0.0, main.lst2pgarr([])))
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({'state': str(error)})
|
||||
|
||||
|
||||
conn.commit()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
|
||||
cur.execute(f"SELECT primary_location FROM {site_name}_logistics_info WHERE id={logistics_info_id};")
|
||||
location = cur.fetchone()[0]
|
||||
payload = [
|
||||
datetime.datetime.now(),
|
||||
logistics_info_id,
|
||||
barcode,
|
||||
name,
|
||||
"SYSTEM",
|
||||
0.0,
|
||||
"Item Added to System!",
|
||||
1,
|
||||
json.dumps({'location': location})
|
||||
]
|
||||
|
||||
main.addTransaction(
|
||||
conn=conn,
|
||||
site_name=site_name,
|
||||
payload=payload,
|
||||
location=location,
|
||||
logistics_info_id=logistics_info_id,
|
||||
item_id=row[0],
|
||||
qty=0.0,
|
||||
cost=0.0)
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({'state': str(error)})
|
||||
|
||||
return jsonify({'state': "SUCCESS"})
|
||||
|
||||
@database_api.route("/transact", methods=['POST'])
|
||||
def addTransaction():
|
||||
|
||||
if request.method == "POST":
|
||||
if "site_name" in request.get_json().keys():
|
||||
site_name = request.get_json()["site_name"]
|
||||
print("passed")
|
||||
elif "selected_site" in session.keys():
|
||||
site_name = session['selected_site']
|
||||
print(session)
|
||||
else:
|
||||
return jsonify({"message": "Failed", "error": "No site selected or sent along with request!"})
|
||||
|
||||
logistics_info_id = request.get_json()['logistics_info_id']
|
||||
barcode = request.get_json()['barcode']
|
||||
name = request.get_json()['name']
|
||||
location = request.get_json()['location']
|
||||
qty = request.get_json()['qty']
|
||||
trans_type = request.get_json()['trans_type']
|
||||
trans_cost = request.get_json()['trans_cost']
|
||||
|
||||
database_config = config()
|
||||
|
||||
actual_qty = qty
|
||||
if trans_type == "Adjust Out":
|
||||
actual_qty = -qty
|
||||
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT id FROM {site_name}_items WHERE barcode=%s;", (barcode,))
|
||||
item_id = cur.fetchone()
|
||||
payload = [
|
||||
datetime.datetime.now(),
|
||||
logistics_info_id,
|
||||
barcode,
|
||||
name,
|
||||
trans_type,
|
||||
qty,
|
||||
"",
|
||||
1,
|
||||
json.dumps({'location': location, 'cost': trans_cost})
|
||||
]
|
||||
|
||||
print(payload)
|
||||
main.addTransaction(
|
||||
conn=conn,
|
||||
site_name=site_name,
|
||||
payload=payload,
|
||||
location=location,
|
||||
logistics_info_id=logistics_info_id,
|
||||
item_id=item_id,
|
||||
qty=actual_qty,
|
||||
cost=trans_cost
|
||||
)
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({'state': str(error)})
|
||||
print("SUCCESS")
|
||||
return jsonify({'state': str("SUCCESS")})
|
||||
print("SUCCESS")
|
||||
return jsonify({'state': str("FAILED")})
|
||||
|
||||
@database_api.route("/updateItem", methods=['POST'])
|
||||
def updateItem():
|
||||
def transformValues(values):
|
||||
v = []
|
||||
for value in values:
|
||||
if isinstance(value, dict):
|
||||
v.append(json.dumps(value))
|
||||
elif isinstance(value, list):
|
||||
v.append(main.lst2pgarr(value))
|
||||
else:
|
||||
v.append(value)
|
||||
return v
|
||||
|
||||
def manufactureSQL(keys, item_id, table):
|
||||
if len(keys) > 1:
|
||||
x = f"({', '.join(keys)})"
|
||||
y = f"({', '.join(['%s' for _ in keys])})"
|
||||
else:
|
||||
x = f"{', '.join(keys)}"
|
||||
y = f"{', '.join(['%s' for _ in keys])}"
|
||||
|
||||
sql = f"UPDATE {table} SET {x} = {y} WHERE id={item_id};"
|
||||
sqltwo = f"SELECT {', '.join(keys)} FROM {table} WHERE id={item_id};"
|
||||
return sql, sqltwo
|
||||
|
||||
if request.method == "POST":
|
||||
site_name = session['selected_site']
|
||||
item_id = request.get_json()['id']
|
||||
data = request.get_json()
|
||||
logistics_info_id = request.get_json()['logistics_info_id']
|
||||
food_info_id = request.get_json()['food_info_id']
|
||||
item_info_id = request.get_json()['item_info_id']
|
||||
updated = request.get_json()['updated']
|
||||
item_info = request.get_json()['item_info']
|
||||
food_info = request.get_json()['food_info']
|
||||
logistics_info = data['logistics_info']
|
||||
|
||||
save_data = {}
|
||||
for k, v in updated.items():
|
||||
save_data[f"{k}_new"] = v;
|
||||
for k, v in item_info.items():
|
||||
save_data[f"{k}_new"] = v;
|
||||
for k, v in food_info.items():
|
||||
save_data[f"{k}_new"] = v;
|
||||
for k, v in logistics_info.items():
|
||||
save_data[f"{k}_new"] = v;
|
||||
|
||||
database_config = config()
|
||||
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
if updated != {}:
|
||||
values = transformValues(updated.values())
|
||||
sql, sqltwo = manufactureSQL(updated.keys(), item_id, f"{site_name}_items")
|
||||
cur.execute(sqltwo)
|
||||
old_data = dict(zip(updated.keys(), cur.fetchone()))
|
||||
for k, v in old_data.items():
|
||||
save_data[f"{k}_old"] = v;
|
||||
cur.execute(sql, values)
|
||||
|
||||
if item_info != {}:
|
||||
values = transformValues(item_info.values())
|
||||
sql, sqltwo = manufactureSQL(item_info.keys(), item_info_id, f"{site_name}_item_info")
|
||||
cur.execute(sqltwo)
|
||||
old_data = dict(zip(item_info.keys(), cur.fetchone()))
|
||||
for k, v in old_data.items():
|
||||
save_data[f"{k}_old"] = v;
|
||||
cur.execute(sql, values)
|
||||
|
||||
if food_info != {}:
|
||||
values = transformValues(food_info.values())
|
||||
sql, sqltwo = manufactureSQL(food_info.keys(), food_info_id, f"{site_name}_food_info")
|
||||
cur.execute(sqltwo)
|
||||
old_data = dict(zip(food_info.keys(), cur.fetchone()))
|
||||
for k, v in old_data.items():
|
||||
save_data[f"{k}_old"] = v;
|
||||
cur.execute(sql, values)
|
||||
|
||||
if logistics_info != {}:
|
||||
values = transformValues(logistics_info.values())
|
||||
sql, sqltwo = manufactureSQL(logistics_info.keys(), logistics_info_id, f"{site_name}_logistics_info")
|
||||
cur.execute(sqltwo)
|
||||
old_data = dict(zip(logistics_info.keys(), cur.fetchone()))
|
||||
for k, v in old_data.items():
|
||||
save_data[f"{k}_old"] = v;
|
||||
cur.execute(sql, values)
|
||||
|
||||
cur.execute(f"SELECT {site_name}_items.id, {site_name}_items.barcode, {site_name}_items.item_name, {site_name}_logistics_info.primary_location FROM {site_name}_items LEFT JOIN {site_name}_logistics_info ON {site_name}_items.logistics_info_id = {site_name}_logistics_info.id WHERE {site_name}_items.id={item_id};")
|
||||
item_id, barcode, name, primary_location = cur.fetchone()
|
||||
payload = [
|
||||
datetime.datetime.now(),
|
||||
logistics_info_id,
|
||||
barcode,
|
||||
name,
|
||||
"SYSTEM",
|
||||
0.0,
|
||||
"Updated Item!",
|
||||
1,
|
||||
json.dumps(save_data)
|
||||
]
|
||||
|
||||
main.addTransaction(
|
||||
conn=conn,
|
||||
site_name=site_name,
|
||||
payload=payload,
|
||||
location=primary_location,
|
||||
logistics_info_id=logistics_info_id,
|
||||
item_id=item_id,
|
||||
qty=0.0,
|
||||
cost=0.0
|
||||
)
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
|
||||
return jsonify({"state": "SUCCESS"})
|
||||
|
||||
return jsonify({"status": "FAILED"})
|
||||
|
||||
@database_api.route("/linkItem", methods=["POST"])
|
||||
def linkItemToItem():
|
||||
if request.method == "POST":
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
master_index = request.json['master_index']
|
||||
sub_index = request.json['sub_index']
|
||||
print(master_index, sub_index)
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
with open(f"sites/{site_name}/sql/unique/select_item_all.sql", "r+") as file:
|
||||
sql = file.read()
|
||||
cur.execute(sql, (sub_index, ))
|
||||
sub_item = cur.fetchone()
|
||||
|
||||
# grab all the location data and then get the qty on hand
|
||||
sql_location_data = f"SELECT {site_name}_locations.uuid, {site_name}_item_locations.quantity_on_hand, {site_name}_item_locations.cost_layers FROM {site_name}_item_locations LEFT JOIN {site_name}_locations ON {site_name}_item_locations.location_id = {site_name}_locations.id WHERE part_id=%s;"
|
||||
cur.execute(sql_location_data, (sub_item[0],))
|
||||
x = cur.fetchall()
|
||||
qty_on_hand = sum([location[1] for location in x])
|
||||
|
||||
# Delete sub_item from database and cascade through tables
|
||||
sql = f"DELETE FROM {site_name}_items WHERE id=%s;"
|
||||
cur.execute(sql, (sub_index,))
|
||||
|
||||
# insert sub_item into the links table
|
||||
sql = f"INSERT INTO {site_name}_itemlinks (barcode, link, data, conv_factor) VALUES (%s, %s, %s, %s);"
|
||||
cur.execute(sql, (sub_item[1], master_index, json.dumps(sub_item), 1.0))
|
||||
|
||||
# need to adjust the qty on hand into the master items
|
||||
|
||||
with open(f"sites/{site_name}/sql/unique/select_item_all.sql", "r+") as file:
|
||||
sql = file.read()
|
||||
|
||||
cur.execute(sql, (master_index,))
|
||||
master_item = cur.fetchone()
|
||||
payload = [
|
||||
datetime.datetime.now(),
|
||||
master_item[8],
|
||||
master_item[1],
|
||||
master_item[2],
|
||||
"Adjust In",
|
||||
qty_on_hand,
|
||||
f"COVERSION FROM {sub_item[1]}",
|
||||
1,
|
||||
json.dumps({'location': master_item[15], 'cost': sub_item[28]*qty_on_hand})
|
||||
]
|
||||
|
||||
print(payload)
|
||||
main.addTransaction(
|
||||
conn=conn,
|
||||
site_name=site_name,
|
||||
payload=payload,
|
||||
location=master_item[15],
|
||||
logistics_info_id=master_item[8],
|
||||
item_id=master_item[0],
|
||||
qty=qty_on_hand,
|
||||
cost=sub_item[28]*qty_on_hand
|
||||
)
|
||||
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
|
||||
return jsonify({})
|
||||
@database_api.route("/addGroup")
|
||||
def addGroup():
|
||||
name = str(request.args.get('name', ""))
|
||||
|
||||
@ -6,7 +6,7 @@ password = test
|
||||
port = 5432
|
||||
|
||||
[manage]
|
||||
sites =
|
||||
sites = main
|
||||
first_setup = False
|
||||
signup_enabled = True
|
||||
signup_enabled = False
|
||||
|
||||
|
||||
1721
database.log
1721
database.log
File diff suppressed because one or more lines are too long
1553
database.py
1553
database.py
File diff suppressed because it is too large
Load Diff
121
external_API.py
121
external_API.py
@ -1,121 +0,0 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers
|
||||
from threading import Thread
|
||||
from queue import Queue
|
||||
import time, process
|
||||
from user_api import login_required
|
||||
|
||||
external_api = Blueprint('external', __name__)
|
||||
|
||||
@external_api.route('/external/getItemLocations', methods=["GET"])
|
||||
def getItemLocations():
|
||||
recordset = []
|
||||
count = 0
|
||||
if request.method == "GET":
|
||||
item_id = int(request.args.get('id', 1))
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
recordset, count = database.getItemLocations(conn, site_name, (item_id, limit, offset), convert=True)
|
||||
print(count)
|
||||
return jsonify({"locations":recordset, "end":math.ceil(count/limit), "error":False, "message":"item fetched succesfully!"})
|
||||
return jsonify({"locations":recordset, "end": math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@external_api.route('/external/getItem', methods=["GET"])
|
||||
def getItem():
|
||||
record = {}
|
||||
if request.method == "GET":
|
||||
item_id = int(request.args.get('id', 1))
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
record = database.getItemAllByID(conn, site_name, (item_id, ), convert=True)
|
||||
return jsonify({"item":record, "error":False, "message":"item fetched succesfully!"})
|
||||
return jsonify({"item":record, "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@external_api.route('/external/getItem/barcode', methods=["GET"])
|
||||
def getItemBarcode():
|
||||
record = {}
|
||||
if request.method == "GET":
|
||||
item_barcode = f"%{str(request.args.get('barcode', 1))}%"
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
record = database.getItemAllByBarcode(conn, site_name, (item_barcode, ), convert=True)
|
||||
|
||||
print(record)
|
||||
if record == {}:
|
||||
return jsonify({"item":None, "error":True, "message":"Item either does not exist or there was a larger problem!"})
|
||||
else:
|
||||
return jsonify({"item":record, "error":False, "message":"item fetched succesfully!"})
|
||||
return jsonify({"item":record, "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@external_api.route('/external/getModalItems', methods=["GET"])
|
||||
@login_required
|
||||
def getModalItems():
|
||||
recordset = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
search_string = request.args.get('search_string', '')
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
payload = (search_string, limit, offset)
|
||||
recordset, count = database.getItemsForModal(conn, site_name, payload, convert=True)
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@external_api.route('/external/postTransaction', methods=["POST"])
|
||||
def post_transaction():
|
||||
if request.method == "POST":
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
result = process.postTransaction(
|
||||
conn=conn,
|
||||
site_name=session['selected_site'],
|
||||
user_id=session['user_id'],
|
||||
data=dict(request.json)
|
||||
)
|
||||
return jsonify(result)
|
||||
return jsonify({"error":True, "message":"There was an error with this POST statement"})
|
||||
|
||||
|
||||
@external_api.route('/external/postReceipt', methods=["POST"])
|
||||
def post_receipt():
|
||||
if request.method == "POST":
|
||||
site_name = session['selected_site']
|
||||
user_id = session['user_id']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
items = request.json['items']
|
||||
receipt_id = database.request_receipt_id(conn, site_name)
|
||||
receipt_id = f"SIR-{receipt_id}"
|
||||
receipt = MyDataclasses.ReceiptPayload(
|
||||
receipt_id=receipt_id,
|
||||
submitted_by=user_id
|
||||
)
|
||||
receipt = database.insertReceiptsTuple(conn, site_name, receipt.payload(), convert=True)
|
||||
|
||||
for item in items:
|
||||
|
||||
receipt_item = MyDataclasses.ReceiptItemPayload(
|
||||
type=item['type'],
|
||||
receipt_id=receipt['id'],
|
||||
barcode=item['item']['barcode'],
|
||||
name=item['item']['item_name'],
|
||||
qty=item['item']['qty'],
|
||||
uom=item['item']['uom'],
|
||||
data=item['item']['data']
|
||||
)
|
||||
database.insertReceiptItemsTuple(conn, site_name, receipt_item.payload())
|
||||
|
||||
return jsonify({"error":False, "message":"Transaction Complete!"})
|
||||
return jsonify({"error":True, "message":"There was an error with this POST statement"})
|
||||
232
external_devices.py
Normal file
232
external_devices.py
Normal file
@ -0,0 +1,232 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||
import psycopg2, math, json, datetime, main, copy, openfoodfacts
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers
|
||||
|
||||
external_api= Blueprint('external_api', __name__)
|
||||
|
||||
open_food_api = openfoodfacts.API(user_agent="MyAwesomeApp/1.0")
|
||||
|
||||
open_food_enabled = False
|
||||
|
||||
|
||||
def parseOpenFoodsData(data: dict):
|
||||
print(data)
|
||||
x = [
|
||||
("brands_tags", list, []), # process into items.tags
|
||||
("categories_tags", list, []), # process into items.tags
|
||||
("countries_tags", list, []), # process into items.tags
|
||||
("labels_hierarchy", list, []), # process into items.tags
|
||||
("ingredients_text_en", str, ""), # process into a list of food_info.ingrediants
|
||||
("nutriments", dict, {}), # process into food_info.nutrients
|
||||
("product_name", str, ""), # #process into items.item_name
|
||||
("serving_size", str, ""), # add to nutriments
|
||||
("code", str, "") # process into items.barcode
|
||||
]
|
||||
|
||||
dummy = {}
|
||||
keys = data.keys()
|
||||
for key in x:
|
||||
if key[0] in keys and isinstance(data[key[0]], key[1]):
|
||||
dummy[key[0]] = data[key[0]]
|
||||
else:
|
||||
dummy[key[0]] = key[2]
|
||||
|
||||
tags = dummy["brands_tags"] + dummy["categories_tags"] + dummy["countries_tags"] + dummy["labels_hierarchy"]
|
||||
ingredients = str(dummy["ingredients_text_en"]).split(", ")
|
||||
nutriments = dummy["nutriments"]
|
||||
nutriments["serving_size"] = dummy["serving_size"]
|
||||
|
||||
payload = copy.deepcopy(main.payload_food_item)
|
||||
payload["tags"] = tags
|
||||
payload["product_name"] = dummy["product_name"]
|
||||
payload["food_info"]["ingrediants"] = ingredients
|
||||
payload["food_info"]["nutrients"] = nutriments
|
||||
|
||||
print(payload)
|
||||
|
||||
|
||||
@external_api.route("/api/getLink/<site>/<barcode>")
|
||||
def get_linked_item(site, barcode):
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT * FROM {site}_itemlinks WHERE barcode=%s;", (barcode, ))
|
||||
item = cur.fetchone()
|
||||
if item:
|
||||
return jsonify({"item": item}), 200
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({'state': str(error)}), 500
|
||||
return jsonify({"item": []}), 500
|
||||
|
||||
@external_api.route("/api/getItem/<site>/<barcode>")
|
||||
def get_item(site, barcode):
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
with open(f"sites/{site}/sql/unique/select_item_all_barcode.sql", "r+") as file:
|
||||
sql = file.read()
|
||||
cur.execute(sql, (barcode, ))
|
||||
item = cur.fetchone()
|
||||
if item:
|
||||
return jsonify({"item": item}), 200
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({'state': str(error)}), 500
|
||||
return jsonify({"item": []}), 500
|
||||
|
||||
@external_api.route("/api/getOpenFacts/<site>/<barcode>")
|
||||
def get_open_facts(site, barcode):
|
||||
if open_food_enabled:
|
||||
data = open_food_api.product.get(barcode)
|
||||
if data != None:
|
||||
return jsonify({"item": data}), 500
|
||||
return jsonify({"item": []}), 500
|
||||
|
||||
|
||||
@external_api.route("/api/addTransaction", methods=['POST'])
|
||||
def add_transaction():
|
||||
|
||||
if request.method == "POST":
|
||||
print(request.get_json())
|
||||
site_name = request.get_json()["site_name"]
|
||||
logistics_info_id = request.get_json()['logistics_info_id']
|
||||
barcode = request.get_json()['barcode']
|
||||
name = request.get_json()['name']
|
||||
location = request.get_json()['location']
|
||||
qty = float(request.get_json()['qty'])
|
||||
trans_type = request.get_json()['trans_type']
|
||||
trans_cost = request.get_json()['trans_cost']
|
||||
|
||||
database_config = config()
|
||||
|
||||
actual_qty = qty
|
||||
if trans_type == "Adjust Out":
|
||||
actual_qty = -qty
|
||||
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT id FROM {site_name}_items WHERE barcode=%s;", (barcode,))
|
||||
item_id = cur.fetchone()
|
||||
payload = [
|
||||
datetime.datetime.now(),
|
||||
logistics_info_id,
|
||||
barcode,
|
||||
name,
|
||||
trans_type,
|
||||
qty,
|
||||
"",
|
||||
1,
|
||||
json.dumps({'location': location, 'cost': trans_cost})
|
||||
]
|
||||
|
||||
print(payload)
|
||||
main.addTransaction(
|
||||
conn=conn,
|
||||
site_name=site_name,
|
||||
payload=payload,
|
||||
location=location,
|
||||
logistics_info_id=logistics_info_id,
|
||||
item_id=item_id,
|
||||
qty=actual_qty,
|
||||
cost=trans_cost
|
||||
)
|
||||
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({'state': str(error)})
|
||||
print("SUCCESS")
|
||||
return jsonify({'state': str("SUCCESS")})
|
||||
print("SUCCESS")
|
||||
return jsonify({'state': str("FAILED")})
|
||||
|
||||
|
||||
@external_api.route("/api/requestReceiptId/<site>")
|
||||
def request_receipt_id(site):
|
||||
"""gets the next id for receipts_id, currently returns a 8 digit number
|
||||
|
||||
Args:
|
||||
site (str): site to get the next id for
|
||||
|
||||
Returns:
|
||||
json: receipt_id, message, error keys
|
||||
"""
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT receipt_id FROM {site}_receipts ORDER BY id DESC LIMIT 1;")
|
||||
next_receipt_id = cur.fetchone()
|
||||
print(next_receipt_id)
|
||||
if next_receipt_id == None:
|
||||
next_receipt_id = "00000001"
|
||||
else:
|
||||
next_receipt_id = next_receipt_id[0]
|
||||
next_receipt_id = int(next_receipt_id.split("-")[1]) + 1
|
||||
y = str(next_receipt_id)
|
||||
len_str = len(y)
|
||||
x = "".join(["0" for _ in range(8 - len_str)])
|
||||
next_receipt_id = x + y
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({"message": "Failed", "error": str(error)})
|
||||
return jsonify({"receipt_id": next_receipt_id, "message": "Success", "error": "None"}), 200
|
||||
|
||||
@external_api.route("/api/addReceipt", methods=["POST"])
|
||||
def add_receipt():
|
||||
"""Receives a payload and adds the receipt to the system for <site>
|
||||
|
||||
payload = {
|
||||
receipt_id: str
|
||||
receipt_status: str
|
||||
date_submitted: timestamp
|
||||
submitted_by: INT
|
||||
vendor_id: INT
|
||||
files: dict
|
||||
items: list = (tuples)
|
||||
(type, 0, barcode, name, qty, data, status),
|
||||
site_name: str
|
||||
}
|
||||
|
||||
Returns:
|
||||
Success: dict with "error", "message" keys
|
||||
"""
|
||||
if request.method == "POST":
|
||||
site_name = request.get_json()["site_name"]
|
||||
receipt_id = request.get_json()["receipt_id"]
|
||||
receipt_status = request.get_json()["receipt_status"]
|
||||
date_submitted = request.get_json()['date_submitted']
|
||||
submitted_by = request.get_json()["submitted_by"]
|
||||
vendor_id = request.get_json()["vendor_id"]
|
||||
files = request.get_json()["files"]
|
||||
items = request.get_json()["items"]
|
||||
payload = (receipt_id, receipt_status, date_submitted, submitted_by, vendor_id, json.dumps(files))
|
||||
database_config = config()
|
||||
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
insert_receipt = f"INSERT INTO {site_name}_receipts (receipt_id, receipt_status, date_submitted, submitted_by, vendor_id, files) VALUES (%s, %s, %s, %s, %s, %s) RETURNING id;"
|
||||
cur.execute(insert_receipt, payload)
|
||||
row_id = cur.fetchone()[0]
|
||||
print(row_id)
|
||||
insert_item = f"INSERT INTO {site_name}_receipt_items (type, receipt_id, barcode, name, qty, data, status) VALUES (%s, %s, %s, %s, %s, %s, %s);"
|
||||
for item in items:
|
||||
item = list(item)
|
||||
item[1] = row_id
|
||||
item[5] = json.dumps(item[5])
|
||||
cur.execute(insert_item, item)
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return jsonify({"message": "Failed", "error": str(error)})
|
||||
return jsonify({"message": "Success", "error": "None"})
|
||||
return jsonify({"message": "Failed", "error": "Must be a post method!"})
|
||||
34
group_api.py
34
group_api.py
@ -1,34 +0,0 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers
|
||||
from user_api import login_required
|
||||
|
||||
groups_api = Blueprint('groups_api', __name__)
|
||||
|
||||
@groups_api.route("/groups")
|
||||
@login_required
|
||||
def groups():
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
return render_template("groups/index.html",
|
||||
current_site=session['selected_site'],
|
||||
sites=sites)
|
||||
|
||||
@groups_api.route("/group/<id>")
|
||||
@login_required
|
||||
def group(id):
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
return render_template("groups/group.html", id=id, current_site=session['selected_site'], sites=sites)
|
||||
|
||||
@groups_api.route('/groups/getGroups', methods=["GET"])
|
||||
def getGroups():
|
||||
groups = []
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 1))
|
||||
offset = (page-1)*limit
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
groups, count = database.getGroups(conn, site_name, (limit, offset), convert=True)
|
||||
return jsonify({'groups': groups, 'end': math.ceil(count/limit), 'error': False, 'message': 'bleh'})
|
||||
56
html_factory.py
Normal file
56
html_factory.py
Normal file
@ -0,0 +1,56 @@
|
||||
import math
|
||||
|
||||
|
||||
def manufactureUsersTable(rows):
|
||||
table = """<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Username</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
%%rows%%
|
||||
</tbody>
|
||||
</table>
|
||||
"""
|
||||
|
||||
string_rows = []
|
||||
for row in rows:
|
||||
string_row = f"""<tr>
|
||||
<td>{row[1]}</td>
|
||||
</tr>"""
|
||||
string_rows.append(string_row)
|
||||
|
||||
table = table.replace("%%rows%%", "".join(string_rows))
|
||||
|
||||
return table
|
||||
|
||||
|
||||
def manufacturePagination(current_page:int , count:int, limit:int):
|
||||
total_pages = math.ceil(count/limit)
|
||||
pag = ""
|
||||
limits = "hx-vals='{" + f'"limit": "{str(limit)}"' + "}'"
|
||||
if count >= limit:
|
||||
pag += '<ul class="pagination">'
|
||||
|
||||
if current_page > 1:
|
||||
pag += f'<li class="waves-effect my_btn"><a hx-post="/admin/users/{current_page - 1}" hx-target="#main_body" {limits}><i class="material-icons">chevron_left</i></a></li>'
|
||||
|
||||
p = [_ for _ in [current_page-2, current_page-1, current_page] if _ >= 1]
|
||||
y = [_ for _ in [current_page+1, current_page+2] if _ <= total_pages]
|
||||
_elems = p + y
|
||||
print(_elems)
|
||||
|
||||
for _element in _elems:
|
||||
if _element == current_page:
|
||||
pag += f'<li class="active"><a hx-post="/admin/users/{_element}" hx-target="#main_body" {limits}>{_element}</a></li>'
|
||||
else:
|
||||
pag += f'<li class="my_btn waves-effect"><a hx-post="/admin/users/{_element}" hx-target="#main_body" {limits}>{_element}</a></li>'
|
||||
|
||||
if current_page != total_pages:
|
||||
pag += f'<li class="waves-effect my_btn"><a hx-post="/admin/users/{current_page + 1}" hx-target="#main_body" {limits}><i class="material-icons">chevron_right</i></a></li>'
|
||||
|
||||
pag += "</ul>"
|
||||
|
||||
return pag
|
||||
|
||||
474
item_API.py
474
item_API.py
@ -1,474 +0,0 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers
|
||||
from user_api import login_required
|
||||
import postsqldb
|
||||
|
||||
items_api = Blueprint('items_api', __name__)
|
||||
|
||||
@items_api.route("/item/<parent_id>/itemLink/<id>")
|
||||
@login_required
|
||||
def itemLink(parent_id, id):
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
return render_template("items/itemlink.html", current_site=session['selected_site'], sites=sites, proto={'referrer': request.referrer}, id=id)
|
||||
|
||||
@items_api.route("/item/getTransactions", methods=["GET"])
|
||||
def getTransactions():
|
||||
if request.method == "GET":
|
||||
recordset = []
|
||||
count = 0
|
||||
logistics_info_id = int(request.args.get('logistics_info_id', 1))
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 50))
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
sql = f"SELECT * FROM {site_name}_transactions WHERE logistics_info_id=%s LIMIT %s OFFSET %s;"
|
||||
recordset = database.queryTuples(conn, sql, (logistics_info_id, limit, offset), convert=True)
|
||||
sql = f"SELECT COUNT(*) FROM {site_name}_transactions WHERE logistics_info_id=%s;"
|
||||
count = database.queryTuple(conn, sql, payload=(logistics_info_id, ))
|
||||
return jsonify({"transactions": recordset, "end": math.ceil(count[0]/limit), "error": False, "message": ""})
|
||||
return jsonify({"transactions": recordset, "end": math.ceil(count[0]/limit), "error": True, "message": "There was an error in your GET request!"})
|
||||
|
||||
@items_api.route("/item/getTransaction", methods=["GET"])
|
||||
def getTransaction():
|
||||
transaction = {}
|
||||
if request.method == "GET":
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
transaction = database.__selectTuple(conn, site_name, f"{site_name}_transactions", payload=(id, ), convert=True)
|
||||
return jsonify({"transaction": transaction, "error": False, "message": ""})
|
||||
return jsonify({"transaction": transaction, "error": True, "message": "There was an error in your GET request!"})
|
||||
|
||||
@items_api.route("/item/getItem")
|
||||
def get_item():
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
item = []
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
item = database.getItemAllByID(conn, site_name, payload=(id, ), convert=True)
|
||||
return jsonify(item=item)
|
||||
|
||||
@items_api.route("/item/getItemsWithQOH", methods=['GET'])
|
||||
@login_required
|
||||
def pagninate_items():
|
||||
pantry_inventory = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
search_string = str(request.args.get('search_text', ""))
|
||||
sort_order = request.args.get('sort_order', "")
|
||||
view = request.args.get('view', "")
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
pantry_inventory, count = database.getItemsWithQOH(conn, site_name, (search_string, limit, offset), convert=True)
|
||||
|
||||
return jsonify({'items': pantry_inventory, "end": math.ceil(count['count']/limit), 'error':False, 'message': 'Items Loaded Successfully!'})
|
||||
return jsonify({'items': pantry_inventory, "end": math.ceil(count['count']/limit), 'error':True, 'message': 'There was a problem loading the items!'})
|
||||
|
||||
@items_api.route('/item/getModalItems', methods=["GET"])
|
||||
@login_required
|
||||
def getModalItems():
|
||||
recordset = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
search_string = request.args.get('search_string', '')
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
payload = (search_string, limit, offset)
|
||||
recordset, count = database.getItemsForModal(conn, site_name, payload, convert=True)
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@items_api.route('/item/getPrefixes', methods=["GET"])
|
||||
@login_required
|
||||
def getModalPrefixes():
|
||||
recordset = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
payload = (limit, offset)
|
||||
recordset, count = postsqldb.SKUPrefixTable.getPrefixes(conn, site_name, payload, convert=True)
|
||||
return jsonify({"prefixes":recordset, "end":math.ceil(count/limit), "error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"prefixes":recordset, "end":math.ceil(count/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
|
||||
@items_api.route('/item/getZones', methods=['GET'])
|
||||
def getZones():
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
zones = []
|
||||
offset = (page - 1) * limit
|
||||
payload = (limit, offset)
|
||||
count = 0
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
zones, count = database.getZonesWithCount(conn, site_name, payload, convert=True)
|
||||
print(count, len(zones))
|
||||
return jsonify(zones=zones, endpage=math.ceil(count[0]/limit))
|
||||
|
||||
@items_api.route('/item/getLocations', methods=['get'])
|
||||
def getLocationsByZone():
|
||||
zone_id = int(request.args.get('id', 1))
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 1))
|
||||
|
||||
offset = (page-1)*limit
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
locations = []
|
||||
count=0
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
sql = f"SELECT * FROM {site_name}_locations WHERE zone_id=%s LIMIT %s OFFSET %s;"
|
||||
locations = database.queryTuples(conn, sql, (zone_id, limit, offset), convert=True)
|
||||
sql = f"SELECT COUNT(*) FROM {site_name}_locations WHERE zone_id=%s;"
|
||||
count = database.queryTuple(conn, sql, (zone_id, ))
|
||||
return jsonify(locations=locations, endpage=math.ceil(count[0]/limit))
|
||||
|
||||
@items_api.route('/item/getBrands', methods=['GET'])
|
||||
def getBrands():
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 1))
|
||||
offset = (page-1)*limit
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
brands = []
|
||||
count = 0
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
brands, count = database._paginateTableTuples(conn, site_name, f"{site_name}_brands", (limit, offset), convert=True)
|
||||
return jsonify(brands=brands, endpage=math.ceil(count['count']/limit))
|
||||
|
||||
@items_api.route('/item/updateItem', methods=['POST'])
|
||||
def updateItem():
|
||||
if request.method == "POST":
|
||||
id = request.get_json()['id']
|
||||
data = request.get_json()['data']
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
|
||||
transaction_data = {}
|
||||
for key in data.keys():
|
||||
for key_2 in data[key].keys():
|
||||
transaction_data[f"{key_2}_new"] = data[key][key_2]
|
||||
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
item = database.getItemAllByID(conn, site_name, (id, ), convert=True)
|
||||
if 'item_info' in data.keys() and data['item_info'] != {}:
|
||||
for key in data['item_info'].keys():
|
||||
transaction_data[f"{key}_old"] = item['item_info'][key]
|
||||
item_info_id = item['item_info_id']
|
||||
item_info = database.__updateTuple(conn, site_name, f"{site_name}_item_info", {'id': item_info_id, 'update': data['item_info']}, convert=True)
|
||||
if 'food_info' in data.keys() and data['food_info'] != {}:
|
||||
for key in data['food_info'].keys():
|
||||
transaction_data[f"{key}_old"] = item['food_info'][key]
|
||||
food_info_id = item['food_info_id']
|
||||
print(food_info_id, data['food_info'])
|
||||
food_info = database.__updateTuple(conn, site_name, f"{site_name}_food_info", {'id': food_info_id, 'update': data['food_info']}, convert=True)
|
||||
if 'logistics_info' in data.keys() and data['logistics_info'] != {}:
|
||||
for key in data['logistics_info'].keys():
|
||||
transaction_data[f"{key}_old"] = item['logistics_info'][key]
|
||||
logistics_info_id = item['logistics_info_id']
|
||||
print(logistics_info_id, data['logistics_info'])
|
||||
logistics_info = database.__updateTuple(conn, site_name, f"{site_name}_logistics_info", {'id': logistics_info_id, 'update': data['logistics_info']}, convert=True)
|
||||
if 'item' in data.keys() and data['item'] != {}:
|
||||
for key in data['item'].keys():
|
||||
if key == "brand":
|
||||
transaction_data[f"{key}_old"] = item['brand']['id']
|
||||
else:
|
||||
transaction_data[f"{key}_old"] = item[key]
|
||||
item = database.__updateTuple(conn, site_name, f"{site_name}_items", {'id': id, 'update': data['item']}, convert=True)
|
||||
|
||||
trans = MyDataclasses.TransactionPayload(
|
||||
timestamp=datetime.datetime.now(),
|
||||
logistics_info_id=item['logistics_info_id'],
|
||||
barcode=item['barcode'],
|
||||
name=item['item_name'],
|
||||
transaction_type="UPDATE",
|
||||
quantity=0.0,
|
||||
description="Item was updated!",
|
||||
user_id=session['user_id'],
|
||||
data=transaction_data
|
||||
)
|
||||
database.insertTransactionsTuple(conn, site_name, trans.payload())
|
||||
|
||||
return jsonify(error=False, message="Item updated successfully!")
|
||||
return jsonify(error=True, message="Unable to save, ERROR!")
|
||||
|
||||
@items_api.route('/item/updateItemLink', methods=['POST'])
|
||||
def updateItemLink():
|
||||
if request.method == "POST":
|
||||
id = request.get_json()['id']
|
||||
conv_factor = request.get_json()['conv_factor']
|
||||
barcode = request.get_json()['barcode']
|
||||
old_conv_factor = request.get_json()['old_conv']
|
||||
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
user_id = session['user_id']
|
||||
transaction_time = datetime.datetime.now()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
linkedItem = database.getItemAllByBarcode(conn, site_name, (barcode, ), convert=True)
|
||||
|
||||
transaction = MyDataclasses.TransactionPayload(
|
||||
timestamp=transaction_time,
|
||||
logistics_info_id=linkedItem['logistics_info_id'],
|
||||
barcode=barcode,
|
||||
name=linkedItem['item_name'],
|
||||
transaction_type='UPDATE',
|
||||
quantity=0.0,
|
||||
description='Link updated!',
|
||||
user_id=user_id,
|
||||
data={'new_conv_factor': conv_factor, 'old_conv_factor': old_conv_factor}
|
||||
)
|
||||
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_itemlinks", {'id': id, 'update': {'conv_factor': conv_factor}})
|
||||
database.insertTransactionsTuple(conn, site_name, transaction.payload())
|
||||
return jsonify(error=False, message="Linked Item was updated successfully")
|
||||
return jsonify(error=True, message="Unable to save this change, ERROR!")
|
||||
|
||||
|
||||
@items_api.route('/item/getLinkedItem', methods=["GET"])
|
||||
@login_required
|
||||
def getLinkedItem():
|
||||
linked_item = {}
|
||||
if request.method == "GET":
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
linked_item = database.__selectTuple(conn, site_name, f"{site_name}_itemlinks", (id, ), convert=True)
|
||||
return jsonify({'linked_item': linked_item, 'error': False, 'message': 'Linked Item added!!'})
|
||||
return jsonify({'linked_item': linked_item, 'error': True, 'message': 'These was an error with adding to the linked list!'})
|
||||
|
||||
@items_api.route('/item/addLinkedItem', methods=["POST"])
|
||||
def addLinkedItem():
|
||||
if request.method == "POST":
|
||||
parent_id = request.get_json()['parent_id']
|
||||
child_id = request.get_json()['child_id']
|
||||
conv_factor = request.get_json()['conv_factor']
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
user_id = session['user_id']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
print(parent_id, child_id, conv_factor)
|
||||
parent_item = database.getItemAllByID(conn, site_name, (parent_id, ), convert=True)
|
||||
child_item = database.getItemAllByID(conn, site_name, (child_id, ), convert=True)
|
||||
|
||||
# i need to transact out ALL locations for child item.
|
||||
pprint.pprint(child_item)
|
||||
sum_child_qoh = 0
|
||||
for location in child_item['item_locations']:
|
||||
print(location)
|
||||
sum_child_qoh += location['quantity_on_hand']
|
||||
payload = {
|
||||
'item_id': child_item['id'],
|
||||
'logistics_info_id': child_item['logistics_info_id'],
|
||||
'barcode': child_item['barcode'],
|
||||
'item_name': child_item['item_name'],
|
||||
'transaction_type': 'Adjust Out',
|
||||
'quantity': location['quantity_on_hand'],
|
||||
'description': f'Converted to {parent_item['barcode']}',
|
||||
'cost': child_item['item_info']['cost'],
|
||||
'vendor': 1,
|
||||
'expires': False,
|
||||
'location_id': location['location_id']
|
||||
}
|
||||
process.postTransaction(conn, site_name, user_id, payload)
|
||||
|
||||
print(sum_child_qoh)
|
||||
|
||||
primary_location = database.selectItemLocationsTuple(conn, site_name, (parent_item['id'], parent_item['logistics_info']['primary_location']['id']), convert=True)
|
||||
|
||||
|
||||
payload = {
|
||||
'item_id': parent_item['id'],
|
||||
'logistics_info_id': parent_item['logistics_info_id'],
|
||||
'barcode': parent_item['barcode'],
|
||||
'item_name': parent_item['item_name'],
|
||||
'transaction_type': 'Adjust In',
|
||||
'quantity': (float(sum_child_qoh)*float(conv_factor)),
|
||||
'description': f'Converted from {child_item['barcode']}',
|
||||
'cost': child_item['item_info']['cost'],
|
||||
'vendor': 1,
|
||||
'expires': None,
|
||||
'location_id': primary_location['location_id']
|
||||
}
|
||||
|
||||
pprint.pprint(payload)
|
||||
result = process.postTransaction(conn, site_name, user_id, payload)
|
||||
|
||||
if result['error']:
|
||||
return jsonify(result)
|
||||
|
||||
itemLink = MyDataclasses.ItemLinkPayload(
|
||||
barcode=child_item['barcode'],
|
||||
link=parent_item['id'],
|
||||
data=child_item,
|
||||
conv_factor=conv_factor
|
||||
)
|
||||
|
||||
database.insertItemLinksTuple(conn, site_name, itemLink.payload())
|
||||
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_items", {'id': child_item['id'], 'update': {'row_type': 'link'}})
|
||||
|
||||
return jsonify({'error': False, 'message': 'Linked Item added!!'})
|
||||
return jsonify({'error': True, 'message': 'These was an error with adding to the linked list!'})
|
||||
|
||||
@items_api.route('/items/addBlankItem', methods=["POST"])
|
||||
def addBlankItem():
|
||||
if request.method == "POST":
|
||||
data = {
|
||||
'barcode': request.get_json()['barcode'],
|
||||
'name': request.get_json()['name'],
|
||||
'subtype': request.get_json()['subtype']
|
||||
}
|
||||
pprint.pprint(data)
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
user_id = session['user_id']
|
||||
try:
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
process.postNewBlankItem(conn, site_name, user_id, data)
|
||||
except Exception as error:
|
||||
conn.rollback()
|
||||
return jsonify({'error': True, 'message': error})
|
||||
return jsonify({'error': False, 'message': 'Item added!!'})
|
||||
return jsonify({'error': True, 'message': 'These was an error with adding Item!'})
|
||||
|
||||
@items_api.route('/items/addSKUPrefix', methods=["POST"])
|
||||
def addSKUPrefix():
|
||||
if request.method == "POST":
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
try:
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
prefix = postsqldb.SKUPrefixTable.Payload(
|
||||
request.get_json()['uuid'],
|
||||
request.get_json()['name'],
|
||||
request.get_json()['description']
|
||||
)
|
||||
postsqldb.SKUPrefixTable.insert_tuple(conn, site_name, prefix.payload())
|
||||
except Exception as error:
|
||||
conn.rollback()
|
||||
return jsonify({'error': True, 'message': error})
|
||||
return jsonify({'error': False, 'message': 'Prefix added!!'})
|
||||
return jsonify({'error': True, 'message': 'These was an error with adding this Prefix!'})
|
||||
|
||||
@items_api.route('/item/addConversion', methods=['POST'])
|
||||
def addConversion():
|
||||
if request.method == "POST":
|
||||
item_id = request.get_json()['parent_id']
|
||||
uom_id = request.get_json()['uom_id']
|
||||
conv_factor = request.get_json()['conv_factor']
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
conversion = postsqldb.ConversionsTable.Payload(
|
||||
item_id, uom_id, conv_factor
|
||||
)
|
||||
postsqldb.ConversionsTable.insert_tuple(conn, site_name, conversion.payload())
|
||||
|
||||
return jsonify(error=False, message="Conversion was added successfully")
|
||||
return jsonify(error=True, message="Unable to save this conversion, ERROR!")
|
||||
|
||||
@items_api.route('/item/deleteConversion', methods=['POST'])
|
||||
def deleteConversion():
|
||||
if request.method == "POST":
|
||||
conversion_id = request.get_json()['conversion_id']
|
||||
print(conversion_id)
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
postsqldb.ConversionsTable.delete_item_tuple(conn, site_name, (conversion_id,))
|
||||
|
||||
return jsonify(error=False, message="Conversion was deleted successfully")
|
||||
return jsonify(error=True, message="Unable to delete this conversion, ERROR!")
|
||||
|
||||
@items_api.route('/item/updateConversion', methods=['POST'])
|
||||
def updateConversion():
|
||||
if request.method == "POST":
|
||||
conversion_id = request.get_json()['conversion_id']
|
||||
update_dictionary = request.get_json()['update']
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
postsqldb.ConversionsTable.update_item_tuple(conn, site_name, {'id': conversion_id, 'update': update_dictionary})
|
||||
return jsonify(error=False, message="Conversion was updated successfully")
|
||||
return jsonify(error=True, message="Unable to save this conversion, ERROR!")
|
||||
|
||||
@items_api.route('/item/addPrefix', methods=['POST'])
|
||||
def addPrefix():
|
||||
if request.method == "POST":
|
||||
item_info_id = request.get_json()['parent_id']
|
||||
prefix_id = request.get_json()['prefix_id']
|
||||
print(item_info_id)
|
||||
print(prefix_id)
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
prefixes = postsqldb.ItemInfoTable.select_tuple(conn, site_name, (item_info_id,))['prefixes']
|
||||
print(prefixes)
|
||||
prefixes.append(prefix_id)
|
||||
postsqldb.ItemInfoTable.update_tuple(conn, site_name, {'id': item_info_id, 'update':{'prefixes': prefixes}})
|
||||
return jsonify(error=False, message="Prefix was added successfully")
|
||||
return jsonify(error=True, message="Unable to save this prefix, ERROR!")
|
||||
|
||||
@items_api.route('/item/deletePrefix', methods=['POST'])
|
||||
def deletePrefix():
|
||||
if request.method == "POST":
|
||||
item_info_id = request.get_json()['item_info_id']
|
||||
prefix_id = request.get_json()['prefix_id']
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
prefixes = postsqldb.ItemInfoTable.select_tuple(conn, site_name, (item_info_id,))['prefixes']
|
||||
prefixes.remove(prefix_id)
|
||||
postsqldb.ItemInfoTable.update_tuple(conn, site_name, {'id': item_info_id, 'update':{'prefixes': prefixes}})
|
||||
return jsonify(error=False, message="Prefix was deleted successfully")
|
||||
return jsonify(error=True, message="Unable to delete this prefix, ERROR!")
|
||||
|
||||
@items_api.route('/item/refreshSearchString', methods=['POST'])
|
||||
def refreshSearchString():
|
||||
if request.method == "POST":
|
||||
item_id = request.get_json()['item_id']
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
item = postsqldb.ItemTable.getItemAllByID(conn, site_name, (item_id,))
|
||||
parameters = [f"id::{item['id']}", f"barcode::{item['barcode']}", f"name::{item['item_name']}", f"brand::{item['brand']['name']}",
|
||||
f"expires::{item['food_info']['expires']}", f"row_type::{item['row_type']}", f"item_type::{item['item_type']}"]
|
||||
|
||||
for prefix in item['item_info']['prefixes']:
|
||||
parameters.append(f"prefix::{prefix['name']}")
|
||||
|
||||
search_string = "&&".join(parameters)
|
||||
postsqldb.ItemTable.update_tuple(conn, site_name, {'id': item_id, 'update':{'search_string': search_string}})
|
||||
|
||||
return jsonify(error=False, message="Search String was updated successfully")
|
||||
return jsonify(error=True, message="Unable to update this search string, ERROR!")
|
||||
32
main.py
32
main.py
@ -412,7 +412,6 @@ def delete_site(site_name):
|
||||
drop_table(f'sites/{site_name}/sql/drop/receipt_items.sql')
|
||||
drop_table(f'sites/{site_name}/sql/drop/receipts.sql')
|
||||
drop_table(f'sites/{site_name}/sql/drop/recipes.sql')
|
||||
drop_table(f'sites/{site_name}/sql/drop/shopping_list_items.sql')
|
||||
drop_table(f'sites/{site_name}/sql/drop/shopping_lists.sql')
|
||||
drop_table(f'sites/{site_name}/sql/drop/item_locations.sql')
|
||||
|
||||
@ -438,17 +437,16 @@ def create_site(site_name, admin_user: tuple, default_zone, default_primary, def
|
||||
create_table(f'sites/{site_name}/sql/create/receipt_items.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/recipes.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/shopping_lists.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/shopping_list_items.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/item_locations.sql')
|
||||
|
||||
add_admin_sql = f"INSERT INTO logins(username, password, email) VALUES(%s, %s, %s) RETURNING id;"
|
||||
add_site_sql = f"INSERT INTO sites(site_name, creation_date, site_owner_id, flags, default_zone, default_auto_issue_location, default_primary_location, site_description) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING id;"
|
||||
add_admin_role = f"INSERT INTO roles(role_name, site_id) VALUES(%s, %s) RETURNING id;"
|
||||
|
||||
sql = f"INSERT INTO {site_name}_zones(name) VALUES (%s) RETURNING id;"
|
||||
sqltwo = f"INSERT INTO {site_name}_locations(uuid, name, zone_id, items) VALUES (%s, %s, %s, %s);"
|
||||
sqlthree = f"INSERT INTO {site_name}_vendors(vendor_name, creation_date, created_by) VALUES (%s, %s, %s);"
|
||||
sqlfour = f"INSERT INTO {site_name}_brands(name) VALUES (%s);"
|
||||
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
try:
|
||||
@ -514,7 +512,6 @@ def create_site(site_name, admin_user: tuple, default_zone, default_primary, def
|
||||
|
||||
uuid = f"{default_zone}@{default_primary}"
|
||||
|
||||
#setup loaction
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sqltwo, (uuid, default_primary, zone_id, json.dumps({})))
|
||||
@ -523,7 +520,6 @@ def create_site(site_name, admin_user: tuple, default_zone, default_primary, def
|
||||
conn.rollback()
|
||||
return False
|
||||
|
||||
#setup vendor
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sqlthree, ("None", str(datetime.datetime.now()), 1))
|
||||
@ -531,15 +527,6 @@ def create_site(site_name, admin_user: tuple, default_zone, default_primary, def
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return False
|
||||
|
||||
# setup brand
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sqlfour, ("None", ))
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return False
|
||||
|
||||
conn.commit()
|
||||
|
||||
@ -550,7 +537,6 @@ async def create_site_secondary(site_name, user_id, default_zone, default_primar
|
||||
create_table(f"sites/{site_name}/sql/create/roles.sql")
|
||||
|
||||
create_table(f'sites/{site_name}/sql/create/groups.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/cost_layers.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/linked_items.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/brands.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/food_info.sql')
|
||||
@ -565,7 +551,6 @@ async def create_site_secondary(site_name, user_id, default_zone, default_primar
|
||||
create_table(f'sites/{site_name}/sql/create/receipt_items.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/recipes.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/shopping_lists.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/shopping_list_items.sql')
|
||||
create_table(f'sites/{site_name}/sql/create/item_locations.sql')
|
||||
|
||||
add_site_sql = f"INSERT INTO sites(site_name, creation_date, site_owner_id, flags, default_zone, default_auto_issue_location, default_primary_location, site_description) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) RETURNING id;"
|
||||
@ -574,9 +559,7 @@ async def create_site_secondary(site_name, user_id, default_zone, default_primar
|
||||
sql = f"INSERT INTO {site_name}_zones(name) VALUES (%s) RETURNING id;"
|
||||
sqltwo = f"INSERT INTO {site_name}_locations(uuid, name, zone_id, items) VALUES (%s, %s, %s, %s);"
|
||||
sqlthree = f"INSERT INTO {site_name}_vendors(vendor_name, creation_date, created_by) VALUES (%s, %s, %s);"
|
||||
sqlfour = f"INSERT INTO {site_name}_brands(name) VALUES (%s);"
|
||||
|
||||
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
# set up site in database
|
||||
@ -645,14 +628,6 @@ async def create_site_secondary(site_name, user_id, default_zone, default_primar
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return False
|
||||
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sqlfour, ("Unknown", ))
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
conn.rollback()
|
||||
return False
|
||||
|
||||
conn.commit()
|
||||
|
||||
@ -708,6 +683,7 @@ def get_sites(sites=[]):
|
||||
for each in sites:
|
||||
cur.execute(f"SELECT * FROM sites WHERE id=%s;", (each, ))
|
||||
site_rows.append(cur.fetchone())
|
||||
print(site_rows)
|
||||
return site_rows
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print(error)
|
||||
|
||||
930
postsqldb.py
930
postsqldb.py
@ -1,930 +0,0 @@
|
||||
import datetime
|
||||
import psycopg2, json
|
||||
import psycopg2.extras
|
||||
from dataclasses import dataclass, field
|
||||
import random
|
||||
import string
|
||||
|
||||
class DatabaseError(Exception):
|
||||
def __init__(self, message, payload=[], sql=""):
|
||||
super().__init__(message)
|
||||
self.payload = payload
|
||||
self.message = str(message).replace("\n", "")
|
||||
self.sql = sql.replace("\n", "")
|
||||
self.log_error()
|
||||
|
||||
def log_error(self):
|
||||
with open("database.log", "a+") as file:
|
||||
file.write("\n")
|
||||
file.write(f"{datetime.datetime.now()} --- ERROR --- DatabaseError(message='{self.message}',\n")
|
||||
file.write(f"{" "*41}payload={self.payload},\n")
|
||||
file.write(f"{" "*41}sql='{self.sql}')")
|
||||
|
||||
def __str__(self):
|
||||
return f"DatabaseError(message='{self.message}', payload={self.payload}, sql='{self.sql}')"
|
||||
|
||||
def tupleDictionaryFactory(columns, row):
|
||||
columns = [desc[0] for desc in columns]
|
||||
return dict(zip(columns, row))
|
||||
|
||||
def lst2pgarr(alist):
|
||||
return '{' + ','.join(alist) + '}'
|
||||
|
||||
def updateStringFactory(updated_values: dict):
|
||||
set_clause = ', '.join([f"{key} = %s" for key in updated_values.keys()])
|
||||
values = []
|
||||
for value in updated_values.values():
|
||||
if isinstance(value, dict):
|
||||
value = json.dumps(value)
|
||||
values.append(value)
|
||||
|
||||
return set_clause, values
|
||||
|
||||
def getUUID(n):
|
||||
random_string = ''.join(random.choices(string.ascii_letters + string.digits, k=n))
|
||||
return random_string
|
||||
|
||||
class ConversionsTable:
|
||||
@dataclass
|
||||
class Payload:
|
||||
item_id: int
|
||||
uom_id: int
|
||||
conv_factor: float
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.item_id,
|
||||
self.uom_id,
|
||||
self.conv_factor
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_table(self, conn, site):
|
||||
with open(f"sql/CREATE/conversions.sql", 'r') as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, sql, "PrefixTable")
|
||||
|
||||
@classmethod
|
||||
def delete_table(self, conn, site):
|
||||
with open(f"sql/DROP/conversions.sql", 'r') as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, 'ConversionsTable', sql)
|
||||
|
||||
@classmethod
|
||||
def insert_tuple(self, conn, site: str, payload: list, convert=True):
|
||||
"""insert into recipes table for site
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (stre):
|
||||
payload (tuple): (item_id, uom_id, conversion_factor)
|
||||
convert (bool, optional): Determines if to return tuple as a dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: inserted tuple
|
||||
"""
|
||||
record = ()
|
||||
with open(f"sql/INSERT/insertConversionsTuple.sql", "r+") as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
record = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
record = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return record
|
||||
|
||||
|
||||
@classmethod
|
||||
def delete_item_tuple(self, conn, site_name, payload, convert=True):
|
||||
"""This is a basic funtion to delete a tuple from a table in site with an id. All
|
||||
tables in this database has id's associated with them.
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site_name (str):
|
||||
payload (tuple): (tuple_id,...)
|
||||
convert (bool, optional): Determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: deleted tuple
|
||||
"""
|
||||
deleted = ()
|
||||
sql = f"WITH deleted_rows AS (DELETE FROM {site_name}_conversions WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
deleted = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
deleted = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return deleted
|
||||
|
||||
@classmethod
|
||||
def update_item_tuple(self, conn, site, payload, convert=False):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (str):
|
||||
table (str):
|
||||
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: updated tuple
|
||||
"""
|
||||
updated = ()
|
||||
|
||||
set_clause, values = updateStringFactory(payload['update'])
|
||||
values.append(payload['id'])
|
||||
sql = f"UPDATE {site}_conversions SET {set_clause} WHERE id=%s RETURNING *;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, values)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
updated = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
updated = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return updated
|
||||
|
||||
class ShoppingListsTable:
|
||||
@dataclass
|
||||
class Payload:
|
||||
name: str
|
||||
description: str
|
||||
author: int
|
||||
type: str = "plain"
|
||||
creation_date: datetime.datetime = field(init=False)
|
||||
|
||||
def __post_init__(self):
|
||||
self.creation_date = datetime.datetime.now()
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.name,
|
||||
self.description,
|
||||
self.author,
|
||||
self.creation_date,
|
||||
self.type
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ItemPayload:
|
||||
uuid: str
|
||||
sl_id: int
|
||||
item_type: str
|
||||
item_name: str
|
||||
uom: str
|
||||
qty: float
|
||||
item_id: int = None
|
||||
links: dict = field(default_factory=dict)
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.uuid,
|
||||
self.sl_id,
|
||||
self.item_type,
|
||||
self.item_name,
|
||||
self.uom,
|
||||
self.qty,
|
||||
self.item_id,
|
||||
json.dumps(self.links)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def getItem(self, conn, site, payload, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
site (_type_): _description_
|
||||
payload (_type_): (id, )
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
record = ()
|
||||
with open('sql/SELECT/selectShoppingListItem.sql', 'r') as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
record = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
record = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return record
|
||||
|
||||
class UnitsTable:
|
||||
@dataclass
|
||||
class Payload:
|
||||
__slots__ = ('plural', 'single', 'fullname', 'description')
|
||||
|
||||
plural: str
|
||||
single: str
|
||||
fullname: str
|
||||
description: str
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.plural,
|
||||
self.single,
|
||||
self.fullname,
|
||||
self.description
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_table(self, conn):
|
||||
with open(f"sql/CREATE/units.sql", 'r') as file:
|
||||
sql = file.read()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, sql, "UnitsTable")
|
||||
|
||||
@classmethod
|
||||
def delete_table(self, conn):
|
||||
with open(f"sql/DROP/units.sql", 'r') as file:
|
||||
sql = file.read()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, 'PrefixTable', sql)
|
||||
|
||||
@classmethod
|
||||
def insert_tuple(self, conn, payload: list, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
payload (list): (plural, single, fullname, description)
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
record = ()
|
||||
with open(f"sql/INSERT/insertUnitsTuple.sql", "r+") as file:
|
||||
sql = file.read()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
record = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
record = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return record
|
||||
|
||||
@classmethod
|
||||
def getAll(self, conn, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
records = ()
|
||||
sql = f"SELECT * FROM units;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
rows = cur.fetchall()
|
||||
if rows and convert:
|
||||
records = [tupleDictionaryFactory(cur.description, row) for row in rows]
|
||||
elif rows and not convert:
|
||||
records = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, "", sql)
|
||||
return records
|
||||
|
||||
class SKUPrefixTable:
|
||||
@dataclass
|
||||
class Payload:
|
||||
__slots__ = ('uuid', 'name', 'description')
|
||||
|
||||
uuid: str
|
||||
name: str
|
||||
description: str
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.uuid,
|
||||
self.name,
|
||||
self.description
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_table(self, conn, site):
|
||||
with open(f"sql/CREATE/sku_prefix.sql", 'r') as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, sql, "PrefixTable")
|
||||
|
||||
@classmethod
|
||||
def delete_table(self, conn, site):
|
||||
with open(f"sql/DROP/sku_prefix.sql", 'r') as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, 'PrefixTable', sql)
|
||||
|
||||
@classmethod
|
||||
def insert_tuple(self, conn, site: str, payload: list, convert=True):
|
||||
record = ()
|
||||
with open(f"sql/INSERT/insertSKUPrefixTuple.sql", "r+") as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
record = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
record = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return record
|
||||
|
||||
@classmethod
|
||||
def getPrefixes(self, conn, site: str, payload: tuple, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
site (_type_): _description_
|
||||
payload (_type_): (limit, offset)
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
recordset = []
|
||||
count = 0
|
||||
with open(f"sql/SELECT/getSkuPrefixes.sql", "r+") as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchall()
|
||||
if rows and convert:
|
||||
recordset = [tupleDictionaryFactory(cur.description, row) for row in rows]
|
||||
if rows and not convert:
|
||||
recordset = rows
|
||||
|
||||
cur.execute(f"SELECT COUNT(*) FROM {site}_sku_prefix;")
|
||||
count = cur.fetchone()[0]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return recordset, count
|
||||
|
||||
class RecipesTable:
|
||||
@dataclass
|
||||
class Payload:
|
||||
#__slots__ = ('name', 'author', 'description', 'created_date', 'instructions', 'picture_path')
|
||||
|
||||
name: str
|
||||
author: int
|
||||
description: str
|
||||
created_date: datetime = field(init=False)
|
||||
instructions: list = field(default_factory=list)
|
||||
picture_path: str = ""
|
||||
|
||||
def __post_init__(self):
|
||||
self.created_date = datetime.datetime.now()
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.name,
|
||||
self.author,
|
||||
self.description,
|
||||
self.created_date,
|
||||
lst2pgarr(self.instructions),
|
||||
self.picture_path
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ItemPayload:
|
||||
uuid: str
|
||||
rp_id: int
|
||||
item_type: str
|
||||
item_name:str
|
||||
uom: int
|
||||
qty: float = 0.0
|
||||
item_id: int = None
|
||||
links: dict = field(default_factory=dict)
|
||||
|
||||
def payload(self):
|
||||
return (
|
||||
self.uuid,
|
||||
self.rp_id,
|
||||
self.item_type,
|
||||
self.item_name,
|
||||
self.uom,
|
||||
self.qty,
|
||||
self.item_id,
|
||||
json.dumps(self.links)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_table(self, conn, site):
|
||||
with open(f"sql/CREATE/recipes.sql", 'r') as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, sql, "PrefixTable")
|
||||
|
||||
@classmethod
|
||||
def delete_table(self, conn, site):
|
||||
with open(f"sql/DROP/recipes.sql", 'r') as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, 'PrefixTable', sql)
|
||||
|
||||
@classmethod
|
||||
def insert_tuple(self, conn, site: str, payload: list, convert=True):
|
||||
"""insert into recipes table for site
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (stre):
|
||||
payload (tuple): (name[str], author[int], description[str], creation_date[timestamp], instructions[list], picture_path[str])
|
||||
convert (bool, optional): Determines if to return tuple as a dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: inserted tuple
|
||||
"""
|
||||
recipe = ()
|
||||
with open(f"sql/INSERT/insertRecipesTuple.sql", "r+") as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
recipe = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
recipe = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return recipe
|
||||
|
||||
@classmethod
|
||||
def insert_item_tuple(self, conn, site, payload, convert=True):
|
||||
"""insert into recipe_items table for site
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (stre):
|
||||
payload (tuple): (uuid[str], rp_id[int], item_type[str], item_name[str], uom[str], qty[float], item_id[int], links[jsonb])
|
||||
convert (bool, optional): Determines if to return tuple as a dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: inserted tuple
|
||||
"""
|
||||
recipe_item = ()
|
||||
with open(f"sql/INSERT/insertRecipeItemsTuple.sql", "r+") as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
recipe_item = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
recipe_item = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return recipe_item
|
||||
|
||||
@classmethod
|
||||
def delete_item_tuple(self, conn, site_name, payload, convert=True):
|
||||
"""This is a basic funtion to delete a tuple from a table in site with an id. All
|
||||
tables in this database has id's associated with them.
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site_name (str):
|
||||
payload (tuple): (tuple_id,...)
|
||||
convert (bool, optional): Determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: deleted tuple
|
||||
"""
|
||||
deleted = ()
|
||||
sql = f"WITH deleted_rows AS (DELETE FROM {site_name}_recipe_items WHERE id IN ({','.join(['%s'] * len(payload))}) RETURNING *) SELECT * FROM deleted_rows;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
deleted = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
deleted = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return deleted
|
||||
|
||||
@classmethod
|
||||
def update_item_tuple(self, conn, site, payload, convert=False):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (str):
|
||||
table (str):
|
||||
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: updated tuple
|
||||
"""
|
||||
updated = ()
|
||||
|
||||
set_clause, values = updateStringFactory(payload['update'])
|
||||
values.append(payload['id'])
|
||||
sql = f"UPDATE {site}_recipe_items SET {set_clause} WHERE id=%s RETURNING *;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, values)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
updated = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
updated = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return updated
|
||||
|
||||
@classmethod
|
||||
def getRecipes(self, conn, site: str, payload: tuple, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
site (_type_): _description_
|
||||
payload (_type_): (limit, offset)
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
recordset = []
|
||||
count = 0
|
||||
with open(f"sql/SELECT/getRecipes.sql", "r+") as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchall()
|
||||
if rows and convert:
|
||||
recordset = [tupleDictionaryFactory(cur.description, row) for row in rows]
|
||||
if rows and not convert:
|
||||
recordset = rows
|
||||
|
||||
cur.execute(f"SELECT COUNT(*) FROM {site}_recipes;")
|
||||
count = cur.fetchone()[0]
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return recordset, count
|
||||
|
||||
@classmethod
|
||||
def getRecipe(self, conn, site: str, payload: tuple, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
site (_type_): _description_
|
||||
payload (_type_): (id, )
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
record = ()
|
||||
with open(f"sql/SELECT/getRecipeByID.sql", "r+") as file:
|
||||
sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
record = tupleDictionaryFactory(cur.description, rows)
|
||||
if rows and not convert:
|
||||
record = rows
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return record
|
||||
|
||||
@classmethod
|
||||
def updateRecipe(self, conn, site, payload, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (str):
|
||||
table (str):
|
||||
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: updated tuple
|
||||
"""
|
||||
updated = ()
|
||||
|
||||
set_clause, values = updateStringFactory(payload['update'])
|
||||
values.append(payload['id'])
|
||||
sql = f"UPDATE {site}_recipes SET {set_clause} WHERE id=%s RETURNING *;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, values)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
updated = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
updated = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return updated
|
||||
|
||||
|
||||
class ItemInfoTable:
|
||||
@classmethod
|
||||
def select_tuple(self, conn, site:str, payload:tuple, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
site (_type_): _description_
|
||||
payload (_type_): (item_info_id,)
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
selected = ()
|
||||
sql = f"SELECT * FROM {site}_item_info WHERE id=%s;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
selected = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
selected = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return selected
|
||||
|
||||
@classmethod
|
||||
def update_tuple(self, conn, site:str, payload: dict, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (str):
|
||||
table (str):
|
||||
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: updated tuple
|
||||
"""
|
||||
updated = ()
|
||||
|
||||
set_clause, values = updateStringFactory(payload['update'])
|
||||
values.append(payload['id'])
|
||||
sql = f"UPDATE {site}_item_info SET {set_clause} WHERE id=%s RETURNING *;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, values)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
updated = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
updated = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return updated
|
||||
|
||||
|
||||
class ItemTable:
|
||||
|
||||
@classmethod
|
||||
def getItemAllByID(self, conn, site, payload, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
site (_type_): _description_
|
||||
payload (_type_): (item_id, )
|
||||
convert (bool, optional): _description_. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
item = ()
|
||||
with open(f"sql/SELECT/getItemAllByID.sql", "r+") as file:
|
||||
getItemAllByID_sql = file.read().replace("%%site_name%%", site)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(getItemAllByID_sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
item = tupleDictionaryFactory(cur.description, rows)
|
||||
if rows and not convert:
|
||||
item = rows
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
raise DatabaseError(error, payload, getItemAllByID_sql)
|
||||
return item
|
||||
|
||||
@classmethod
|
||||
def update_tuple(self, conn, site, payload, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (str):
|
||||
table (str):
|
||||
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: updated tuple
|
||||
"""
|
||||
updated = ()
|
||||
|
||||
set_clause, values = updateStringFactory(payload['update'])
|
||||
values.append(payload['id'])
|
||||
sql = f"UPDATE {site}_items SET {set_clause} WHERE id=%s RETURNING *;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, values)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
updated = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
updated = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return updated
|
||||
|
||||
class ReceiptTable:
|
||||
|
||||
@classmethod
|
||||
def update_receipt(self, conn, site:str, payload:dict, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_T_connector@connect): Postgresql Connector
|
||||
site (str):
|
||||
table (str):
|
||||
payload (dict): {'id': row_id, 'update': {... column_to_update: value_to_update_to...}}
|
||||
convert (bool, optional): determines if to return tuple as dictionary. Defaults to False.
|
||||
|
||||
Raises:
|
||||
DatabaseError:
|
||||
|
||||
Returns:
|
||||
tuple or dict: updated tuple
|
||||
"""
|
||||
updated = ()
|
||||
|
||||
set_clause, values = updateStringFactory(payload['update'])
|
||||
values.append(payload['id'])
|
||||
sql = f"UPDATE {site}_receipts SET {set_clause} WHERE id=%s RETURNING *;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, values)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
updated = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
updated = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return updated
|
||||
|
||||
@classmethod
|
||||
def select_tuple(self, conn, site:str, payload:tuple, convert=True):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
conn (_type_): _description_
|
||||
site (_type_): _description_
|
||||
payload (_type_): (receipt_id,)
|
||||
convert (bool, optional): _description_. Defaults to True.
|
||||
|
||||
Raises:
|
||||
DatabaseError: _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
selected = ()
|
||||
sql = f"SELECT * FROM {site}_receipts WHERE id=%s;"
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, payload)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
selected = tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
selected = rows
|
||||
except Exception as error:
|
||||
raise DatabaseError(error, payload, sql)
|
||||
return selected
|
||||
421
process.log
421
process.log
@ -1,421 +0,0 @@
|
||||
|
||||
2025-04-07 18:07:48.193728 --- CAUTION --- 0
|
||||
{"Plural": "pinches", " Single": " pinch", " Fullname": " Pinch", " Description": " Less than 1/8 teaspoon."}
|
||||
2025-04-07 18:07:48.198811 --- CAUTION --- 0
|
||||
{"Plural": "tsp", " Single": " tsp", " Fullname": " Teaspoon", " Description": " 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US."}
|
||||
2025-04-07 18:07:48.202723 --- CAUTION --- 0
|
||||
{"Plural": "dsp", " Single": " dsp", " Fullname": " Dessertspoon", " Description": " 10 millilitres in Australia."}
|
||||
2025-04-07 18:07:48.205733 --- CAUTION --- 0
|
||||
{"Plural": "tbsp", " Single": " tbsp", " Fullname": " Tablespoon", " Description": " 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US."}
|
||||
2025-04-07 18:07:48.208306 --- CAUTION --- 0
|
||||
{"Plural": "fl oz", " Single": " fl oz", " Fullname": " Fluid ounce", " Description": " 28.41 millilitres in the UK or approximately 29.57 millilitres in the US."}
|
||||
2025-04-07 18:07:48.212832 --- CAUTION --- 0
|
||||
{"Plural": "cups", " Single": " cup", " Fullname": " Cup", " Description": " 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US."}
|
||||
2025-04-07 18:07:48.215843 --- CAUTION --- 0
|
||||
{"Plural": "pt", " Single": " pt", " Fullname": " Pint", " Description": " 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US."}
|
||||
2025-04-07 18:07:48.219306 --- CAUTION --- 0
|
||||
{"Plural": "qt", " Single": " qt", " Fullname": " Quart", " Description": " Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US."}
|
||||
2025-04-07 18:07:48.222204 --- CAUTION --- 0
|
||||
{"Plural": "gal", " Single": " gal", " Fullname": " Gallon", " Description": " Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US."}
|
||||
2025-04-07 18:07:48.226717 --- CAUTION --- 0
|
||||
{"Plural": "jiggers", " Single": " jigger", " Fullname": " Jigger", " Description": " 1.5 fluid ounces."}
|
||||
2025-04-07 18:07:48.230038 --- CAUTION --- 0
|
||||
{"Plural": "oz", " Single": " oz", " Fullname": " Ounce", " Description": " 1/4 lb for butter which can also be measured as 3 tablespoons."}
|
||||
2025-04-07 18:07:48.233496 --- CAUTION --- 0
|
||||
{"Plural": "L", " Single": " L", " Fullname": " Liter", " Description": " Commonly used for volume measurement in the metric system."}
|
||||
2025-04-07 18:07:48.236507 --- CAUTION --- 0
|
||||
{"Plural": "mL", " Single": " mL", " Fullname": " Milliliter", " Description": " Commonly used for volume measurement in the metric system."}
|
||||
2025-04-07 18:07:48.240296 --- CAUTION --- 0
|
||||
{"Plural": "dm3", " Single": " dm3", " Fullname": " Cubic decimeter", " Description": " Equivalent to 1 liter."}
|
||||
2025-04-07 18:07:48.242309 --- CAUTION --- 0
|
||||
{"Plural": "g", " Single": " g", " Fullname": " Gram", " Description": " Commonly used for weight measurement in the metric system."}
|
||||
2025-04-07 18:07:48.246896 --- CAUTION --- 0
|
||||
{"Plural": "kg", " Single": " kg", " Fullname": " Kilogram", " Description": " Commonly used for weight measurement in the metric system."}
|
||||
2025-04-07 18:07:48.250308 --- CAUTION --- 0
|
||||
{"Plural": "lbs", " Single": " lb", " Fullname": " Pound", " Description": " Used for weight measurement in the US."}
|
||||
2025-04-07 18:07:48.253557 --- CAUTION --- 0
|
||||
{"Plural": "fluid scruples", " Single": " fluid scruple", " Fullname": " Fluid scruple", " Description": " A unit used in the apothecaries' system but not commonly used in cooking."}
|
||||
2025-04-07 18:07:48.257236 --- CAUTION --- 0
|
||||
{"Plural": "cm3", " Single": " cm3", " Fullname": " Cubic centimeter", " Description": " Equivalent to 1 milliliter."}
|
||||
2025-04-07 18:07:48.261287 --- CAUTION --- 0
|
||||
{"Plural": "breakfast cups", " Single": " breakfast cup", " Fullname": " Breakfast cup", " Description": " Similar in size to the US customary cup and the metric cup."}
|
||||
2025-04-07 18:07:48.263881 --- CAUTION --- 0
|
||||
{"Plural": "tumblers", " Single": " tumbler", " Fullname": " Tumblerful", " Description": " A unit used in British cookery books and recipes."}
|
||||
2025-04-07 18:07:48.266919 --- CAUTION --- 0
|
||||
{"Plural": "winefulls", " Single": " winefull", " Fullname": " Wineglassful", " Description": " A unit used in British cookery books and recipes."}
|
||||
2025-04-07 18:07:48.270307 --- CAUTION --- 0
|
||||
{"Plural": "coffee cups", " Single": " coffee cup", " Fullname": " Coffee cup", " Description": " Can vary from 100 to 200 millilitres."}
|
||||
2025-04-07 18:07:48.273839 --- CAUTION --- 0
|
||||
{"Plural": "sticks of butter", " Single": " stick of butter", " Fullname": " Stick of butter", " Description": " 1/4 lb or 3 tablespoons."}
|
||||
2025-04-07 18:07:48.277370 --- CAUTION --- 0
|
||||
{"Plural": "smidgens", " Single": " smidgen", " Fullname": " Smidgen", " Description": " An approximate unit of measure."}
|
||||
2025-04-07 18:07:48.280470 --- CAUTION --- 0
|
||||
{"Plural": "dashes", " Single": " dash", " Fullname": " Dash", " Description": " An approximate unit of measure."}
|
||||
2025-04-07 18:07:48.283908 --- CAUTION --- 0
|
||||
{"Plural": "drops", " Single": " drop", " Fullname": " Drop", " Description": " An approximate unit of measure."}
|
||||
2025-04-07 18:07:48.286954 --- CAUTION --- 0
|
||||
{"Plural": "eaches", " Single": " each", " Fullname": " Each", " Description": " A single unit."}
|
||||
2025-04-07 18:07:48.290199 --- CAUTION --- 0
|
||||
{"Plural": "boxes", " Single": " box", " Fullname": " Box", " Description": " A Single box of a unit."}
|
||||
2025-04-07 18:07:48.293848 --- CAUTION --- 0
|
||||
{"Plural": "crates", " Single": " crate", " Fullname": " Crate", " Description": " a single crate of a unit."}
|
||||
2025-04-07 18:07:48.297999 --- CAUTION --- 0
|
||||
{"Plural": "jars", " Single": " jar", " Fullname": " Jar", " Description": " A single Jar of a unit."}
|
||||
2025-04-07 18:07:48.301330 --- CAUTION --- 0
|
||||
{"Plural": "cans", " Single": " can", " Fullname": " Can", " Description": " A Single Can of a unit."}
|
||||
2025-04-07 18:07:48.304665 --- CAUTION --- 0
|
||||
{"Plural": "bars", " Single": " bar", " Fullname": " Bars", " Description": " A Single bar of a unit."}
|
||||
2025-04-07 18:07:48.307969 --- CAUTION --- 0
|
||||
{"Plural": "loaves", " Single": " loaf", " Fullname": " Loaf", " Description": " A single loaf of a unit."}
|
||||
2025-04-07 18:07:48.311119 --- CAUTION --- 0
|
||||
{"Plural": "packs", " Single": " pack", " Fullname": " Pack", " Description": " A Single Pack of a unit."}
|
||||
2025-04-07 18:08:49.486023 --- CAUTION --- 0
|
||||
{"Plural": "pinches", " Single": " pinch", " Fullname": " Pinch", " Description": " Less than 1/8 teaspoon."}
|
||||
2025-04-07 18:08:49.492309 --- CAUTION --- 0
|
||||
{"Plural": "tsp", " Single": " tsp", " Fullname": " Teaspoon", " Description": " 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US."}
|
||||
2025-04-07 18:08:49.496834 --- CAUTION --- 0
|
||||
{"Plural": "dsp", " Single": " dsp", " Fullname": " Dessertspoon", " Description": " 10 millilitres in Australia."}
|
||||
2025-04-07 18:08:49.500191 --- CAUTION --- 0
|
||||
{"Plural": "tbsp", " Single": " tbsp", " Fullname": " Tablespoon", " Description": " 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US."}
|
||||
2025-04-07 18:08:49.504607 --- CAUTION --- 0
|
||||
{"Plural": "fl oz", " Single": " fl oz", " Fullname": " Fluid ounce", " Description": " 28.41 millilitres in the UK or approximately 29.57 millilitres in the US."}
|
||||
2025-04-07 18:08:49.508006 --- CAUTION --- 0
|
||||
{"Plural": "cups", " Single": " cup", " Fullname": " Cup", " Description": " 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US."}
|
||||
2025-04-07 18:08:49.512009 --- CAUTION --- 0
|
||||
{"Plural": "pt", " Single": " pt", " Fullname": " Pint", " Description": " 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US."}
|
||||
2025-04-07 18:08:49.514519 --- CAUTION --- 0
|
||||
{"Plural": "qt", " Single": " qt", " Fullname": " Quart", " Description": " Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US."}
|
||||
2025-04-07 18:08:49.519512 --- CAUTION --- 0
|
||||
{"Plural": "gal", " Single": " gal", " Fullname": " Gallon", " Description": " Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US."}
|
||||
2025-04-07 18:08:49.522787 --- CAUTION --- 0
|
||||
{"Plural": "jiggers", " Single": " jigger", " Fullname": " Jigger", " Description": " 1.5 fluid ounces."}
|
||||
2025-04-07 18:08:49.526796 --- CAUTION --- 0
|
||||
{"Plural": "oz", " Single": " oz", " Fullname": " Ounce", " Description": " 1/4 lb for butter which can also be measured as 3 tablespoons."}
|
||||
2025-04-07 18:08:49.529796 --- CAUTION --- 0
|
||||
{"Plural": "L", " Single": " L", " Fullname": " Liter", " Description": " Commonly used for volume measurement in the metric system."}
|
||||
2025-04-07 18:08:49.533807 --- CAUTION --- 0
|
||||
{"Plural": "mL", " Single": " mL", " Fullname": " Milliliter", " Description": " Commonly used for volume measurement in the metric system."}
|
||||
2025-04-07 18:08:49.537268 --- CAUTION --- 0
|
||||
{"Plural": "dm3", " Single": " dm3", " Fullname": " Cubic decimeter", " Description": " Equivalent to 1 liter."}
|
||||
2025-04-07 18:08:49.541002 --- CAUTION --- 0
|
||||
{"Plural": "g", " Single": " g", " Fullname": " Gram", " Description": " Commonly used for weight measurement in the metric system."}
|
||||
2025-04-07 18:08:49.544953 --- CAUTION --- 0
|
||||
{"Plural": "kg", " Single": " kg", " Fullname": " Kilogram", " Description": " Commonly used for weight measurement in the metric system."}
|
||||
2025-04-07 18:08:49.548961 --- CAUTION --- 0
|
||||
{"Plural": "lbs", " Single": " lb", " Fullname": " Pound", " Description": " Used for weight measurement in the US."}
|
||||
2025-04-07 18:08:49.551999 --- CAUTION --- 0
|
||||
{"Plural": "fluid scruples", " Single": " fluid scruple", " Fullname": " Fluid scruple", " Description": " A unit used in the apothecaries' system but not commonly used in cooking."}
|
||||
2025-04-07 18:08:49.556000 --- CAUTION --- 0
|
||||
{"Plural": "cm3", " Single": " cm3", " Fullname": " Cubic centimeter", " Description": " Equivalent to 1 milliliter."}
|
||||
2025-04-07 18:08:49.560003 --- CAUTION --- 0
|
||||
{"Plural": "breakfast cups", " Single": " breakfast cup", " Fullname": " Breakfast cup", " Description": " Similar in size to the US customary cup and the metric cup."}
|
||||
2025-04-07 18:08:49.564002 --- CAUTION --- 0
|
||||
{"Plural": "tumblers", " Single": " tumbler", " Fullname": " Tumblerful", " Description": " A unit used in British cookery books and recipes."}
|
||||
2025-04-07 18:08:49.567064 --- CAUTION --- 0
|
||||
{"Plural": "winefulls", " Single": " winefull", " Fullname": " Wineglassful", " Description": " A unit used in British cookery books and recipes."}
|
||||
2025-04-07 18:08:49.571455 --- CAUTION --- 0
|
||||
{"Plural": "coffee cups", " Single": " coffee cup", " Fullname": " Coffee cup", " Description": " Can vary from 100 to 200 millilitres."}
|
||||
2025-04-07 18:08:49.574950 --- CAUTION --- 0
|
||||
{"Plural": "sticks of butter", " Single": " stick of butter", " Fullname": " Stick of butter", " Description": " 1/4 lb or 3 tablespoons."}
|
||||
2025-04-07 18:08:49.578958 --- CAUTION --- 0
|
||||
{"Plural": "smidgens", " Single": " smidgen", " Fullname": " Smidgen", " Description": " An approximate unit of measure."}
|
||||
2025-04-07 18:08:49.582103 --- CAUTION --- 0
|
||||
{"Plural": "dashes", " Single": " dash", " Fullname": " Dash", " Description": " An approximate unit of measure."}
|
||||
2025-04-07 18:08:49.584615 --- CAUTION --- 0
|
||||
{"Plural": "drops", " Single": " drop", " Fullname": " Drop", " Description": " An approximate unit of measure."}
|
||||
2025-04-07 18:08:49.589280 --- CAUTION --- 0
|
||||
{"Plural": "eaches", " Single": " each", " Fullname": " Each", " Description": " A single unit."}
|
||||
2025-04-07 18:08:49.592278 --- CAUTION --- 0
|
||||
{"Plural": "boxes", " Single": " box", " Fullname": " Box", " Description": " A Single box of a unit."}
|
||||
2025-04-07 18:08:49.595790 --- CAUTION --- 0
|
||||
{"Plural": "crates", " Single": " crate", " Fullname": " Crate", " Description": " a single crate of a unit."}
|
||||
2025-04-07 18:08:49.599333 --- CAUTION --- 0
|
||||
{"Plural": "jars", " Single": " jar", " Fullname": " Jar", " Description": " A single Jar of a unit."}
|
||||
2025-04-07 18:08:49.603568 --- CAUTION --- 0
|
||||
{"Plural": "cans", " Single": " can", " Fullname": " Can", " Description": " A Single Can of a unit."}
|
||||
2025-04-07 18:08:49.607011 --- CAUTION --- 0
|
||||
{"Plural": "bars", " Single": " bar", " Fullname": " Bars", " Description": " A Single bar of a unit."}
|
||||
2025-04-07 18:08:49.611167 --- CAUTION --- 0
|
||||
{"Plural": "loaves", " Single": " loaf", " Fullname": " Loaf", " Description": " A single loaf of a unit."}
|
||||
2025-04-07 18:08:49.614706 --- CAUTION --- 0
|
||||
{"Plural": "packs", " Single": " pack", " Fullname": " Pack", " Description": " A Single Pack of a unit."}
|
||||
2025-04-07 18:10:43.995134 --- CAUTION --- DatabaseError(message='duplicate key value violates unique constraint "units_plural_key"DETAIL: Key (plural)=(pinches) already exists.', payload=('pinches', ' pinch', ' Pinch', ' Less than 1/8 teaspoon.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["pinches", " pinch", " Pinch", " Less than 1/8 teaspoon."]
|
||||
2025-04-07 18:10:44.005096 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tsp', ' tsp', ' Teaspoon', ' 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tsp", " tsp", " Teaspoon", " 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US."]
|
||||
2025-04-07 18:10:44.012695 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dsp', ' dsp', ' Dessertspoon', ' 10 millilitres in Australia.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dsp", " dsp", " Dessertspoon", " 10 millilitres in Australia."]
|
||||
2025-04-07 18:10:44.020587 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tbsp', ' tbsp', ' Tablespoon', ' 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tbsp", " tbsp", " Tablespoon", " 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US."]
|
||||
2025-04-07 18:10:44.027752 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fl oz', ' fl oz', ' Fluid ounce', ' 28.41 millilitres in the UK or approximately 29.57 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fl oz", " fl oz", " Fluid ounce", " 28.41 millilitres in the UK or approximately 29.57 millilitres in the US."]
|
||||
2025-04-07 18:10:44.036113 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cups', ' cup', ' Cup', ' 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cups", " cup", " Cup", " 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US."]
|
||||
2025-04-07 18:10:44.043799 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('pt', ' pt', ' Pint', ' 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["pt", " pt", " Pint", " 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US."]
|
||||
2025-04-07 18:10:44.051856 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('qt', ' qt', ' Quart', ' Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["qt", " qt", " Quart", " Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US."]
|
||||
2025-04-07 18:10:44.059206 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('gal', ' gal', ' Gallon', ' Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["gal", " gal", " Gallon", " Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US."]
|
||||
2025-04-07 18:10:44.067014 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jiggers', ' jigger', ' Jigger', ' 1.5 fluid ounces.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jiggers", " jigger", " Jigger", " 1.5 fluid ounces."]
|
||||
2025-04-07 18:10:44.074033 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('oz', ' oz', ' Ounce', ' 1/4 lb for butter which can also be measured as 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["oz", " oz", " Ounce", " 1/4 lb for butter which can also be measured as 3 tablespoons."]
|
||||
2025-04-07 18:10:44.081603 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('L', ' L', ' Liter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["L", " L", " Liter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:10:44.089592 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('mL', ' mL', ' Milliliter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["mL", " mL", " Milliliter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:10:44.097342 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dm3', ' dm3', ' Cubic decimeter', ' Equivalent to 1 liter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dm3", " dm3", " Cubic decimeter", " Equivalent to 1 liter."]
|
||||
2025-04-07 18:10:44.104724 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('g', ' g', ' Gram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["g", " g", " Gram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:10:44.112144 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('kg', ' kg', ' Kilogram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["kg", " kg", " Kilogram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:10:44.120135 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('lbs', ' lb', ' Pound', ' Used for weight measurement in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["lbs", " lb", " Pound", " Used for weight measurement in the US."]
|
||||
2025-04-07 18:10:44.127804 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fluid scruples', ' fluid scruple', ' Fluid scruple', " A unit used in the apothecaries' system but not commonly used in cooking."), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fluid scruples", " fluid scruple", " Fluid scruple", " A unit used in the apothecaries' system but not commonly used in cooking."]
|
||||
2025-04-07 18:10:44.135113 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cm3', ' cm3', ' Cubic centimeter', ' Equivalent to 1 milliliter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cm3", " cm3", " Cubic centimeter", " Equivalent to 1 milliliter."]
|
||||
2025-04-07 18:10:44.142675 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('breakfast cups', ' breakfast cup', ' Breakfast cup', ' Similar in size to the US customary cup and the metric cup.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["breakfast cups", " breakfast cup", " Breakfast cup", " Similar in size to the US customary cup and the metric cup."]
|
||||
2025-04-07 18:10:44.151374 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tumblers', ' tumbler', ' Tumblerful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tumblers", " tumbler", " Tumblerful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:10:44.158807 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('winefulls', ' winefull', ' Wineglassful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["winefulls", " winefull", " Wineglassful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:10:44.167004 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('coffee cups', ' coffee cup', ' Coffee cup', ' Can vary from 100 to 200 millilitres.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["coffee cups", " coffee cup", " Coffee cup", " Can vary from 100 to 200 millilitres."]
|
||||
2025-04-07 18:10:44.174449 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('sticks of butter', ' stick of butter', ' Stick of butter', ' 1/4 lb or 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["sticks of butter", " stick of butter", " Stick of butter", " 1/4 lb or 3 tablespoons."]
|
||||
2025-04-07 18:10:44.183729 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('smidgens', ' smidgen', ' Smidgen', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["smidgens", " smidgen", " Smidgen", " An approximate unit of measure."]
|
||||
2025-04-07 18:10:44.191339 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dashes', ' dash', ' Dash', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dashes", " dash", " Dash", " An approximate unit of measure."]
|
||||
2025-04-07 18:10:44.198886 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('drops', ' drop', ' Drop', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["drops", " drop", " Drop", " An approximate unit of measure."]
|
||||
2025-04-07 18:10:44.206287 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('eaches', ' each', ' Each', ' A single unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["eaches", " each", " Each", " A single unit."]
|
||||
2025-04-07 18:10:44.213758 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('boxes', ' box', ' Box', ' A Single box of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["boxes", " box", " Box", " A Single box of a unit."]
|
||||
2025-04-07 18:10:44.221833 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('crates', ' crate', ' Crate', ' a single crate of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["crates", " crate", " Crate", " a single crate of a unit."]
|
||||
2025-04-07 18:10:44.229839 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jars', ' jar', ' Jar', ' A single Jar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jars", " jar", " Jar", " A single Jar of a unit."]
|
||||
2025-04-07 18:10:44.237064 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cans', ' can', ' Can', ' A Single Can of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cans", " can", " Can", " A Single Can of a unit."]
|
||||
2025-04-07 18:10:44.244118 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('bars', ' bar', ' Bars', ' A Single bar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["bars", " bar", " Bars", " A Single bar of a unit."]
|
||||
2025-04-07 18:10:44.252959 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('loaves', ' loaf', ' Loaf', ' A single loaf of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["loaves", " loaf", " Loaf", " A single loaf of a unit."]
|
||||
2025-04-07 18:10:44.260249 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('packs', ' pack', ' Pack', ' A Single Pack of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["packs", " pack", " Pack", " A Single Pack of a unit."]
|
||||
2025-04-07 18:11:11.901876 --- CAUTION --- DatabaseError(message='duplicate key value violates unique constraint "units_plural_key"DETAIL: Key (plural)=(pinches) already exists.', payload=('pinches', ' pinch', ' Pinch', ' Less than 1/8 teaspoon.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["pinches", " pinch", " Pinch", " Less than 1/8 teaspoon."]
|
||||
2025-04-07 18:11:11.913095 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tsp', ' tsp', ' Teaspoon', ' 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tsp", " tsp", " Teaspoon", " 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US."]
|
||||
2025-04-07 18:11:11.920834 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dsp', ' dsp', ' Dessertspoon', ' 10 millilitres in Australia.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dsp", " dsp", " Dessertspoon", " 10 millilitres in Australia."]
|
||||
2025-04-07 18:11:11.928118 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tbsp', ' tbsp', ' Tablespoon', ' 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tbsp", " tbsp", " Tablespoon", " 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US."]
|
||||
2025-04-07 18:11:11.935834 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fl oz', ' fl oz', ' Fluid ounce', ' 28.41 millilitres in the UK or approximately 29.57 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fl oz", " fl oz", " Fluid ounce", " 28.41 millilitres in the UK or approximately 29.57 millilitres in the US."]
|
||||
2025-04-07 18:11:11.943995 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cups', ' cup', ' Cup', ' 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cups", " cup", " Cup", " 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US."]
|
||||
2025-04-07 18:11:11.951271 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('pt', ' pt', ' Pint', ' 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["pt", " pt", " Pint", " 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US."]
|
||||
2025-04-07 18:11:11.958833 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('qt', ' qt', ' Quart', ' Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["qt", " qt", " Quart", " Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US."]
|
||||
2025-04-07 18:11:11.966926 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('gal', ' gal', ' Gallon', ' Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["gal", " gal", " Gallon", " Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US."]
|
||||
2025-04-07 18:11:11.974434 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jiggers', ' jigger', ' Jigger', ' 1.5 fluid ounces.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jiggers", " jigger", " Jigger", " 1.5 fluid ounces."]
|
||||
2025-04-07 18:11:11.982214 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('oz', ' oz', ' Ounce', ' 1/4 lb for butter which can also be measured as 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["oz", " oz", " Ounce", " 1/4 lb for butter which can also be measured as 3 tablespoons."]
|
||||
2025-04-07 18:11:11.989518 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('L', ' L', ' Liter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["L", " L", " Liter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:11:11.997321 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('mL', ' mL', ' Milliliter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["mL", " mL", " Milliliter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:11:12.005157 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dm3', ' dm3', ' Cubic decimeter', ' Equivalent to 1 liter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dm3", " dm3", " Cubic decimeter", " Equivalent to 1 liter."]
|
||||
2025-04-07 18:11:12.012662 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('g', ' g', ' Gram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["g", " g", " Gram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:11:12.020544 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('kg', ' kg', ' Kilogram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["kg", " kg", " Kilogram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:11:12.028832 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('lbs', ' lb', ' Pound', ' Used for weight measurement in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["lbs", " lb", " Pound", " Used for weight measurement in the US."]
|
||||
2025-04-07 18:11:12.035928 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fluid scruples', ' fluid scruple', ' Fluid scruple', " A unit used in the apothecaries' system but not commonly used in cooking."), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fluid scruples", " fluid scruple", " Fluid scruple", " A unit used in the apothecaries' system but not commonly used in cooking."]
|
||||
2025-04-07 18:11:12.044169 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cm3', ' cm3', ' Cubic centimeter', ' Equivalent to 1 milliliter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cm3", " cm3", " Cubic centimeter", " Equivalent to 1 milliliter."]
|
||||
2025-04-07 18:11:12.051848 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('breakfast cups', ' breakfast cup', ' Breakfast cup', ' Similar in size to the US customary cup and the metric cup.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["breakfast cups", " breakfast cup", " Breakfast cup", " Similar in size to the US customary cup and the metric cup."]
|
||||
2025-04-07 18:11:12.059081 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tumblers', ' tumbler', ' Tumblerful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tumblers", " tumbler", " Tumblerful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:11:12.067370 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('winefulls', ' winefull', ' Wineglassful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["winefulls", " winefull", " Wineglassful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:11:12.074897 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('coffee cups', ' coffee cup', ' Coffee cup', ' Can vary from 100 to 200 millilitres.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["coffee cups", " coffee cup", " Coffee cup", " Can vary from 100 to 200 millilitres."]
|
||||
2025-04-07 18:11:12.082570 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('sticks of butter', ' stick of butter', ' Stick of butter', ' 1/4 lb or 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["sticks of butter", " stick of butter", " Stick of butter", " 1/4 lb or 3 tablespoons."]
|
||||
2025-04-07 18:11:12.090585 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('smidgens', ' smidgen', ' Smidgen', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["smidgens", " smidgen", " Smidgen", " An approximate unit of measure."]
|
||||
2025-04-07 18:11:12.099307 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dashes', ' dash', ' Dash', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dashes", " dash", " Dash", " An approximate unit of measure."]
|
||||
2025-04-07 18:11:12.106924 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('drops', ' drop', ' Drop', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["drops", " drop", " Drop", " An approximate unit of measure."]
|
||||
2025-04-07 18:11:12.114986 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('eaches', ' each', ' Each', ' A single unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["eaches", " each", " Each", " A single unit."]
|
||||
2025-04-07 18:11:12.123785 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('boxes', ' box', ' Box', ' A Single box of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["boxes", " box", " Box", " A Single box of a unit."]
|
||||
2025-04-07 18:11:12.132171 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('crates', ' crate', ' Crate', ' a single crate of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["crates", " crate", " Crate", " a single crate of a unit."]
|
||||
2025-04-07 18:11:12.140032 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jars', ' jar', ' Jar', ' A single Jar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jars", " jar", " Jar", " A single Jar of a unit."]
|
||||
2025-04-07 18:11:12.148143 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cans', ' can', ' Can', ' A Single Can of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cans", " can", " Can", " A Single Can of a unit."]
|
||||
2025-04-07 18:11:12.157037 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('bars', ' bar', ' Bars', ' A Single bar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["bars", " bar", " Bars", " A Single bar of a unit."]
|
||||
2025-04-07 18:11:12.165175 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('loaves', ' loaf', ' Loaf', ' A single loaf of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["loaves", " loaf", " Loaf", " A single loaf of a unit."]
|
||||
2025-04-07 18:11:12.172983 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('packs', ' pack', ' Pack', ' A Single Pack of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["packs", " pack", " Pack", " A Single Pack of a unit."]
|
||||
2025-04-07 18:12:49.005060 --- CAUTION --- DatabaseError(message='duplicate key value violates unique constraint "units_plural_key"DETAIL: Key (plural)=(Plural) already exists.', payload=('Plural', ' Single', ' Fullname', ' Description'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["Plural", " Single", " Fullname", " Description"]
|
||||
2025-04-07 18:12:49.018692 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tsp', ' tsp', ' Teaspoon', ' 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tsp", " tsp", " Teaspoon", " 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US."]
|
||||
2025-04-07 18:12:49.026767 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dsp', ' dsp', ' Dessertspoon', ' 10 millilitres in Australia.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dsp", " dsp", " Dessertspoon", " 10 millilitres in Australia."]
|
||||
2025-04-07 18:12:49.035142 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tbsp', ' tbsp', ' Tablespoon', ' 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tbsp", " tbsp", " Tablespoon", " 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US."]
|
||||
2025-04-07 18:12:49.043176 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fl oz', ' fl oz', ' Fluid ounce', ' 28.41 millilitres in the UK or approximately 29.57 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fl oz", " fl oz", " Fluid ounce", " 28.41 millilitres in the UK or approximately 29.57 millilitres in the US."]
|
||||
2025-04-07 18:12:49.052311 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cups', ' cup', ' Cup', ' 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cups", " cup", " Cup", " 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US."]
|
||||
2025-04-07 18:12:49.060527 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('pt', ' pt', ' Pint', ' 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["pt", " pt", " Pint", " 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US."]
|
||||
2025-04-07 18:12:49.068511 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('qt', ' qt', ' Quart', ' Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["qt", " qt", " Quart", " Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US."]
|
||||
2025-04-07 18:12:49.076951 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('gal', ' gal', ' Gallon', ' Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["gal", " gal", " Gallon", " Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US."]
|
||||
2025-04-07 18:12:49.086062 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jiggers', ' jigger', ' Jigger', ' 1.5 fluid ounces.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jiggers", " jigger", " Jigger", " 1.5 fluid ounces."]
|
||||
2025-04-07 18:12:49.095057 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('oz', ' oz', ' Ounce', ' 1/4 lb for butter which can also be measured as 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["oz", " oz", " Ounce", " 1/4 lb for butter which can also be measured as 3 tablespoons."]
|
||||
2025-04-07 18:12:49.102906 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('L', ' L', ' Liter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["L", " L", " Liter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:12:49.111927 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('mL', ' mL', ' Milliliter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["mL", " mL", " Milliliter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:12:49.119508 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dm3', ' dm3', ' Cubic decimeter', ' Equivalent to 1 liter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dm3", " dm3", " Cubic decimeter", " Equivalent to 1 liter."]
|
||||
2025-04-07 18:12:49.128094 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('g', ' g', ' Gram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["g", " g", " Gram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:12:49.136293 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('kg', ' kg', ' Kilogram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["kg", " kg", " Kilogram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:12:49.144897 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('lbs', ' lb', ' Pound', ' Used for weight measurement in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["lbs", " lb", " Pound", " Used for weight measurement in the US."]
|
||||
2025-04-07 18:12:49.153354 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fluid scruples', ' fluid scruple', ' Fluid scruple', " A unit used in the apothecaries' system but not commonly used in cooking."), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fluid scruples", " fluid scruple", " Fluid scruple", " A unit used in the apothecaries' system but not commonly used in cooking."]
|
||||
2025-04-07 18:12:49.162476 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cm3', ' cm3', ' Cubic centimeter', ' Equivalent to 1 milliliter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cm3", " cm3", " Cubic centimeter", " Equivalent to 1 milliliter."]
|
||||
2025-04-07 18:12:49.170611 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('breakfast cups', ' breakfast cup', ' Breakfast cup', ' Similar in size to the US customary cup and the metric cup.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["breakfast cups", " breakfast cup", " Breakfast cup", " Similar in size to the US customary cup and the metric cup."]
|
||||
2025-04-07 18:12:49.178357 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tumblers', ' tumbler', ' Tumblerful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tumblers", " tumbler", " Tumblerful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:12:49.186429 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('winefulls', ' winefull', ' Wineglassful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["winefulls", " winefull", " Wineglassful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:12:49.195941 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('coffee cups', ' coffee cup', ' Coffee cup', ' Can vary from 100 to 200 millilitres.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["coffee cups", " coffee cup", " Coffee cup", " Can vary from 100 to 200 millilitres."]
|
||||
2025-04-07 18:12:49.205197 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('sticks of butter', ' stick of butter', ' Stick of butter', ' 1/4 lb or 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["sticks of butter", " stick of butter", " Stick of butter", " 1/4 lb or 3 tablespoons."]
|
||||
2025-04-07 18:12:49.212492 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('smidgens', ' smidgen', ' Smidgen', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["smidgens", " smidgen", " Smidgen", " An approximate unit of measure."]
|
||||
2025-04-07 18:12:49.221728 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dashes', ' dash', ' Dash', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dashes", " dash", " Dash", " An approximate unit of measure."]
|
||||
2025-04-07 18:12:49.231128 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('drops', ' drop', ' Drop', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["drops", " drop", " Drop", " An approximate unit of measure."]
|
||||
2025-04-07 18:12:49.239584 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('eaches', ' each', ' Each', ' A single unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["eaches", " each", " Each", " A single unit."]
|
||||
2025-04-07 18:12:49.248671 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('boxes', ' box', ' Box', ' A Single box of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["boxes", " box", " Box", " A Single box of a unit."]
|
||||
2025-04-07 18:12:49.257268 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('crates', ' crate', ' Crate', ' a single crate of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["crates", " crate", " Crate", " a single crate of a unit."]
|
||||
2025-04-07 18:12:49.266294 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jars', ' jar', ' Jar', ' A single Jar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jars", " jar", " Jar", " A single Jar of a unit."]
|
||||
2025-04-07 18:12:49.275199 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cans', ' can', ' Can', ' A Single Can of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cans", " can", " Can", " A Single Can of a unit."]
|
||||
2025-04-07 18:12:49.282805 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('bars', ' bar', ' Bars', ' A Single bar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["bars", " bar", " Bars", " A Single bar of a unit."]
|
||||
2025-04-07 18:12:49.291482 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('loaves', ' loaf', ' Loaf', ' A single loaf of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["loaves", " loaf", " Loaf", " A single loaf of a unit."]
|
||||
2025-04-07 18:12:49.301205 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('packs', ' pack', ' Pack', ' A Single Pack of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["packs", " pack", " Pack", " A Single Pack of a unit."]
|
||||
2025-04-07 18:13:10.286385 --- CAUTION --- DatabaseError(message='duplicate key value violates unique constraint "units_plural_key"DETAIL: Key (plural)=(Plural) already exists.', payload=('Plural', ' Single', ' Fullname', ' Description'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["Plural", " Single", " Fullname", " Description"]
|
||||
2025-04-07 18:13:10.298389 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tsp', ' tsp', ' Teaspoon', ' 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tsp", " tsp", " Teaspoon", " 5 millilitres in Australia New Zealand UK Canada and the US but approximately 4.93 millilitres in the US."]
|
||||
2025-04-07 18:13:10.307226 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dsp', ' dsp', ' Dessertspoon', ' 10 millilitres in Australia.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dsp", " dsp", " Dessertspoon", " 10 millilitres in Australia."]
|
||||
2025-04-07 18:13:10.316278 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tbsp', ' tbsp', ' Tablespoon', ' 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tbsp", " tbsp", " Tablespoon", " 20 millilitres in Australia or 15 millilitres in New Zealand the UK Canada and the US."]
|
||||
2025-04-07 18:13:10.324427 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fl oz', ' fl oz', ' Fluid ounce', ' 28.41 millilitres in the UK or approximately 29.57 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fl oz", " fl oz", " Fluid ounce", " 28.41 millilitres in the UK or approximately 29.57 millilitres in the US."]
|
||||
2025-04-07 18:13:10.333981 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cups', ' cup', ' Cup', ' 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cups", " cup", " Cup", " 250 millilitres in Australia and New Zealand or approximately 170.48 millilitres in the UK or approximately 227.31 millilitres in Canada and approximately 236.59 millilitres in the US."]
|
||||
2025-04-07 18:13:10.343232 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('pt', ' pt', ' Pint', ' 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["pt", " pt", " Pint", " 570 millilitres in Australia approximately 568.26 millilitres in New Zealand the UK and Canada and approximately 473.18 millilitres in the US."]
|
||||
2025-04-07 18:13:10.350877 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('qt', ' qt', ' Quart', ' Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["qt", " qt", " Quart", " Approximately 1136.52 millilitres in Australia New Zealand the UK and Canada and approximately 946.35 millilitres in the US."]
|
||||
2025-04-07 18:13:10.359701 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('gal', ' gal', ' Gallon', ' Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["gal", " gal", " Gallon", " Approximately 4546.09 millilitres in Australia New Zealand the UK and Canada and approximately 3785.41 millilitres in the US."]
|
||||
2025-04-07 18:13:10.367839 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jiggers', ' jigger', ' Jigger', ' 1.5 fluid ounces.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jiggers", " jigger", " Jigger", " 1.5 fluid ounces."]
|
||||
2025-04-07 18:13:10.376504 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('oz', ' oz', ' Ounce', ' 1/4 lb for butter which can also be measured as 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["oz", " oz", " Ounce", " 1/4 lb for butter which can also be measured as 3 tablespoons."]
|
||||
2025-04-07 18:13:10.384509 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('L', ' L', ' Liter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["L", " L", " Liter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:13:10.392478 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('mL', ' mL', ' Milliliter', ' Commonly used for volume measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["mL", " mL", " Milliliter", " Commonly used for volume measurement in the metric system."]
|
||||
2025-04-07 18:13:10.401408 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dm3', ' dm3', ' Cubic decimeter', ' Equivalent to 1 liter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dm3", " dm3", " Cubic decimeter", " Equivalent to 1 liter."]
|
||||
2025-04-07 18:13:10.410222 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('g', ' g', ' Gram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["g", " g", " Gram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:13:10.418352 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('kg', ' kg', ' Kilogram', ' Commonly used for weight measurement in the metric system.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["kg", " kg", " Kilogram", " Commonly used for weight measurement in the metric system."]
|
||||
2025-04-07 18:13:10.426544 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('lbs', ' lb', ' Pound', ' Used for weight measurement in the US.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["lbs", " lb", " Pound", " Used for weight measurement in the US."]
|
||||
2025-04-07 18:13:10.435351 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('fluid scruples', ' fluid scruple', ' Fluid scruple', " A unit used in the apothecaries' system but not commonly used in cooking."), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["fluid scruples", " fluid scruple", " Fluid scruple", " A unit used in the apothecaries' system but not commonly used in cooking."]
|
||||
2025-04-07 18:13:10.444346 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cm3', ' cm3', ' Cubic centimeter', ' Equivalent to 1 milliliter.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cm3", " cm3", " Cubic centimeter", " Equivalent to 1 milliliter."]
|
||||
2025-04-07 18:13:10.452199 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('breakfast cups', ' breakfast cup', ' Breakfast cup', ' Similar in size to the US customary cup and the metric cup.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["breakfast cups", " breakfast cup", " Breakfast cup", " Similar in size to the US customary cup and the metric cup."]
|
||||
2025-04-07 18:13:10.461058 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('tumblers', ' tumbler', ' Tumblerful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["tumblers", " tumbler", " Tumblerful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:13:10.469292 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('winefulls', ' winefull', ' Wineglassful', ' A unit used in British cookery books and recipes.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["winefulls", " winefull", " Wineglassful", " A unit used in British cookery books and recipes."]
|
||||
2025-04-07 18:13:10.478094 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('coffee cups', ' coffee cup', ' Coffee cup', ' Can vary from 100 to 200 millilitres.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["coffee cups", " coffee cup", " Coffee cup", " Can vary from 100 to 200 millilitres."]
|
||||
2025-04-07 18:13:10.486368 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('sticks of butter', ' stick of butter', ' Stick of butter', ' 1/4 lb or 3 tablespoons.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["sticks of butter", " stick of butter", " Stick of butter", " 1/4 lb or 3 tablespoons."]
|
||||
2025-04-07 18:13:10.494021 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('smidgens', ' smidgen', ' Smidgen', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["smidgens", " smidgen", " Smidgen", " An approximate unit of measure."]
|
||||
2025-04-07 18:13:10.502494 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('dashes', ' dash', ' Dash', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["dashes", " dash", " Dash", " An approximate unit of measure."]
|
||||
2025-04-07 18:13:10.510827 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('drops', ' drop', ' Drop', ' An approximate unit of measure.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["drops", " drop", " Drop", " An approximate unit of measure."]
|
||||
2025-04-07 18:13:10.519133 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('eaches', ' each', ' Each', ' A single unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["eaches", " each", " Each", " A single unit."]
|
||||
2025-04-07 18:13:10.528226 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('boxes', ' box', ' Box', ' A Single box of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["boxes", " box", " Box", " A Single box of a unit."]
|
||||
2025-04-07 18:13:10.537276 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('crates', ' crate', ' Crate', ' a single crate of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["crates", " crate", " Crate", " a single crate of a unit."]
|
||||
2025-04-07 18:13:10.544973 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('jars', ' jar', ' Jar', ' A single Jar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["jars", " jar", " Jar", " A single Jar of a unit."]
|
||||
2025-04-07 18:13:10.554072 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('cans', ' can', ' Can', ' A Single Can of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["cans", " can", " Can", " A Single Can of a unit."]
|
||||
2025-04-07 18:13:10.561977 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('bars', ' bar', ' Bars', ' A Single bar of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["bars", " bar", " Bars", " A Single bar of a unit."]
|
||||
2025-04-07 18:13:10.570396 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('loaves', ' loaf', ' Loaf', ' A single loaf of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["loaves", " loaf", " Loaf", " A single loaf of a unit."]
|
||||
2025-04-07 18:13:10.579705 --- CAUTION --- DatabaseError(message='current transaction is aborted, commands ignored until end of transaction block', payload=('packs', ' pack', ' Pack', ' A Single Pack of a unit.'), sql='INSERT INTO units(plural, single, fullname, description) VALUES (%s, %s, %s, %s) RETURNING *;')
|
||||
["packs", " pack", " Pack", " A Single Pack of a unit."]
|
||||
298
process.py
298
process.py
@ -1,298 +0,0 @@
|
||||
import database, MyDataclasses, psycopg2, datetime,json
|
||||
from config import config
|
||||
|
||||
def dropSiteTables(conn, site_manager: MyDataclasses.SiteManager):
|
||||
try:
|
||||
for table in site_manager.drop_order:
|
||||
database.__dropTable(conn, site_manager.site_name, table)
|
||||
with open("process.log", "a+") as file:
|
||||
file.write(f"{datetime.datetime.now()} --- INFO --- {table} DROPPED!\n")
|
||||
except Exception as error:
|
||||
raise error
|
||||
|
||||
def setupSiteTables(conn, site_manager: MyDataclasses.SiteManager):
|
||||
try:
|
||||
for table in site_manager.create_order:
|
||||
database.__createTable(conn, site_manager.site_name, table)
|
||||
with open("process.log", "a+") as file:
|
||||
file.write(f"{datetime.datetime.now()} --- INFO --- {table} Created!\n")
|
||||
except Exception as error:
|
||||
raise error
|
||||
|
||||
def addAdminUser(conn, site_manager: MyDataclasses.SiteManager, convert=True):
|
||||
admin_user = ()
|
||||
try:
|
||||
sql = f"INSERT INTO logins (username, password, email, row_type) VALUES (%s, %s, %s, %s) ON CONFLICT (username) DO UPDATE SET username = excluded.username RETURNING *;"
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, site_manager.admin_user)
|
||||
rows = cur.fetchone()
|
||||
if rows and convert:
|
||||
admin_user = database.tupleDictionaryFactory(cur.description, rows)
|
||||
elif rows and not convert:
|
||||
admin_user = rows
|
||||
with open("process.log", "a+") as file:
|
||||
file.write(f"{datetime.datetime.now()} --- INFO --- Admin User Created!\n")
|
||||
except Exception as error:
|
||||
raise error
|
||||
return admin_user
|
||||
|
||||
def deleteSite(site_manager: MyDataclasses.SiteManager):
|
||||
"""Uses a Site Manager to delete a site from the system.
|
||||
|
||||
Args:
|
||||
site_manager (MyDataclasses.SiteManager):
|
||||
|
||||
Raises:
|
||||
Exception:
|
||||
"""
|
||||
database_config = config()
|
||||
print(site_manager)
|
||||
try:
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
print("before site")
|
||||
site = database.selectSiteTuple(conn, (site_manager.site_name,), convert=True)
|
||||
print("before user")
|
||||
user = database.getUser(conn, site_manager.admin_user, convert=True)
|
||||
print("after user: ", user)
|
||||
if user['id'] != site['site_owner_id']:
|
||||
raise Exception("The credentials passed do not match site owner")
|
||||
|
||||
print("before roles")
|
||||
roles = database.selectRolesTuple(conn, (site['id'],), convert=True)
|
||||
database.deleteRolesTuple(conn, site['site_name'], [role['id'] for role in roles])
|
||||
|
||||
print("dropping site")
|
||||
dropSiteTables(conn, site_manager)
|
||||
|
||||
print("updating roles and sites")
|
||||
for role in roles:
|
||||
database.updateUsersRoles(conn, role['id'])
|
||||
database.updateUsersSites(conn, site['id'])
|
||||
|
||||
site = database.deleteSitesTuple(conn, site_manager.site_name, (site['id'], ), convert=True)
|
||||
|
||||
except Exception as error:
|
||||
with open("process.log", "a+") as file:
|
||||
file.write(f"{datetime.datetime.now()} --- ERROR --- {error}\n")
|
||||
print(error)
|
||||
conn.rollback()
|
||||
|
||||
def addSite(site_manager: MyDataclasses.SiteManager):
|
||||
"""uses a Site Manager to add a site to the system
|
||||
|
||||
Args:
|
||||
site_manager (MyDataclasses.SiteManager):
|
||||
"""
|
||||
database_config = config()
|
||||
try:
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
setupSiteTables(conn, site_manager)
|
||||
|
||||
admin_user = addAdminUser(conn, site_manager)
|
||||
|
||||
site = MyDataclasses.SitePayload(
|
||||
site_name=site_manager.site_name,
|
||||
site_description=site_manager.description,
|
||||
site_owner_id=admin_user['id']
|
||||
)
|
||||
site = database.insertSitesTuple(conn, site.payload(), convert=True)
|
||||
|
||||
role = MyDataclasses.RolePayload("Admin", f"Admin for {site['site_name']}", site['id'])
|
||||
role = database.insertRolesTuple(conn, role.payload(), convert=True)
|
||||
|
||||
admin_user = database.updateAddLoginSitesRoles(conn, (site["id"], role["id"], admin_user["id"]), convert=True)
|
||||
|
||||
default_zone = MyDataclasses.ZonePayload(site_manager.default_zone, site['id'])
|
||||
default_zone = database.insertZonesTuple(conn, site["site_name"], default_zone.payload(), convert=True)
|
||||
|
||||
uuid = f"{site_manager.default_zone}@{site_manager.default_location}"
|
||||
default_location = MyDataclasses.LocationPayload(uuid, site_manager.default_location, default_zone['id'])
|
||||
default_location = database.insertLocationsTuple(conn, site['site_name'], default_location.payload(), convert=True)
|
||||
|
||||
# need to update the default zones/locations for site.
|
||||
payload = {
|
||||
'id': site['id'],
|
||||
'update': {'default_zone': default_zone['id'],
|
||||
'default_auto_issue_location': default_location['id'],
|
||||
'default_primary_location': default_location['id']}
|
||||
}
|
||||
database.__updateTuple(conn, site_manager.site_name, f"sites", payload)
|
||||
|
||||
|
||||
blank_vendor = MyDataclasses.VendorPayload("None", admin_user['id'])
|
||||
blank_brand = MyDataclasses.BrandsPayload("None")
|
||||
|
||||
blank_vendor = database.insertVendorsTuple(conn, site['site_name'], blank_vendor.payload(), convert=True)
|
||||
blank_brand = database.insertBrandsTuple(conn, site['site_name'], blank_brand.payload(), convert=True)
|
||||
|
||||
|
||||
conn.commit()
|
||||
except Exception as error:
|
||||
with open("process.log", "a+") as file:
|
||||
file.write(f"{datetime.datetime.now()} --- ERROR --- {error}\n")
|
||||
conn.rollback()
|
||||
|
||||
def postNewBlankItem(conn, site_name: str, user_id: int, data: dict):
|
||||
site = database.selectSiteTuple(conn, (site_name,), convert=True)
|
||||
default_zone = database.__selectTuple(conn, site_name, f"{site_name}_zones", (site['default_zone'], ), convert=True)
|
||||
default_location = database.__selectTuple(conn, site_name, f"{site_name}_locations", (site['default_primary_location'],), convert=True)
|
||||
uuid = f"{default_zone['name']}@{default_location['name']}"
|
||||
|
||||
# create logistics info
|
||||
logistics_info = MyDataclasses.LogisticsInfoPayload(
|
||||
barcode=data['barcode'],
|
||||
primary_location=site['default_primary_location'],
|
||||
primary_zone=site['default_zone'],
|
||||
auto_issue_location=site['default_auto_issue_location'],
|
||||
auto_issue_zone=site['default_zone']
|
||||
)
|
||||
|
||||
# create item info
|
||||
item_info = MyDataclasses.ItemInfoPayload(data['barcode'])
|
||||
|
||||
# create Food Info
|
||||
food_info = MyDataclasses.FoodInfoPayload()
|
||||
|
||||
logistics_info_id = 0
|
||||
item_info_id = 0
|
||||
food_info_id = 0
|
||||
brand_id = 1
|
||||
|
||||
|
||||
logistics_info = database.insertLogisticsInfoTuple(conn, site_name, logistics_info.payload(), convert=True)
|
||||
item_info = database.insertItemInfoTuple(conn, site_name, item_info.payload(), convert=True)
|
||||
food_info = database.insertFoodInfoTuple(conn, site_name, food_info.payload(), convert=True)
|
||||
|
||||
name = data['name']
|
||||
name = name.replace("'", "@&apostraphe&")
|
||||
description = ""
|
||||
tags = database.lst2pgarr([])
|
||||
links = json.dumps({})
|
||||
search_string = f"&&{data['barcode']}&&{name}&&"
|
||||
|
||||
|
||||
item = MyDataclasses.ItemsPayload(
|
||||
data['barcode'],
|
||||
data['name'],
|
||||
item_info['id'],
|
||||
logistics_info['id'],
|
||||
food_info['id'],
|
||||
brand=brand_id,
|
||||
row_type="single",
|
||||
item_type=data['subtype'],
|
||||
search_string=search_string
|
||||
)
|
||||
|
||||
item = database.insertItemTuple(conn, site_name, item.payload(), convert=True)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT id FROM {site_name}_locations WHERE uuid=%s;", (uuid, ))
|
||||
location_id = cur.fetchone()[0]
|
||||
|
||||
|
||||
item_location = MyDataclasses.ItemLocationPayload(item['id'], location_id)
|
||||
database.insertItemLocationsTuple(conn, site_name, item_location.payload())
|
||||
|
||||
|
||||
creation_tuple = MyDataclasses.TransactionPayload(
|
||||
datetime.datetime.now(),
|
||||
logistics_info['id'],
|
||||
item['barcode'],
|
||||
item['item_name'],
|
||||
"SYSTEM",
|
||||
0.0,
|
||||
"Item added to the System!",
|
||||
user_id,
|
||||
{'location': uuid}
|
||||
)
|
||||
|
||||
database.insertTransactionsTuple(conn, site_name, creation_tuple.payload())
|
||||
|
||||
|
||||
def postTransaction(conn, site_name, user_id, data: dict):
|
||||
#dict_keys(['item_id', 'logistics_info_id', 'barcode', 'item_name', 'transaction_type',
|
||||
# 'quantity', 'description', 'cost', 'vendor', 'expires', 'location_id'])
|
||||
def quantityFactory(quantity_on_hand:float, quantity:float, transaction_type:str):
|
||||
if transaction_type == "Adjust In":
|
||||
quantity_on_hand += quantity
|
||||
return quantity_on_hand
|
||||
if transaction_type == "Adjust Out":
|
||||
quantity_on_hand -= quantity
|
||||
return quantity_on_hand
|
||||
raise Exception("The transaction type is wrong!")
|
||||
|
||||
transaction_time = datetime.datetime.now()
|
||||
|
||||
cost_layer = MyDataclasses.CostLayerPayload(
|
||||
aquisition_date=transaction_time,
|
||||
quantity=float(data['quantity']),
|
||||
cost=float(data['cost']),
|
||||
currency_type="USD",
|
||||
vendor=int(data['vendor']),
|
||||
expires=data['expires']
|
||||
)
|
||||
transaction = MyDataclasses.TransactionPayload(
|
||||
timestamp=transaction_time,
|
||||
logistics_info_id=int(data['logistics_info_id']),
|
||||
barcode=data['barcode'],
|
||||
name=data['item_name'],
|
||||
transaction_type=data['transaction_type'],
|
||||
quantity=float(data['quantity']),
|
||||
description=data['description'],
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
location = database.selectItemLocationsTuple(conn, site_name, payload=(data['item_id'], data['location_id']), convert=True)
|
||||
cost_layers: list = location['cost_layers']
|
||||
if data['transaction_type'] == "Adjust In":
|
||||
cost_layer = database.insertCostLayersTuple(conn, site_name, cost_layer.payload(), convert=True)
|
||||
cost_layers.append(cost_layer['id'])
|
||||
|
||||
if data['transaction_type'] == "Adjust Out":
|
||||
if float(location['quantity_on_hand']) < float(data['quantity']):
|
||||
return {"error":True, "message":f"The quantity on hand in the chosen location is not enough to satisfy your transaction!"}
|
||||
cost_layers = database.selectCostLayersTuple(conn, site_name, (location['id'], ), convert=True)
|
||||
|
||||
new_cost_layers = []
|
||||
qty = float(data['quantity'])
|
||||
for layer in cost_layers:
|
||||
if qty == 0.0:
|
||||
new_cost_layers.append(layer['id'])
|
||||
elif qty >= float(layer['quantity']):
|
||||
qty -= float(layer['quantity'])
|
||||
layer['quantity'] = 0.0
|
||||
else:
|
||||
layer['quantity'] -= qty
|
||||
new_cost_layers.append(layer['id'])
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_cost_layers", {'id': layer['id'], 'update': {'quantity': layer['quantity']}})
|
||||
qty = 0.0
|
||||
|
||||
if layer['quantity'] == 0.0:
|
||||
database.deleteCostLayersTuple(conn, site_name, (layer['id'], ))
|
||||
|
||||
cost_layers = new_cost_layers
|
||||
|
||||
quantity_on_hand = quantityFactory(float(location['quantity_on_hand']), data['quantity'], data['transaction_type'])
|
||||
|
||||
updated_item_location_payload = (cost_layers, quantity_on_hand, data['item_id'], data['location_id'])
|
||||
database.updateItemLocation(conn, site_name, updated_item_location_payload)
|
||||
|
||||
site_location = database.__selectTuple(conn, site_name, f"{site_name}_locations", (location['location_id'], ), convert=True)
|
||||
|
||||
transaction.data = {'location': site_location['uuid']}
|
||||
|
||||
database.insertTransactionsTuple(conn, site_name, transaction.payload())
|
||||
return {"error": False, "message":f"Transaction Successful!"}
|
||||
|
||||
|
||||
|
||||
site_manager = MyDataclasses.SiteManager(
|
||||
site_name="test",
|
||||
admin_user=("jadowyne", "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08", "jadowyne.ulve@outlook.com", 'user'),
|
||||
default_zone="DEFAULT",
|
||||
default_location="ALL",
|
||||
description="This is my test site"
|
||||
)
|
||||
|
||||
#addSite(site_manager)
|
||||
#deleteSite(site_manager)
|
||||
324
receipts_API.py
324
receipts_API.py
@ -1,324 +0,0 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response, current_app, send_from_directory
|
||||
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
|
||||
from config import config, sites_config
|
||||
from user_api import login_required
|
||||
import openfoodfacts
|
||||
import postsqldb
|
||||
import mimetypes, os
|
||||
import pymupdf, PIL
|
||||
|
||||
|
||||
def create_pdf_preview(pdf_path, output_path, size=(600, 400)):
|
||||
pdf = pymupdf.open(pdf_path)
|
||||
page = pdf[0]
|
||||
file_name = os.path.basename(pdf_path).replace('.pdf', "")
|
||||
pix = page.get_pixmap()
|
||||
img = PIL.Image.frombytes("RGB", (pix.width, pix.height), pix.samples)
|
||||
output_path = output_path + file_name + '.jpg'
|
||||
img.thumbnail(size)
|
||||
img.save(output_path)
|
||||
return file_name + '.jpg'
|
||||
|
||||
|
||||
receipt_api = Blueprint('receipt_api', __name__)
|
||||
|
||||
@receipt_api.route("/receipt/<id>")
|
||||
@login_required
|
||||
def receipt(id):
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
units = postsqldb.UnitsTable.getAll(conn)
|
||||
return render_template("receipts/receipt.html", id=id, current_site=session['selected_site'], sites=sites, units=units)
|
||||
|
||||
@receipt_api.route("/receipts")
|
||||
@login_required
|
||||
def receipts():
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
return render_template("receipts/index.html", current_site=session['selected_site'], sites=sites)
|
||||
|
||||
@receipt_api.route('/receipts/getItems', methods=["GET"])
|
||||
def getItems():
|
||||
recordset = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
payload = ("%%", limit, offset)
|
||||
recordset, count = database.getItemsWithQOH(conn, site_name, payload, convert=True)
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@receipt_api.route('/receipts/getReceipts', methods=["GET"])
|
||||
def getReceipts():
|
||||
recordset = []
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 50))
|
||||
offset = (page - 1) * limit
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
recordset, count = database.getReceipts(conn, site_name, payload=(limit, offset), convert=True)
|
||||
return jsonify({'receipts':recordset, "end": math.ceil(count/limit), 'error': False, "message": "Get Receipts Successful!"})
|
||||
return jsonify({'receipts': recordset, "end": math.ceil(count/limit), 'error': True, "message": "Something went wrong while getting receipts!"})
|
||||
|
||||
@receipt_api.route('/receipts/getReceipt', methods=["GET"])
|
||||
def getReceipt():
|
||||
record = []
|
||||
if request.method == "GET":
|
||||
receipt_id = int(request.args.get('id', 1))
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
record = database.getReceiptByID(conn, site_name, payload=(receipt_id, ), convert=True)
|
||||
return jsonify({'receipt': record, 'error': False, "message": "Get Receipts Successful!"})
|
||||
return jsonify({'receipt': record, 'error': True, "message": "Something went wrong while getting receipts!"})
|
||||
|
||||
@receipt_api.route('/receipts/addReceipt', methods=["POST", "GET"])
|
||||
def addReceipt():
|
||||
if request.method == "GET":
|
||||
user_id = session['user_id']
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
receipt = MyDataclasses.ReceiptPayload(
|
||||
receipt_id=f"PR-{database.request_receipt_id(conn, site_name)}",
|
||||
submitted_by=user_id
|
||||
)
|
||||
database.insertReceiptsTuple(conn, site_name, receipt.payload())
|
||||
return jsonify({'error': False, "message": "Receipt Added Successful!"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while adding receipt!"})
|
||||
|
||||
@receipt_api.route('/receipts/addSKULine', methods=["POST"])
|
||||
def addSKULine():
|
||||
if request.method == "POST":
|
||||
item_id = int(request.get_json()['item_id'])
|
||||
receipt_id = int(request.get_json()['receipt_id'])
|
||||
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
item = database.getItemAllByID(conn, site_name, (item_id, ), convert=True)
|
||||
data = {
|
||||
'cost': item['item_info']['cost'],
|
||||
'expires': item['food_info']['expires']
|
||||
}
|
||||
receipt_item = MyDataclasses.ReceiptItemPayload(
|
||||
type="sku",
|
||||
receipt_id=receipt_id,
|
||||
barcode=item['barcode'],
|
||||
name=item['item_name'],
|
||||
qty=item['item_info']['uom_quantity'],
|
||||
uom=item['item_info']['uom'],
|
||||
data=data
|
||||
)
|
||||
database.insertReceiptItemsTuple(conn, site_name, receipt_item.payload())
|
||||
return jsonify({'error': False, "message": "Line added Succesfully"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while add SKU line!"})
|
||||
|
||||
@receipt_api.route('/receipts/deleteLine', methods=["POST"])
|
||||
def deleteLine():
|
||||
if request.method == "POST":
|
||||
line_id = int(request.get_json()['line_id'])
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
database.deleteReceiptItemsTuple(conn, site_name, (line_id, ))
|
||||
|
||||
return jsonify({'error': False, "message": "Line Deleted Succesfully"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while deleting line!"})
|
||||
|
||||
@receipt_api.route('/receipts/denyLine', methods=["POST"])
|
||||
def denyLine():
|
||||
if request.method == "POST":
|
||||
line_id = int(request.get_json()['line_id'])
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_receipt_items", {'id': line_id, 'update': {'status': 'Denied'}})
|
||||
return jsonify({'error': False, "message": "Line Denied Succesfully"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while denying line!"})
|
||||
|
||||
@receipt_api.route('/receipts/saveLine', methods=["POST"])
|
||||
def saveLine():
|
||||
if request.method == "POST":
|
||||
line_id = int(request.get_json()['line_id'])
|
||||
payload = request.get_json()['payload']
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
receipt_item = database.__selectTuple(conn, site_name, f"{site_name}_receipt_items", (line_id, ), convert=True)
|
||||
if 'api_data' in receipt_item['data'].keys():
|
||||
payload['data']['api_data'] = receipt_item['data']['api_data']
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_receipt_items", {'id': line_id, 'update': payload})
|
||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||
|
||||
|
||||
@receipt_api.route('/receipts/resolveLine', methods=["POST"])
|
||||
def resolveLine():
|
||||
if request.method == "POST":
|
||||
line_id = int(request.get_json()['line_id'])
|
||||
site_name = session['selected_site']
|
||||
user_id = session['user_id']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
transaction_time = datetime.datetime.now()
|
||||
receipt_item = database.__selectTuple(conn, site_name, f"{site_name}_receipt_items", (line_id, ), convert=True)
|
||||
receipt = database.getReceiptByID(conn, site_name, (receipt_item['receipt_id'], ), convert=True)
|
||||
conv_factor = 1.0
|
||||
if receipt_item['data']['expires'] is not False:
|
||||
print(receipt_item['data']['expires'])
|
||||
expiration = datetime.datetime.strptime(receipt_item['data']['expires'], "%Y-%m-%d")
|
||||
else:
|
||||
expiration = None
|
||||
|
||||
if receipt_item['type'] == 'sku':
|
||||
linked_item = database.getLinkedItemByBarcode(conn, site_name, (receipt_item['barcode'], ))
|
||||
if len(linked_item) > 1:
|
||||
conv_factor = linked_item['conv_factor']
|
||||
receipt_item['data']['linked_child'] = linked_item['barcode']
|
||||
|
||||
if receipt_item['type'] == 'api':
|
||||
|
||||
data = {
|
||||
'barcode': receipt_item['barcode'],
|
||||
'name': receipt_item['name'],
|
||||
'subtype': 'FOOD'
|
||||
}
|
||||
process.postNewBlankItem(conn, site_name, user_id, data)
|
||||
|
||||
if receipt_item['type'] == "new sku":
|
||||
data = {
|
||||
'barcode': receipt_item['barcode'],
|
||||
'name': receipt_item['name'],
|
||||
'subtype': 'FOOD'
|
||||
}
|
||||
process.postNewBlankItem(conn, site_name, user_id, data)
|
||||
|
||||
item = database.getItemAllByBarcode(conn, site_name, (receipt_item['barcode'], ), convert=True)
|
||||
location = database.selectItemLocationsTuple(conn, site_name, (item['id'], item['logistics_info']['primary_location']['id']), convert=True)
|
||||
cost_layers: list = location['cost_layers']
|
||||
|
||||
receipt_item['data']['location'] = item['logistics_info']['primary_location']['uuid']
|
||||
|
||||
transaction = MyDataclasses.TransactionPayload(
|
||||
timestamp=transaction_time,
|
||||
logistics_info_id=item['logistics_info_id'],
|
||||
barcode=item['barcode'],
|
||||
name=item['item_name'],
|
||||
transaction_type="Adjust In",
|
||||
quantity=(float(receipt_item['qty'])*conv_factor),
|
||||
description=f"{receipt['receipt_id']}",
|
||||
user_id=session['user_id'],
|
||||
data=receipt_item['data']
|
||||
)
|
||||
|
||||
cost_layer = MyDataclasses.CostLayerPayload(
|
||||
aquisition_date=transaction_time,
|
||||
quantity=float(receipt_item['qty']),
|
||||
cost=float(receipt_item['data']['cost']),
|
||||
currency_type="USD",
|
||||
vendor=receipt['vendor_id'],
|
||||
expires=expiration
|
||||
)
|
||||
|
||||
cost_layer = database.insertCostLayersTuple(conn, site_name, cost_layer.payload(), convert=True)
|
||||
cost_layers.append(cost_layer['id'])
|
||||
|
||||
quantity_on_hand = float(location['quantity_on_hand']) + float(receipt_item['qty'])
|
||||
|
||||
updated_item_location_payload = (cost_layers, quantity_on_hand, item['id'], item['logistics_info']['primary_location']['id'])
|
||||
database.updateItemLocation(conn, site_name, updated_item_location_payload)
|
||||
|
||||
site_location = database.__selectTuple(conn, site_name, f"{site_name}_locations", (location['location_id'], ), convert=True)
|
||||
|
||||
receipt_item['data']['location'] = site_location['uuid']
|
||||
|
||||
database.insertTransactionsTuple(conn, site_name, transaction.payload())
|
||||
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_receipt_items", {'id': receipt_item['id'], 'update': {'status': "Resolved"}})
|
||||
|
||||
|
||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||
|
||||
@receipt_api.route('/receipts/resolveReceipt', methods=["POST"])
|
||||
def resolveReceipt():
|
||||
if request.method == "POST":
|
||||
receipt_id = int(request.get_json()['receipt_id'])
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
postsqldb.ReceiptTable.update_receipt(conn, site_name, {'id': receipt_id, 'update': {'receipt_status': 'Resolved'}})
|
||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||
|
||||
@receipt_api.route('/receipt/uploadfile/<receipt_id>', methods=["POST"])
|
||||
def uploadFile(receipt_id):
|
||||
file = request.files['file']
|
||||
file_path = current_app.config['FILES_FOLDER'] + f"/receipts/{file.filename.replace(" ", "_")}"
|
||||
file.save(file_path)
|
||||
file_type, _ = mimetypes.guess_type(file.filename)
|
||||
preview_image = ""
|
||||
if file_type == "application/pdf":
|
||||
output_path = "static/files/receipts/previews/"
|
||||
preview_image = create_pdf_preview(file_path, output_path)
|
||||
|
||||
file_size = os.path.getsize(file_path)
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
username = session['user']['username']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
files = postsqldb.ReceiptTable.select_tuple(conn, site_name, (receipt_id, ))['files']
|
||||
files[file.filename.replace(" ", "_")] = {'file_path': file.filename.replace(" ", "_"), 'file_type': file_type, 'file_size': file_size, 'uploaded_by': username, 'preview_image': preview_image}
|
||||
postsqldb.ReceiptTable.update_receipt(conn, site_name, {'id': receipt_id, 'update': {'files': files}})
|
||||
|
||||
return jsonify({})
|
||||
|
||||
@receipt_api.route('/receipt/getFile/<file_name>')
|
||||
def getFile(file_name):
|
||||
return send_from_directory('static/files/receipts', file_name)
|
||||
|
||||
@receipt_api.route('/receipts/checkAPI', methods=["POST"])
|
||||
def checkAPI():
|
||||
if request.method == "POST":
|
||||
line_id = int(request.get_json()['line_id'])
|
||||
barcode = request.get_json()['barcode']
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
print(barcode, line_id)
|
||||
api_response, api_data = get_open_facts(barcode)
|
||||
if api_response:
|
||||
receipt_item = database.__selectTuple(conn, site_name, f"{site_name}_receipt_items", (line_id, ), convert=True)
|
||||
item_data = receipt_item['data']
|
||||
item_data['api_data'] = api_data
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_receipt_items",
|
||||
{'id': line_id, 'update': {
|
||||
'type': 'api',
|
||||
'data': item_data,
|
||||
'name': api_data['product_name']
|
||||
}})
|
||||
return jsonify({'error': False, "message": "Line updated for API, Succesfully"})
|
||||
else:
|
||||
return jsonify({'error': True, "message": "Item not in WorldFoodFacts!"})
|
||||
return jsonify({'error': False, "message": "Line Saved Succesfully"})
|
||||
return jsonify({'error': True, "message": "Something went wrong while saving line!"})
|
||||
|
||||
open_food_api = openfoodfacts.API(user_agent="MyAwesomeApp/1.0")
|
||||
|
||||
open_food_enabled = True
|
||||
|
||||
def get_open_facts(barcode):
|
||||
if open_food_enabled:
|
||||
barcode: str = barcode.replace('%', "")
|
||||
data = open_food_api.product.get(barcode)
|
||||
if data != None:
|
||||
return True, data
|
||||
return False, {}
|
||||
201
recipes_api.py
201
recipes_api.py
@ -1,201 +0,0 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response, current_app, send_from_directory
|
||||
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers
|
||||
from user_api import login_required
|
||||
import os
|
||||
import postsqldb
|
||||
|
||||
recipes_api = Blueprint('recipes_api', __name__)
|
||||
|
||||
@recipes_api.route("/recipes")
|
||||
@login_required
|
||||
def recipes():
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
return render_template("recipes/index.html",
|
||||
current_site=session['selected_site'],
|
||||
sites=sites)
|
||||
|
||||
@recipes_api.route("/recipe/<mode>/<id>")
|
||||
@login_required
|
||||
def recipe(mode, id):
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
units = postsqldb.UnitsTable.getAll(conn)
|
||||
|
||||
if mode == "edit":
|
||||
return render_template("recipes/recipe_edit.html", recipe_id=id, current_site=session['selected_site'], units=units)
|
||||
if mode == "view":
|
||||
return render_template("recipes/recipe_view.html", recipe_id=id, current_site=session['selected_site'])
|
||||
|
||||
|
||||
@recipes_api.route('/recipes/getRecipes', methods=["GET"])
|
||||
def getRecipes():
|
||||
recipes = []
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 1))
|
||||
offset = (page-1)*limit
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
recipes, count = postsqldb.RecipesTable.getRecipes(conn, site_name, (limit, offset), convert=True)
|
||||
return jsonify({'recipes': recipes, 'end': math.ceil(count/limit), 'error': False, 'message': 'bleh'})
|
||||
|
||||
@recipes_api.route('/recipe/getRecipe', methods=["GET"])
|
||||
def getRecipe():
|
||||
recipe = {}
|
||||
if request.method == "GET":
|
||||
id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
recipe = postsqldb.RecipesTable.getRecipe(conn, site_name, (id,), convert=True)
|
||||
return jsonify({'recipe': recipe, 'error': False, 'message': 'bleh'})
|
||||
|
||||
@recipes_api.route('/recipes/addRecipe', methods=["POST"])
|
||||
def addRecipe():
|
||||
if request.method == "POST":
|
||||
recipe_name = request.get_json()['recipe_name']
|
||||
recipe_description = request.get_json()['recipe_description']
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
user_id = session['user_id']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
recipe = postsqldb.RecipesTable.Payload(
|
||||
name=recipe_name,
|
||||
author=user_id,
|
||||
description=recipe_description
|
||||
)
|
||||
postsqldb.RecipesTable.insert_tuple(conn, site_name, recipe.payload())
|
||||
return jsonify({'recipe': recipe, 'error': False, 'message': 'Add Recipe successful!'})
|
||||
return jsonify({'recipe': recipe, 'error': True, 'message': 'Add Recipe unsuccessful!'})
|
||||
|
||||
@recipes_api.route('/recipe/getItems', methods=["GET"])
|
||||
def getItems():
|
||||
recordset = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
search_string = request.args.get('search_string', 10)
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
payload = (search_string, limit, offset)
|
||||
recordset, count = database.getItemsWithQOH(conn, site_name, payload, convert=True)
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
|
||||
@recipes_api.route('/recipe/postUpdate', methods=["POST"])
|
||||
def postUpdate():
|
||||
recipe = {}
|
||||
if request.method == "POST":
|
||||
recipe_id = int(request.get_json()['recipe_id'])
|
||||
update = request.get_json()['update']
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
recipe = postsqldb.RecipesTable.updateRecipe(conn, site_name, {'id': recipe_id, 'update': update}, convert=True)
|
||||
return jsonify({'recipe': recipe, 'error': False, 'message': 'Update of Recipe successful!'})
|
||||
return jsonify({'recipe': recipe, 'error': True, 'message': 'Update of Recipe unsuccessful!'})
|
||||
|
||||
@recipes_api.route('/recipe/postCustomItem', methods=["POST"])
|
||||
def postCustomItem():
|
||||
recipe = {}
|
||||
if request.method == "POST":
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
recipe_item = postsqldb.RecipesTable.ItemPayload(
|
||||
uuid=f"%{int(request.get_json()['rp_id'])}{database.getUUID(6)}%",
|
||||
rp_id=int(request.get_json()['rp_id']),
|
||||
item_type=request.get_json()['item_type'],
|
||||
item_name=request.get_json()['item_name'],
|
||||
uom=request.get_json()['uom'],
|
||||
qty=float(request.get_json()['qty']),
|
||||
links=request.get_json()['links']
|
||||
)
|
||||
postsqldb.RecipesTable.insert_item_tuple(conn, site_name, recipe_item.payload(), convert=True)
|
||||
recipe = postsqldb.RecipesTable.getRecipe(conn, site_name, (int(request.get_json()['rp_id']), ), convert=True)
|
||||
return jsonify({'recipe': recipe, 'error': False, 'message': 'Recipe Item was added successful!'})
|
||||
return jsonify({'recipe': recipe, 'error': True, 'message': 'Recipe Item was not added unsuccessful!'})
|
||||
|
||||
@recipes_api.route('/recipe/postSKUItem', methods=["POST"])
|
||||
def postSKUItem():
|
||||
recipe = {}
|
||||
if request.method == "POST":
|
||||
recipe_id = int(request.get_json()['recipe_id'])
|
||||
item_id = int(request.get_json()['item_id'])
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
item = database.getItemAllByID(conn, site_name, (item_id, ), convert=True)
|
||||
recipe_item = postsqldb.RecipesTable.ItemPayload(
|
||||
uuid=item['barcode'],
|
||||
rp_id=recipe_id,
|
||||
item_type='sku',
|
||||
item_name=item['item_name'],
|
||||
uom=item['item_info']['uom']['id'],
|
||||
qty=float(item['item_info']['uom_quantity']),
|
||||
item_id=item['id'],
|
||||
links=item['links']
|
||||
)
|
||||
postsqldb.RecipesTable.insert_item_tuple(conn, site_name, recipe_item.payload(), convert=True)
|
||||
recipe = postsqldb.RecipesTable.getRecipe(conn, site_name, (recipe_id, ), convert=True)
|
||||
return jsonify({'recipe': recipe, 'error': False, 'message': 'Recipe Item was added successful!'})
|
||||
return jsonify({'recipe': recipe, 'error': True, 'message': 'Recipe Item was not added unsuccessful!'})
|
||||
|
||||
@recipes_api.route('/recipe/postImage/<recipe_id>', methods=["POST"])
|
||||
def uploadImage(recipe_id):
|
||||
file = request.files['file']
|
||||
file_path = current_app.config['UPLOAD_FOLDER'] + f"/recipes/{file.filename.replace(" ", "_")}"
|
||||
file.save(file_path)
|
||||
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
postsqldb.RecipesTable.updateRecipe(conn, site_name, {'id': recipe_id, 'update': {'picture_path': file.filename.replace(" ", "_")}})
|
||||
|
||||
return jsonify({})
|
||||
|
||||
@recipes_api.route('/recipe/getImage/<recipe_id>')
|
||||
def get_image(recipe_id):
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT picture_path FROM {site_name}_recipes WHERE id=%s;", (recipe_id,))
|
||||
rows = cur.fetchone()[0]
|
||||
return send_from_directory('static/pictures/recipes', rows)
|
||||
|
||||
@recipes_api.route('/recipe/deleteRecipeItem', methods=["POST"])
|
||||
def deleteRecipeItem():
|
||||
recipe = {}
|
||||
if request.method == "POST":
|
||||
id = int(request.get_json()['id'])
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
deleted_item = postsqldb.RecipesTable.delete_item_tuple(conn, site_name, (id, ), convert=True)
|
||||
recipe = postsqldb.RecipesTable.getRecipe(conn, site_name, (int(deleted_item['rp_id']), ), convert=True)
|
||||
return jsonify({'recipe': recipe, 'error': False, 'message': f'Recipe Item {deleted_item['item_name']} was deleted successful!'})
|
||||
return jsonify({'recipe': recipe, 'error': True, 'message': 'Recipe Item was not deleted unsuccessful!'})
|
||||
|
||||
@recipes_api.route('/recipe/saveRecipeItem', methods=["POST"])
|
||||
def saveRecipeItem():
|
||||
recipe = {}
|
||||
if request.method == "POST":
|
||||
id = int(request.get_json()['id'])
|
||||
update = request.get_json()['update']
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
updated_line = postsqldb.RecipesTable.update_item_tuple(conn, site_name, {'id': id, 'update': update}, convert=True)
|
||||
recipe = postsqldb.RecipesTable.getRecipe(conn, site_name, (int(updated_line['rp_id']), ), convert=True)
|
||||
return jsonify({'recipe': recipe, 'error': False, 'message': f'Recipe Item {updated_line['item_name']} was updated successful!'})
|
||||
return jsonify({'recipe': recipe, 'error': True, 'message': 'Recipe Item was not updated unsuccessful!'})
|
||||
449
scratch.py
449
scratch.py
@ -1,235 +1,250 @@
|
||||
|
||||
from config import config
|
||||
import psycopg2, requests, database, MyDataclasses
|
||||
import psycopg2, requests, database
|
||||
import main, datetime, json, csv
|
||||
from main import lst2pgarr
|
||||
import process
|
||||
|
||||
def importItemFromCSV(test, site_name, uuid, site):
|
||||
logistics_info = MyDataclasses.LogisticsInfoPayload(
|
||||
barcode=test['barcode'],
|
||||
primary_location=site['default_primary_location'],
|
||||
primary_zone=site['default_zone'],
|
||||
auto_issue_location=site['default_auto_issue_location'],
|
||||
auto_issue_zone=site['default_zone'])
|
||||
|
||||
item_info = MyDataclasses.ItemInfoPayload(test['barcode'])
|
||||
headers = []
|
||||
test = []
|
||||
with open("2024-10-02-Pantry.csv", "r+", encoding="utf-8") as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
for row in csv_reader:
|
||||
try:
|
||||
if row['id'] == "430":
|
||||
test = row
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
# Food Info
|
||||
t = ['serving', 'serving_unit', 'calories', 'calories_unit', 'proteins',
|
||||
'proteins_unit', 'fats', 'fats_unit', 'carbohydrates', 'carbohydrates_unit', 'sugars', 'sugars_unit', 'sodium', 'sodium_unit',
|
||||
'fibers', 'fibers_unit']
|
||||
# print(test)
|
||||
|
||||
other_tags = [
|
||||
'serving',
|
||||
'serving_unit',
|
||||
'calories',
|
||||
'calories_unit',
|
||||
'proteins_serving',
|
||||
'proteins_unit',
|
||||
'fat_serving',
|
||||
'fat_unit',
|
||||
'carbohydrates_serving',
|
||||
'carbohydrates_unit',
|
||||
'sugars_serving',
|
||||
'sugars_unit',
|
||||
'sodium_serving',
|
||||
'sodium_unit',
|
||||
'fiber_serving',
|
||||
'fiber_unit',
|
||||
# order_of_operations
|
||||
# create_logistics
|
||||
# create_item_info
|
||||
# create_food_info
|
||||
# create_brand_info
|
||||
# return id of each and save, check to make sure you have id for each else drop out and do not commit
|
||||
site_name = "main"
|
||||
defaults = config(filename=f"sites/{site_name}/site.ini", section="defaults")
|
||||
uuid = f"{defaults["default_zone"]}@{defaults["default_primary_location"]}"
|
||||
|
||||
logistics_info_payload = [
|
||||
test['barcode'],
|
||||
uuid,
|
||||
uuid,
|
||||
json.dumps({}), # dynamic_locations
|
||||
json.dumps({}), # location_data
|
||||
0.0 # quantity_on_hand
|
||||
]
|
||||
|
||||
item_info_payload = [
|
||||
test['barcode'],
|
||||
lst2pgarr([]), # linked_items
|
||||
lst2pgarr([]), # shopping_lists
|
||||
lst2pgarr([]), # recipes
|
||||
lst2pgarr([]), # groups
|
||||
test['packaging'], # packaging
|
||||
test['product_quantity_unit'], # uom
|
||||
test['cost'], # cost
|
||||
test['safety_stock'], # safety_stock
|
||||
test['lead_time'], # lead_time_days
|
||||
False # ai_pick
|
||||
]
|
||||
|
||||
# Food Info
|
||||
t = ['serving', 'serving_unit', 'calories', 'calories_unit', 'proteins',
|
||||
'proteins_unit', 'fats', 'fats_unit', 'carbohydrates', 'carbohydrates_unit', 'sugars', 'sugars_unit', 'sodium', 'sodium_unit',
|
||||
'fibers', 'fibers_unit']
|
||||
|
||||
other_tags = [
|
||||
'serving',
|
||||
'serving_unit',
|
||||
'calories',
|
||||
'calories_unit',
|
||||
'proteins_serving',
|
||||
'proteins_unit',
|
||||
'fat_serving',
|
||||
'fat_unit',
|
||||
'carbohydrates_serving',
|
||||
'carbohydrates_unit',
|
||||
'sugars_serving',
|
||||
'sugars_unit',
|
||||
'sodium_serving',
|
||||
'sodium_unit',
|
||||
'fiber_serving',
|
||||
'fiber_unit',
|
||||
]
|
||||
|
||||
nutriments = test['nutriments'].replace("'", '"')
|
||||
nutriments = nutriments.replace("{", "").replace("}", "")
|
||||
key_values = nutriments.split(", ")
|
||||
nutriments = {}
|
||||
|
||||
if key_values != ['']:
|
||||
for s in key_values:
|
||||
s= s.split(": ")
|
||||
k = s[0].replace('"', "")
|
||||
v = s[1].replace('"', "")
|
||||
nutriments[k] = v
|
||||
|
||||
nutrients = {}
|
||||
for i in range(len(other_tags)):
|
||||
if other_tags[i] in nutriments.keys():
|
||||
nutrients[t[i]] = nutriments[other_tags[i]]
|
||||
else:
|
||||
nutrients[t[i]] = ''
|
||||
|
||||
food_groups = test['food_groups_tags']
|
||||
food_groups = food_groups.replace('[', "").replace("]", "")
|
||||
food_groups = food_groups.replace("'", "")
|
||||
food_groups = food_groups.split(", ")
|
||||
|
||||
ingrediants = test['ingredients_hierarchy']
|
||||
ingrediants = ingrediants.replace('[', "").replace("]", "")
|
||||
ingrediants = ingrediants.replace("'", "")
|
||||
ingrediants = ingrediants.split(", ")
|
||||
|
||||
food_info_payload = [
|
||||
lst2pgarr(food_groups), # food_groups
|
||||
lst2pgarr(ingrediants), # ingrediants
|
||||
json.dumps(nutrients),
|
||||
False # expires
|
||||
]
|
||||
|
||||
brand_payload = [test['brands'],]
|
||||
|
||||
|
||||
logistics_info_id = 0
|
||||
item_info_id = 0
|
||||
food_info_id = 0
|
||||
brand_id = 0
|
||||
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
logistics_info = database.insertLogisticsInfoTuple(conn, "main", logistics_info_payload)
|
||||
item_info = database.insertItemInfoTuple(conn, "main", item_info_payload)
|
||||
food_info = database.insertFoodInfoTuple(conn, "main", food_info_payload)
|
||||
brand = database.insertBrandsTuple(conn, "main", brand_payload)
|
||||
|
||||
print("Logistics:", logistics_info)
|
||||
print("item_info:", item_info)
|
||||
print("food_info:", food_info)
|
||||
print("brand:", brand)
|
||||
|
||||
name = test['name']
|
||||
name = name.replace("'", "@&apostraphe&")
|
||||
description = ""
|
||||
tags = lst2pgarr([])
|
||||
links = json.dumps({})
|
||||
search_string = f"{test['barcode']}&{name}"
|
||||
|
||||
item_payload = [
|
||||
test['barcode'],
|
||||
test['name'],
|
||||
brand[0],
|
||||
description,
|
||||
tags,
|
||||
links,
|
||||
item_info[0],
|
||||
logistics_info[0],
|
||||
food_info[0],
|
||||
"single",
|
||||
test["sub_type"],
|
||||
search_string
|
||||
]
|
||||
|
||||
print("Item:", item_payload)
|
||||
|
||||
item = database.insertItemTuple(conn, "main", item_payload)
|
||||
print(item)
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT id FROM {site_name}_locations WHERE uuid=%s;", (uuid, ))
|
||||
location_id = cur.fetchone()[0]
|
||||
|
||||
|
||||
print("Location ID:", location_id)
|
||||
|
||||
item_location_payload = [
|
||||
item[0],
|
||||
location_id,
|
||||
0.0,
|
||||
main.lst2pgarr([])
|
||||
]
|
||||
|
||||
location = database.insertItemLocationsTuple(conn, site_name, item_location_payload)
|
||||
|
||||
print("Item location:", location)
|
||||
|
||||
creation_payload = [
|
||||
datetime.datetime.now(),
|
||||
logistics_info[0],
|
||||
test['barcode'],
|
||||
name,
|
||||
"SYSTEM",
|
||||
0.0,
|
||||
"Item Added to System!",
|
||||
1,
|
||||
json.dumps({'location': uuid})
|
||||
]
|
||||
|
||||
nutriments = test['nutriments'].replace("'", '"')
|
||||
nutriments = nutriments.replace("{", "").replace("}", "")
|
||||
key_values = nutriments.split(", ")
|
||||
nutriments = {}
|
||||
|
||||
if key_values != ['']:
|
||||
for s in key_values:
|
||||
s= s.split(": ")
|
||||
k = s[0].replace('"', "")
|
||||
v = s[1].replace('"', "")
|
||||
nutriments[k] = v
|
||||
transaction = database.insertTransactionsTuple(conn, site_name, creation_payload)
|
||||
|
||||
nutrients = {}
|
||||
for i in range(len(other_tags)):
|
||||
if other_tags[i] in nutriments.keys():
|
||||
nutrients[t[i]] = nutriments[other_tags[i]]
|
||||
else:
|
||||
nutrients[t[i]] = ''
|
||||
print("transaction:", transaction)
|
||||
|
||||
food_groups = test['food_groups_tags']
|
||||
food_groups = food_groups.replace('[', "").replace("]", "")
|
||||
food_groups = food_groups.replace("'", "")
|
||||
food_groups = food_groups.split(", ")
|
||||
qoh = float(test['qty_on_hand'])
|
||||
print(qoh, type(qoh))
|
||||
if qoh != 0.0:
|
||||
if qoh >= 0.0:
|
||||
trans_type = "Adjust In"
|
||||
else:
|
||||
trans_type = "Adjust Out"
|
||||
|
||||
ingrediants = test['ingredients_hierarchy']
|
||||
ingrediants = ingrediants.replace('[', "").replace("]", "")
|
||||
ingrediants = ingrediants.replace("'", "")
|
||||
ingrediants = ingrediants.split(", ")
|
||||
adjustment_payload = [
|
||||
datetime.datetime.now(),
|
||||
logistics_info[0],
|
||||
test['barcode'],
|
||||
name,
|
||||
trans_type,
|
||||
qoh,
|
||||
"",
|
||||
1,
|
||||
json.dumps({'location': uuid, 'cost': item_info[8]})
|
||||
]
|
||||
|
||||
transaction = database.insertTransactionsTuple(conn, site_name, adjustment_payload)
|
||||
print("transaction:", transaction)
|
||||
|
||||
cost_layer_payload = [
|
||||
datetime.datetime.now(),
|
||||
float(qoh),
|
||||
float(test['cost']),
|
||||
'USD',
|
||||
None,
|
||||
0
|
||||
]
|
||||
print(cost_layer_payload)
|
||||
cost_layer = database.insertCostLayersTuple(conn, site_name, cost_layer_payload)
|
||||
print("cost_layer:", cost_layer)
|
||||
|
||||
layer_payload = [
|
||||
cost_layer[0],
|
||||
float(location[3]) + float(qoh),
|
||||
location_id, # location_id
|
||||
item[0] # part_id
|
||||
]
|
||||
|
||||
print(layer_payload)
|
||||
location = database.updateItemLocation(conn, site_name, layer_payload)
|
||||
print(location)
|
||||
|
||||
print("\n")
|
||||
conn.commit()
|
||||
|
||||
# need to insert into Item_Locations, part_id and location id
|
||||
|
||||
|
||||
food_info = MyDataclasses.FoodInfoPayload(food_groups, ingrediants, nutrients)
|
||||
|
||||
if test['brands'] != "":
|
||||
brand = MyDataclasses.BrandsPayload(test['brands'])
|
||||
qoh = float(test['qty_on_hand'])
|
||||
|
||||
logistics_info_id = 0
|
||||
item_info_id = 0
|
||||
food_info_id = 0
|
||||
brand_id = 1
|
||||
|
||||
database_config = config()
|
||||
try:
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
logistics_info = database.insertLogisticsInfoTuple(conn, site_name, logistics_info.payload())
|
||||
item_info = database.insertItemInfoTuple(conn, site_name, item_info.payload())
|
||||
food_info = database.insertFoodInfoTuple(conn, site_name, food_info.payload())
|
||||
if test['brands'] != "":
|
||||
brand = database.insertBrandsTuple(conn, site_name, brand.payload())
|
||||
brand_id = brand[0]
|
||||
|
||||
print("Logistics:", logistics_info)
|
||||
print("item_info:", item_info)
|
||||
print("food_info:", food_info)
|
||||
print("brand:", brand_id)
|
||||
|
||||
name = test['name']
|
||||
name = name.replace("'", "@&apostraphe&")
|
||||
description = ""
|
||||
tags = lst2pgarr([])
|
||||
links = json.dumps({})
|
||||
search_string = f"&&{test['barcode']}&&{name}&&"
|
||||
|
||||
|
||||
item = MyDataclasses.ItemsPayload(test['barcode'], test['name'], item_info[0],
|
||||
logistics_info[0], food_info[0], brand=brand_id,
|
||||
row_type="single", item_type=test["sub_type"], search_string=search_string)
|
||||
|
||||
item = database.insertItemTuple(conn, site_name, item.payload(), convert=True)
|
||||
item = database.getItemAllByID(conn, site_name, (item['id'], ), convert=True)
|
||||
print("Item:", item)
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"SELECT id FROM {site_name}_locations WHERE uuid=%s;", (uuid, ))
|
||||
location_id = cur.fetchone()[0]
|
||||
|
||||
|
||||
print("Location ID:", location_id)
|
||||
item_location = MyDataclasses.ItemLocationPayload(item['id'], location_id)
|
||||
location = database.insertItemLocationsTuple(conn, site_name, item_location.payload(), convert=True)
|
||||
|
||||
print("Item location:", location)
|
||||
|
||||
creation_tuple = MyDataclasses.TransactionPayload(
|
||||
datetime.datetime.now(),
|
||||
logistics_info[0],
|
||||
item['barcode'],
|
||||
item['item_name'],
|
||||
"SYSTEM",
|
||||
0.0,
|
||||
"Item added to the System!",
|
||||
1,
|
||||
{'location': uuid}
|
||||
)
|
||||
|
||||
|
||||
database.insertTransactionsTuple(conn, site_name, creation_tuple.payload())
|
||||
|
||||
qoh = float(test['qty_on_hand'])
|
||||
print(qoh, type(qoh))
|
||||
trans_type = "Adjust In"
|
||||
if qoh != 0.0:
|
||||
if qoh >= 0.0:
|
||||
trans_type = "Adjust In"
|
||||
else:
|
||||
trans_type = "Adjust Out"
|
||||
|
||||
payload = {
|
||||
'item_id': item['id'],
|
||||
'logistics_info_id': item['logistics_info_id'],
|
||||
'barcode': item['barcode'],
|
||||
'item_name': item['item_name'],
|
||||
'transaction_type': trans_type,
|
||||
'quantity': float(qoh),
|
||||
'description': f'creation quantity',
|
||||
'cost': item['item_info']['cost'],
|
||||
'vendor': 1,
|
||||
'expires': None,
|
||||
'location_id': location_id
|
||||
}
|
||||
|
||||
process.postTransaction(conn, site_name, 1, payload)
|
||||
conn.commit()
|
||||
except Exception as error:
|
||||
print(error, item_info)
|
||||
|
||||
|
||||
def importCSV(path, site_name):
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
site = database.selectSiteTuple(conn, (site_name,), convert=True)
|
||||
default_zone = database.__selectTuple(conn, site_name, f"{site_name}_zones", (site['default_zone'], ), convert=True)
|
||||
default_location = database.__selectTuple(conn, site_name, f"{site_name}_locations", (site['default_primary_location'],), convert=True)
|
||||
|
||||
|
||||
uuid = f"{default_zone['name']}@{default_location['name']}"
|
||||
print(uuid)
|
||||
with open(path, "r+", encoding="utf-8") as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
for row in csv_reader:
|
||||
try:
|
||||
importItemFromCSV(row, site_name, uuid, site)
|
||||
except Exception as error:
|
||||
with open("process.log", "a+") as file:
|
||||
file.write("\n")
|
||||
file.write(f"{datetime.datetime.now()} --- CAUTION --- {error}\n")
|
||||
file.write(f"{" "*41}{json.dumps(row)}")
|
||||
|
||||
#importCSV("2025-03-19-Pantry (1).csv", "main")
|
||||
|
||||
def importLinkFromCSV(row, site_name, conn):
|
||||
barcode = row['barcode']
|
||||
link_barcode=row['link_barcode']
|
||||
item_data=json.loads(row['data'].replace('\\j*s*o*n\\', ""))
|
||||
conv_factor=row['conv_factor']
|
||||
|
||||
link_item = database.getItemAllByBarcode(conn, site_name, (link_barcode, ), convert=True)
|
||||
|
||||
link = MyDataclasses.ItemLinkPayload(
|
||||
barcode=barcode,
|
||||
link=link_item['id'],
|
||||
data=item_data,
|
||||
conv_factor=conv_factor
|
||||
)
|
||||
|
||||
newitem = {
|
||||
'barcode': barcode,
|
||||
'name': item_data['name'],
|
||||
'subtype': ''
|
||||
}
|
||||
|
||||
try:
|
||||
process.postNewBlankItem(conn, site_name, 1, newitem)
|
||||
except Exception as error:
|
||||
print(error)
|
||||
pass
|
||||
|
||||
lin = database.insertItemLinksTuple(conn, site_name, link.payload())
|
||||
print(lin)
|
||||
|
||||
def importLinksFromCSV(path, site_name):
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
with open(path, "r+", encoding="utf-8") as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
for row in csv_reader:
|
||||
try:
|
||||
importLinkFromCSV(row, site_name, conn)
|
||||
except Exception as error:
|
||||
with open("process.log", "a+") as file:
|
||||
file.write("\n")
|
||||
file.write(f"{datetime.datetime.now()} --- CAUTION --- {error}\n")
|
||||
file.write(f"{" "*41}{json.dumps(row)}")
|
||||
|
||||
importLinksFromCSV("test.csv", 'test')
|
||||
# transact qoh into the system
|
||||
@ -1,186 +0,0 @@
|
||||
from flask import Blueprint, request, render_template, redirect, session, url_for, send_file, jsonify, Response
|
||||
import psycopg2, math, json, datetime, main, copy, requests, process, database, pprint, MyDataclasses
|
||||
from config import config, sites_config
|
||||
from main import unfoldCostLayers
|
||||
from user_api import login_required
|
||||
import postsqldb
|
||||
|
||||
shopping_list_api = Blueprint('shopping_list_API', __name__)
|
||||
|
||||
@shopping_list_api.route("/shopping-lists")
|
||||
@login_required
|
||||
def shopping_lists():
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
return render_template("shopping-lists/index.html", current_site=session['selected_site'], sites=sites)
|
||||
|
||||
@shopping_list_api.route("/shopping-list/<mode>/<id>")
|
||||
@login_required
|
||||
def shopping_list(mode, id):
|
||||
sites = [site[1] for site in main.get_sites(session['user']['sites'])]
|
||||
if mode == "view":
|
||||
return render_template("shopping-lists/view.html", id=id, current_site=session['selected_site'], sites=sites)
|
||||
if mode == "edit":
|
||||
return render_template("shopping-lists/edit.html", id=id, current_site=session['selected_site'], sites=sites)
|
||||
return redirect("/")
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/addList', methods=["POST"])
|
||||
def addList():
|
||||
if request.method == "POST":
|
||||
list_name = request.get_json()['list_name']
|
||||
list_description = request.get_json()['list_description']
|
||||
list_type = request.get_json()['list_type']
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
user_id = session['user_id']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
shopping_list = MyDataclasses.ShoppingListPayload(
|
||||
name=list_name,
|
||||
description=list_description,
|
||||
author=user_id,
|
||||
type=list_type
|
||||
)
|
||||
database.insertShoppingListsTuple(conn, site_name, shopping_list.payload())
|
||||
return jsonify({'error': False, 'message': 'List added!!'})
|
||||
return jsonify({'error': True, 'message': 'These was an error with adding the list!'})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/getLists', methods=["GET"])
|
||||
def getShoppingLists():
|
||||
lists = []
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 1))
|
||||
offset = (page-1)*limit
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
lists, count = database.getShoppingLists(conn, site_name, (limit, offset), convert=True)
|
||||
|
||||
for list in lists:
|
||||
|
||||
if list['type'] == 'calculated':
|
||||
items = []
|
||||
not_items = database.getItemsSafetyStock(conn, site_name, convert=True)
|
||||
for item in not_items:
|
||||
new_item = {
|
||||
'id': item['id'],
|
||||
'uuid': item['barcode'],
|
||||
'sl_id': 0,
|
||||
'item_type': 'sku',
|
||||
'item_name': item['item_name'],
|
||||
'uom': item['uom'],
|
||||
'qty': float(float(item['safety_stock']) - float(item['total_sum'])),
|
||||
'item_id': item['id'],
|
||||
'links': item['links']
|
||||
}
|
||||
items.append(new_item)
|
||||
list['sl_items'] = items
|
||||
|
||||
return jsonify({'shopping_lists': lists, 'end':math.ceil(count/limit), 'error': False, 'message': 'Lists queried successfully!'})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/getList', methods=["GET"])
|
||||
def getShoppingList():
|
||||
if request.method == "GET":
|
||||
sl_id = int(request.args.get('id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
lists = database.getShoppingList(conn, site_name, (sl_id, ), convert=True)
|
||||
return jsonify({'shopping_list': lists, 'error': False, 'message': 'Lists queried successfully!'})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/getListItem', methods=["GET"])
|
||||
def getShoppingListItem():
|
||||
list_item = {}
|
||||
if request.method == "GET":
|
||||
sli_id = int(request.args.get('sli_id', 1))
|
||||
database_config = config()
|
||||
site_name = session['selected_site']
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
list_item = postsqldb.ShoppingListsTable.getItem(conn, site_name, (sli_id, ))
|
||||
return jsonify({'list_item': list_item, 'error': False, 'message': 'Lists Items queried successfully!'})
|
||||
return jsonify({'list_item': list_item, 'error': True, 'message': 'List Items queried unsuccessfully!'})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/getItems', methods=["GET"])
|
||||
def getItems():
|
||||
recordset = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
page = int(request.args.get('page', 1))
|
||||
limit = int(request.args.get('limit', 10))
|
||||
search_string = request.args.get('search_string', 10)
|
||||
site_name = session['selected_site']
|
||||
offset = (page - 1) * limit
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
payload = (search_string, limit, offset)
|
||||
recordset, count = database.getItemsWithQOH(conn, site_name, payload, convert=True)
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"items":recordset, "end":math.ceil(count['count']/limit), "error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/postListItem', methods=["POST"])
|
||||
def postListItem():
|
||||
if request.method == "POST":
|
||||
data = request.get_json()['data']
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
sl_item = MyDataclasses.ShoppingListItemPayload(
|
||||
uuid = data['uuid'],
|
||||
sl_id = data['sl_id'],
|
||||
item_type=data['item_type'],
|
||||
item_name=data['item_name'],
|
||||
uom=data['uom'],
|
||||
qty=data['qty'],
|
||||
item_id=data['item_id'],
|
||||
links=data['links']
|
||||
)
|
||||
database.insertShoppingListItemsTuple(conn, site_name, sl_item.payload())
|
||||
return jsonify({"error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/deleteListItem', methods=["POST"])
|
||||
def deleteListItem():
|
||||
if request.method == "POST":
|
||||
sli_id = request.get_json()['sli_id']
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
database.deleteShoppingListItemsTuple(conn, site_name, (sli_id, ))
|
||||
return jsonify({"error":False, "message":"item deleted succesfully!"})
|
||||
return jsonify({"error":True, "message":"There was an error with this POST statement"})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/saveListItem', methods=["POST"])
|
||||
def saveListItem():
|
||||
if request.method == "POST":
|
||||
sli_id = request.get_json()['sli_id']
|
||||
update = request.get_json()['update']
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
database.__updateTuple(conn, site_name, f"{site_name}_shopping_list_items", {'id': sli_id, 'update': update})
|
||||
return jsonify({"error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"error":True, "message":"There was an error with this GET statement"})
|
||||
|
||||
@shopping_list_api.route('/shopping-lists/getSKUItemsFull', methods=["GET"])
|
||||
def getSKUItemsFull():
|
||||
items = []
|
||||
count = {'count': 0}
|
||||
if request.method == "GET":
|
||||
site_name = session['selected_site']
|
||||
database_config = config()
|
||||
with psycopg2.connect(**database_config) as conn:
|
||||
not_items = database.getItemsSafetyStock(conn, site_name, convert=True)
|
||||
for item in not_items:
|
||||
new_item = {
|
||||
'id': item['id'],
|
||||
'uuid': item['barcode'],
|
||||
'sl_id': 0,
|
||||
'item_type': 'sku',
|
||||
'item_name': item['item_name'],
|
||||
'uom': item['uom'],
|
||||
'qty': float(float(item['safety_stock']) - float(item['total_sum'])),
|
||||
'item_id': item['id'],
|
||||
'links': item['links']
|
||||
}
|
||||
items.append(new_item)
|
||||
return jsonify({"list_items":items, "error":False, "message":"items fetched succesfully!"})
|
||||
return jsonify({"list_items":items, "error":True, "message":"There was an error with this GET statement"})
|
||||
@ -23,7 +23,8 @@ CREATE TABLE IF NOT EXISTS %sitename%_items(
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_brand
|
||||
FOREIGN KEY(brand)
|
||||
REFERENCES %sitename%_brands(id),
|
||||
REFERENCES %sitename%_brands(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_logistics_info
|
||||
FOREIGN KEY(logistics_info_id)
|
||||
REFERENCES %sitename%_logistics_info(id)
|
||||
|
||||
@ -7,8 +7,10 @@ CREATE TABLE IF NOT EXISTS %sitename%_item_locations(
|
||||
UNIQUE(part_id, location_id),
|
||||
CONSTRAINT fk_part_id
|
||||
FOREIGN KEY(part_id)
|
||||
REFERENCES %sitename%_items(id),
|
||||
REFERENCES %sitename%_items(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_location_id
|
||||
FOREIGN KEY(location_id)
|
||||
REFERENCES %sitename%_locations(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -10,5 +10,4 @@ CREATE TABLE IF NOT EXISTS %sitename%_receipt_items (
|
||||
CONSTRAINT fk_receipt
|
||||
FOREIGN KEY(receipt_id)
|
||||
REFERENCES %sitename%_receipts(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,20 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %sitename%_shopping_list_items (
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(32) NOT NULL,
|
||||
sl_id INTEGER NOT NULL,
|
||||
item_type VARCHAR(32) NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
uom VARCHAR(32) NOT NULL,
|
||||
qty FLOAT8 NOT NULL,
|
||||
item_id INTEGER DEFAULT NULL,
|
||||
links JSONB DEFAULT '{"main": ""}',
|
||||
UNIQUE(uuid),
|
||||
CONSTRAINT fk_sl_id
|
||||
FOREIGN KEY(sl_id)
|
||||
REFERENCES %sitename%_shopping_lists(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_item_id
|
||||
FOREIGN KEY(item_id)
|
||||
REFERENCES %sitename%_items(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,7 +1,12 @@
|
||||
CREATE TABLE IF NOT EXISTS %sitename%_shopping_lists (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
description TEXT,
|
||||
pantry_items INTEGER [],
|
||||
custom_items JSONB,
|
||||
recipes INTEGER [],
|
||||
groups INTEGER [],
|
||||
quantities JSONB,
|
||||
author INTEGER,
|
||||
creation_date TIMESTAMP,
|
||||
type VARCHAR(64),
|
||||
|
||||
@ -1 +1 @@
|
||||
DROP TABLE %%site_name%%_brands CASCADE;
|
||||
DROP TABLE %sitename%_brands CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %sitename%_shopping_list_items CASCADE;
|
||||
@ -23,7 +23,8 @@ CREATE TABLE IF NOT EXISTS main_items(
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_brand
|
||||
FOREIGN KEY(brand)
|
||||
REFERENCES main_brands(id),
|
||||
REFERENCES main_brands(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_logistics_info
|
||||
FOREIGN KEY(logistics_info_id)
|
||||
REFERENCES main_logistics_info(id)
|
||||
|
||||
@ -7,8 +7,10 @@ CREATE TABLE IF NOT EXISTS main_item_locations(
|
||||
UNIQUE(part_id, location_id),
|
||||
CONSTRAINT fk_part_id
|
||||
FOREIGN KEY(part_id)
|
||||
REFERENCES main_items(id),
|
||||
REFERENCES main_items(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_location_id
|
||||
FOREIGN KEY(location_id)
|
||||
REFERENCES main_locations(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -10,5 +10,4 @@ CREATE TABLE IF NOT EXISTS main_receipt_items (
|
||||
CONSTRAINT fk_receipt
|
||||
FOREIGN KEY(receipt_id)
|
||||
REFERENCES main_receipts(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,20 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS main_shopping_list_items (
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(32) NOT NULL,
|
||||
sl_id INTEGER NOT NULL,
|
||||
item_type VARCHAR(32) NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
uom VARCHAR(32) NOT NULL,
|
||||
qty FLOAT8 NOT NULL,
|
||||
item_id INTEGER DEFAULT NULL,
|
||||
links JSONB DEFAULT '{"main": ""}',
|
||||
UNIQUE(uuid),
|
||||
CONSTRAINT fk_sl_id
|
||||
FOREIGN KEY(sl_id)
|
||||
REFERENCES main_shopping_lists(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_item_id
|
||||
FOREIGN KEY(item_id)
|
||||
REFERENCES main_items(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,7 +1,12 @@
|
||||
CREATE TABLE IF NOT EXISTS main_shopping_lists (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
description TEXT,
|
||||
pantry_items INTEGER [],
|
||||
custom_items JSONB,
|
||||
recipes INTEGER [],
|
||||
groups INTEGER [],
|
||||
quantities JSONB,
|
||||
author INTEGER,
|
||||
creation_date TIMESTAMP,
|
||||
type VARCHAR(64),
|
||||
|
||||
@ -1 +1 @@
|
||||
DROP TABLE %%site_name%%_brands CASCADE;
|
||||
DROP TABLE main_brands CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE main_shopping_list_items CASCADE;
|
||||
@ -1,4 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_brands (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
);
|
||||
@ -1,7 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_conversions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
item_id INTEGER NOT NULL,
|
||||
uom_id INTEGER NOT NULL,
|
||||
conv_factor FLOAT8 NOT NULL,
|
||||
UNIQUE(item_id, uom_id)
|
||||
);
|
||||
@ -1,9 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_cost_layers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
aquisition_date TIMESTAMP NOT NULL,
|
||||
quantity FLOAT8 NOT NULL,
|
||||
cost FLOAT8 NOT NULL,
|
||||
currency_type VARCHAR(16) NOT NULL,
|
||||
expires TIMESTAMP,
|
||||
vendor INTEGER DEFAULT 0
|
||||
);
|
||||
@ -1,8 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_food_info (
|
||||
id SERIAL PRIMARY KEY,
|
||||
food_groups TEXT [],
|
||||
ingrediants TEXT [],
|
||||
nutrients JSONB,
|
||||
expires BOOLEAN,
|
||||
default_expiration FLOAT8
|
||||
);
|
||||
@ -1,20 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_group_items(
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(32) NOT NULL,
|
||||
gr_id INTEGER NOT NULL,
|
||||
item_type VARCHAR(32) NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
uom INTEGER NOT NULL,
|
||||
qty FLOAT8 NOT NULL,
|
||||
item_id INTEGER DEFAULT NULL,
|
||||
links JSONB DEFAULT '{"main": ""}',
|
||||
UNIQUE(uuid),
|
||||
CONSTRAINT fk_gr_id
|
||||
FOREIGN KEY(gr_id)
|
||||
REFERENCES %%site_name%%_groups(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_item_id
|
||||
FOREIGN KEY(item_id)
|
||||
REFERENCES %%site_name%%_items(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,7 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_groups(
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
group_type VARCHAR(255),
|
||||
UNIQUE (name)
|
||||
);
|
||||
@ -1,31 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_items(
|
||||
id SERIAL PRIMARY KEY,
|
||||
barcode VARCHAR(255) NOT NULL,
|
||||
item_name VARCHAR(255) NOT NULL,
|
||||
brand INTEGER,
|
||||
description TEXT,
|
||||
tags TEXT [],
|
||||
links JSONB,
|
||||
item_info_id INTEGER NOT NULL,
|
||||
logistics_info_id INTEGER NOT NULL,
|
||||
food_info_id INTEGER,
|
||||
row_type VARCHAR(255) NOT NULL,
|
||||
item_type VARCHAR(255) NOT NULL,
|
||||
search_string TEXT NOT NULL,
|
||||
UNIQUE(barcode, item_info_id),
|
||||
CONSTRAINT fk_item_info
|
||||
FOREIGN KEY(item_info_id)
|
||||
REFERENCES %%site_name%%_item_info(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_food_info
|
||||
FOREIGN KEY(food_info_id)
|
||||
REFERENCES %%site_name%%_food_info(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_brand
|
||||
FOREIGN KEY(brand)
|
||||
REFERENCES %%site_name%%_brands(id),
|
||||
CONSTRAINT fk_logistics_info
|
||||
FOREIGN KEY(logistics_info_id)
|
||||
REFERENCES %%site_name%%_logistics_info(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,13 +0,0 @@
|
||||
CREATE TABLE IF NOt EXISTS %%site_name%%_item_info (
|
||||
id SERIAL PRIMARY KEY,
|
||||
barcode VARCHAR(255) NOT NULL,
|
||||
packaging VARCHAR(255),
|
||||
uom_quantity FLOAT8,
|
||||
uom INTEGER,
|
||||
cost FLOAT8,
|
||||
safety_stock FLOAT8,
|
||||
lead_time_days FLOAT8,
|
||||
ai_pick BOOLEAN,
|
||||
prefixes INTEGER [],
|
||||
UNIQUE(barcode)
|
||||
);
|
||||
@ -1,16 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_item_locations(
|
||||
id SERIAL PRIMARY KEY,
|
||||
part_id INTEGER NOT NULL,
|
||||
location_id INTEGER NOT NULL,
|
||||
quantity_on_hand FLOAT8 NOT NULL,
|
||||
cost_layers INTEGER[] DEFAULT '{}',
|
||||
UNIQUE(part_id, location_id),
|
||||
CONSTRAINT fk_part_id
|
||||
FOREIGN KEY(part_id)
|
||||
REFERENCES %%site_name%%_items(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_location_id
|
||||
FOREIGN KEY(location_id)
|
||||
REFERENCES %%site_name%%_locations(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,8 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_itemlinks (
|
||||
id SERIAL PRIMARY KEY,
|
||||
barcode VARCHAR(255) NOt NULL,
|
||||
link INTEGER NOT NULL,
|
||||
data JSONB NOT NULL,
|
||||
conv_factor FLOAT8 NOt NULL,
|
||||
UNIQUE(barcode)
|
||||
);
|
||||
@ -1,10 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_locations(
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(255) NOT NULL,
|
||||
name VARCHAR(32) NOT NULL,
|
||||
zone_id INTEGER NOT NULL,
|
||||
UNIQUE(uuid),
|
||||
CONSTRAINT fk_zone
|
||||
FOREIGN KEY(zone_id)
|
||||
REFERENCES %%site_name%%_zones(id)
|
||||
);
|
||||
@ -1,23 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS logins(
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(255),
|
||||
password VARCHAR(255),
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
favorites JSONB DEFAULT '{}',
|
||||
unseen_pantry_items INTEGER [] DEFAULT '{}',
|
||||
unseen_groups INTEGER [] DEFAULT '{}',
|
||||
unseen_shopping_lists INTEGER [] DEFAULT '{}',
|
||||
unseen_recipes INTEGER [] DEFAULT '{}',
|
||||
seen_pantry_items INTEGER [] DEFAULT '{}',
|
||||
seen_groups INTEGER[] DEFAULT '{}',
|
||||
seen_shopping_lists INTEGER [] DEFAULT '{}',
|
||||
seen_recipes INTEGER [] DEFAULT '{}',
|
||||
sites INTEGER [] DEFAULT '{}',
|
||||
site_roles INTEGER [] DEFAULT '{}',
|
||||
system_admin BOOLEAN DEFAULT FALSE,
|
||||
flags JSONB DEFAULT '{}',
|
||||
row_type VARCHAR(50),
|
||||
UNIQUE(username),
|
||||
CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$')
|
||||
);
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_logistics_info(
|
||||
id SERIAL PRIMARY KEY,
|
||||
barcode VARCHAR(255) NOT NULL,
|
||||
primary_location INTEGER NOT NULL,
|
||||
primary_zone INTEGER NOT NULL,
|
||||
auto_issue_location INTEGER NOT NULL,
|
||||
auto_issue_zone INTEGER NOT NULL,
|
||||
UNIQUE(barcode),
|
||||
CONSTRAINT fk_primary_location
|
||||
FOREIGN KEY(primary_location)
|
||||
REFERENCES %%site_name%%_locations(id),
|
||||
CONSTRAINT fk_primary_zone
|
||||
FOREIGN KEY(primary_zone)
|
||||
REFERENCES %%site_name%%_zones(id),
|
||||
CONSTRAINT fk_auto_issue_location
|
||||
FOREIGN KEY(auto_issue_location)
|
||||
REFERENCES %%site_name%%_locations(id),
|
||||
CONSTRAINT fk_auto_issue_zone
|
||||
FOREIGN KEY(auto_issue_zone)
|
||||
REFERENCES %%site_name%%_zones(id)
|
||||
);
|
||||
@ -1,15 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_receipt_items (
|
||||
id SERIAL PRIMARY KEY,
|
||||
type VARCHAR(255) NOT NULL,
|
||||
receipt_id INTEGER NOT NULL,
|
||||
barcode VARCHAR(255) NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
qty FLOAT8 NOT NULL,
|
||||
uom VARCHAR(32) NOT NULL,
|
||||
data JSONB,
|
||||
status VARCHAR (64),
|
||||
CONSTRAINT fk_receipt
|
||||
FOREIGN KEY(receipt_id)
|
||||
REFERENCES %%site_name%%_receipts(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,13 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_receipts (
|
||||
id SERIAL PRIMARY KEY,
|
||||
receipt_id VARCHAR (32) NOT NULL,
|
||||
receipt_status VARCHAR (64) NOT NULL,
|
||||
date_submitted TIMESTAMP NOT NULL,
|
||||
submitted_by INTEGER NOT NULL,
|
||||
vendor_id INTEGER,
|
||||
files JSONB,
|
||||
UNIQUE(receipt_id),
|
||||
CONSTRAINT fk_vendor
|
||||
FOREIGN KEY(vendor_id)
|
||||
REFERENCES %%site_name%%_vendors(id)
|
||||
);
|
||||
@ -1,20 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_recipe_items (
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(32) NOT NULL,
|
||||
rp_id INTEGER NOT NULL,
|
||||
item_type VARCHAR(32) NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
uom INTEGER NOT NULL,
|
||||
qty FLOAT8 NOT NULL,
|
||||
item_id INTEGER DEFAULT NULL,
|
||||
links JSONB DEFAULT '{"main": ""}',
|
||||
UNIQUE(uuid),
|
||||
CONSTRAINT fk_rp_id
|
||||
FOREIGN KEY(rp_id)
|
||||
REFERENCES %%site_name%%_recipes(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_item_id
|
||||
FOREIGN KEY(item_id)
|
||||
REFERENCES %%site_name%%_items(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,9 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_recipes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR,
|
||||
author INTEGER,
|
||||
description TEXT,
|
||||
creation_date TIMESTAMP,
|
||||
instructions TEXT [],
|
||||
picture_path TEXT
|
||||
);
|
||||
@ -1,11 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS roles(
|
||||
id SERIAL PRIMARY KEY,
|
||||
role_name VARCHAR(255) NOT NULL,
|
||||
role_description TEXT,
|
||||
site_id INTEGER NOT NULL,
|
||||
flags JSONB DEFAULT '{}',
|
||||
UNIQUE(role_name, site_id),
|
||||
CONSTRAINT fk_site
|
||||
FOREIGN KEY(site_id)
|
||||
REFERENCES sites(id)
|
||||
);
|
||||
@ -1,20 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_shopping_list_items (
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(32) NOT NULL,
|
||||
sl_id INTEGER NOT NULL,
|
||||
item_type VARCHAR(32) NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
uom INTEGER NOT NULL,
|
||||
qty FLOAT8 NOT NULL,
|
||||
item_id INTEGER DEFAULT NULL,
|
||||
links JSONB DEFAULT '{"main": ""}',
|
||||
UNIQUE(uuid, sl_id),
|
||||
CONSTRAINT fk_sl_id
|
||||
FOREIGN KEY(sl_id)
|
||||
REFERENCES %%site_name%%_shopping_lists(id)
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT fk_item_id
|
||||
FOREIGN KEY(item_id)
|
||||
REFERENCES %%site_name%%_items(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,9 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_shopping_lists (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
author INTEGER,
|
||||
creation_date TIMESTAMP,
|
||||
type VARCHAR(64),
|
||||
UNIQUE(name)
|
||||
);
|
||||
@ -1,12 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS sites (
|
||||
id SERIAL PRIMARY KEY,
|
||||
site_name VARCHAR(120),
|
||||
site_description TEXT,
|
||||
creation_date TIMESTAMP,
|
||||
site_owner_id INTEGER NOT NULL,
|
||||
flags JSONB,
|
||||
default_zone INTEGER DEFAULT NULL,
|
||||
default_auto_issue_location INTEGER DEFAULT NULL,
|
||||
default_primary_location INTEGER DEFAULT NULL,
|
||||
UNIQUE(site_name)
|
||||
);
|
||||
@ -1,7 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_sku_prefix(
|
||||
id SERIAL PRIMARY KEY,
|
||||
uuid VARCHAR(16) NOT NULL,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
UNIQUE (name, uuid)
|
||||
);
|
||||
@ -1,16 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_Transactions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
timestamp TIMESTAMP,
|
||||
logistics_info_id INTEGER NOT NULL,
|
||||
barcode VARCHAR(255) NOT NULL,
|
||||
name VARCHAR(255),
|
||||
transaction_type VARCHAR(255) NOT NULL,
|
||||
quantity FLOAT8 NOT NULL,
|
||||
description TEXT,
|
||||
user_id INTEGER NOT NULL,
|
||||
data JSONB,
|
||||
CONSTRAINT fk_logistics_info
|
||||
FOREIGN KEY(logistics_info_id)
|
||||
REFERENCES %%site_name%%_logistics_info(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
@ -1,10 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS units (
|
||||
id SERIAL PRIMARY KEY,
|
||||
plural VARCHAR(32),
|
||||
single VARCHAR(32),
|
||||
fullname VARCHAR(255),
|
||||
description TEXT,
|
||||
unique(plural),
|
||||
unique(single),
|
||||
unique(fullname)
|
||||
);
|
||||
@ -1,8 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_vendors (
|
||||
id SERIAL PRIMARY KEY,
|
||||
vendor_name VARCHAR(255) NOT NULL,
|
||||
vendor_address VARCHAR(255),
|
||||
creation_date TIMESTAMP NOT NULL,
|
||||
created_by INTEGER NOT NULL,
|
||||
phone_number VARCHAR(32)
|
||||
);
|
||||
@ -1,9 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS %%site_name%%_zones(
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(32) NOT NULL,
|
||||
site_id INTEGER NOT NULL,
|
||||
UNIQUE(name),
|
||||
CONSTRAINT fk_site
|
||||
FOREIGN KEY(site_id)
|
||||
REFERENCES sites(id)
|
||||
);
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_brands CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_conversions CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_cost_layers CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_food_info CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_group_items CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_groups CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_item_info CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_item_locations CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_items CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_itemlinks CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_locations CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_logistics_info CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_receipt_items CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_receipts CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_recipe_items CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_recipes CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_shopping_list_items CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_shopping_lists CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_sku_prefix CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_transactions CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE units CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_vendors CASCADE;
|
||||
@ -1 +0,0 @@
|
||||
DROP TABLE %%site_name%%_zones CASCADE;
|
||||
@ -1,4 +0,0 @@
|
||||
INSERT INTO %%site_name%%_conversions
|
||||
(item_id, uom_id, conv_factor)
|
||||
VALUES (%s, %s, %s)
|
||||
RETURNING *;
|
||||
@ -1,4 +1,4 @@
|
||||
INSERT INTO %%site_name%%_food_info
|
||||
(ingrediants, food_groups, nutrients, expires, default_expiration)
|
||||
VALUES (%s, %s, %s, %s, %s)
|
||||
(ingrediants, food_groups, nutrients, expires)
|
||||
VALUES (%s, %s, %s, %s)
|
||||
RETURNING *;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user