halfway to databased

This commit is contained in:
2024-02-08 23:15:05 +01:00
parent f49e251354
commit 78a06765c2
14 changed files with 571 additions and 115 deletions

View File

@@ -31,7 +31,8 @@ Okay, next fill-up set to <date X days from now> (configured in `sup.toml`::FILL
TODO: talk about product_aliases
To configure what you take, how much, and when, add entries to `supps.toml`. The fields are configured as such in the `Supp` class:
To configure what you take, how much, and when, add entries to `supps.toml`.
The fields are configured as such in the `Supp` class:
```python
name: str

View File

@@ -83,7 +83,7 @@
{
"orderDate": "2024-01-23T23:00:00.000Z",
"name": "Liquid D-3 & MK-7",
"quantity": 81.5,
"quantity": 82,
"quantityUnits": "iu",
"servingUnit": "iu",
"numUnitsInServing": 5000,
@@ -410,7 +410,7 @@
"quantity": 59,
"quantityUnits": "ml",
"servingUnit": "ml",
"numUnitsInServing": 0.126,
"numUnitsInServing": 126,
"numBottles": 1
},
{
@@ -445,7 +445,7 @@
"quantity": 90,
"numUnitsInServing": 25,
"servingUnit": "mg",
"quantityUnit": "caps",
"quantityUnits": "caps",
"orderDate": "2024-02-03",
"numBottles": 1
},
@@ -480,7 +480,7 @@
"orderDate": "2024-02-04T23:00:00.000Z",
"name": "Ultimate Omega",
"quantity": 640,
"quantityUnits": "mg per Soft Gel",
"quantityUnits": "mg",
"servingUnit": "mg",
"numUnitsInServing": 1280,
"numBottles": 1
@@ -525,9 +525,9 @@
"orderDate": "2024-02-04T23:00:00.000Z",
"name": "Liquid Iodine Plus",
"quantity": 59,
"quantityUnits": "ml",
"servingUnit": "ml",
"numUnitsInServing": 0.126,
"quantityUnits": "mcl",
"servingUnit": "mcl",
"numUnitsInServing": 126,
"numBottles": 1
},
{

237
poetry.lock generated
View File

@@ -22,6 +22,24 @@ files = [
{file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"},
]
[[package]]
name = "asttokens"
version = "2.4.1"
description = "Annotate AST trees with source code positions"
optional = false
python-versions = "*"
files = [
{file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
{file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"},
]
[package.dependencies]
six = ">=1.12.0"
[package.extras]
astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"]
test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
[[package]]
name = "black"
version = "24.1.1"
@@ -91,6 +109,17 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "decorator"
version = "5.1.1"
description = "Decorators for Humans"
optional = false
python-versions = ">=3.5"
files = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "dill"
version = "0.3.8"
@@ -106,6 +135,20 @@ files = [
graph = ["objgraph (>=1.7.2)"]
profile = ["gprof2dot (>=2022.7.29)"]
[[package]]
name = "executing"
version = "2.0.1"
description = "Get the currently executing AST node of a frame, and other information"
optional = false
python-versions = ">=3.5"
files = [
{file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"},
{file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"},
]
[package.extras]
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
[[package]]
name = "greenlet"
version = "3.0.3"
@@ -177,6 +220,41 @@ files = [
docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
[[package]]
name = "ipython"
version = "8.21.0"
description = "IPython: Productive Interactive Computing"
optional = false
python-versions = ">=3.10"
files = [
{file = "ipython-8.21.0-py3-none-any.whl", hash = "sha256:1050a3ab8473488d7eee163796b02e511d0735cf43a04ba2a8348bd0f2eaf8a5"},
{file = "ipython-8.21.0.tar.gz", hash = "sha256:48fbc236fbe0e138b88773fa0437751f14c3645fb483f1d4c5dee58b37e5ce73"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
decorator = "*"
jedi = ">=0.16"
matplotlib-inline = "*"
pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""}
prompt-toolkit = ">=3.0.41,<3.1.0"
pygments = ">=2.4.0"
stack-data = "*"
traitlets = ">=5"
[package.extras]
all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"]
black = ["black"]
doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"]
kernel = ["ipykernel"]
nbconvert = ["nbconvert"]
nbformat = ["nbformat"]
notebook = ["ipywidgets", "notebook"]
parallel = ["ipyparallel"]
qtconsole = ["qtconsole"]
test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"]
test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath", "trio"]
[[package]]
name = "isort"
version = "5.13.2"
@@ -191,6 +269,25 @@ files = [
[package.extras]
colors = ["colorama (>=0.4.6)"]
[[package]]
name = "jedi"
version = "0.19.1"
description = "An autocompletion tool for Python that can be used for text editors."
optional = false
python-versions = ">=3.6"
files = [
{file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
{file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
]
[package.dependencies]
parso = ">=0.8.3,<0.9.0"
[package.extras]
docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
[[package]]
name = "markdown-it-py"
version = "3.0.0"
@@ -215,6 +312,20 @@ profiling = ["gprof2dot"]
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "matplotlib-inline"
version = "0.1.6"
description = "Inline Matplotlib backend for Jupyter"
optional = false
python-versions = ">=3.5"
files = [
{file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"},
{file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"},
]
[package.dependencies]
traitlets = "*"
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -300,6 +411,21 @@ files = [
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
name = "parso"
version = "0.8.3"
description = "A Python Parser"
optional = false
python-versions = ">=3.6"
files = [
{file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
{file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
]
[package.extras]
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
testing = ["docopt", "pytest (<6.0.0)"]
[[package]]
name = "pathspec"
version = "0.12.1"
@@ -311,6 +437,20 @@ files = [
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "pexpect"
version = "4.9.0"
description = "Pexpect allows easy control of interactive console applications."
optional = false
python-versions = "*"
files = [
{file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
{file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
]
[package.dependencies]
ptyprocess = ">=0.5"
[[package]]
name = "platformdirs"
version = "4.2.0"
@@ -326,6 +466,45 @@ files = [
docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
[[package]]
name = "prompt-toolkit"
version = "3.0.43"
description = "Library for building powerful interactive command lines in Python"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"},
{file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"},
]
[package.dependencies]
wcwidth = "*"
[[package]]
name = "ptyprocess"
version = "0.7.0"
description = "Run a subprocess in a pseudo terminal"
optional = false
python-versions = "*"
files = [
{file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
{file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
]
[[package]]
name = "pure-eval"
version = "0.2.2"
description = "Safely evaluate AST nodes without side effects"
optional = false
python-versions = "*"
files = [
{file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
{file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
]
[package.extras]
tests = ["pytest"]
[[package]]
name = "pydantic"
version = "2.6.1"
@@ -533,6 +712,17 @@ files = [
{file = "ruff-0.2.0.tar.gz", hash = "sha256:63856b91837606c673537d2889989733d7dffde553828d3b0f0bacfa6def54be"},
]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "sqlalchemy"
version = "2.0.25"
@@ -635,6 +825,25 @@ files = [
pydantic = ">=1.10.13,<3.0.0"
SQLAlchemy = ">=2.0.0,<2.1.0"
[[package]]
name = "stack-data"
version = "0.6.3"
description = "Extract data from python stack frames and tracebacks for informative displays"
optional = false
python-versions = "*"
files = [
{file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
{file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
]
[package.dependencies]
asttokens = ">=2.1.0"
executing = ">=1.2.0"
pure-eval = "*"
[package.extras]
tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
[[package]]
name = "toml"
version = "0.10.2"
@@ -657,6 +866,21 @@ files = [
{file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"},
]
[[package]]
name = "traitlets"
version = "5.14.1"
description = "Traitlets Python configuration system"
optional = false
python-versions = ">=3.8"
files = [
{file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"},
{file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"},
]
[package.extras]
docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"]
[[package]]
name = "typer"
version = "0.9.0"
@@ -689,7 +913,18 @@ files = [
{file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
]
[[package]]
name = "wcwidth"
version = "0.2.13"
description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
files = [
{file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
]
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
content-hash = "79f33e8ba6a5c94a5af2c291bedb84d6131bd7bde31396e2f4348c1fdacfa3b2"
content-hash = "f1694f707d895f0211021d90acef4c7449085c1c2cac19bbb35f14a7820dcaa6"

View File

@@ -19,6 +19,7 @@ python-dotenv = "^1.0.1"
pylint = "^3.0.3"
ruff = "^0.2.0"
black = "^24.1.1"
ipython = "^8.21.0"
[build-system]
requires = ["poetry-core"]

View File

@@ -1,38 +1,180 @@
import collections
from dataclasses import asdict
import datetime as dt
import hashlib
import json
import pathlib as pl
import tomllib
from sup.commands import add_user_supp_consumption
from mysql.connector.errors import IntegrityError
from sup.commands import (
add_user_supp_consumption,
add_inventory,
add_product,
add_alias,
set_fill_every_x_days,
set_last_fill_date,
create_user,
)
from sup.models import Supp
from sup.queries import get_product_id_from_alias
from sup.queries import product_is_in_products_table, alias_exists, inventory_exists, consumption_exists
from sup.sql_funcs import commit
def load_config():
return tomllib.loads(pl.Path('supps.toml').read_text())
CONFIG = load_config()
def transfer_inventory_to_sql():
...
inventory = json.loads(pl.Path('inventory.json').read_text())
for i in inventory:
add_inventory(
user_id='zev@averba.ch',
product_name=i['name'],
order_date=i['orderDate'][:10],
num_bottles=i['numBottles']
)
def transfer_supps_consumption_toml_to_sql():
toml = tomllib.loads(pl.Path('supps.toml').read_text())
for sup in toml["supps"]:
def populate_products():
inventory = json.loads(pl.Path('inventory.json').read_text())
for i in inventory:
try:
add_product(
name=i['name'],
quantity=i['quantity'],
num_units_in_serving=i['numUnitsInServing'],
quantity_units=i['quantityUnits'],
serving_units=i['servingUnit'],
)
except KeyError:
print(i)
raise
except IntegrityError:
print('already exists')
continue
commit()
def transfer_supps_consumption_toml_to_sql() -> None:
aliases = {v: k for k, v in CONFIG['product_aliases'].items()}
product_names = [i['name'] for i in json.loads(pl.Path('inventory.json').read_text())]
for sup in CONFIG["supps"]:
si = Supp(**sup)
si_dict = asdict(si)
name = si_dict['name']
_name = si_dict['name']
del si_dict['name']
product_id = get_product_id_from_alias(name)
add_user_supp_consumption(**si_dict)
name = None
if _name.lower() in aliases:
name = aliases[_name.lower()]
else:
for pn in product_names:
if _name.lower() in pn.lower():
name = pn
break
name = name or _name
print(name)
add_user_supp_consumption(name=name, user_id="zev@averba.ch", **si_dict)
commit()
def transfer_config_to_sql():
"""fill every x days and last fill"""
...
fill_every_x_days = CONFIG['FILL_EVERY_X_DAYS']
last_fill_date = CONFIG['LAST_FILL_DATE']
set_fill_every_x_days(value=fill_every_x_days, user_id="zev@averba.ch")
set_last_fill_date(value=last_fill_date, user_id="zev@averba.ch")
commit()
def transfer_aliases_to_sql():
...
aliases = CONFIG['product_aliases']
for name, alias in aliases.items():
add_alias("zev@averba.ch", name, alias)
commit()
transfer_config_to_sql()
transfer_aliases_to_sql()
transfer_supps_consumption_toml_to_sql()
transfer_inventory_to_sql()
def validate_products_against_toml():
inventory = json.loads(pl.Path('inventory.json').read_text())
for i in inventory:
if not product_is_in_products_table(name=i['name'], q=round(i['quantity']), num_units=i['numUnitsInServing']):
raise Exception
def do_create_user():
email = "zev@averba.ch"
pw = "@m_6Lwx.CjqvfwG@hmT"
first_name = "Zev"
last_name = "Averbach"
pw_hash = hashlib.new("SHA256")
pw_hash.update(pw.encode())
create_user(email, pw_hash.hexdigest(), first_name, last_name, dt.date.today(), 30)
commit()
print("okay, created user")
def validate_product_aliases():
aliases = CONFIG['product_aliases']
for name, alias in aliases.items():
if not alias_exists(name=name, alias=alias, user_id="zev@averba.ch"):
raise Exception
def validate_inventory():
inventory = json.loads(pl.Path('inventory.json').read_text())
for i in inventory:
if not inventory_exists(
user_id="zev@averba.ch",
product_name=i['name'],
order_date=i['orderDate'][:10],
quantity=i['quantity'],
serving_q=i["numUnitsInServing"],
):
raise Exception
# PRIMARY KEY (user_id, product_id, order_date)
def validate_consumption():
"""
Something is wrong here; several of the items in supps.toml are missing from
the consumption table.
"""
aliases = collections.defaultdict(list)
for k, v in CONFIG['product_aliases'].items():
aliases[v].append(k)
product_names = [i['name'] for i in json.loads(pl.Path('inventory.json').read_text())]
for sup in CONFIG["supps"]:
si = Supp(**sup)
si_dict = asdict(si)
_name = si_dict['name']
del si_dict['name']
name = None
names = None
if _name in aliases:
names = aliases[_name]
print(f"{names=}")
else:
for pn in product_names:
if _name.lower() in pn.lower():
name = pn
break
if names:
if not any(consumption_exists(name=n, user_id="zev@averba.ch", **si_dict) for n in names):
raise Exception
else:
name = name or _name
if not consumption_exists(name=name, user_id="zev@averba.ch", **si_dict):
raise Exception
# do_create_user()
# populate_products()
# transfer_inventory_to_sql()
# transfer_aliases_to_sql()
# transfer_supps_consumption_toml_to_sql()
# transfer_config_to_sql()
# validate_products_against_toml()
# validate_product_aliases()
# validate_inventory()
# validate_consumption()

View File

@@ -26,10 +26,14 @@ atexit.register(close_everything)
def do_query(create_statement: str) -> None:
table_name = None
try:
table_name = create_statement.lower().split("create table ")[1].split(" ")[0]
except IndexError:
table_name = create_statement.lower().split("drop table ")[1].split(" ")[0]
try:
table_name = create_statement.lower().split("drop table ")[1].split(" ")[0]
except IndexError:
pass
print(f"{table_name=}")
print(create_statement)
try:
@@ -63,17 +67,18 @@ def create_table_products():
id INT NOT NULL AUTO_INCREMENT,
name VARCHAR(150) NOT NULL,
quantity INT NOT NULL,
quantity_units ENUM('caps', 'mg', 'g', 'ml', 'mcg', 'iu'),
serving_units ENUM('caps', 'mg', 'g', 'ml', 'mcg', 'iu'),
quantity_units ENUM('caps', 'mg', 'g', 'ml', 'mcg', 'mcl', 'iu'),
serving_units ENUM('caps', 'mg', 'g', 'ml', 'mcg', 'mcl', 'iu'),
num_units_in_serving INT NOT NULL,
CONSTRAINT pk_product PRIMARY KEY (id)
CONSTRAINT pk_product_name PRIMARY KEY (id, name),
CONSTRAINT uq_name_quantity_numunitsinserving UNIQUE KEY (name, quantity, num_units_in_serving)
)""").strip()
do_query(query)
def create_table_product_aliases():
query = ("""
create table user_product_aliases (
CREATE TABLE user_product_aliases (
user_id VARCHAR(80) NOT NULL,
product_id INT NOT NULL,
alias VARCHAR(30) NOT NULL,
@@ -90,11 +95,11 @@ def create_table_user_supplements_consumption():
user_id VARCHAR(80) NOT NULL,
product_id INT NOT NULL,
morning INT DEFAULT 0,
lunch INT NULL,
dinner INT NULL,
bedtime INT NULL,
lunch INT DEFAULT 0,
dinner INT DEFAULT 0,
bedtime INT DEFAULT 0,
days_per_week INT DEFAULT 7,
units ENUM('caps', 'mg', 'g', 'ml', 'mcg', 'iu'),
units ENUM('caps', 'mg', 'g', 'ml', 'mcg', 'mcl', 'iu'),
winter_only BOOL DEFAULT false,
CONSTRAINT fk_user_id FOREIGN KEY (user_id) REFERENCES users(id),
CONSTRAINT fk_product_id FOREIGN KEY (product_id) REFERENCES products(id),
@@ -103,7 +108,7 @@ def create_table_user_supplements_consumption():
do_query(query)
def create_table_user_supplements_inventory():
def create_table_user_supplements_orders():
query = ("""
create table user_supplements_orders (
user_id VARCHAR(80) NOT NULL,
@@ -130,19 +135,30 @@ def create_tables1():
return
def create_tables2():
# try:
# create_table_user_supplements_consumption()
# except Exception:
# return
# create_table_user_supplements_inventory()
create_table_product_aliases()
try:
create_table_user_supplements_consumption()
except Exception:
return
try:
create_table_user_supplements_orders()
except Exception:
do_query("drop table user_supplements_consumption")
try:
create_table_product_aliases()
except Exception:
do_query("drop table user_supplements_consumption")
do_query("drop table user_supplements_orders")
def drop_tables():
do_query("drop table users")
do_query("drop table products")
do_query("drop table user_supplements_consumption")
do_query("drop table user_supplements_orders")
do_query("drop table user_product_aliases")
do_query("drop table users")
do_query("drop table products")
# drop_tables()
create_tables2()
if __name__ == "__main__":
drop_tables()
create_tables1()
create_tables2()

View File

@@ -12,10 +12,9 @@ from sup.main import (
load_config,
SUPP_CONSUMPTION_FP,
load_inventory,
CONFIG,
ALIASES_REV,
)
from sup.models import Supp
from sup.queries import get_user
app = typer.Typer()
@@ -97,20 +96,20 @@ def status():
TODO: this doesn't seem to sense pending orders which have been added to inventory.json
maybe because the delivery date is in the present/future?
"""
validate_matches()
# TODO: make this an arg
user_id = "zev@averba.ch"
config = load_config()
num_days_of_inventory_needed = config["FILL_EVERY_X_DAYS"]
user = get_user(user_id)
num_days_of_inventory_needed = user["fill_every_x_days"]
last_fill_date = dt.datetime.strptime(
config["LAST_FILL_DATE"], "%Y-%m-%d"
user["last_fill_date"], "%Y-%m-%d"
).date()
next_fill_date = last_fill_date + dt.timedelta(num_days_of_inventory_needed)
print()
print(f"{next_fill_date=}")
print()
inventory = load_inventory()
# TODO: this is where you left off
inventory = load_inventory(user_id)
needs = []
@@ -185,7 +184,7 @@ def add(
date = dt.datetime.now().date() # type: ignore
else:
date = date.date() # type: ignore
ordered_supps = load_ordered_supps()
ordered_supps = load_ordered_supps(user_id)
order_dict = dict(
name=name,
quantity=quantity,
@@ -204,27 +203,7 @@ def save_ordered_supps(ordered_supps: list[dict]) -> None:
ORDERED_SUPPS_FP.write_text(json.dumps(ordered_supps, indent=2))
def _prettify_json():
ORDERED_SUPPS_FP.write_text(json.dumps(json.loads(ORDERED_SUPPS_FP.read_text()), indent=2))
def save_config(config: dict) -> None:
SUPP_CONSUMPTION_FP.write_text(toml.dumps(config))
def validate_matches() -> None:
missing = []
ordered_supp_names_lower = [i["name"].lower() for i in load_ordered_supps()]
for i in CONFIG["supps"]:
if (
not any(
i["name"].lower() in ordered_supp_name
for ordered_supp_name in ordered_supp_names_lower
)
and i["name"].lower() not in ALIASES_REV
):
missing.append(i["name"])
if missing:
raise Missing(", ".join(missing))

View File

@@ -1,19 +1,60 @@
import datetime as dt
import typing as t
from sup.sql_funcs import do_query
def set_fill_every_x_days(value: int, user_id: str) -> None:
do_query(query=f"update users set fill_every_x_days = {value} where id = '{user_id}'")
def set_last_fill_date(value: int, user_id: str) -> None:
do_query(query=f"update users set last_fill_date = '{value}' where id = '{user_id}'")
def create_user(email: str, pw_hash: str, first_name: str, last_name: str, last_fill_date: dt.date, fill_every_x_days=30):
do_query(
"insert into users (id, pw_hash, first_name, last_name, last_fill_date, fill_every_x_days) values "
do_query(commit=True,
query="insert into users (id, pw_hash, first_name, last_name, last_fill_date, fill_every_x_days) values "
f"('{email}', '{pw_hash}', '{first_name}', '{last_name}', '{last_fill_date}', '{fill_every_x_days}')"
)
def add_user_supp_consumption(user_id: str, product_id: int, morning: int, lunch: int, dinner: int, bedtime: int, days_per_week: int, units: str, winter_only: bool):
def add_user_supp_consumption(user_id: str, name: str, morning: int, lunch: int, dinner: int, bedtime: int, days_per_week: int, units: str, winter_only: bool):
do_query(
"insert into user_supplements_consumption "
"(user_id, product_id, morning, lunch, dinner, bedtime, days_per_week, units, winter_only) values ("
f"('{user_id}', {product_id}, {morning}, {lunch}, {dinner}, {bedtime}, {days_per_week}, '{units}', {winter_only}"
"(user_id, product_id, morning, lunch, dinner, bedtime, days_per_week, units, winter_only) "
f"select '{user_id}', id, {morning}, {lunch}, {dinner}, {bedtime}, {days_per_week}, '{units}', {winter_only} "
f"from products where name = '{name}'"
)
def add_inventory(user_id: str, product_name: str, order_date: str, num_bottles: int) -> None:
do_query(
"insert into user_supplements_orders (user_id, product_id, order_date, num_bottles) "
f"select '{user_id}', id, '{order_date}', {num_bottles} "
f"from products where name = '{product_name}'"
)
def add_product(
name: str,
quantity: int,
quantity_units: t.Literal['caps', 'mg', 'g', 'ml', 'mcg', 'iu'],
serving_units: t.Literal['caps', 'mg', 'g', 'ml', 'mcg', 'iu'],
num_units_in_serving: int,
):
query = (
"insert into products (name, quantity, quantity_units, serving_units, num_units_in_serving) values ("
f"'{name}', {quantity}, '{quantity_units}', '{serving_units}', {num_units_in_serving}"
")"
)
do_query(query)
def add_alias(user_id: str, name: str, alias: str) -> None:
query = (
"insert into user_product_aliases (user_id, product_id, alias) "
f"select '{user_id}', id, '{alias}' "
f"from products where name = '{name}'"
)
do_query(query)

View File

@@ -18,14 +18,14 @@ def load_config():
return tomllib.loads(SUPP_CONSUMPTION_FP.read_text())
def load_ordered_supps() -> list[dict]:
def load_ordered_supps(user_id: str) -> list[dict]:
return json.loads(ORDERED_SUPPS_FP.read_text())
CONFIG = load_config()
ALIASES = CONFIG["product_aliases"]
ordered_supps = load_ordered_supps()
ordered_supps = load_ordered_supps(user_id)
for i in CONFIG["supps"]:
for ordered_supp in ordered_supps:
@@ -35,9 +35,9 @@ for i in CONFIG["supps"]:
ALIASES_REV = {v: k for k, v in ALIASES.items()}
def load_inventory():
def load_inventory(user_id: str):
inventory = collections.defaultdict(list)
for s in load_ordered_supps():
for s in load_ordered_supps(user_id):
if s["name"] not in CONFIG["discontinued"]:
inventory[ALIASES[s["name"]]].append(s)
return inventory

View File

@@ -10,7 +10,7 @@ class Supp:
dinner: int | float = 0
bedtime: int | float = 0
days_per_week: int = 7
units: t.Literal["caps", "mg", "g", "ml", "mcg", "iu"] = "mg"
units: t.Literal["caps", "mg", "g", "ml", "mcg", "mcl", "iu"] = "mg"
winter_only: bool = False
def __mul__(self, other: int) -> float:

View File

@@ -1,10 +1,59 @@
from sup.sql_funcs import do_query
class NoResult(Exception):
pass
def get_user(user_id: str):
return do_query(f"select * from users where id = '{user_id}'", get_result=True, return_dict=True)[0] # type: ignore
def get_product_id_from_alias(alias: str):
res = do_query(f"select
raise NoResult(f"no product_id for alias '{alias}'")
def product_is_in_products_table(name: str, q: int, num_units: int) -> bool:
query = f"select count(*) from products where name = '{name}' and quantity = {q} and num_units_in_serving = {num_units}"
res = do_query(get_result=True, query=query)
if res[0][0] != 1: # type: ignore
print(res[0][0]) # type: ignore
return False
return True
def alias_exists(name: str, alias: str, user_id: str) -> bool:
query = (
f"select count(*) from user_product_aliases where user_id = '{user_id}' "
f"and alias = '{alias}' and product_id = (select id from products where name = '{name}')"
)
res = do_query(get_result=True, query=query)
if res[0][0] != 1: # type: ignore
print(res[0][0]) # type: ignore
return False
return True
def inventory_exists(user_id: str, product_name: str, order_date: str, quantity: int, serving_q: int):
query = (
f"select count(*) from user_supplements_orders where user_id = '{user_id}' and order_date = '{order_date}' "
"and product_id = ("
f"select distinct id from products where name = '{product_name}' and quantity = {quantity} "
f"and num_units_in_serving = {serving_q}"
")"
)
res = do_query(get_result=True, query=query)
if res[0][0] != 1: # type: ignore
print(res[0][0]) # type: ignore
return False
return True
def consumption_exists(name: str, user_id: str, morning: int, lunch: int, dinner: int, bedtime: int, days_per_week: int, units: str, winter_only: bool):
name = name.lower()
query = (
f"select count(*) from user_supplements_consumption where user_id = '{user_id}' and morning = {morning} "
f"and lunch = {lunch} and dinner = {dinner} and bedtime = {bedtime} and days_per_week = {days_per_week} "
f"and units = '{units}' and winter_only = {winter_only} "
f"and product_id = coalesce("
f"(select id from products where name = '{name}' limit 1), "
f"(select product_id from user_product_aliases where alias = '{name}' and user_id = '{user_id}' limit 1)"
")"
)
res = do_query(get_result=True, query=query)
if res[0][0] != 1: # type: ignore
print(res[0][0]) # type: ignore
return False
return True

View File

@@ -1,15 +0,0 @@
import datetime as dt
import hashlib
from sup.commands import create_user
if __name__ == "__main__":
email = input('email? ')
pw = input('pw? ')
first_name = input('first name? ')
last_name = input('last name? ')
pw_hash = hashlib.new("SHA256")
pw_hash.update(pw.encode())
create_user(email, pw_hash.hexdigest(), first_name, last_name, dt.date.today(), 30)
print("okay, created user")

View File

@@ -15,7 +15,8 @@ cnx = mysql.connector.connect(
host=os.getenv("PLANETSCALE_HOST"),
database=DB_NAME,
)
cursor = cnx.cursor()
cursor = cnx.cursor(buffered=True)
dict_cursor = cnx.cursor(dictionary=True)
def close_everything():
cnx.close()
@@ -24,11 +25,18 @@ def close_everything():
atexit.register(close_everything)
def do_query(query: str) -> None:
def do_query(query: str, get_result: bool = False, commit: bool = False, return_dict: bool = False) -> list | None:
print(query)
c = cursor if not return_dict else dict_cursor
try:
cursor.execute(query)
c.execute(query)
except mysql.connector.Error as err:
print(err.msg)
raise
# TODO: return something if appropriate
if commit:
cnx.commit()
if get_result:
return cursor.fetchall()
def commit():
cnx.commit()

View File

@@ -16,7 +16,7 @@ morning = 12
[[supps]]
name = "b complex"
morning = 0.5
morning = 1
days_per_week = 2
units = "caps"
@@ -42,9 +42,8 @@ morning = 500
[[supps]]
name = "creatine"
lunch = 2.5
lunch = 2500
days_per_week = 4
units = "g"
[[supps]]
name = "Ca-AKG"
@@ -73,7 +72,7 @@ morning = 67
days_per_week = 3
[[supps]]
name = "EPA/DHA"
name = "epa/dha"
morning = 1280
dinner = 640
@@ -106,8 +105,8 @@ morning = 300
[[supps]]
name = "iodine"
morning = 0.126
units = "ml"
morning = 126
units = "mcl"
[[supps]]
name = "iron"