mirror of
https://github.com/zaphar/kitchen.git
synced 2025-07-22 19:40:14 -04:00
Compare commits
81 Commits
Author | SHA1 | Date | |
---|---|---|---|
876fa8fb33 | |||
d4fce72333 | |||
e59eff8ae7 | |||
cda5b02a37 | |||
cc66c1f4f5 | |||
4ed0b6f8fb | |||
c77fa24515 | |||
6a5046d3c0 | |||
263abda17b | |||
df88c2b7bd | |||
8000b7d317 | |||
aba1e114cf | |||
9126d434d5 | |||
548f336e1a | |||
9f3b11a01f | |||
f173204d2d | |||
1f90cc2ef6 | |||
ed44e929f4 | |||
51d165a50b | |||
84cc2a2713 | |||
f75652befa | |||
b93edd2701 | |||
4767115da6 | |||
1c55a315b0 | |||
fe181fb102 | |||
61112c4e64 | |||
24fea84a0a | |||
113b03016f | |||
9833e22e42 | |||
1f986e6372 | |||
63463fb5d6 | |||
6087d31aad | |||
4ffb481634 | |||
6bc9f2ea2e | |||
ec18d9de97 | |||
9249dca202 | |||
dac4324c8f | |||
e3c4a01648 | |||
e1735e4243 | |||
651f0cb264 | |||
3e853f51eb | |||
251cbfa5c7 | |||
1b6023a03e | |||
3e675b47f4 | |||
6f7d44ff83 | |||
b105ce3f4b | |||
0ba5f18b22 | |||
a320351041 | |||
874a5fdb57 | |||
bb092212ac | |||
9022503e76 | |||
94e1987f09 | |||
a104ef5f47 | |||
dac529e8e8 | |||
6e0e00c7f3 | |||
8942eb59a5 | |||
e80953e987 | |||
c64605f9e7 | |||
d7cea46427 | |||
45737f24e4 | |||
61634cd682 | |||
0eee2e33bf | |||
cbe7957844 | |||
b999359e95 | |||
c672459ec9 | |||
a399858728 | |||
e6b827ca21 | |||
1432dcea13 | |||
a3aa579fa5 | |||
50eecf9a7c | |||
39456bb35d | |||
45b5c84d7c | |||
![]() |
e30555aabe | ||
![]() |
e78116ff8d | ||
0b7f513f27 | |||
4cefe42072 | |||
db03d603c3 | |||
2ea0339ad1 | |||
b496cf9568 | |||
806fdd2721 | |||
acc922787d |
9
.envrc
Normal file
9
.envrc
Normal file
@ -0,0 +1,9 @@
|
||||
if has lorri; then
|
||||
eval "$(lorri direnv)"
|
||||
elif has nix; then
|
||||
echo "Using flake fallback since lorri isn't installed"
|
||||
use flake
|
||||
else
|
||||
# noop
|
||||
echo "Unsupported direnv configuration. We need nix flake support and lorri installed"
|
||||
fi
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,11 +1,10 @@
|
||||
target/
|
||||
.lsp/
|
||||
.clj-kondo/
|
||||
web/dist/
|
||||
webdist/
|
||||
nix/*/result
|
||||
result
|
||||
.vscode/
|
||||
.session_store/
|
||||
.gitignore/
|
||||
.DS_Store/
|
||||
.env
|
||||
|
2406
Cargo.lock
generated
2406
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,11 @@
|
||||
[workspace]
|
||||
members = [ "recipes", "kitchen", "web", "api" ]
|
||||
resolver = "2"
|
||||
|
||||
[patch.crates-io]
|
||||
# TODO(jwall): When the fix for RcSignal Binding is released we can drop this patch.
|
||||
sycamore = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" }
|
||||
sycamore-router = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" }
|
||||
# NOTE(jwall): We are maintaining a patch to remove the unstable async_std_feature. It breaks in our project on
|
||||
# Rust v1.64
|
||||
sqlx = { git = "https://github.com/zaphar/sqlx", branch = "remove_unstable_async_std_feature" }
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
14
Makefile
14
Makefile
@ -14,6 +14,8 @@
|
||||
mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST)))
|
||||
mkfile_dir := $(dir $(mkfile_path))
|
||||
sqlite_url := sqlite://$(mkfile_dir)/.session_store/store.db
|
||||
export out := dist
|
||||
export project := kitchen
|
||||
|
||||
kitchen: wasm kitchen/src/*.rs
|
||||
cd kitchen; cargo build
|
||||
@ -27,15 +29,19 @@ static-prep: web/index.html web/favicon.ico web/static/*.css
|
||||
cp -r web/favicon.ico web/dist/
|
||||
cp -r web/static web/dist/
|
||||
|
||||
wasmrelease: wasmrelease-dist static-prep
|
||||
wasmrelease: wasm-opt static-prep
|
||||
|
||||
wasm-opt: wasmrelease-dist
|
||||
cd web; sh ../scripts/wasm-opt.sh release
|
||||
|
||||
wasmrelease-dist: web/src/*.rs web/src/components/*.rs
|
||||
cd web; wasm-pack build --mode no-install --release --target web --out-dir dist/
|
||||
cd web; sh ../scripts/wasm-build.sh release
|
||||
|
||||
wasm: wasm-dist static-prep
|
||||
|
||||
wasm-dist: web/src/*.rs web/src/components/*.rs
|
||||
cd web; wasm-pack build --mode no-install --target web --out-dir dist/
|
||||
cd web; sh ../scripts/wasm-build.sh debug
|
||||
cd web; sh ../scripts/wasm-sourcemap.sh
|
||||
|
||||
clean:
|
||||
rm -rf web/dist/*
|
||||
@ -50,5 +56,5 @@ sqlx-add-%:
|
||||
sqlx-revert:
|
||||
cd kitchen; cargo sqlx migrate revert --database-url $(sqlite_url)
|
||||
|
||||
sqlx-prepare:
|
||||
sqlx-prepare: wasm
|
||||
cd kitchen; cargo sqlx prepare --database-url $(sqlite_url)
|
||||
|
@ -6,10 +6,12 @@ edition = "2021"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
serde = "1.0.144"
|
||||
recipes = { path = "../recipes" }
|
||||
chrono = "0.4.22"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.204"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.axum]
|
||||
version = "0.5.16"
|
||||
|
103
flake.lock
generated
103
flake.lock
generated
@ -1,5 +1,43 @@
|
||||
{
|
||||
"nodes": {
|
||||
"cargo-wasm2map-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1693927731,
|
||||
"narHash": "sha256-oqJ9ZZLvUK57A9Kf6L4pPrW6nHqb+18+JGKj9HfIaaM=",
|
||||
"owner": "mtolmacs",
|
||||
"repo": "wasm2map",
|
||||
"rev": "c7d80748b7f3af37df24770b9330b17aa9599e3e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "mtolmacs",
|
||||
"repo": "wasm2map",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"naersk",
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1752475459,
|
||||
"narHash": "sha256-z6QEu4ZFuHiqdOPbYss4/Q8B0BFhacR8ts6jO/F/aOU=",
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "bf0d6f70f4c9a9cf8845f992105652173f4b617f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@ -31,21 +69,6 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"locked": {
|
||||
"lastModified": 1659877975,
|
||||
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@ -64,14 +87,15 @@
|
||||
},
|
||||
"naersk": {
|
||||
"inputs": {
|
||||
"fenix": "fenix",
|
||||
"nixpkgs": "nixpkgs"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1671096816,
|
||||
"narHash": "sha256-ezQCsNgmpUHdZANDCILm3RvtO1xH8uujk/+EqNvzIOg=",
|
||||
"lastModified": 1752689277,
|
||||
"narHash": "sha256-uldUBFkZe/E7qbvxa3mH1ItrWZyT6w1dBKJQF/3ZSsc=",
|
||||
"owner": "nix-community",
|
||||
"repo": "naersk",
|
||||
"rev": "d998160d6a076cfe8f9741e56aeec7e267e3e114",
|
||||
"rev": "0e72363d0938b0208d6c646d10649164c43f4d64",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -82,25 +106,27 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1678987615,
|
||||
"narHash": "sha256-lF4agoB7ysQGNHRXvOqxtSKIZrUZwClA85aASahQlYM=",
|
||||
"lastModified": 1752077645,
|
||||
"narHash": "sha256-HM791ZQtXV93xtCY+ZxG1REzhQenSQO020cu6rHtAPk=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "194c2aa446b2b059886bb68be15ef6736d5a8c31",
|
||||
"rev": "be9e214982e20b8310878ac2baa063a961c1bdf6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"type": "indirect"
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1679174867,
|
||||
"narHash": "sha256-fFxb8wN3bjOMvHPr63Iyzo3cuHhQzWW03UkckfTeBWU=",
|
||||
"lastModified": 1753135609,
|
||||
"narHash": "sha256-//xMo8MwSw1HoTnIk455J7NIJpsDqwVyD69MOXb7gZM=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "f5ec87b82832736f1624874fd34eb60c0b68bdd6",
|
||||
"rev": "5d9316e7fb2d6395818d506ef997530eba1545b7",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -111,6 +137,7 @@
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"cargo-wasm2map-src": "cargo-wasm2map-src",
|
||||
"flake-compat": "flake-compat",
|
||||
"flake-utils": "flake-utils",
|
||||
"gitignore": "gitignore",
|
||||
@ -119,19 +146,35 @@
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1752428706,
|
||||
"narHash": "sha256-EJcdxw3aXfP8Ex1Nm3s0awyH9egQvB2Gu+QEnJn2Sfg=",
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "591e3b7624be97e4443ea7b5542c191311aa141d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "rust-lang",
|
||||
"ref": "nightly",
|
||||
"repo": "rust-analyzer",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1678397831,
|
||||
"narHash": "sha256-7xbxSoiht8G+Zgz55R0ILPsTdbnksILCDMIxeg8Buns=",
|
||||
"lastModified": 1750964660,
|
||||
"narHash": "sha256-YQ6EyFetjH1uy5JhdhRdPe6cuNXlYpMAQePFfZj4W7M=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "bdf08e2f43488283eeb25b4a7e7ecba9147a955c",
|
||||
"rev": "04f0fcfb1a50c63529805a798b4b5c21610ff390",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
52
flake.nix
52
flake.nix
@ -2,17 +2,18 @@
|
||||
description = "kitchen";
|
||||
# Pin nixpkgs
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs";
|
||||
gitignore = { url = "github:hercules-ci/gitignore.nix"; flake = false; };
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay?ref=stable";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
naersk.url = "github:nix-community/naersk";
|
||||
flake-compat = { url = github:edolstra/flake-compat; flake = false; };
|
||||
nixpkgs.url = "github:NixOS/nixpkgs";
|
||||
gitignore = { url = "github:hercules-ci/gitignore.nix"; flake = false; };
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay?ref=stable";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
naersk.url = "github:nix-community/naersk";
|
||||
flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||
cargo-wasm2map-src = { url = "github:mtolmacs/wasm2map"; flake = false; };
|
||||
};
|
||||
outputs = {self, nixpkgs, flake-utils, rust-overlay, naersk, gitignore, flake-compat}:
|
||||
outputs = {nixpkgs, flake-utils, rust-overlay, naersk, cargo-wasm2map-src, ...}:
|
||||
let
|
||||
kitchenGen = (import ./nix/kitchen/default.nix);
|
||||
kitchenWasmGen = (import ./nix/kitchenWasm/default.nix);
|
||||
@ -25,7 +26,7 @@
|
||||
let
|
||||
overlays = [ rust-overlay.overlays.default ];
|
||||
pkgs = import nixpkgs { inherit system overlays; };
|
||||
rust-wasm = pkgs.rust-bin.stable."1.68.0".default.override {
|
||||
rust-wasm = pkgs.rust-bin.stable."1.87.0".default.override {
|
||||
extensions = [ "rust-src" ];
|
||||
# Add wasm32 as an extra target besides the native target.
|
||||
targets = [ "wasm32-unknown-unknown" ];
|
||||
@ -42,9 +43,23 @@
|
||||
wasm-pack = wasm-packGen {
|
||||
inherit rust-wasm naersk-lib pkgs;
|
||||
};
|
||||
cargo-wasm2map = naersk-lib.buildPackage {
|
||||
pname = "cargo-wasm2map";
|
||||
version = "v0.1.0";
|
||||
build-inputs = [ rust-wasm ];
|
||||
src = cargo-wasm2map-src;
|
||||
cargoBuildOptions = opts: opts ++ ["-p" "cargo-wasm2map" ];
|
||||
};
|
||||
wasm-bindgen = pkgs.callPackage wasm-bindgenGen { inherit pkgs; };
|
||||
kitchenWasm = kitchenWasmGen {
|
||||
inherit pkgs rust-wasm wasm-bindgen version;
|
||||
inherit pkgs rust-wasm wasm-bindgen version cargo-wasm2map;
|
||||
lockFile = ./Cargo.lock;
|
||||
outputHashes = {
|
||||
# I'm maintaining some patches for these so the lockfile hashes are a little
|
||||
# incorrect. We override those here.
|
||||
"wasm-web-component-0.2.0" = "sha256-quuPgzGb2F96blHmD3BAUjsWQYbSyJGZl27PVrwL92k=";
|
||||
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
|
||||
};
|
||||
};
|
||||
kitchen = (kitchenGen {
|
||||
inherit pkgs version naersk-lib kitchenWasm rust-wasm;
|
||||
@ -54,8 +69,15 @@
|
||||
root = ./.;
|
||||
});
|
||||
kitchenWasmDebug = kitchenWasmGen {
|
||||
inherit pkgs rust-wasm wasm-bindgen version;
|
||||
features = "--features debug_logs";
|
||||
inherit pkgs rust-wasm wasm-bindgen version cargo-wasm2map;
|
||||
lockFile = ./Cargo.lock;
|
||||
outputHashes = {
|
||||
# I'm maintaining some patches for these so the lockfile hashes are a little
|
||||
# incorrect. We override those here.
|
||||
"wasm-web-component-0.2.0" = "sha256-quuPgzGb2F96blHmD3BAUjsWQYbSyJGZl27PVrwL92k=";
|
||||
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
|
||||
};
|
||||
#features = "--features debug_logs";
|
||||
};
|
||||
kitchenDebug = (kitchenGen {
|
||||
inherit pkgs version naersk-lib rust-wasm;
|
||||
@ -82,7 +104,7 @@
|
||||
program = "${kitchen}/bin/kitchen";
|
||||
};
|
||||
devShell = pkgs.callPackage ./nix/devShell/default.nix {
|
||||
inherit rust-wasm wasm-bindgen;
|
||||
inherit rust-wasm wasm-bindgen cargo-wasm2map;
|
||||
wasm-pack-hermetic = wasm-pack;
|
||||
};
|
||||
}
|
||||
|
38
kitchen/.sqlx/query-01018c919131848f8fa907a1356a1356b2aa6ca0912de8a296f5fef3486b5ff9.json
generated
Normal file
38
kitchen/.sqlx/query-01018c919131848f8fa907a1356a1356b2aa6ca0912de8a296f5fef3486b5ff9.json
generated
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "recipe_id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "recipe_text",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "category",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "serving_count",
|
||||
"ordinal": 3,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "01018c919131848f8fa907a1356a1356b2aa6ca0912de8a296f5fef3486b5ff9"
|
||||
}
|
20
kitchen/.sqlx/query-104f07472670436d3eee1733578bbf0c92dc4f965d3d13f9bf4bfbc92958c5b6.json
generated
Normal file
20
kitchen/.sqlx/query-104f07472670436d3eee1733578bbf0c92dc4f965d3d13f9bf4bfbc92958c5b6.json
generated
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select password_hashed from users where id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "password_hashed",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "104f07472670436d3eee1733578bbf0c92dc4f965d3d13f9bf4bfbc92958c5b6"
|
||||
}
|
26
kitchen/.sqlx/query-10de1e9950d7d3ae7f017b9175a1cee4ff7fcbc7403a39ea02930c75b4b9160a.json
generated
Normal file
26
kitchen/.sqlx/query-10de1e9950d7d3ae7f017b9175a1cee4ff7fcbc7403a39ea02930c75b4b9160a.json
generated
Normal file
@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n extra_items.name,\n extra_items.amt\nfrom latest_dates\ninner join extra_items on\n latest_dates.user_id = extra_items.user_id\n and latest_dates.plan_date = extra_items.plan_date",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "amt",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "10de1e9950d7d3ae7f017b9175a1cee4ff7fcbc7403a39ea02930c75b4b9160a"
|
||||
}
|
12
kitchen/.sqlx/query-10e1c111a16d647a106a3147f4e61e34b0176860ca99cb62cb43dc72550ad990.json
generated
Normal file
12
kitchen/.sqlx/query-10e1c111a16d647a106a3147f4e61e34b0176860ca99cb62cb43dc72550ad990.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from modified_amts where user_id = ? and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "10e1c111a16d647a106a3147f4e61e34b0176860ca99cb62cb43dc72550ad990"
|
||||
}
|
12
kitchen/.sqlx/query-160a9dfccf2e91a37d81f75eba21ec73105a7453c4f1fe76a430d04e525bc6cd.json
generated
Normal file
12
kitchen/.sqlx/query-160a9dfccf2e91a37d81f75eba21ec73105a7453c4f1fe76a430d04e525bc6cd.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into filtered_ingredients(user_id, name, form, measure_type, plan_date)\n values (?, ?, ?, ?, date()) on conflict(user_id, name, form, measure_type, plan_date) DO NOTHING",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "160a9dfccf2e91a37d81f75eba21ec73105a7453c4f1fe76a430d04e525bc6cd"
|
||||
}
|
32
kitchen/.sqlx/query-19832e3582c05ed49c676fde33cde64274379a83a8dd130f6eec96c1d7250909.json
generated
Normal file
32
kitchen/.sqlx/query-19832e3582c05ed49c676fde33cde64274379a83a8dd130f6eec96c1d7250909.json
generated
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select plan_date as \"plan_date: NaiveDate\", recipe_id, count\nfrom plan_recipes\nwhere\n user_id = ?\n and date(plan_date) > ?\norder by user_id, plan_date",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "plan_date: NaiveDate",
|
||||
"ordinal": 0,
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"name": "recipe_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "count",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "19832e3582c05ed49c676fde33cde64274379a83a8dd130f6eec96c1d7250909"
|
||||
}
|
12
kitchen/.sqlx/query-1b4a7250e451991ee7e642c6389656814e0dd00c94e59383c02af6313bc76213.json
generated
Normal file
12
kitchen/.sqlx/query-1b4a7250e451991ee7e642c6389656814e0dd00c94e59383c02af6313bc76213.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into staples (user_id, content) values (?, ?)\n on conflict(user_id) do update set content = excluded.content",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "1b4a7250e451991ee7e642c6389656814e0dd00c94e59383c02af6313bc76213"
|
||||
}
|
12
kitchen/.sqlx/query-1b6fd91460bef61cf02f210404a4ca57b520c969d1f9613e7101ee6aa7a9962a.json
generated
Normal file
12
kitchen/.sqlx/query-1b6fd91460bef61cf02f210404a4ca57b520c969d1f9613e7101ee6aa7a9962a.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into modified_amts(user_id, name, form, measure_type, amt, plan_date)\n values (?, ?, ?, ?, ?, ?) on conflict (user_id, name, form, measure_type, plan_date) do update set amt=excluded.amt",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 6
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "1b6fd91460bef61cf02f210404a4ca57b520c969d1f9613e7101ee6aa7a9962a"
|
||||
}
|
12
kitchen/.sqlx/query-23beb05e40cf011170182d4e98cdf1faa3d8df6e5956e471245e666f32e56962.json
generated
Normal file
12
kitchen/.sqlx/query-23beb05e40cf011170182d4e98cdf1faa3d8df6e5956e471245e666f32e56962.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from filtered_ingredients where user_id = ? and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "23beb05e40cf011170182d4e98cdf1faa3d8df6e5956e471245e666f32e56962"
|
||||
}
|
12
kitchen/.sqlx/query-2582522f8ca9f12eccc70a3b339d9030aee0f52e62d6674cfd3862de2a68a177.json
generated
Normal file
12
kitchen/.sqlx/query-2582522f8ca9f12eccc70a3b339d9030aee0f52e62d6674cfd3862de2a68a177.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into category_mappings\n (user_id, ingredient_name, category_name)\n values (?, ?, ?)\n on conflict (user_id, ingredient_name)\n do update set category_name=excluded.category_name\n",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "2582522f8ca9f12eccc70a3b339d9030aee0f52e62d6674cfd3862de2a68a177"
|
||||
}
|
12
kitchen/.sqlx/query-27aa0a21f534cdf580841fa111136fc26cf1a0ca4ddb308c12f3f8f5a62d6178.json
generated
Normal file
12
kitchen/.sqlx/query-27aa0a21f534cdf580841fa111136fc26cf1a0ca4ddb308c12f3f8f5a62d6178.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from plan_table where user_id = ? and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "27aa0a21f534cdf580841fa111136fc26cf1a0ca4ddb308c12f3f8f5a62d6178"
|
||||
}
|
12
kitchen/.sqlx/query-288535e7b9e1f02ad1b677e3dddc85f38c0766ce16d26fc1bdd2bf90ab9a7f7c.json
generated
Normal file
12
kitchen/.sqlx/query-288535e7b9e1f02ad1b677e3dddc85f38c0766ce16d26fc1bdd2bf90ab9a7f7c.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into plan_table (user_id, plan_date) values (?, ?)\n on conflict (user_id, plan_date) do nothing;",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "288535e7b9e1f02ad1b677e3dddc85f38c0766ce16d26fc1bdd2bf90ab9a7f7c"
|
||||
}
|
32
kitchen/.sqlx/query-2e076acd2405d234daaa866e5a2ac1e10989fc8d2820f90aa722464a7b17db6b.json
generated
Normal file
32
kitchen/.sqlx/query-2e076acd2405d234daaa866e5a2ac1e10989fc8d2820f90aa722464a7b17db6b.json
generated
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select plan_date as \"plan_date: NaiveDate\", recipe_id, count\n from plan_recipes\nwhere\n user_id = ?\n and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "plan_date: NaiveDate",
|
||||
"ordinal": 0,
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"name": "recipe_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "count",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "2e076acd2405d234daaa866e5a2ac1e10989fc8d2820f90aa722464a7b17db6b"
|
||||
}
|
26
kitchen/.sqlx/query-37f382be1b53efd2f79a0d59ae6a8717f88a86908a7a4128d5ed7339147ca59d.json
generated
Normal file
26
kitchen/.sqlx/query-37f382be1b53efd2f79a0d59ae6a8717f88a86908a7a4128d5ed7339147ca59d.json
generated
Normal file
@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select ingredient_name, category_name from category_mappings where user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "ingredient_name",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "category_name",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "37f382be1b53efd2f79a0d59ae6a8717f88a86908a7a4128d5ed7339147ca59d"
|
||||
}
|
12
kitchen/.sqlx/query-3caefb86073c47b5dd5d05f639ddef2f7ed2d1fd80f224457d1ec34243cc56c7.json
generated
Normal file
12
kitchen/.sqlx/query-3caefb86073c47b5dd5d05f639ddef2f7ed2d1fd80f224457d1ec34243cc56c7.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into extra_items (user_id, name, plan_date, amt)\nvalues (?, ?, date(), ?)\non conflict (user_id, name, plan_date) do update set amt=excluded.amt",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "3caefb86073c47b5dd5d05f639ddef2f7ed2d1fd80f224457d1ec34243cc56c7"
|
||||
}
|
38
kitchen/.sqlx/query-3e43f06f5c2e959f66587c8d74696d6db27d89fd2f7d7e1ed6fa5016b4bd1a91.json
generated
Normal file
38
kitchen/.sqlx/query-3e43f06f5c2e959f66587c8d74696d6db27d89fd2f7d7e1ed6fa5016b4bd1a91.json
generated
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n modified_amts.name,\n modified_amts.form,\n modified_amts.measure_type,\n modified_amts.amt\nfrom latest_dates\ninner join modified_amts on\n latest_dates.user_id = modified_amts.user_id\n and latest_dates.plan_date = modified_amts.plan_date",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "form",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "measure_type",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "amt",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "3e43f06f5c2e959f66587c8d74696d6db27d89fd2f7d7e1ed6fa5016b4bd1a91"
|
||||
}
|
26
kitchen/.sqlx/query-4237ff804f254c122a36a14135b90434c6576f48d3a83245503d702552ea9f30.json
generated
Normal file
26
kitchen/.sqlx/query-4237ff804f254c122a36a14135b90434c6576f48d3a83245503d702552ea9f30.json
generated
Normal file
@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select\n name,\n amt\nfrom extra_items\nwhere\n user_id = ?\n and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "amt",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "4237ff804f254c122a36a14135b90434c6576f48d3a83245503d702552ea9f30"
|
||||
}
|
12
kitchen/.sqlx/query-5883c4a57def93cca45f8f9d81c8bba849547758217cd250e7ab28cc166ab42b.json
generated
Normal file
12
kitchen/.sqlx/query-5883c4a57def93cca45f8f9d81c8bba849547758217cd250e7ab28cc166ab42b.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into filtered_ingredients(user_id, name, form, measure_type, plan_date)\n values (?, ?, ?, ?, ?) on conflict(user_id, name, form, measure_type, plan_date) DO NOTHING",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 5
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5883c4a57def93cca45f8f9d81c8bba849547758217cd250e7ab28cc166ab42b"
|
||||
}
|
12
kitchen/.sqlx/query-5d743897fb0d8fd54c3708f1b1c6e416346201faa9e28823c1ba5a421472b1fa.json
generated
Normal file
12
kitchen/.sqlx/query-5d743897fb0d8fd54c3708f1b1c6e416346201faa9e28823c1ba5a421472b1fa.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into users (id, password_hashed) values (?, ?)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5d743897fb0d8fd54c3708f1b1c6e416346201faa9e28823c1ba5a421472b1fa"
|
||||
}
|
20
kitchen/.sqlx/query-64af3f713eb4c61ac02cab2dfea83d0ed197e602e99079d4d32cb38d677edf2e.json
generated
Normal file
20
kitchen/.sqlx/query-64af3f713eb4c61ac02cab2dfea83d0ed197e602e99079d4d32cb38d677edf2e.json
generated
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select content from staples where user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "content",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "64af3f713eb4c61ac02cab2dfea83d0ed197e602e99079d4d32cb38d677edf2e"
|
||||
}
|
38
kitchen/.sqlx/query-699ff0f0d4d4c6e26a21c1922a5b5249d89ed1677680a2276899a7f8b26344ee.json
generated
Normal file
38
kitchen/.sqlx/query-699ff0f0d4d4c6e26a21c1922a5b5249d89ed1677680a2276899a7f8b26344ee.json
generated
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select\n modified_amts.name,\n modified_amts.form,\n modified_amts.measure_type,\n modified_amts.amt\nfrom modified_amts\nwhere\n user_id = ?\n and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "form",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "measure_type",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "amt",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "699ff0f0d4d4c6e26a21c1922a5b5249d89ed1677680a2276899a7f8b26344ee"
|
||||
}
|
12
kitchen/.sqlx/query-6c43908d90f229b32ed8b1b076be9b452a995e1b42ba2554e947c515b031831a.json
generated
Normal file
12
kitchen/.sqlx/query-6c43908d90f229b32ed8b1b076be9b452a995e1b42ba2554e947c515b031831a.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from recipes where user_id = ? and recipe_id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6c43908d90f229b32ed8b1b076be9b452a995e1b42ba2554e947c515b031831a"
|
||||
}
|
12
kitchen/.sqlx/query-6e28698330e42fd6c87ba1e6f1deb664c0d3995caa2b937ceac8c908e98aded6.json
generated
Normal file
12
kitchen/.sqlx/query-6e28698330e42fd6c87ba1e6f1deb664c0d3995caa2b937ceac8c908e98aded6.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into modified_amts(user_id, name, form, measure_type, amt, plan_date)\n values (?, ?, ?, ?, ?, date()) on conflict (user_id, name, form, measure_type, plan_date) do update set amt=excluded.amt",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 5
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6e28698330e42fd6c87ba1e6f1deb664c0d3995caa2b937ceac8c908e98aded6"
|
||||
}
|
12
kitchen/.sqlx/query-6f11d90875a6230766a5f9bd1d67665dc4d00c13d7e81b0d18d60baa67987da9.json
generated
Normal file
12
kitchen/.sqlx/query-6f11d90875a6230766a5f9bd1d67665dc4d00c13d7e81b0d18d60baa67987da9.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from extra_items where user_id = ? and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6f11d90875a6230766a5f9bd1d67665dc4d00c13d7e81b0d18d60baa67987da9"
|
||||
}
|
12
kitchen/.sqlx/query-7578157607967a6a4c60f12408c5d9900d15b429a49681a4cae4e02d31c524ec.json
generated
Normal file
12
kitchen/.sqlx/query-7578157607967a6a4c60f12408c5d9900d15b429a49681a4cae4e02d31c524ec.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from sessions where id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "7578157607967a6a4c60f12408c5d9900d15b429a49681a4cae4e02d31c524ec"
|
||||
}
|
32
kitchen/.sqlx/query-7695a0602395006f9b76ecd4d0cb5ecd5dee419b71b3b0b9ea4f47a83f3df41a.json
generated
Normal file
32
kitchen/.sqlx/query-7695a0602395006f9b76ecd4d0cb5ecd5dee419b71b3b0b9ea4f47a83f3df41a.json
generated
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom filtered_ingredients\nwhere\n user_id = ?\n and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "form",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "measure_type",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "7695a0602395006f9b76ecd4d0cb5ecd5dee419b71b3b0b9ea4f47a83f3df41a"
|
||||
}
|
12
kitchen/.sqlx/query-83824ea638cb64c524f5c8984ef6ef28dfe781f0abf168abc4ae9a51e6e0ae88.json
generated
Normal file
12
kitchen/.sqlx/query-83824ea638cb64c524f5c8984ef6ef28dfe781f0abf168abc4ae9a51e6e0ae88.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into plan_recipes (user_id, plan_date, recipe_id, count) values (?, ?, ?, ?)\n on conflict (user_id, plan_date, recipe_id) do update set count=excluded.count;",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "83824ea638cb64c524f5c8984ef6ef28dfe781f0abf168abc4ae9a51e6e0ae88"
|
||||
}
|
12
kitchen/.sqlx/query-8490e1bb40879caed62ac1c38cb9af48246f3451b6f7f1e1f33850f1dbe25f58.json
generated
Normal file
12
kitchen/.sqlx/query-8490e1bb40879caed62ac1c38cb9af48246f3451b6f7f1e1f33850f1dbe25f58.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into categories (user_id, category_text) values (?, ?)\n on conflict(user_id) do update set category_text=excluded.category_text",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "8490e1bb40879caed62ac1c38cb9af48246f3451b6f7f1e1f33850f1dbe25f58"
|
||||
}
|
20
kitchen/.sqlx/query-928a479ca0f765ec7715bf8784c5490e214486edbf5b78fd501823feb328375b.json
generated
Normal file
20
kitchen/.sqlx/query-928a479ca0f765ec7715bf8784c5490e214486edbf5b78fd501823feb328375b.json
generated
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select session_value from sessions where id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "session_value",
|
||||
"ordinal": 0,
|
||||
"type_info": "Blob"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "928a479ca0f765ec7715bf8784c5490e214486edbf5b78fd501823feb328375b"
|
||||
}
|
12
kitchen/.sqlx/query-93af0c367a0913d49c92aa69022fa30fc0564bd4dbab7f3ae78673a01439cd6e.json
generated
Normal file
12
kitchen/.sqlx/query-93af0c367a0913d49c92aa69022fa30fc0564bd4dbab7f3ae78673a01439cd6e.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from plan_recipes where user_id = ? and plan_date = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "93af0c367a0913d49c92aa69022fa30fc0564bd4dbab7f3ae78673a01439cd6e"
|
||||
}
|
12
kitchen/.sqlx/query-9ad4acd9b9d32c9f9f441276aa71a17674fe4d65698848044778bd4aef77d42d.json
generated
Normal file
12
kitchen/.sqlx/query-9ad4acd9b9d32c9f9f441276aa71a17674fe4d65698848044778bd4aef77d42d.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into sessions (id, session_value) values (?, ?)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "9ad4acd9b9d32c9f9f441276aa71a17674fe4d65698848044778bd4aef77d42d"
|
||||
}
|
32
kitchen/.sqlx/query-ad3408cd773dd8f9308255ec2800171638a1aeda9817c57fb8360f97115f8e97.json
generated
Normal file
32
kitchen/.sqlx/query-ad3408cd773dd8f9308255ec2800171638a1aeda9817c57fb8360f97115f8e97.json
generated
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "with max_date as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes group by user_id\n)\n\nselect plan_recipes.plan_date as \"plan_date: NaiveDate\", plan_recipes.recipe_id, plan_recipes.count\n from plan_recipes\n inner join max_date on plan_recipes.user_id = max_date.user_id\nwhere\n plan_recipes.user_id = ?\n and plan_recipes.plan_date = max_date.plan_date",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "plan_date: NaiveDate",
|
||||
"ordinal": 0,
|
||||
"type_info": "Date"
|
||||
},
|
||||
{
|
||||
"name": "recipe_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "count",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "ad3408cd773dd8f9308255ec2800171638a1aeda9817c57fb8360f97115f8e97"
|
||||
}
|
12
kitchen/.sqlx/query-ba07658eb11f9d6cfdb5dbee4496b2573f1e51f4b4d9ae760eca3b977649b5c7.json
generated
Normal file
12
kitchen/.sqlx/query-ba07658eb11f9d6cfdb5dbee4496b2573f1e51f4b4d9ae760eca3b977649b5c7.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into extra_items (user_id, name, amt, plan_date)\nvalues (?, ?, ?, ?)\non conflict (user_id, name, plan_date) do update set amt=excluded.amt",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "ba07658eb11f9d6cfdb5dbee4496b2573f1e51f4b4d9ae760eca3b977649b5c7"
|
||||
}
|
20
kitchen/.sqlx/query-c988364f9f83f4fa8bd0e594bab432ee7c9ec47ca40f4d16e5e2a8763653f377.json
generated
Normal file
20
kitchen/.sqlx/query-c988364f9f83f4fa8bd0e594bab432ee7c9ec47ca40f4d16e5e2a8763653f377.json
generated
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select category_text from categories where user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "category_text",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "c988364f9f83f4fa8bd0e594bab432ee7c9ec47ca40f4d16e5e2a8763653f377"
|
||||
}
|
12
kitchen/.sqlx/query-d84685a82585c5e4ae72c86ba1fe6e4a7241c4c3c9e948213e5849d956132bad.json
generated
Normal file
12
kitchen/.sqlx/query-d84685a82585c5e4ae72c86ba1fe6e4a7241c4c3c9e948213e5849d956132bad.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "delete from sessions",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "d84685a82585c5e4ae72c86ba1fe6e4a7241c4c3c9e948213e5849d956132bad"
|
||||
}
|
32
kitchen/.sqlx/query-e38183e2e16afa308672044e5d314296d7cd84c1ffedcbfe790743547dc62de8.json
generated
Normal file
32
kitchen/.sqlx/query-e38183e2e16afa308672044e5d314296d7cd84c1ffedcbfe790743547dc62de8.json
generated
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom latest_dates\ninner join filtered_ingredients on\n latest_dates.user_id = filtered_ingredients.user_id\n and latest_dates.plan_date = filtered_ingredients.plan_date",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "form",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "measure_type",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e38183e2e16afa308672044e5d314296d7cd84c1ffedcbfe790743547dc62de8"
|
||||
}
|
12
kitchen/.sqlx/query-eb99a37e18009e0dd46caccacea57ba0b25510d80a4e4a282a5ac2be50bba81c.json
generated
Normal file
12
kitchen/.sqlx/query-eb99a37e18009e0dd46caccacea57ba0b25510d80a4e4a282a5ac2be50bba81c.json
generated
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "insert into recipes (user_id, recipe_id, recipe_text, category, serving_count) values (?, ?, ?, ?, ?)\n on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 5
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "eb99a37e18009e0dd46caccacea57ba0b25510d80a4e4a282a5ac2be50bba81c"
|
||||
}
|
38
kitchen/.sqlx/query-ee0491c7d1a31ef80d7abe6ea4c9a8b0618dba58a0a8bceef7bdafec98ccd543.json
generated
Normal file
38
kitchen/.sqlx/query-ee0491c7d1a31ef80d7abe6ea4c9a8b0618dba58a0a8bceef7bdafec98ccd543.json
generated
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ? and recipe_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "recipe_id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "recipe_text",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "category",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "serving_count",
|
||||
"ordinal": 3,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ee0491c7d1a31ef80d7abe6ea4c9a8b0618dba58a0a8bceef7bdafec98ccd543"
|
||||
}
|
20
kitchen/.sqlx/query-fd818a6b1c800c2014b5cfe8a923ac9228832b11d7575585cf7930fbf91306d1.json
generated
Normal file
20
kitchen/.sqlx/query-fd818a6b1c800c2014b5cfe8a923ac9228832b11d7575585cf7930fbf91306d1.json
generated
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "select distinct plan_date as \"plan_date: NaiveDate\" from plan_table\nwhere user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "plan_date: NaiveDate",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "fd818a6b1c800c2014b5cfe8a923ac9228832b11d7575585cf7930fbf91306d1"
|
||||
}
|
@ -18,14 +18,19 @@ async-trait = "0.1.57"
|
||||
async-session = "3.0.0"
|
||||
ciborium = "0.2.0"
|
||||
tower = "0.4.13"
|
||||
serde = "1.0.144"
|
||||
cookie = "0.17.0"
|
||||
chrono = "0.4.22"
|
||||
metrics = "0.20.1"
|
||||
metrics-exporter-prometheus = "0.11.0"
|
||||
futures = "0.3"
|
||||
metrics-process = "1.0.8"
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.22"
|
||||
features = ["serde"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.204"
|
||||
|
||||
[dependencies.argon2]
|
||||
version = "0.5.0"
|
||||
|
||||
@ -62,5 +67,5 @@ version = "1.12.0"
|
||||
features = ["tokio1"]
|
||||
|
||||
[dependencies.sqlx]
|
||||
version = "0.6.2"
|
||||
features = ["sqlite", "runtime-async-std-rustls", "offline", "chrono"]
|
||||
version = "0.7"
|
||||
features = ["sqlite", "runtime-async-std", "tls-rustls", "chrono"]
|
||||
|
@ -0,0 +1,2 @@
|
||||
-- Add down migration script here
|
||||
ALTER TABLE recipes DROP COLUMN serving_count;
|
2
kitchen/migrations/20240701002811_recipe-servings.up.sql
Normal file
2
kitchen/migrations/20240701002811_recipe-servings.up.sql
Normal file
@ -0,0 +1,2 @@
|
||||
-- Add up migration script here
|
||||
ALTER TABLE recipes ADD COLUMN serving_count INT;
|
@ -1,4 +1,3 @@
|
||||
use std::collections::BTreeMap;
|
||||
// Copyright 2022 Jeremy Wall
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -12,6 +11,7 @@ use std::collections::BTreeMap;
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::{collections::BTreeSet, net::SocketAddr};
|
||||
|
@ -22,6 +22,7 @@ use tracing::{debug, instrument};
|
||||
|
||||
use super::RecipeEntry;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub struct Error(String);
|
||||
|
||||
@ -98,7 +99,7 @@ impl AsyncFileStore {
|
||||
let file_name = entry.file_name().to_string_lossy().to_string();
|
||||
debug!("adding recipe file {}", file_name);
|
||||
let recipe_contents = read_to_string(entry.path()).await?;
|
||||
entry_vec.push(RecipeEntry(file_name, recipe_contents, None));
|
||||
entry_vec.push(RecipeEntry::new(file_name, recipe_contents));
|
||||
} else {
|
||||
warn!(
|
||||
file = %entry.path().to_string_lossy(),
|
||||
@ -118,11 +119,12 @@ impl AsyncFileStore {
|
||||
if recipe_path.exists().await && recipe_path.is_file().await {
|
||||
debug!("Found recipe file {}", recipe_path.to_string_lossy());
|
||||
let recipe_contents = read_to_string(recipe_path).await?;
|
||||
return Ok(Some(RecipeEntry(
|
||||
id.as_ref().to_owned(),
|
||||
recipe_contents,
|
||||
None,
|
||||
)));
|
||||
return Ok(Some(RecipeEntry {
|
||||
id: id.as_ref().to_owned(),
|
||||
text: recipe_contents,
|
||||
category: None,
|
||||
serving_count: None,
|
||||
}));
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -14,6 +14,7 @@
|
||||
use async_std::sync::Arc;
|
||||
use std::collections::BTreeSet;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
use std::{collections::BTreeMap, path::Path};
|
||||
|
||||
use argon2::{
|
||||
@ -259,6 +260,7 @@ impl SqliteStore {
|
||||
std::fs::create_dir_all(&path)?;
|
||||
let url = format!("sqlite://{}/store.db", path.as_ref().to_string_lossy());
|
||||
let options = SqliteConnectOptions::from_str(&url)?
|
||||
.busy_timeout(Duration::from_secs(5))
|
||||
.journal_mode(SqliteJournalMode::Wal)
|
||||
.create_if_missing(true);
|
||||
info!(?options, "Connecting to sqlite db");
|
||||
@ -429,20 +431,10 @@ impl APIStore for SqliteStore {
|
||||
user_id: S,
|
||||
id: S,
|
||||
) -> Result<Option<RecipeEntry>> {
|
||||
// NOTE(jwall): We allow dead code becaue Rust can't figure out that
|
||||
// this code is actually constructed but it's done via the query_as
|
||||
// macro.
|
||||
#[allow(dead_code)]
|
||||
struct RecipeRow {
|
||||
pub recipe_id: String,
|
||||
pub recipe_text: Option<String>,
|
||||
pub category: Option<String>,
|
||||
}
|
||||
let id = id.as_ref();
|
||||
let user_id = user_id.as_ref();
|
||||
let entry = sqlx::query_as!(
|
||||
RecipeRow,
|
||||
"select recipe_id, recipe_text, category from recipes where user_id = ? and recipe_id = ?",
|
||||
let entry = sqlx::query!(
|
||||
"select recipe_id, recipe_text, category, serving_count from recipes where user_id = ? and recipe_id = ?",
|
||||
user_id,
|
||||
id,
|
||||
)
|
||||
@ -450,40 +442,32 @@ impl APIStore for SqliteStore {
|
||||
.await?
|
||||
.iter()
|
||||
.map(|row| {
|
||||
RecipeEntry(
|
||||
row.recipe_id.clone(),
|
||||
row.recipe_text.clone().unwrap_or_else(|| String::new()),
|
||||
row.category.clone()
|
||||
)
|
||||
RecipeEntry {
|
||||
id: row.recipe_id.clone(),
|
||||
text: row.recipe_text.clone().unwrap_or_else(|| String::new()),
|
||||
category: row.category.clone(),
|
||||
serving_count: row.serving_count.clone(),
|
||||
}
|
||||
})
|
||||
.nth(0);
|
||||
Ok(entry)
|
||||
}
|
||||
|
||||
async fn get_recipes_for_user(&self, user_id: &str) -> Result<Option<Vec<RecipeEntry>>> {
|
||||
// NOTE(jwall): We allow dead code becaue Rust can't figure out that
|
||||
// this code is actually constructed but it's done via the query_as
|
||||
// macro.
|
||||
#[allow(dead_code)]
|
||||
struct RecipeRow {
|
||||
pub recipe_id: String,
|
||||
pub recipe_text: Option<String>,
|
||||
pub category: Option<String>,
|
||||
}
|
||||
let rows = sqlx::query_as!(
|
||||
RecipeRow,
|
||||
"select recipe_id, recipe_text, category from recipes where user_id = ?",
|
||||
let rows = sqlx::query!(
|
||||
"select recipe_id, recipe_text, category, serving_count from recipes where user_id = ?",
|
||||
user_id,
|
||||
)
|
||||
.fetch_all(self.pool.as_ref())
|
||||
.await?
|
||||
.iter()
|
||||
.map(|row| {
|
||||
RecipeEntry(
|
||||
row.recipe_id.clone(),
|
||||
row.recipe_text.clone().unwrap_or_else(|| String::new()),
|
||||
row.category.clone(),
|
||||
)
|
||||
RecipeEntry {
|
||||
id: row.recipe_id.clone(),
|
||||
text: row.recipe_text.clone().unwrap_or_else(|| String::new()),
|
||||
category: row.category.clone(),
|
||||
serving_count: row.serving_count.clone(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Ok(Some(rows))
|
||||
@ -498,13 +482,15 @@ impl APIStore for SqliteStore {
|
||||
let recipe_id = entry.recipe_id().to_owned();
|
||||
let recipe_text = entry.recipe_text().to_owned();
|
||||
let category = entry.category();
|
||||
let serving_count = entry.serving_count();
|
||||
sqlx::query!(
|
||||
"insert into recipes (user_id, recipe_id, recipe_text, category) values (?, ?, ?, ?)
|
||||
"insert into recipes (user_id, recipe_id, recipe_text, category, serving_count) values (?, ?, ?, ?, ?)
|
||||
on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category",
|
||||
user_id,
|
||||
recipe_id,
|
||||
recipe_text,
|
||||
category,
|
||||
serving_count,
|
||||
)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
@ -520,7 +506,7 @@ impl APIStore for SqliteStore {
|
||||
user_id,
|
||||
recipe_id,
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
transaction.commit().await?;
|
||||
@ -552,10 +538,10 @@ impl APIStore for SqliteStore {
|
||||
user_id,
|
||||
date,
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
sqlx::query_file!("src/web/storage/init_meal_plan.sql", user_id, date)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
for (id, count) in recipe_counts {
|
||||
sqlx::query_file!(
|
||||
@ -565,7 +551,7 @@ impl APIStore for SqliteStore {
|
||||
id,
|
||||
count
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
transaction.commit().await?;
|
||||
@ -645,35 +631,35 @@ impl APIStore for SqliteStore {
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
sqlx::query!(
|
||||
"delete from plan_recipes where user_id = ? and plan_date = ?",
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
sqlx::query!(
|
||||
"delete from filtered_ingredients where user_id = ? and plan_date = ?",
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
sqlx::query!(
|
||||
"delete from modified_amts where user_id = ? and plan_date = ?",
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
sqlx::query!(
|
||||
"delete from extra_items where user_id = ? and plan_date = ?",
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
Ok(())
|
||||
@ -921,7 +907,7 @@ impl APIStore for SqliteStore {
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
for key in filtered_ingredients {
|
||||
let name = key.name();
|
||||
@ -935,7 +921,7 @@ impl APIStore for SqliteStore {
|
||||
measure_type,
|
||||
date,
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
sqlx::query!(
|
||||
@ -943,7 +929,7 @@ impl APIStore for SqliteStore {
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
// store the modified amts
|
||||
for (key, amt) in modified_amts {
|
||||
@ -960,7 +946,7 @@ impl APIStore for SqliteStore {
|
||||
amt,
|
||||
date,
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
sqlx::query!(
|
||||
@ -968,7 +954,7 @@ impl APIStore for SqliteStore {
|
||||
user_id,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
// Store the extra items
|
||||
for (name, amt) in extra_items {
|
||||
@ -979,7 +965,7 @@ impl APIStore for SqliteStore {
|
||||
amt,
|
||||
date
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
transaction.commit().await?;
|
||||
@ -1007,7 +993,7 @@ impl APIStore for SqliteStore {
|
||||
form,
|
||||
measure_type,
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
// store the modified amts
|
||||
@ -1024,13 +1010,13 @@ impl APIStore for SqliteStore {
|
||||
measure_type,
|
||||
amt,
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
// Store the extra items
|
||||
for (name, amt) in extra_items {
|
||||
sqlx::query_file!("src/web/storage/store_extra_items.sql", user_id, name, amt)
|
||||
.execute(&mut transaction)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
transaction.commit().await?;
|
||||
|
51
models/browser_state.als
Normal file
51
models/browser_state.als
Normal file
@ -0,0 +1,51 @@
|
||||
sig Id {}
|
||||
sig Text {}
|
||||
|
||||
sig Recipe {
|
||||
, id: one Id
|
||||
, text: one Text
|
||||
}
|
||||
|
||||
fact {
|
||||
no r1, r2: Recipe | (r1.id = r2.id) and (r1.text != r2.text)
|
||||
no r1, r2: Recipe | (r1 != r2) and (r1.id = r2.id)
|
||||
}
|
||||
|
||||
sig Ingredient {}
|
||||
sig Modifier {}
|
||||
sig Amt {}
|
||||
|
||||
sig ModifiedInventory {
|
||||
, ingredient: one Ingredient
|
||||
, modifier: lone Modifier
|
||||
, amt: one Amt
|
||||
}
|
||||
|
||||
fact {
|
||||
no mi1, mi2: ModifiedInventory | mi1 != mi2 && (mi1.ingredient = mi2.ingredient) and (mi1.modifier = mi2.modifier)
|
||||
}
|
||||
|
||||
sig DeletedInventory {
|
||||
, ingredient: one Ingredient
|
||||
, modifier: lone Modifier
|
||||
}
|
||||
|
||||
fact {
|
||||
no mi1, mi2: DeletedInventory | mi1 != mi2 && (mi1.ingredient = mi2.ingredient) and (mi1.modifier = mi2.modifier)
|
||||
}
|
||||
|
||||
sig ExtraItems {
|
||||
, ingredient: one Ingredient
|
||||
, amt: one Amt
|
||||
}
|
||||
|
||||
sig State {
|
||||
, recipes: some Recipe
|
||||
, modified: set ModifiedInventory
|
||||
, deleted: set DeletedInventory
|
||||
, extras: set ExtraItems
|
||||
} {
|
||||
no rs: Recipe | rs not in recipes
|
||||
}
|
||||
|
||||
run { } for 3 but exactly 2 State, 2 Modifier, exactly 3 ModifiedInventory, exactly 9 Ingredient
|
17
models/planning.d2
Normal file
17
models/planning.d2
Normal file
@ -0,0 +1,17 @@
|
||||
Meal Planning: {
|
||||
shape: sequence_diagram
|
||||
user: Cook; client: Kitchen frontend; kitchen: Kitchen backend
|
||||
|
||||
user -> client: Start new meal Plan
|
||||
client -> kitchen: new plan created
|
||||
user -> client: Add recipe to meal plan
|
||||
client -> kitchen: Update meal plan with recipe
|
||||
client -> client: cache updated meal plan
|
||||
user -> client: Do inventory
|
||||
client -> kitchen: Store inventory mutations
|
||||
client -> client: cache inventory mutations
|
||||
user -> client: Undo mutation
|
||||
client -> kitchen: Store inventory mutations
|
||||
client -> client: cache inventory mutations
|
||||
user -> user: Cook recipes
|
||||
}
|
125
models/planning.svg
Normal file
125
models/planning.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 94 KiB |
@ -1,8 +1,5 @@
|
||||
let
|
||||
lib = import ../lib/lib.nix;
|
||||
in
|
||||
{ pkgs, rust-wasm, wasm-pack-hermetic, wasm-bindgen }:
|
||||
{ pkgs, rust-wasm, wasm-pack-hermetic, wasm-bindgen, cargo-wasm2map }:
|
||||
with pkgs;
|
||||
mkShell {
|
||||
buildInputs = (lib.darwin-sdk pkgs) ++ (with pkgs; [wasm-bindgen wasm-pack-hermetic llvm clang rust-wasm]);
|
||||
buildInputs = (with pkgs; [wasm-bindgen wasm-pack-hermetic llvm clang rust-wasm binaryen cargo-wasm2map]);
|
||||
}
|
@ -1,6 +1,3 @@
|
||||
let
|
||||
lib = import ../lib/lib.nix;
|
||||
in
|
||||
{pkgs ? (import <nixpkgs>) {},
|
||||
# Because it's a workspace we need the other crates available as source
|
||||
root,
|
||||
@ -15,7 +12,7 @@ with pkgs;
|
||||
inherit version;
|
||||
buildInputs = [ rust-wasm libclang ];
|
||||
# However the crate we are building has it's root in specific crate.
|
||||
nativeBuildInputs = (lib.darwin-sdk pkgs) ++ [llvm clang rust-bindgen];
|
||||
nativeBuildInputs = [llvm clang rust-bindgen];
|
||||
src = root;
|
||||
cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ];
|
||||
postPatch = ''
|
||||
|
@ -1,29 +1,25 @@
|
||||
{pkgs? (import <nixpkgs>) {},
|
||||
version,
|
||||
features ? "",
|
||||
rust-wasm,
|
||||
wasm-bindgen,
|
||||
lockFile,
|
||||
outputHashes,
|
||||
cargo-wasm2map,
|
||||
}:
|
||||
with pkgs;
|
||||
let
|
||||
pname = "kitchen-wasm";
|
||||
src = ./../..;
|
||||
lockFile = ./../../Cargo.lock;
|
||||
# NOTE(jwall): Because we use wasm-pack directly below we need
|
||||
# the cargo dependencies to already be installed.
|
||||
cargoDeps = (pkgs.rustPlatform.importCargoLock { inherit lockFile; outputHashes = {
|
||||
# I'm maintaining some patches for these so the lockfile hashes are a little
|
||||
# incorrect. We override those here.
|
||||
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
|
||||
"sqlx-0.6.2" = "sha256-X/LFvtzRfiOIEZJiVzmFvvULPpjhqvI99pSwH7a//GM=";
|
||||
};
|
||||
});
|
||||
cargoDeps = (pkgs.rustPlatform.importCargoLock { inherit lockFile outputHashes; });
|
||||
in
|
||||
# TODO(zaphar): I should actually be leveraging naersklib.buildPackage with a postInstall for the optimization and bindgen
|
||||
stdenv.mkDerivation {
|
||||
inherit src pname;
|
||||
version = version;
|
||||
# we need wasmb-bindgen v0.2.83 exactly
|
||||
buildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen];
|
||||
buildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen cargo-wasm2map];
|
||||
propagatedBuildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen];
|
||||
phases = [ "postUnpackPhase" "buildPhase"];
|
||||
postUnpackPhase = ''
|
||||
@ -31,14 +27,17 @@ stdenv.mkDerivation {
|
||||
cp -r ./cargo-vendor-dir/.cargo ./
|
||||
cp -r $src/* ./
|
||||
'';
|
||||
# TODO(jwall): Build this from the root rather than the src.
|
||||
# TODO(jwall): Use the makefile for as much of this as possible.
|
||||
buildPhase = ''
|
||||
echo building with wasm-pack
|
||||
mkdir -p $out
|
||||
cd web
|
||||
cp -r static $out
|
||||
RUST_LOG=info wasm-pack build --mode no-install --release --target web --out-dir $out ${features};
|
||||
export project=kitchen
|
||||
sh ../scripts/wasm-build.sh release
|
||||
sh ../scripts/wasm-sourcemap.sh
|
||||
cp -r index.html $out
|
||||
cp -r favicon.ico $out
|
||||
rm -rf $out/release
|
||||
rm -rf $out/wasm32-unknown-unknown
|
||||
'';
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
{
|
||||
darwin-sdk = pkgs: with pkgs; (if stdenv.isDarwin then (with darwin.apple_sdk.frameworks; [
|
||||
xcbuild
|
||||
Security
|
||||
fixDarwinDylibNames
|
||||
]) else [ ]);
|
||||
}
|
@ -1,6 +1,3 @@
|
||||
let
|
||||
my-lib = import ../lib/lib.nix;
|
||||
in
|
||||
{ pkgs
|
||||
, lib
|
||||
, rustPlatform
|
||||
@ -8,9 +5,7 @@ in
|
||||
, nodejs
|
||||
, pkg-config
|
||||
, openssl
|
||||
, stdenv
|
||||
, curl
|
||||
, runCommand
|
||||
}:
|
||||
|
||||
# This package is special so we don't use the naersk infrastructure to build it.
|
||||
@ -20,21 +15,22 @@ rustPlatform.buildRustPackage rec {
|
||||
pname = "wasm-bindgen-cli";
|
||||
# NOTE(jwall): This must exactly match the version of the wasm-bindgen crate
|
||||
# we are using.
|
||||
version = "0.2.84";
|
||||
version = "0.2.89";
|
||||
|
||||
src = fetchCrate {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-0rK+Yx4/Jy44Fw5VwJ3tG243ZsyOIBBehYU54XP/JGk=";
|
||||
sha256 = "sha256-IPxP68xtNSpwJjV2yNMeepAS0anzGl02hYlSTvPocz8=";
|
||||
};
|
||||
|
||||
cargoSha256 = "sha256-vcpxcRlW1OKoD64owFF6mkxSqmNrvY+y3Ckn5UwEQ50=";
|
||||
cargoHash = "sha256-EsGFW1f9+E5NnMadP/0rRzFCxVJQb0mlTLz/3zYQ5Ac=";
|
||||
|
||||
nativeBuildInputs = [ pkg-config ];
|
||||
|
||||
buildInputs = [ openssl curl ] ++ (my-lib.darwin-sdk pkgs);
|
||||
buildInputs = [ openssl curl ];
|
||||
|
||||
nativeCheckInputs = [ nodejs ];
|
||||
|
||||
# other tests require it to be ran in the wasm-bindgen monorepo
|
||||
cargoTestFlags = [ "--test=interface-types" ];
|
||||
#cargoTestFlags = [ "--test=reference" ];
|
||||
doCheck = false;
|
||||
}
|
@ -1,6 +1,3 @@
|
||||
let
|
||||
my-lib = import ../lib/lib.nix;
|
||||
in
|
||||
{pkgs,
|
||||
naersk-lib,
|
||||
rust-wasm,
|
||||
@ -8,9 +5,9 @@ in
|
||||
with pkgs;
|
||||
(naersk-lib.buildPackage rec {
|
||||
pname = "wasm-pack";
|
||||
version = "v0.11.0";
|
||||
version = "v0.12.1";
|
||||
buildInputs = [ rust-wasm pkgs.openssl curl];
|
||||
nativeBuildInputs = (my-lib.darwin-sdk pkgs) ++ [llvm clang pkg-config];
|
||||
nativeBuildInputs =[llvm clang pkg-config];
|
||||
OPENSSL_NO_VENDOR=1;
|
||||
# The checks use network so disable them here
|
||||
doCheck = false;
|
||||
@ -18,7 +15,7 @@ with pkgs;
|
||||
owner = "rustwasm";
|
||||
repo = "wasm-pack";
|
||||
rev = version;
|
||||
sha256 = "sha256-3iwXoYnmrZsbwFUR41uI/4jnCF0OjeRO7UqVDaGJJbQ=";
|
||||
hash = "sha256-L4mCgUPG4cgTUpCoaIUOTONBOggXn5vMyPKj48B3MMk=";
|
||||
};
|
||||
cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ];
|
||||
})
|
@ -6,10 +6,6 @@ A web assembly experiment in Meal Planning and Shopping List management.
|
||||
|
||||
Ensure you have rust installed with support for the web assembly target. You can see instructions here: [Rust wasm book](https://rustwasm.github.io/docs/book/game-of-life/setup.html).
|
||||
|
||||
You will also want to have trunk installed. You can see instructions for that here: [trunk](https://trunkrs.dev/)
|
||||
|
||||
Then obtain the source. We do not at this time publish kitchen on [crates.io](https://crates.io/).
|
||||
|
||||
```sh
|
||||
git clone https://github.com/zaphar/kitchen
|
||||
cd kitchen
|
||||
@ -23,7 +19,7 @@ make release
|
||||
|
||||
# Hacking on kitchen
|
||||
|
||||
If you want to hack on kitchen, then you may find it useful to use trunk in dev mode. The run script will run build the app and run trunk with it watching for changes and reloading on demand in your browser.
|
||||
The run script will run build the app and run it for you.
|
||||
|
||||
```sh
|
||||
./run.sh
|
||||
|
@ -8,8 +8,14 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
abortable_parser = "~0.2.6"
|
||||
chrono = "~0.4"
|
||||
serde = "1.0.144"
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.22"
|
||||
features = ["serde"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.204"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.num-rational]
|
||||
version = "~0.4.0"
|
||||
|
@ -50,35 +50,49 @@ impl Mealplan {
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct RecipeEntry(pub String, pub String, pub Option<String>);
|
||||
pub struct RecipeEntry {
|
||||
pub id: String,
|
||||
pub text: String,
|
||||
pub category: Option<String>,
|
||||
pub serving_count: Option<i64>,
|
||||
}
|
||||
|
||||
impl RecipeEntry {
|
||||
pub fn new<IS: Into<String>, TS: Into<String>>(recipe_id: IS, text: TS) -> Self {
|
||||
Self(recipe_id.into(), text.into(), None)
|
||||
Self {
|
||||
id: recipe_id.into(),
|
||||
text: text.into(),
|
||||
category: None,
|
||||
serving_count: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_recipe_id<S: Into<String>>(&mut self, id: S) {
|
||||
self.0 = id.into();
|
||||
self.id = id.into();
|
||||
}
|
||||
|
||||
pub fn recipe_id(&self) -> &str {
|
||||
self.0.as_str()
|
||||
self.id.as_str()
|
||||
}
|
||||
|
||||
pub fn set_recipe_text<S: Into<String>>(&mut self, text: S) {
|
||||
self.1 = text.into();
|
||||
self.text = text.into();
|
||||
}
|
||||
|
||||
pub fn recipe_text(&self) -> &str {
|
||||
self.1.as_str()
|
||||
self.text.as_str()
|
||||
}
|
||||
|
||||
pub fn set_category<S: Into<String>>(&mut self, cat: S) {
|
||||
self.2 = Some(cat.into());
|
||||
self.category = Some(cat.into());
|
||||
}
|
||||
|
||||
pub fn category(&self) -> Option<&String> {
|
||||
self.2.as_ref()
|
||||
self.category.as_ref()
|
||||
}
|
||||
|
||||
pub fn serving_count(&self) -> Option<i64> {
|
||||
self.serving_count.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@ -87,6 +101,7 @@ impl RecipeEntry {
|
||||
pub struct Recipe {
|
||||
pub title: String,
|
||||
pub desc: Option<String>,
|
||||
pub serving_count: Option<i64>,
|
||||
pub steps: Vec<Step>,
|
||||
}
|
||||
|
||||
@ -96,6 +111,7 @@ impl Recipe {
|
||||
title: title.into(),
|
||||
desc: desc.map(|s| s.into()),
|
||||
steps: Vec::new(),
|
||||
serving_count: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -132,6 +148,16 @@ impl Recipe {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&RecipeEntry> for Recipe {
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: &RecipeEntry) -> Result<Self, Self::Error> {
|
||||
let mut parsed = parse::as_recipe(&value.text)?;
|
||||
parsed.serving_count = value.serving_count.clone();
|
||||
Ok(parsed)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IngredientAccumulator {
|
||||
inner: BTreeMap<IngredientKey, (Ingredient, BTreeSet<String>)>,
|
||||
}
|
||||
@ -156,16 +182,28 @@ impl IngredientAccumulator {
|
||||
set.insert(recipe_title.clone());
|
||||
self.inner.insert(key, (i.clone(), set));
|
||||
} else {
|
||||
let amt = match (self.inner[&key].0.amt, i.amt) {
|
||||
(Volume(rvm), Volume(lvm)) => Volume(lvm + rvm),
|
||||
(Count(lqty), Count(rqty)) => Count(lqty + rqty),
|
||||
(Weight(lqty), Weight(rqty)) => Weight(lqty + rqty),
|
||||
let amts = match (&self.inner[&key].0.amt, &i.amt) {
|
||||
(Volume(rvm), Volume(lvm)) => vec![Volume(lvm + rvm)],
|
||||
(Count(lqty), Count(rqty)) => vec![Count(lqty + rqty)],
|
||||
(Weight(lqty), Weight(rqty)) => vec![Weight(lqty + rqty)],
|
||||
(Package(lnm, lqty), Package(rnm, rqty)) => {
|
||||
if lnm == rnm {
|
||||
vec![Package(lnm.clone(), lqty + rqty)]
|
||||
} else {
|
||||
vec![
|
||||
Package(lnm.clone(), lqty.clone()),
|
||||
Package(rnm.clone(), rqty.clone()),
|
||||
]
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
self.inner.get_mut(&key).map(|(i, set)| {
|
||||
i.amt = amt;
|
||||
set.insert(recipe_title.clone());
|
||||
});
|
||||
for amt in amts {
|
||||
self.inner.get_mut(&key).map(|(i, set)| {
|
||||
i.amt = amt;
|
||||
set.insert(recipe_title.clone());
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -194,7 +232,7 @@ pub struct Step {
|
||||
impl Step {
|
||||
pub fn new<S: Into<String>>(prep_time: Option<std::time::Duration>, instructions: S) -> Self {
|
||||
Self {
|
||||
prep_time: prep_time,
|
||||
prep_time,
|
||||
instructions: instructions.into(),
|
||||
ingredients: Vec::new(),
|
||||
}
|
||||
|
@ -334,7 +334,14 @@ make_fn!(unit<StrIter, String>,
|
||||
text_token!("kg"),
|
||||
text_token!("grams"),
|
||||
text_token!("gram"),
|
||||
text_token!("g")),
|
||||
text_token!("g"),
|
||||
text_token!("pkg"),
|
||||
text_token!("package"),
|
||||
text_token!("bottle"),
|
||||
text_token!("bot"),
|
||||
text_token!("bag"),
|
||||
text_token!("can")
|
||||
),
|
||||
_ => ws,
|
||||
(u.to_lowercase().to_singular())
|
||||
)
|
||||
@ -393,6 +400,7 @@ pub fn measure(i: StrIter) -> abortable_parser::Result<StrIter, Measure> {
|
||||
"oz" => Weight(Oz(qty)),
|
||||
"kg" | "kilogram" => Weight(Kilogram(qty)),
|
||||
"g" | "gram" => Weight(Gram(qty)),
|
||||
"pkg" | "package" | "can" | "bag" | "bottle" | "bot" => Measure::pkg(s, qty),
|
||||
_u => {
|
||||
eprintln!("Invalid unit: {}", _u);
|
||||
unreachable!()
|
||||
@ -418,9 +426,8 @@ pub fn normalize_name(name: &str) -> String {
|
||||
// NOTE(jwall): The below unwrap is safe because of the length
|
||||
// check above.
|
||||
let last = parts.last().unwrap();
|
||||
let normalized = last.to_singular();
|
||||
prefix.push(' ');
|
||||
prefix.push_str(&normalized);
|
||||
prefix.push_str(&last.to_string());
|
||||
return prefix;
|
||||
}
|
||||
return name.trim().to_lowercase().to_owned();
|
||||
|
@ -235,32 +235,30 @@ fn test_ingredient_name_parse() {
|
||||
#[test]
|
||||
fn test_ingredient_parse() {
|
||||
for (i, expected) in vec![
|
||||
//(
|
||||
// "1 cup flour ",
|
||||
// Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1))), ""),
|
||||
//),
|
||||
//(
|
||||
// "\t1 cup flour ",
|
||||
// Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1))), ""),
|
||||
//),
|
||||
//(
|
||||
// "1 cup apple (chopped)",
|
||||
// Ingredient::new(
|
||||
// "apple",
|
||||
// Some("chopped".to_owned()),
|
||||
// Volume(Cup(Quantity::Whole(1))),
|
||||
// "",
|
||||
// ),
|
||||
//),
|
||||
//(
|
||||
// "1 cup apple (chopped) ",
|
||||
// Ingredient::new(
|
||||
// "apple",
|
||||
// Some("chopped".to_owned()),
|
||||
// Volume(Cup(Quantity::Whole(1))),
|
||||
// "",
|
||||
// ),
|
||||
//),
|
||||
(
|
||||
"1 cup flour ",
|
||||
Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1)))),
|
||||
),
|
||||
(
|
||||
"\t1 cup flour ",
|
||||
Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1)))),
|
||||
),
|
||||
(
|
||||
"1 cup apple (chopped)",
|
||||
Ingredient::new(
|
||||
"apple",
|
||||
Some("chopped".to_owned()),
|
||||
Volume(Cup(Quantity::Whole(1))),
|
||||
),
|
||||
),
|
||||
(
|
||||
"1 cup apple (chopped) ",
|
||||
Ingredient::new(
|
||||
"apple",
|
||||
Some("chopped".to_owned()),
|
||||
Volume(Cup(Quantity::Whole(1))),
|
||||
),
|
||||
),
|
||||
(
|
||||
"1 green bell pepper (chopped) ",
|
||||
Ingredient::new(
|
||||
@ -269,6 +267,46 @@ fn test_ingredient_parse() {
|
||||
Count(Quantity::Whole(1)),
|
||||
),
|
||||
),
|
||||
(
|
||||
"1 pkg green onion",
|
||||
Ingredient::new(
|
||||
"green onion",
|
||||
None,
|
||||
Package("pkg".into(), Quantity::Whole(1)),
|
||||
),
|
||||
),
|
||||
(
|
||||
"1 bottle green onion",
|
||||
Ingredient::new(
|
||||
"green onion",
|
||||
None,
|
||||
Package("bottle".into(), Quantity::Whole(1)),
|
||||
),
|
||||
),
|
||||
(
|
||||
"1 bot green onion",
|
||||
Ingredient::new(
|
||||
"green onion",
|
||||
None,
|
||||
Package("bot".into(), Quantity::Whole(1)),
|
||||
),
|
||||
),
|
||||
(
|
||||
"1 bag green onion",
|
||||
Ingredient::new(
|
||||
"green onion",
|
||||
None,
|
||||
Package("bag".into(), Quantity::Whole(1)),
|
||||
),
|
||||
),
|
||||
(
|
||||
"1 can baked beans",
|
||||
Ingredient::new(
|
||||
"baked beans",
|
||||
None,
|
||||
Package("can".into(), Quantity::Whole(1)),
|
||||
),
|
||||
),
|
||||
] {
|
||||
match parse::ingredient(StrIter::new(i)) {
|
||||
ParseResult::Complete(_, ing) => assert_eq!(ing, expected),
|
||||
|
@ -22,6 +22,7 @@ use std::{
|
||||
convert::TryFrom,
|
||||
fmt::Display,
|
||||
ops::{Add, Div, Mul, Sub},
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
use num_rational::Ratio;
|
||||
@ -179,6 +180,20 @@ impl VolumeMeasure {
|
||||
|
||||
macro_rules! volume_op {
|
||||
($trait:ident, $method:ident) => {
|
||||
impl $trait for &VolumeMeasure {
|
||||
type Output = VolumeMeasure;
|
||||
|
||||
fn $method(self, lhs: Self) -> Self::Output {
|
||||
let (l, r) = (self.get_ml(), lhs.get_ml());
|
||||
let result = ML($trait::$method(l, r));
|
||||
if self.metric() {
|
||||
result.normalize()
|
||||
} else {
|
||||
result.into_tsp().normalize()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl $trait for VolumeMeasure {
|
||||
type Output = Self;
|
||||
|
||||
@ -293,6 +308,20 @@ impl WeightMeasure {
|
||||
|
||||
macro_rules! weight_op {
|
||||
($trait:ident, $method:ident) => {
|
||||
impl $trait for &WeightMeasure {
|
||||
type Output = WeightMeasure;
|
||||
|
||||
fn $method(self, lhs: Self) -> Self::Output {
|
||||
let (l, r) = (self.get_grams(), lhs.get_grams());
|
||||
let result = WeightMeasure::Gram($trait::$method(l, r));
|
||||
if self.metric() {
|
||||
result.normalize()
|
||||
} else {
|
||||
result.into_oz().normalize()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl $trait for WeightMeasure {
|
||||
type Output = Self;
|
||||
|
||||
@ -335,18 +364,19 @@ impl Display for WeightMeasure {
|
||||
|
||||
use WeightMeasure::{Gram, Kilogram, Oz, Pound};
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
|
||||
#[derive(Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
|
||||
/// Measurements in a Recipe with associated units for them.
|
||||
pub enum Measure {
|
||||
/// Volume measurements as meter cubed base unit
|
||||
Volume(VolumeMeasure),
|
||||
/// Simple count of items
|
||||
Count(Quantity),
|
||||
Package(Rc<str>, Quantity),
|
||||
/// Weight measure as Grams base unit
|
||||
Weight(WeightMeasure),
|
||||
}
|
||||
|
||||
use Measure::{Count, Volume, Weight};
|
||||
use Measure::{Count, Package, Volume, Weight};
|
||||
|
||||
impl Measure {
|
||||
pub fn tsp(qty: Quantity) -> Self {
|
||||
@ -407,11 +437,16 @@ impl Measure {
|
||||
Weight(Oz(qty))
|
||||
}
|
||||
|
||||
pub fn pkg<S: Into<Rc<str>>>(name: S, qty: Quantity) -> Self {
|
||||
Package(name.into(), qty)
|
||||
}
|
||||
|
||||
pub fn measure_type(&self) -> String {
|
||||
match self {
|
||||
Volume(_) => "Volume",
|
||||
Count(_) => "Count",
|
||||
Weight(_) => "Weight",
|
||||
Package(_, _) => "Package",
|
||||
}
|
||||
.to_owned()
|
||||
}
|
||||
@ -421,6 +456,7 @@ impl Measure {
|
||||
Volume(vm) => vm.plural(),
|
||||
Count(qty) => qty.plural(),
|
||||
Weight(wm) => wm.plural(),
|
||||
Package(_, qty) => qty.plural(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -429,6 +465,7 @@ impl Measure {
|
||||
Volume(vm) => Volume(vm.normalize()),
|
||||
Count(qty) => Count(qty.clone()),
|
||||
Weight(wm) => Weight(wm.normalize()),
|
||||
Package(nm, qty) => Package(nm.clone(), qty.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -439,6 +476,7 @@ impl Display for Measure {
|
||||
Volume(vm) => write!(w, "{}", vm),
|
||||
Count(qty) => write!(w, "{}", qty),
|
||||
Weight(wm) => write!(w, "{}", wm),
|
||||
Package(nm, qty) => write!(w, "{} {}", qty, nm),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -533,6 +571,26 @@ impl TryFrom<f32> for Quantity {
|
||||
|
||||
macro_rules! quantity_op {
|
||||
($trait:ident, $method:ident) => {
|
||||
impl $trait for &Quantity {
|
||||
type Output = Quantity;
|
||||
|
||||
fn $method(self, lhs: Self) -> Self::Output {
|
||||
match (self, lhs) {
|
||||
(Whole(rhs), Whole(lhs)) => Frac($trait::$method(
|
||||
Ratio::from_integer(*rhs),
|
||||
Ratio::from_integer(*lhs),
|
||||
)),
|
||||
(Frac(rhs), Frac(lhs)) => Frac($trait::$method(rhs, lhs)),
|
||||
(Whole(rhs), Frac(lhs)) => {
|
||||
Frac($trait::$method(Ratio::from_integer(*rhs), lhs))
|
||||
}
|
||||
(Frac(rhs), Whole(lhs)) => {
|
||||
Frac($trait::$method(rhs, Ratio::from_integer(*lhs)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl $trait for Quantity {
|
||||
type Output = Self;
|
||||
|
||||
|
19
run-non-nix.sh
Executable file
19
run-non-nix.sh
Executable file
@ -0,0 +1,19 @@
|
||||
# Copyright 2022 Jeremy Wall
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
EXAMPLES=${EXAMPLES:-../examples}
|
||||
echo Starting server serving ${EXAMPLES}
|
||||
mkdir -p .session_store
|
||||
make kitchen
|
||||
./target/debug/kitchen --verbose debug serve --listen 127.0.0.1:3030 --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+1.pem --cert_key ~/tls-certs/localhost+1-key.pem $@
|
||||
# This is ghetto but I'm doing it anyway
|
2
run.sh
2
run.sh
@ -14,5 +14,5 @@
|
||||
EXAMPLES=${EXAMPLES:-../examples}
|
||||
echo Starting server serving ${EXAMPLES}
|
||||
mkdir .session_store
|
||||
nix run .\#kitchenDebug -- --verbose debug serve --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+2.pem --cert_key ~/tls-certs/localhost+2-key.pem $@
|
||||
nix run .\#kitchenDebug -- --verbose debug serve --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+1.pem --cert_key ~/tls-certs/localhost+1-key.pem $@
|
||||
# This is ghetto but I'm doing it anyway
|
||||
|
11
scripts/wasm-build.sh
Normal file
11
scripts/wasm-build.sh
Normal file
@ -0,0 +1,11 @@
|
||||
set -x
|
||||
buildtype=$1;
|
||||
|
||||
mkdir -p $out
|
||||
|
||||
if [ ${buildtype} = "release" ]; then
|
||||
buildtype_flag="--release"
|
||||
fi
|
||||
|
||||
cargo build --lib ${buildtype_flag} --target wasm32-unknown-unknown --target-dir $out --features debug_logs
|
||||
wasm-bindgen $out/wasm32-unknown-unknown/${buildtype}/${project}_wasm.wasm --out-dir $out --typescript --target web
|
6
scripts/wasm-opt.sh
Normal file
6
scripts/wasm-opt.sh
Normal file
@ -0,0 +1,6 @@
|
||||
set -x
|
||||
buildtype=$1;
|
||||
|
||||
wasm-opt $out/wasm32-unknown-unknown/${buildtype}/${project}_wasm.wasm --output $out/${project}_wasm_bg-opt.wasm -O
|
||||
rm -f $out/${project}_wasm_bg.wasm
|
||||
mv $out/${project}_wasm_bg-opt.wasm $out/${project}_wasm_bg.wasm
|
3
scripts/wasm-sourcemap.sh
Normal file
3
scripts/wasm-sourcemap.sh
Normal file
@ -0,0 +1,3 @@
|
||||
set -x
|
||||
|
||||
cargo-wasm2map wasm2map --patch $out/${project}_wasm_bg.wasm --base-url=http://localhost:3030
|
11
shell.nix
11
shell.nix
@ -1,11 +0,0 @@
|
||||
let
|
||||
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
|
||||
in
|
||||
(import (
|
||||
fetchTarball {
|
||||
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
|
||||
sha256 = lock.nodes.flake-compat.locked.narHash;
|
||||
}
|
||||
) {
|
||||
src = ./.;
|
||||
}).devShell
|
@ -25,6 +25,15 @@ async-trait = "0.1.57"
|
||||
base64 = "0.21.0"
|
||||
sycamore-router = "0.8"
|
||||
js-sys = "0.3.60"
|
||||
wasm-web-component = { git = "https://github.com/zaphar/wasm-web-components.git", rev = "v0.3.0" }
|
||||
maud = "*"
|
||||
indexed-db = "0.4.1"
|
||||
anyhow = "1.0.86"
|
||||
serde-wasm-bindgen = "0.6.5"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.204"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.tracing-subscriber]
|
||||
version = "0.3.16"
|
||||
@ -37,20 +46,26 @@ features = ["fmt", "time"]
|
||||
version = "0.4.22"
|
||||
features = ["serde"]
|
||||
|
||||
[dependencies.reqwasm]
|
||||
version = "0.5.0"
|
||||
[dependencies.gloo-net]
|
||||
version = "0.4.0"
|
||||
|
||||
[dependencies.wasm-bindgen]
|
||||
# we need wasm-bindgen v0.2.84 exactly
|
||||
version = "= 0.2.84"
|
||||
version = "= 0.2.89"
|
||||
|
||||
[dependencies.web-sys]
|
||||
version = "0.3"
|
||||
features = [
|
||||
"Event",
|
||||
"InputEvent",
|
||||
"CustomEvent",
|
||||
"CustomEventInit",
|
||||
"EventTarget",
|
||||
"History",
|
||||
"HtmlAnchorElement",
|
||||
"HtmlDivElement",
|
||||
"HtmlSpanElement",
|
||||
"HtmlInputElement",
|
||||
"HtmlTextAreaElement",
|
||||
"HtmlBaseElement",
|
||||
"HtmlDialogElement",
|
||||
"KeyboardEvent",
|
||||
@ -58,7 +73,12 @@ features = [
|
||||
"PopStateEvent",
|
||||
"Url",
|
||||
"Window",
|
||||
"Storage"
|
||||
"IdbFactory",
|
||||
"IdbOpenDbRequest",
|
||||
"IdbRequest",
|
||||
"IdbDatabase",
|
||||
"IdbRequestReadyState",
|
||||
"Storage",
|
||||
]
|
||||
|
||||
[dependencies.sycamore]
|
||||
|
@ -19,7 +19,7 @@
|
||||
<head>
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" name="viewport"
|
||||
content="width=device-width, initial-scale=1.0" charset="UTF-8">
|
||||
<link rel="stylesheet" href="/ui/static/pico.min.css">
|
||||
<link rel="stylesheet" href="/ui/static/normalize.css">
|
||||
<link rel="stylesheet" href="/ui/static/app.css">
|
||||
</head>
|
||||
|
||||
|
628
web/src/api.rs
628
web/src/api.rs
@ -15,18 +15,31 @@ use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use base64::{self, Engine};
|
||||
use chrono::NaiveDate;
|
||||
use reqwasm;
|
||||
use serde_json::{from_str, to_string};
|
||||
use gloo_net;
|
||||
// TODO(jwall): Remove this when we have gone a few migrations past.
|
||||
use serde_json::from_str;
|
||||
use sycamore::prelude::*;
|
||||
use tracing::{debug, error, instrument};
|
||||
|
||||
use anyhow::Result;
|
||||
use client_api::*;
|
||||
use recipes::{IngredientKey, RecipeEntry};
|
||||
use serde_wasm_bindgen::{from_value, Serializer};
|
||||
use wasm_bindgen::JsValue;
|
||||
// TODO(jwall): Remove this when we have gone a few migrations past.
|
||||
use web_sys::Storage;
|
||||
|
||||
use crate::{app_state::AppState, js_lib};
|
||||
fn to_js<T: serde::ser::Serialize>(value: T) -> Result<JsValue, serde_wasm_bindgen::Error> {
|
||||
let s = Serializer::new().serialize_maps_as_objects(true);
|
||||
value.serialize(&s)
|
||||
}
|
||||
|
||||
use crate::{
|
||||
app_state::{parse_recipes, AppState},
|
||||
js_lib::{self, DBFactory},
|
||||
};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub struct Error(String);
|
||||
|
||||
@ -66,284 +79,290 @@ impl From<std::string::FromUtf8Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<reqwasm::Error> for Error {
|
||||
fn from(item: reqwasm::Error) -> Self {
|
||||
impl From<gloo_net::Error> for Error {
|
||||
fn from(item: gloo_net::Error) -> Self {
|
||||
Error(format!("{:?}", item))
|
||||
}
|
||||
}
|
||||
|
||||
fn recipe_key<S: std::fmt::Display>(id: S) -> String {
|
||||
format!("recipe:{}", id)
|
||||
}
|
||||
|
||||
fn category_key<S: std::fmt::Display>(id: S) -> String {
|
||||
format!("category:{}", id)
|
||||
}
|
||||
|
||||
fn token68(user: String, pass: String) -> String {
|
||||
base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass))
|
||||
}
|
||||
|
||||
fn convert_to_io_error<V, E>(res: Result<V, E>) -> Result<V, std::io::Error>
|
||||
where
|
||||
E: Into<Box<dyn std::error::Error>> + std::fmt::Debug,
|
||||
{
|
||||
match res {
|
||||
Ok(v) => Ok(v),
|
||||
Err(e) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!("{:?}", e),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LocalStore {
|
||||
store: Storage,
|
||||
// TODO(zaphar): Remove this when it's safe to delete the migration
|
||||
old_store: Storage,
|
||||
store: DBFactory<'static>,
|
||||
}
|
||||
|
||||
const APP_STATE_KEY: &'static str = "app-state";
|
||||
const USER_DATA_KEY: &'static str = "user_data";
|
||||
|
||||
impl LocalStore {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
store: js_lib::get_storage(),
|
||||
store: DBFactory::default(),
|
||||
old_store: js_lib::get_storage(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets user data from local storage.
|
||||
pub fn get_user_data(&self) -> Option<UserData> {
|
||||
self.store
|
||||
.get("user_data")
|
||||
.map_or(None, |val| val.map(|val| from_str(&val).unwrap_or(None)))
|
||||
.flatten()
|
||||
pub async fn migrate(&self) {
|
||||
// 1. migrate app-state from localstore to indexeddb
|
||||
debug!("Peforming localstorage migration");
|
||||
if let Ok(Some(v)) = self.old_store.get("app_state") {
|
||||
if let Ok(Some(local_state)) = from_str::<Option<AppState>>(&v) {
|
||||
self.store_app_state(&local_state).await;
|
||||
}
|
||||
}
|
||||
let _ = self.old_store.remove_item("app_state");
|
||||
// 2. migrate user-state from localstore to indexeddb
|
||||
if let Ok(Some(v)) = self.old_store.get(USER_DATA_KEY) {
|
||||
if let Ok(local_user_data) = from_str::<Option<UserData>>(&v) {
|
||||
self.set_user_data(local_user_data.as_ref()).await;
|
||||
}
|
||||
}
|
||||
let _ = self.old_store.remove_item(USER_DATA_KEY);
|
||||
// 3. Recipes
|
||||
let store_len = self.old_store.length().unwrap();
|
||||
let mut key_list = Vec::new();
|
||||
for i in 0..store_len {
|
||||
let key = self.old_store.key(i).unwrap().unwrap();
|
||||
if key.starts_with("recipe:") {
|
||||
key_list.push(key);
|
||||
}
|
||||
}
|
||||
for k in key_list {
|
||||
if let Ok(Some(recipe)) = self.old_store.get(&k) {
|
||||
if let Ok(recipe) = from_str::<RecipeEntry>(&recipe) {
|
||||
self.set_recipe_entry(&recipe).await;
|
||||
}
|
||||
}
|
||||
let _ = self.old_store.delete(&k);
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
pub async fn store_app_state(&self, state: &AppState) {
|
||||
let state = match to_js(state) {
|
||||
Ok(state) => state,
|
||||
Err(err) => {
|
||||
error!(?err, ?state, "Error deserializing app_state");
|
||||
return;
|
||||
}
|
||||
};
|
||||
web_sys::console::log_1(&state);
|
||||
let key = to_js(APP_STATE_KEY).expect("Failed to serialize key");
|
||||
self.store
|
||||
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
|
||||
object_store.put_kv(&key, &state).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.expect("Failed to store app-state");
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub async fn fetch_app_state(&self) -> Option<AppState> {
|
||||
debug!("Loading state from local store");
|
||||
let recipes = parse_recipes(&self.get_recipes().await).expect("Failed to parse recipes");
|
||||
self.store
|
||||
.ro_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
|
||||
let key = convert_to_io_error(to_js(APP_STATE_KEY))?;
|
||||
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
|
||||
let mut app_state: AppState = match object_store.get(&key).await? {
|
||||
Some(s) => convert_to_io_error(from_value(s))?,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
if let Some(recipes) = recipes {
|
||||
debug!("Populating recipes");
|
||||
for (id, recipe) in recipes {
|
||||
debug!(id, "Adding recipe from local storage");
|
||||
app_state.recipes.insert(id, recipe);
|
||||
}
|
||||
}
|
||||
Ok(Some(app_state))
|
||||
})
|
||||
.await
|
||||
.expect("Failed to fetch app-state")
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
/// Gets user data from local storage.
|
||||
pub async fn get_user_data(&self) -> Option<UserData> {
|
||||
self.store
|
||||
.ro_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
|
||||
let key = to_js(USER_DATA_KEY).expect("Failed to serialize key");
|
||||
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
|
||||
let user_data: UserData = match object_store.get(&key).await? {
|
||||
Some(s) => convert_to_io_error(from_value(s))?,
|
||||
None => return Ok(None),
|
||||
};
|
||||
Ok(Some(user_data))
|
||||
})
|
||||
.await
|
||||
.expect("Failed to fetch user_data")
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
// Set's user data to local storage.
|
||||
pub fn set_user_data(&self, data: Option<&UserData>) {
|
||||
pub async fn set_user_data(&self, data: Option<&UserData>) {
|
||||
let key = to_js(USER_DATA_KEY).expect("Failed to serialize key");
|
||||
if let Some(data) = data {
|
||||
let data = data.clone();
|
||||
self.store
|
||||
.set(
|
||||
"user_data",
|
||||
&to_string(data).expect("Failed to desrialize user_data"),
|
||||
)
|
||||
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
|
||||
object_store
|
||||
.put_kv(&key, &convert_to_io_error(to_js(&data))?)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.expect("Failed to set user_data");
|
||||
} else {
|
||||
self.store
|
||||
.delete("user_data")
|
||||
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
|
||||
object_store.delete(&key).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.expect("Failed to delete user_data");
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets categories from local storage.
|
||||
pub fn get_categories(&self) -> Option<Vec<(String, String)>> {
|
||||
let mut mappings = Vec::new();
|
||||
for k in self.get_category_keys() {
|
||||
if let Some(mut cat_map) = self
|
||||
.store
|
||||
.get(&k)
|
||||
.expect(&format!("Failed to get category key {}", k))
|
||||
.map(|v| {
|
||||
from_str::<Vec<(String, String)>>(&v)
|
||||
.expect(&format!("Failed to parse category key {}", k))
|
||||
})
|
||||
{
|
||||
mappings.extend(cat_map.drain(0..));
|
||||
}
|
||||
}
|
||||
if mappings.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(mappings)
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the categories to the given string.
|
||||
pub fn set_categories(&self, mappings: Option<&Vec<(String, String)>>) {
|
||||
if let Some(mappings) = mappings {
|
||||
for (i, cat) in mappings.iter() {
|
||||
self.store
|
||||
.set(
|
||||
&category_key(i),
|
||||
&to_string(&(i, cat)).expect("Failed to serialize category mapping"),
|
||||
)
|
||||
.expect("Failed to store category mapping");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_storage_keys(&self) -> Vec<String> {
|
||||
let mut keys = Vec::new();
|
||||
for idx in 0..self.store.length().unwrap() {
|
||||
if let Some(k) = self.store.key(idx).expect("Failed to get storage key") {
|
||||
keys.push(k)
|
||||
}
|
||||
}
|
||||
keys
|
||||
}
|
||||
|
||||
fn get_category_keys(&self) -> impl Iterator<Item = String> {
|
||||
self.get_storage_keys()
|
||||
.into_iter()
|
||||
.filter(|k| k.starts_with("category:"))
|
||||
}
|
||||
|
||||
fn get_recipe_keys(&self) -> impl Iterator<Item = String> {
|
||||
self.get_storage_keys()
|
||||
.into_iter()
|
||||
.filter(|k| k.starts_with("recipe:"))
|
||||
}
|
||||
|
||||
/// Gets all the recipes from local storage.
|
||||
pub fn get_recipes(&self) -> Option<Vec<RecipeEntry>> {
|
||||
let mut recipe_list = Vec::new();
|
||||
for recipe_key in self.get_recipe_keys() {
|
||||
if let Some(entry) = self
|
||||
.store
|
||||
.get(&recipe_key)
|
||||
.expect(&format!("Failed to get recipe: {}", recipe_key))
|
||||
{
|
||||
match from_str(&entry) {
|
||||
Ok(entry) => {
|
||||
recipe_list.push(entry);
|
||||
}
|
||||
Err(e) => {
|
||||
error!(recipe_key, err = ?e, "Failed to parse recipe entry");
|
||||
#[instrument]
|
||||
async fn get_recipe_keys(&self) -> impl Iterator<Item = String> {
|
||||
self.store
|
||||
.ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
|
||||
let mut keys = Vec::new();
|
||||
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
|
||||
let key_vec = object_store.get_all_keys(None).await?;
|
||||
for k in key_vec {
|
||||
if let Ok(v) = from_value(k) {
|
||||
keys.push(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if recipe_list.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(recipe_list)
|
||||
Ok(keys)
|
||||
})
|
||||
.await
|
||||
.expect("Failed to get storage keys")
|
||||
.into_iter()
|
||||
}
|
||||
|
||||
pub fn get_recipe_entry(&self, id: &str) -> Option<RecipeEntry> {
|
||||
let key = recipe_key(id);
|
||||
#[instrument]
|
||||
/// Gets all the recipes from local storage.
|
||||
pub async fn get_recipes(&self) -> Option<Vec<RecipeEntry>> {
|
||||
self.store
|
||||
.get(&key)
|
||||
.expect(&format!("Failed to get recipe {}", key))
|
||||
.map(|entry| from_str(&entry).expect(&format!("Failed to get recipe {}", key)))
|
||||
.ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
|
||||
let mut recipe_list = Vec::new();
|
||||
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
|
||||
let mut c = object_store.cursor().open().await?;
|
||||
while let Some(value) = c.value() {
|
||||
recipe_list.push(convert_to_io_error(from_value(value))?);
|
||||
c.advance(1).await?;
|
||||
}
|
||||
if recipe_list.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
Ok(Some(recipe_list))
|
||||
})
|
||||
.await
|
||||
.expect("Failed to get recipes")
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub async fn get_recipe_entry(&self, id: &str) -> Option<RecipeEntry> {
|
||||
let key = to_js(id).expect("Failed to serialize key");
|
||||
self.store
|
||||
.ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
|
||||
let entry: Option<RecipeEntry> = match object_store.get(&key).await? {
|
||||
Some(v) => convert_to_io_error(from_value(v))?,
|
||||
None => None,
|
||||
};
|
||||
Ok(entry)
|
||||
})
|
||||
.await
|
||||
.expect("Failed to get recipes")
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
/// Sets the set of recipes to the entries passed in. Deletes any recipes not
|
||||
/// in the list.
|
||||
pub fn set_all_recipes(&self, entries: &Vec<RecipeEntry>) {
|
||||
for recipe_key in self.get_recipe_keys() {
|
||||
pub async fn set_all_recipes(&self, entries: &Vec<RecipeEntry>) {
|
||||
for recipe_key in self.get_recipe_keys().await {
|
||||
let key = to_js(&recipe_key).expect("Failed to serialize key");
|
||||
self.store
|
||||
.delete(&recipe_key)
|
||||
.expect(&format!("Failed to get recipe {}", recipe_key));
|
||||
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
|
||||
object_store.delete(&key).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.expect("Failed to delete user_data");
|
||||
}
|
||||
for entry in entries {
|
||||
self.set_recipe_entry(entry);
|
||||
let entry = entry.clone();
|
||||
let key = to_js(entry.recipe_id()).expect("Failed to serialize recipe key");
|
||||
self.store
|
||||
.rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
|
||||
object_store
|
||||
.put_kv(&key, &convert_to_io_error(to_js(&entry))?)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.expect("Failed to store recipe entry");
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
/// Set recipe entry in local storage.
|
||||
pub fn set_recipe_entry(&self, entry: &RecipeEntry) {
|
||||
pub async fn set_recipe_entry(&self, entry: &RecipeEntry) {
|
||||
let entry = entry.clone();
|
||||
let key = to_js(entry.recipe_id()).expect("Failed to serialize recipe key");
|
||||
self.store
|
||||
.set(
|
||||
&recipe_key(entry.recipe_id()),
|
||||
&to_string(&entry).expect(&format!("Failed to get recipe {}", entry.recipe_id())),
|
||||
)
|
||||
.expect(&format!("Failed to store recipe {}", entry.recipe_id()))
|
||||
.rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
|
||||
object_store
|
||||
.put_kv(&key, &convert_to_io_error(to_js(&entry))?)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.expect("Failed to store recipe entry");
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
/// Delete recipe entry from local storage.
|
||||
pub fn delete_recipe_entry(&self, recipe_id: &str) {
|
||||
pub async fn delete_recipe_entry(&self, recipe_id: &str) {
|
||||
let key = to_js(recipe_id).expect("Failed to serialize key");
|
||||
self.store
|
||||
.delete(&recipe_key(recipe_id))
|
||||
.expect(&format!("Failed to delete recipe {}", recipe_id))
|
||||
}
|
||||
|
||||
/// Save working plan to local storage.
|
||||
pub fn store_plan(&self, plan: &Vec<(String, i32)>) {
|
||||
self.store
|
||||
.set("plan", &to_string(&plan).expect("Failed to serialize plan"))
|
||||
.expect("Failed to store plan'");
|
||||
}
|
||||
|
||||
pub fn get_plan(&self) -> Option<Vec<(String, i32)>> {
|
||||
if let Some(plan) = self.store.get("plan").expect("Failed to store plan") {
|
||||
Some(from_str(&plan).expect("Failed to deserialize plan"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_plan(&self) {
|
||||
self.store.delete("plan").expect("Failed to delete plan");
|
||||
self.store
|
||||
.delete("inventory")
|
||||
.expect("Failed to delete inventory data");
|
||||
}
|
||||
|
||||
pub fn set_plan_date(&self, date: &NaiveDate) {
|
||||
self.store
|
||||
.set(
|
||||
"plan:date",
|
||||
&to_string(&date).expect("Failed to serialize plan:date"),
|
||||
)
|
||||
.expect("Failed to store plan:date");
|
||||
}
|
||||
|
||||
pub fn get_plan_date(&self) -> Option<NaiveDate> {
|
||||
if let Some(date) = self
|
||||
.store
|
||||
.get("plan:date")
|
||||
.expect("Failed to get plan date")
|
||||
{
|
||||
Some(from_str(&date).expect("Failed to deserialize plan_date"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_inventory_data(
|
||||
&self,
|
||||
) -> Option<(
|
||||
BTreeSet<IngredientKey>,
|
||||
BTreeMap<IngredientKey, String>,
|
||||
Vec<(String, String)>,
|
||||
)> {
|
||||
if let Some(inventory) = self
|
||||
.store
|
||||
.get("inventory")
|
||||
.expect("Failed to retrieve inventory data")
|
||||
{
|
||||
let (filtered, modified, extras): (
|
||||
BTreeSet<IngredientKey>,
|
||||
Vec<(IngredientKey, String)>,
|
||||
Vec<(String, String)>,
|
||||
) = from_str(&inventory).expect("Failed to deserialize inventory");
|
||||
return Some((filtered, BTreeMap::from_iter(modified), extras));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
pub fn set_inventory_data(
|
||||
&self,
|
||||
inventory: (
|
||||
&BTreeSet<IngredientKey>,
|
||||
&BTreeMap<IngredientKey, String>,
|
||||
&Vec<(String, String)>,
|
||||
),
|
||||
) {
|
||||
let filtered = inventory.0;
|
||||
let modified_amts = inventory
|
||||
.1
|
||||
.iter()
|
||||
.map(|(k, amt)| (k.clone(), amt.clone()))
|
||||
.collect::<Vec<(IngredientKey, String)>>();
|
||||
let extras = inventory.2;
|
||||
let inventory_data = (filtered, &modified_amts, extras);
|
||||
self.store
|
||||
.set(
|
||||
"inventory",
|
||||
&to_string(&inventory_data).expect(&format!(
|
||||
"Failed to serialize inventory {:?}",
|
||||
inventory_data
|
||||
)),
|
||||
)
|
||||
.expect("Failed to set inventory");
|
||||
}
|
||||
|
||||
pub fn set_staples(&self, content: &String) {
|
||||
self.store
|
||||
.set("staples", content)
|
||||
.expect("Failed to set staples in local store");
|
||||
}
|
||||
|
||||
pub fn get_staples(&self) -> Option<String> {
|
||||
self.store
|
||||
.get("staples")
|
||||
.expect("Failed to retreive staples from local store")
|
||||
.rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
|
||||
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
|
||||
object_store.delete(&key).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.expect("Failed to delete user_data");
|
||||
}
|
||||
}
|
||||
|
||||
@ -381,13 +400,17 @@ impl HttpStore {
|
||||
debug!("attempting login request against api.");
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/auth");
|
||||
let result = reqwasm::http::Request::get(&path)
|
||||
let request = gloo_net::http::Request::get(&path)
|
||||
.header(
|
||||
"Authorization",
|
||||
"authorization",
|
||||
format!("Basic {}", token68(user, pass)).as_str(),
|
||||
)
|
||||
.send()
|
||||
.await;
|
||||
.mode(web_sys::RequestMode::SameOrigin)
|
||||
.credentials(web_sys::RequestCredentials::SameOrigin)
|
||||
.build()
|
||||
.expect("Failed to build request");
|
||||
debug!(?request, "Sending auth request");
|
||||
let result = request.send().await;
|
||||
if let Ok(resp) = &result {
|
||||
if resp.status() == 200 {
|
||||
let user_data = resp
|
||||
@ -409,7 +432,7 @@ impl HttpStore {
|
||||
debug!("Retrieving User Account data");
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/account");
|
||||
let result = reqwasm::http::Request::get(&path).send().await;
|
||||
let result = gloo_net::http::Request::get(&path).send().await;
|
||||
if let Ok(resp) = &result {
|
||||
if resp.status() == 200 {
|
||||
let user_data = resp
|
||||
@ -430,11 +453,11 @@ impl HttpStore {
|
||||
pub async fn fetch_categories(&self) -> Result<Option<Vec<(String, String)>>, Error> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/category_map");
|
||||
let resp = match reqwasm::http::Request::get(&path).send().await {
|
||||
let resp = match gloo_net::http::Request::get(&path).send().await {
|
||||
Ok(resp) => resp,
|
||||
Err(reqwasm::Error::JsError(err)) => {
|
||||
Err(gloo_net::Error::JsError(err)) => {
|
||||
error!(path, ?err, "Error hitting api");
|
||||
return Ok(self.local_store.get_categories());
|
||||
return Ok(None);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err)?;
|
||||
@ -460,11 +483,11 @@ impl HttpStore {
|
||||
pub async fn fetch_recipes(&self) -> Result<Option<Vec<RecipeEntry>>, Error> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/recipes");
|
||||
let resp = match reqwasm::http::Request::get(&path).send().await {
|
||||
let resp = match gloo_net::http::Request::get(&path).send().await {
|
||||
Ok(resp) => resp,
|
||||
Err(reqwasm::Error::JsError(err)) => {
|
||||
Err(gloo_net::Error::JsError(err)) => {
|
||||
error!(path, ?err, "Error hitting api");
|
||||
return Ok(self.local_store.get_recipes());
|
||||
return Ok(self.local_store.get_recipes().await);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err)?;
|
||||
@ -490,11 +513,11 @@ impl HttpStore {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/recipe/");
|
||||
path.push_str(id.as_ref());
|
||||
let resp = match reqwasm::http::Request::get(&path).send().await {
|
||||
let resp = match gloo_net::http::Request::get(&path).send().await {
|
||||
Ok(resp) => resp,
|
||||
Err(reqwasm::Error::JsError(err)) => {
|
||||
Err(gloo_net::Error::JsError(err)) => {
|
||||
error!(path, ?err, "Error hitting api");
|
||||
return Ok(self.local_store.get_recipe_entry(id.as_ref()));
|
||||
return Ok(self.local_store.get_recipe_entry(id.as_ref()).await);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err)?;
|
||||
@ -514,7 +537,7 @@ impl HttpStore {
|
||||
.as_success()
|
||||
.unwrap();
|
||||
if let Some(ref entry) = entry {
|
||||
self.local_store.set_recipe_entry(entry);
|
||||
self.local_store.set_recipe_entry(entry).await;
|
||||
}
|
||||
Ok(entry)
|
||||
}
|
||||
@ -528,7 +551,7 @@ impl HttpStore {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/recipe");
|
||||
path.push_str(&format!("/{}", recipe.as_ref()));
|
||||
let resp = reqwasm::http::Request::delete(&path).send().await?;
|
||||
let resp = gloo_net::http::Request::delete(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
} else {
|
||||
@ -546,10 +569,9 @@ impl HttpStore {
|
||||
return Err("Recipe Ids can not be empty".into());
|
||||
}
|
||||
}
|
||||
let serialized = to_string(&recipes).expect("Unable to serialize recipe entries");
|
||||
let resp = reqwasm::http::Request::post(&path)
|
||||
.body(&serialized)
|
||||
.header("content-type", "application/json")
|
||||
let resp = gloo_net::http::Request::post(&path)
|
||||
.json(&recipes)
|
||||
.expect("Failed to set body")
|
||||
.send()
|
||||
.await?;
|
||||
if resp.status() != 200 {
|
||||
@ -564,9 +586,9 @@ impl HttpStore {
|
||||
pub async fn store_categories(&self, categories: &Vec<(String, String)>) -> Result<(), Error> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/category_map");
|
||||
let resp = reqwasm::http::Request::post(&path)
|
||||
.body(to_string(&categories).expect("Unable to encode categories as json"))
|
||||
.header("content-type", "application/json")
|
||||
let resp = gloo_net::http::Request::post(&path)
|
||||
.json(&categories)
|
||||
.expect("Failed to set body")
|
||||
.send()
|
||||
.await?;
|
||||
if resp.status() != 200 {
|
||||
@ -618,9 +640,9 @@ impl HttpStore {
|
||||
pub async fn store_plan(&self, plan: Vec<(String, i32)>) -> Result<(), Error> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/plan");
|
||||
let resp = reqwasm::http::Request::post(&path)
|
||||
.body(to_string(&plan).expect("Unable to encode plan as json"))
|
||||
.header("content-type", "application/json")
|
||||
let resp = gloo_net::http::Request::post(&path)
|
||||
.json(&plan)
|
||||
.expect("Failed to set body")
|
||||
.send()
|
||||
.await?;
|
||||
if resp.status() != 200 {
|
||||
@ -640,9 +662,9 @@ impl HttpStore {
|
||||
path.push_str("/plan");
|
||||
path.push_str("/at");
|
||||
path.push_str(&format!("/{}", date));
|
||||
let resp = reqwasm::http::Request::post(&path)
|
||||
.body(to_string(&plan).expect("Unable to encode plan as json"))
|
||||
.header("content-type", "application/json")
|
||||
let resp = gloo_net::http::Request::post(&path)
|
||||
.json(&plan)
|
||||
.expect("Failed to set body")
|
||||
.send()
|
||||
.await?;
|
||||
if resp.status() != 200 {
|
||||
@ -657,7 +679,7 @@ impl HttpStore {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/plan");
|
||||
path.push_str("/all");
|
||||
let resp = reqwasm::http::Request::get(&path).send().await?;
|
||||
let resp = gloo_net::http::Request::get(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
} else {
|
||||
@ -676,7 +698,7 @@ impl HttpStore {
|
||||
path.push_str("/plan");
|
||||
path.push_str("/at");
|
||||
path.push_str(&format!("/{}", date));
|
||||
let resp = reqwasm::http::Request::delete(&path).send().await?;
|
||||
let resp = gloo_net::http::Request::delete(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
} else {
|
||||
@ -692,7 +714,7 @@ impl HttpStore {
|
||||
path.push_str("/plan");
|
||||
path.push_str("/at");
|
||||
path.push_str(&format!("/{}", date));
|
||||
let resp = reqwasm::http::Request::get(&path).send().await?;
|
||||
let resp = gloo_net::http::Request::get(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
} else {
|
||||
@ -706,22 +728,22 @@ impl HttpStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn fetch_plan(&self) -> Result<Option<Vec<(String, i32)>>, Error> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/plan");
|
||||
let resp = reqwasm::http::Request::get(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
} else {
|
||||
debug!("We got a valid response back");
|
||||
let plan = resp
|
||||
.json::<PlanDataResponse>()
|
||||
.await
|
||||
.map_err(|e| format!("{}", e))?
|
||||
.as_success();
|
||||
Ok(plan)
|
||||
}
|
||||
}
|
||||
//pub async fn fetch_plan(&self) -> Result<Option<Vec<(String, i32)>>, Error> {
|
||||
// let mut path = self.v2_path();
|
||||
// path.push_str("/plan");
|
||||
// let resp = gloo_net::http::Request::get(&path).send().await?;
|
||||
// if resp.status() != 200 {
|
||||
// Err(format!("Status: {}", resp.status()).into())
|
||||
// } else {
|
||||
// debug!("We got a valid response back");
|
||||
// let plan = resp
|
||||
// .json::<PlanDataResponse>()
|
||||
// .await
|
||||
// .map_err(|e| format!("{}", e))?
|
||||
// .as_success();
|
||||
// Ok(plan)
|
||||
// }
|
||||
//}
|
||||
|
||||
pub async fn fetch_inventory_for_date(
|
||||
&self,
|
||||
@ -738,13 +760,9 @@ impl HttpStore {
|
||||
path.push_str("/inventory");
|
||||
path.push_str("/at");
|
||||
path.push_str(&format!("/{}", date));
|
||||
let resp = reqwasm::http::Request::get(&path).send().await?;
|
||||
let resp = gloo_net::http::Request::get(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
let err = Err(format!("Status: {}", resp.status()).into());
|
||||
Ok(match self.local_store.get_inventory_data() {
|
||||
Some(val) => val,
|
||||
None => return err,
|
||||
})
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
} else {
|
||||
debug!("We got a valid response back");
|
||||
let InventoryData {
|
||||
@ -777,13 +795,9 @@ impl HttpStore {
|
||||
> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/inventory");
|
||||
let resp = reqwasm::http::Request::get(&path).send().await?;
|
||||
let resp = gloo_net::http::Request::get(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
let err = Err(format!("Status: {}", resp.status()).into());
|
||||
Ok(match self.local_store.get_inventory_data() {
|
||||
Some(val) => val,
|
||||
None => return err,
|
||||
})
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
} else {
|
||||
debug!("We got a valid response back");
|
||||
let InventoryData {
|
||||
@ -818,13 +832,10 @@ impl HttpStore {
|
||||
path.push_str(&format!("/{}", date));
|
||||
let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect();
|
||||
let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect();
|
||||
debug!("Storing inventory data in cache");
|
||||
let serialized_inventory = to_string(&(filtered_ingredients, modified_amts, extra_items))
|
||||
.expect("Unable to encode plan as json");
|
||||
debug!("Storing inventory data via API");
|
||||
let resp = reqwasm::http::Request::post(&path)
|
||||
.body(&serialized_inventory)
|
||||
.header("content-type", "application/json")
|
||||
let resp = gloo_net::http::Request::post(&path)
|
||||
.json(&(filtered_ingredients, modified_amts, extra_items))
|
||||
.expect("Failed to set body")
|
||||
.send()
|
||||
.await?;
|
||||
if resp.status() != 200 {
|
||||
@ -847,13 +858,10 @@ impl HttpStore {
|
||||
path.push_str("/inventory");
|
||||
let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect();
|
||||
let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect();
|
||||
debug!("Storing inventory data in cache");
|
||||
let serialized_inventory = to_string(&(filtered_ingredients, modified_amts, extra_items))
|
||||
.expect("Unable to encode plan as json");
|
||||
debug!("Storing inventory data via API");
|
||||
let resp = reqwasm::http::Request::post(&path)
|
||||
.body(&serialized_inventory)
|
||||
.header("content-type", "application/json")
|
||||
let resp = gloo_net::http::Request::post(&path)
|
||||
.json(&(filtered_ingredients, modified_amts, extra_items))
|
||||
.expect("Failed to set body")
|
||||
.send()
|
||||
.await?;
|
||||
if resp.status() != 200 {
|
||||
@ -868,7 +876,7 @@ impl HttpStore {
|
||||
pub async fn fetch_staples(&self) -> Result<Option<String>, Error> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/staples");
|
||||
let resp = reqwasm::http::Request::get(&path).send().await?;
|
||||
let resp = gloo_net::http::Request::get(&path).send().await?;
|
||||
if resp.status() != 200 {
|
||||
debug!("Invalid response back");
|
||||
Err(format!("Status: {}", resp.status()).into())
|
||||
@ -882,15 +890,15 @@ impl HttpStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn store_staples<S: AsRef<str>>(&self, content: S) -> Result<(), Error> {
|
||||
pub async fn store_staples<S: AsRef<str> + serde::Serialize>(
|
||||
&self,
|
||||
content: S,
|
||||
) -> Result<(), Error> {
|
||||
let mut path = self.v2_path();
|
||||
path.push_str("/staples");
|
||||
let serialized_staples: String =
|
||||
to_string(content.as_ref()).expect("Failed to serialize staples to json");
|
||||
|
||||
let resp = reqwasm::http::Request::post(&path)
|
||||
.body(&serialized_staples)
|
||||
.header("content-type", "application/json")
|
||||
let resp = gloo_net::http::Request::post(&path)
|
||||
.json(&content)
|
||||
.expect("Failed to set body")
|
||||
.send()
|
||||
.await?;
|
||||
if resp.status() != 200 {
|
||||
|
@ -19,6 +19,7 @@ use std::{
|
||||
use chrono::NaiveDate;
|
||||
use client_api::UserData;
|
||||
use recipes::{parse, Ingredient, IngredientKey, Recipe, RecipeEntry};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sycamore::futures::spawn_local_scoped;
|
||||
use sycamore::prelude::*;
|
||||
use sycamore_state::{Handler, MessageMapper};
|
||||
@ -27,15 +28,23 @@ use wasm_bindgen::throw_str;
|
||||
|
||||
use crate::{
|
||||
api::{HttpStore, LocalStore},
|
||||
components,
|
||||
linear::LinearSignal,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
fn bool_true() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct AppState {
|
||||
pub recipe_counts: BTreeMap<String, usize>,
|
||||
pub recipe_counts: BTreeMap<String, u32>,
|
||||
pub recipe_categories: BTreeMap<String, String>,
|
||||
pub extras: Vec<(String, String)>,
|
||||
// FIXME(jwall): This should really be storable I think?
|
||||
#[serde(skip_deserializing, skip_serializing)]
|
||||
pub staples: Option<BTreeSet<Ingredient>>,
|
||||
// FIXME(jwall): This should really be storable I think?
|
||||
#[serde(skip_deserializing, skip_serializing)]
|
||||
pub recipes: BTreeMap<String, Recipe>,
|
||||
pub category_map: BTreeMap<String, String>,
|
||||
pub filtered_ingredients: BTreeSet<IngredientKey>,
|
||||
@ -43,6 +52,8 @@ pub struct AppState {
|
||||
pub auth: Option<UserData>,
|
||||
pub plan_dates: BTreeSet<NaiveDate>,
|
||||
pub selected_plan_date: Option<NaiveDate>,
|
||||
#[serde(default = "bool_true")]
|
||||
pub use_staples: bool,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
@ -59,13 +70,14 @@ impl AppState {
|
||||
auth: None,
|
||||
plan_dates: BTreeSet::new(),
|
||||
selected_plan_date: None,
|
||||
use_staples: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Message {
|
||||
ResetRecipeCounts,
|
||||
UpdateRecipeCount(String, usize),
|
||||
UpdateRecipeCount(String, u32),
|
||||
AddExtra(String, String),
|
||||
RemoveExtra(usize),
|
||||
UpdateExtra(usize, String, String),
|
||||
@ -74,6 +86,7 @@ pub enum Message {
|
||||
UpdateCategory(String, String, Option<Box<dyn FnOnce()>>),
|
||||
ResetInventory,
|
||||
AddFilteredIngredient(IngredientKey),
|
||||
RemoveFilteredIngredient(IngredientKey),
|
||||
UpdateAmt(IngredientKey, String),
|
||||
SetUserData(UserData),
|
||||
SaveState(Option<Box<dyn FnOnce()>>),
|
||||
@ -81,6 +94,7 @@ pub enum Message {
|
||||
UpdateStaples(String, Option<Box<dyn FnOnce()>>),
|
||||
DeletePlan(NaiveDate, Option<Box<dyn FnOnce()>>),
|
||||
SelectPlanDate(NaiveDate, Option<Box<dyn FnOnce()>>),
|
||||
UpdateUseStaples(bool), // TODO(jwall): Should this just be various settings?
|
||||
}
|
||||
|
||||
impl Debug for Message {
|
||||
@ -111,6 +125,9 @@ impl Debug for Message {
|
||||
Self::AddFilteredIngredient(arg0) => {
|
||||
f.debug_tuple("AddFilteredIngredient").field(arg0).finish()
|
||||
}
|
||||
Self::RemoveFilteredIngredient(arg0) => {
|
||||
f.debug_tuple("RemoveFilteredIngredient").field(arg0).finish()
|
||||
}
|
||||
Self::UpdateAmt(arg0, arg1) => {
|
||||
f.debug_tuple("UpdateAmt").field(arg0).field(arg1).finish()
|
||||
}
|
||||
@ -118,6 +135,7 @@ impl Debug for Message {
|
||||
Self::SaveState(_) => write!(f, "SaveState"),
|
||||
Self::LoadState(_) => write!(f, "LoadState"),
|
||||
Self::UpdateStaples(arg, _) => f.debug_tuple("UpdateStaples").field(arg).finish(),
|
||||
Self::UpdateUseStaples(arg) => f.debug_tuple("UpdateUseStaples").field(arg).finish(),
|
||||
Self::SelectPlanDate(arg, _) => f.debug_tuple("SelectPlanDate").field(arg).finish(),
|
||||
Self::DeletePlan(arg, _) => f.debug_tuple("DeletePlan").field(arg).finish(),
|
||||
}
|
||||
@ -130,14 +148,14 @@ pub struct StateMachine {
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
fn parse_recipes(
|
||||
pub fn parse_recipes(
|
||||
recipe_entries: &Option<Vec<RecipeEntry>>,
|
||||
) -> Result<Option<BTreeMap<String, Recipe>>, String> {
|
||||
match recipe_entries {
|
||||
Some(parsed) => {
|
||||
let mut parsed_map = BTreeMap::new();
|
||||
for r in parsed {
|
||||
let recipe = match parse::as_recipe(&r.recipe_text()) {
|
||||
let recipe = match r.try_into() {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
error!("Error parsing recipe {}", e);
|
||||
@ -157,40 +175,44 @@ impl StateMachine {
|
||||
Self { store, local_store }
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn load_state(
|
||||
store: &HttpStore,
|
||||
local_store: &LocalStore,
|
||||
original: &Signal<AppState>,
|
||||
) -> Result<(), crate::api::Error> {
|
||||
// NOTE(jwall): We use a linear Signal in here to ensure that we only
|
||||
// call set on the signal once. When the LinearSignal get's dropped it
|
||||
// will call set on the contained Signal.
|
||||
let mut original: LinearSignal<AppState> = original.into();
|
||||
if let Some(state) = local_store.fetch_app_state().await {
|
||||
original = original.update(state);
|
||||
}
|
||||
let mut state = original.get().as_ref().clone();
|
||||
info!("Synchronizing Recipes");
|
||||
let recipe_entries = &store.fetch_recipes().await?;
|
||||
let recipes = parse_recipes(&recipe_entries)?;
|
||||
|
||||
debug!(?recipes, "Parsed Recipes");
|
||||
if let Some(recipes) = recipes {
|
||||
state.recipes = recipes;
|
||||
};
|
||||
|
||||
info!("Synchronizing staples");
|
||||
state.staples = if let Some(content) = store.fetch_staples().await? {
|
||||
local_store.set_staples(&content);
|
||||
// now we need to parse staples as ingredients
|
||||
let mut staples = parse::as_ingredient_list(&content)?;
|
||||
Some(staples.drain(0..).collect())
|
||||
} else {
|
||||
if let Some(content) = local_store.get_staples() {
|
||||
let mut staples = parse::as_ingredient_list(&content)?;
|
||||
Some(staples.drain(0..).collect())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
Some(BTreeSet::new())
|
||||
};
|
||||
|
||||
info!("Synchronizing recipe");
|
||||
if let Some(recipe_entries) = recipe_entries {
|
||||
local_store.set_all_recipes(recipe_entries);
|
||||
local_store.set_all_recipes(recipe_entries).await;
|
||||
state.recipe_categories = recipe_entries
|
||||
.iter()
|
||||
.map(|entry| {
|
||||
debug!(recipe_entry=?entry, "Getting recipe category");
|
||||
(
|
||||
entry.recipe_id().to_owned(),
|
||||
entry
|
||||
@ -203,25 +225,25 @@ impl StateMachine {
|
||||
}
|
||||
|
||||
info!("Fetching meal plan list");
|
||||
let plan_dates = store.fetch_plan_dates().await?;
|
||||
if let Some(mut plan_dates) = plan_dates {
|
||||
if let Some(mut plan_dates) = store.fetch_plan_dates().await? {
|
||||
debug!(?plan_dates, "meal plan list");
|
||||
state.plan_dates = BTreeSet::from_iter(plan_dates.drain(0..));
|
||||
}
|
||||
|
||||
info!("Synchronizing meal plan");
|
||||
let plan = if let Some(cached_plan_date) = local_store.get_plan_date() {
|
||||
let plan = store.fetch_plan_for_date(&cached_plan_date).await?;
|
||||
state.selected_plan_date = Some(cached_plan_date);
|
||||
plan
|
||||
let plan = if let Some(ref cached_plan_date) = state.selected_plan_date {
|
||||
store
|
||||
.fetch_plan_for_date(cached_plan_date)
|
||||
.await?
|
||||
.or_else(|| Some(Vec::new()))
|
||||
} else {
|
||||
store.fetch_plan().await?
|
||||
None
|
||||
};
|
||||
if let Some(plan) = plan {
|
||||
// set the counts.
|
||||
let mut plan_map = BTreeMap::new();
|
||||
for (id, count) in plan {
|
||||
plan_map.insert(id, count as usize);
|
||||
plan_map.insert(id, count as u32);
|
||||
}
|
||||
state.recipe_counts = plan_map;
|
||||
for (id, _) in state.recipes.iter() {
|
||||
@ -230,44 +252,32 @@ impl StateMachine {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let Some(plan) = local_store.get_plan() {
|
||||
state.recipe_counts = plan.iter().map(|(k, v)| (k.clone(), *v as usize)).collect();
|
||||
} else {
|
||||
// Initialize things to zero.
|
||||
if let Some(rs) = recipe_entries {
|
||||
for r in rs {
|
||||
state.recipe_counts.insert(r.recipe_id().to_owned(), 0);
|
||||
}
|
||||
// Initialize things to zero.
|
||||
if let Some(rs) = recipe_entries {
|
||||
for r in rs {
|
||||
state.recipe_counts.insert(r.recipe_id().to_owned(), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
let plan = state
|
||||
.recipe_counts
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), *v as i32))
|
||||
.collect::<Vec<(String, i32)>>();
|
||||
local_store.store_plan(&plan);
|
||||
info!("Checking for user account data");
|
||||
if let Some(user_data) = store.fetch_user_data().await {
|
||||
debug!("Successfully got account data from server");
|
||||
local_store.set_user_data(Some(&user_data));
|
||||
local_store.set_user_data(Some(&user_data)).await;
|
||||
state.auth = Some(user_data);
|
||||
} else {
|
||||
debug!("Using account data from local store");
|
||||
let user_data = local_store.get_user_data();
|
||||
let user_data = local_store.get_user_data().await;
|
||||
state.auth = user_data;
|
||||
}
|
||||
info!("Synchronizing categories");
|
||||
match store.fetch_categories().await {
|
||||
Ok(Some(mut categories_content)) => {
|
||||
debug!(categories=?categories_content);
|
||||
local_store.set_categories(Some(&categories_content));
|
||||
let category_map = BTreeMap::from_iter(categories_content.drain(0..));
|
||||
state.category_map = category_map;
|
||||
}
|
||||
Ok(None) => {
|
||||
warn!("There is no category file");
|
||||
local_store.set_categories(None);
|
||||
}
|
||||
Err(e) => {
|
||||
error!("{:?}", e);
|
||||
@ -281,11 +291,6 @@ impl StateMachine {
|
||||
info!("Synchronizing inventory data");
|
||||
match inventory_data {
|
||||
Ok((filtered_ingredients, modified_amts, extra_items)) => {
|
||||
local_store.set_inventory_data((
|
||||
&filtered_ingredients,
|
||||
&modified_amts,
|
||||
&extra_items,
|
||||
));
|
||||
state.modified_amts = modified_amts;
|
||||
state.filtered_ingredients = filtered_ingredients;
|
||||
state.extras = extra_items;
|
||||
@ -294,7 +299,9 @@ impl StateMachine {
|
||||
error!("{:?}", e);
|
||||
}
|
||||
}
|
||||
original.set(state);
|
||||
// Finally we store all of this app state back to our localstore
|
||||
local_store.store_app_state(&state).await;
|
||||
original.update(state);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -310,80 +317,49 @@ impl MessageMapper<Message, AppState> for StateMachine {
|
||||
for (id, _) in original_copy.recipes.iter() {
|
||||
map.insert(id.clone(), 0);
|
||||
}
|
||||
let plan: Vec<(String, i32)> =
|
||||
map.iter().map(|(s, i)| (s.clone(), *i as i32)).collect();
|
||||
self.local_store.store_plan(&plan);
|
||||
original_copy.recipe_counts = map;
|
||||
}
|
||||
Message::UpdateRecipeCount(id, count) => {
|
||||
original_copy.recipe_counts.insert(id, count);
|
||||
let plan: Vec<(String, i32)> = original_copy
|
||||
.recipe_counts
|
||||
.iter()
|
||||
.map(|(s, i)| (s.clone(), *i as i32))
|
||||
.collect();
|
||||
self.local_store.store_plan(&plan);
|
||||
}
|
||||
Message::AddExtra(amt, name) => {
|
||||
original_copy.extras.push((amt, name));
|
||||
self.local_store.set_inventory_data((
|
||||
&original_copy.filtered_ingredients,
|
||||
&original_copy.modified_amts,
|
||||
&original_copy.extras,
|
||||
))
|
||||
}
|
||||
Message::RemoveExtra(idx) => {
|
||||
original_copy.extras.remove(idx);
|
||||
self.local_store.set_inventory_data((
|
||||
&original_copy.filtered_ingredients,
|
||||
&original_copy.modified_amts,
|
||||
&original_copy.extras,
|
||||
))
|
||||
}
|
||||
Message::UpdateExtra(idx, amt, name) => {
|
||||
match original_copy.extras.get_mut(idx) {
|
||||
Some(extra) => {
|
||||
extra.0 = amt;
|
||||
extra.1 = name;
|
||||
}
|
||||
None => {
|
||||
throw_str("Attempted to remove extra that didn't exist");
|
||||
}
|
||||
Message::UpdateExtra(idx, amt, name) => match original_copy.extras.get_mut(idx) {
|
||||
Some(extra) => {
|
||||
extra.0 = amt;
|
||||
extra.1 = name;
|
||||
}
|
||||
self.local_store.set_inventory_data((
|
||||
&original_copy.filtered_ingredients,
|
||||
&original_copy.modified_amts,
|
||||
&original_copy.extras,
|
||||
))
|
||||
}
|
||||
None => {
|
||||
throw_str("Attempted to remove extra that didn't exist");
|
||||
}
|
||||
},
|
||||
Message::SaveRecipe(entry, callback) => {
|
||||
let recipe =
|
||||
parse::as_recipe(entry.recipe_text()).expect("Failed to parse RecipeEntry");
|
||||
original_copy
|
||||
.recipes
|
||||
.insert(entry.recipe_id().to_owned(), recipe);
|
||||
let recipe_id = entry.recipe_id().to_owned();
|
||||
let recipe: Recipe = (&entry).try_into().expect("Failed to parse RecipeEntry");
|
||||
original_copy.recipes.insert(recipe_id.clone(), recipe);
|
||||
if !original_copy.recipe_counts.contains_key(entry.recipe_id()) {
|
||||
original_copy
|
||||
.recipe_counts
|
||||
.insert(entry.recipe_id().to_owned(), 0);
|
||||
original_copy.recipe_counts.insert(recipe_id.clone(), 0);
|
||||
}
|
||||
if let Some(cat) = entry.category().cloned() {
|
||||
original_copy
|
||||
.recipe_categories
|
||||
.entry(entry.recipe_id().to_owned())
|
||||
.entry(recipe_id.clone())
|
||||
.and_modify(|c| *c = cat.clone())
|
||||
.or_insert(cat);
|
||||
}
|
||||
let store = self.store.clone();
|
||||
self.local_store.set_recipe_entry(&entry);
|
||||
let local_store = self.local_store.clone();
|
||||
spawn_local_scoped(cx, async move {
|
||||
local_store.set_recipe_entry(&entry).await;
|
||||
if let Err(e) = store.store_recipes(vec![entry]).await {
|
||||
// FIXME(jwall): We should have a global way to trigger error messages
|
||||
error!(err=?e, "Unable to save Recipe");
|
||||
// FIXME(jwall): This should be an error message
|
||||
components::toast::error_message(cx, "Failed to save Recipe", None);
|
||||
} else {
|
||||
components::toast::message(cx, "Saved Recipe", None);
|
||||
}
|
||||
callback.map(|f| f());
|
||||
});
|
||||
@ -391,21 +367,17 @@ impl MessageMapper<Message, AppState> for StateMachine {
|
||||
Message::RemoveRecipe(recipe, callback) => {
|
||||
original_copy.recipe_counts.remove(&recipe);
|
||||
original_copy.recipes.remove(&recipe);
|
||||
self.local_store.delete_recipe_entry(&recipe);
|
||||
let store = self.store.clone();
|
||||
let local_store = self.local_store.clone();
|
||||
spawn_local_scoped(cx, async move {
|
||||
local_store.delete_recipe_entry(&recipe).await;
|
||||
if let Err(err) = store.delete_recipe(&recipe).await {
|
||||
error!(?err, "Failed to delete recipe");
|
||||
components::toast::error_message(cx, "Unable to delete recipe", None);
|
||||
} else {
|
||||
components::toast::message(cx, "Deleted Recipe", None);
|
||||
}
|
||||
callback.map(|f| f());
|
||||
});
|
||||
}
|
||||
Message::UpdateCategory(ingredient, category, callback) => {
|
||||
self.local_store
|
||||
.set_categories(Some(&vec![(ingredient.clone(), category.clone())]));
|
||||
original_copy
|
||||
.category_map
|
||||
.insert(ingredient.clone(), category.clone());
|
||||
@ -421,49 +393,42 @@ impl MessageMapper<Message, AppState> for StateMachine {
|
||||
original_copy.filtered_ingredients = BTreeSet::new();
|
||||
original_copy.modified_amts = BTreeMap::new();
|
||||
original_copy.extras = Vec::new();
|
||||
self.local_store.set_inventory_data((
|
||||
&original_copy.filtered_ingredients,
|
||||
&original_copy.modified_amts,
|
||||
&original_copy.extras,
|
||||
));
|
||||
components::toast::message(cx, "Reset Inventory", None);
|
||||
}
|
||||
Message::AddFilteredIngredient(key) => {
|
||||
original_copy.filtered_ingredients.insert(key);
|
||||
self.local_store.set_inventory_data((
|
||||
&original_copy.filtered_ingredients,
|
||||
&original_copy.modified_amts,
|
||||
&original_copy.extras,
|
||||
));
|
||||
}
|
||||
Message::RemoveFilteredIngredient(key) => {
|
||||
original_copy.filtered_ingredients.remove(&key);
|
||||
}
|
||||
Message::UpdateAmt(key, amt) => {
|
||||
original_copy.modified_amts.insert(key, amt);
|
||||
self.local_store.set_inventory_data((
|
||||
&original_copy.filtered_ingredients,
|
||||
&original_copy.modified_amts,
|
||||
&original_copy.extras,
|
||||
));
|
||||
}
|
||||
Message::SetUserData(user_data) => {
|
||||
self.local_store.set_user_data(Some(&user_data));
|
||||
original_copy.auth = Some(user_data);
|
||||
let local_store = self.local_store.clone();
|
||||
original_copy.auth = Some(user_data.clone());
|
||||
spawn_local_scoped(cx, async move {
|
||||
local_store.set_user_data(Some(&user_data)).await;
|
||||
});
|
||||
}
|
||||
Message::SaveState(f) => {
|
||||
let mut original_copy = original_copy.clone();
|
||||
let store = self.store.clone();
|
||||
let local_store = self.local_store.clone();
|
||||
spawn_local_scoped(cx, async move {
|
||||
if original_copy.selected_plan_date.is_none() {
|
||||
original_copy.selected_plan_date = Some(chrono::Local::now().date_naive());
|
||||
}
|
||||
original_copy
|
||||
.plan_dates
|
||||
.insert(original_copy.selected_plan_date.map(|d| d.clone()).unwrap());
|
||||
original_copy.plan_dates.insert(
|
||||
original_copy
|
||||
.selected_plan_date
|
||||
.as_ref()
|
||||
.map(|d| d.clone())
|
||||
.unwrap(),
|
||||
);
|
||||
if let Err(e) = store.store_app_state(&original_copy).await {
|
||||
error!(err=?e, "Error saving app state");
|
||||
components::toast::error_message(cx, "Failed to save user state", None);
|
||||
} else {
|
||||
components::toast::message(cx, "Saved user state", None);
|
||||
};
|
||||
local_store.store_app_state(&original_copy).await;
|
||||
original.set(original_copy);
|
||||
f.map(|f| f());
|
||||
});
|
||||
@ -474,17 +439,10 @@ impl MessageMapper<Message, AppState> for StateMachine {
|
||||
Message::LoadState(f) => {
|
||||
let store = self.store.clone();
|
||||
let local_store = self.local_store.clone();
|
||||
debug!("Loading user state.");
|
||||
spawn_local_scoped(cx, async move {
|
||||
if let Err(err) = Self::load_state(&store, &local_store, original).await {
|
||||
error!(?err, "Failed to load user state");
|
||||
components::toast::error_message(cx, "Failed to load_state.", None);
|
||||
} else {
|
||||
components::toast::message(cx, "Loaded user state", None);
|
||||
local_store.set_inventory_data((
|
||||
&original.get().filtered_ingredients,
|
||||
&original.get().modified_amts,
|
||||
&original.get().extras,
|
||||
));
|
||||
}
|
||||
f.map(|f| f());
|
||||
});
|
||||
@ -492,47 +450,47 @@ impl MessageMapper<Message, AppState> for StateMachine {
|
||||
}
|
||||
Message::UpdateStaples(content, callback) => {
|
||||
let store = self.store.clone();
|
||||
let local_store = self.local_store.clone();
|
||||
spawn_local_scoped(cx, async move {
|
||||
local_store.set_staples(&content);
|
||||
if let Err(err) = store.store_staples(content).await {
|
||||
error!(?err, "Failed to store staples");
|
||||
components::toast::error_message(cx, "Failed to store staples", None);
|
||||
} else {
|
||||
components::toast::message(cx, "Updated staples", None);
|
||||
callback.map(|f| f());
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
Message::UpdateUseStaples(value) => {
|
||||
original_copy.use_staples = value;
|
||||
}
|
||||
Message::SelectPlanDate(date, callback) => {
|
||||
let store = self.store.clone();
|
||||
let local_store = self.local_store.clone();
|
||||
spawn_local_scoped(cx, async move {
|
||||
if let Some(mut plan) = store
|
||||
if let Ok(Some(mut plan)) = store
|
||||
.fetch_plan_for_date(&date)
|
||||
.await
|
||||
.expect("Failed to fetch plan for date")
|
||||
{
|
||||
// Note(jwall): This is a little unusual but because this
|
||||
// is async code we can't rely on the set below.
|
||||
original_copy.recipe_counts =
|
||||
BTreeMap::from_iter(plan.drain(0..).map(|(k, v)| (k, v as usize)));
|
||||
BTreeMap::from_iter(plan.drain(0..).map(|(k, v)| (k, v as u32)));
|
||||
let (filtered, modified, extras) = store
|
||||
.fetch_inventory_for_date(&date)
|
||||
.await
|
||||
.expect("Failed to fetch inventory_data for date");
|
||||
original_copy.modified_amts = modified;
|
||||
original_copy.filtered_ingredients = filtered;
|
||||
original_copy.extras = extras;
|
||||
} else {
|
||||
store.store_plan_for_date(Vec::new(), &date).await.expect("failed to set plan on server");
|
||||
}
|
||||
let (filtered, modified, extras) = store
|
||||
.fetch_inventory_for_date(&date)
|
||||
.await
|
||||
.expect("Failed to fetch inventory_data for date");
|
||||
original_copy.plan_dates.insert(date.clone());
|
||||
original_copy.modified_amts = modified;
|
||||
original_copy.filtered_ingredients = filtered;
|
||||
original_copy.extras = extras;
|
||||
local_store.set_plan_date(&date);
|
||||
original_copy.selected_plan_date = Some(date.clone());
|
||||
store
|
||||
.store_plan_for_date(vec![], &date)
|
||||
.await
|
||||
.expect("Failed to init meal plan for date");
|
||||
|
||||
local_store.store_app_state(&original_copy).await;
|
||||
original.set(original_copy);
|
||||
|
||||
callback.map(|f| f());
|
||||
@ -547,31 +505,33 @@ impl MessageMapper<Message, AppState> for StateMachine {
|
||||
let local_store = self.local_store.clone();
|
||||
spawn_local_scoped(cx, async move {
|
||||
if let Err(err) = store.delete_plan_for_date(&date).await {
|
||||
components::toast::error_message(
|
||||
cx,
|
||||
"Failed to delete meal plan for date",
|
||||
None,
|
||||
);
|
||||
error!(?err, "Error deleting plan");
|
||||
} else {
|
||||
local_store.delete_plan();
|
||||
|
||||
original_copy.plan_dates.remove(&date);
|
||||
// Reset all meal planning state;
|
||||
let _ = original_copy.recipe_counts.iter_mut().map(|(_, v)| *v = 0);
|
||||
original_copy.filtered_ingredients = BTreeSet::new();
|
||||
original_copy.modified_amts = BTreeMap::new();
|
||||
original_copy.extras = Vec::new();
|
||||
local_store.store_app_state(&original_copy).await;
|
||||
original.set(original_copy);
|
||||
components::toast::message(cx, "Deleted Plan", None);
|
||||
|
||||
callback.map(|f| f());
|
||||
}
|
||||
});
|
||||
// NOTE(jwall): Because we do our signal set above in the async block
|
||||
// we have to return here to avoid lifetime issues and double setting
|
||||
// the original signal.
|
||||
return;
|
||||
}
|
||||
}
|
||||
original.set(original_copy);
|
||||
spawn_local_scoped(cx, {
|
||||
let local_store = self.local_store.clone();
|
||||
async move {
|
||||
local_store.store_app_state(&original_copy).await;
|
||||
original.set(original_copy);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,18 +42,19 @@ pub fn AddRecipe<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View
|
||||
} else {
|
||||
Some(category)
|
||||
};
|
||||
RecipeEntry(
|
||||
recipe_title
|
||||
RecipeEntry {
|
||||
id: recipe_title
|
||||
.get()
|
||||
.as_ref()
|
||||
.to_lowercase()
|
||||
.replace(" ", "_")
|
||||
.replace("\n", ""),
|
||||
STARTER_RECIPE
|
||||
text: STARTER_RECIPE
|
||||
.replace("TITLE_PLACEHOLDER", recipe_title.get().as_str())
|
||||
.replace("\r", ""),
|
||||
category,
|
||||
)
|
||||
serving_count: None,
|
||||
}
|
||||
});
|
||||
|
||||
view! {cx,
|
||||
|
@ -49,7 +49,7 @@ fn CategoryRow<'ctx, G: Html>(cx: Scope<'ctx>, props: CategoryRowProps<'ctx>) ->
|
||||
});
|
||||
view! {cx,
|
||||
tr() {
|
||||
td() {
|
||||
td(class="margin-bot-1 border-bottom") {
|
||||
(ingredient_clone) br()
|
||||
Indexed(
|
||||
iterable=recipes,
|
||||
|
@ -17,8 +17,8 @@ use sycamore::prelude::*;
|
||||
#[component]
|
||||
pub fn Footer<G: Html>(cx: Scope) -> View<G> {
|
||||
view! {cx,
|
||||
nav(class="no-print") {
|
||||
ul {
|
||||
nav(class="no-print menu-font") {
|
||||
ul(class="no-list") {
|
||||
li { a(href="https://github.com/zaphar/kitchen") { "On Github" } }
|
||||
}
|
||||
}
|
||||
|
@ -23,9 +23,9 @@ pub fn Header<'ctx, G: Html>(cx: Scope<'ctx>, h: StateHandler<'ctx>) -> View<G>
|
||||
None => "Login".to_owned(),
|
||||
});
|
||||
view! {cx,
|
||||
nav(class="no-print") {
|
||||
nav(class="no-print row-flex align-center header-bg heavy-bottom-border menu-font") {
|
||||
h1(class="title") { "Kitchen" }
|
||||
ul {
|
||||
ul(class="row-flex align-center no-list") {
|
||||
li { a(href="/ui/planning/select") { "MealPlan" } }
|
||||
li { a(href="/ui/manage/ingredients") { "Manage" } }
|
||||
li { a(href="/ui/login") { (login.get()) } }
|
||||
|
@ -24,18 +24,7 @@ pub mod recipe_selection;
|
||||
pub mod shopping_list;
|
||||
pub mod staples;
|
||||
pub mod tabs;
|
||||
pub mod toast;
|
||||
|
||||
pub use add_recipe::*;
|
||||
pub use categories::*;
|
||||
pub use footer::*;
|
||||
pub use header::*;
|
||||
pub use number_field::*;
|
||||
pub use plan_list::*;
|
||||
pub use recipe::*;
|
||||
pub use recipe_list::*;
|
||||
pub use recipe_plan::*;
|
||||
pub use recipe_selection::*;
|
||||
pub use shopping_list::*;
|
||||
pub use staples::*;
|
||||
pub use tabs::*;
|
||||
|
@ -11,18 +11,209 @@
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use maud::html;
|
||||
use sycamore::prelude::*;
|
||||
use tracing::debug;
|
||||
use web_sys::{Event, HtmlInputElement};
|
||||
use tracing::{debug, error};
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use wasm_web_component::{web_component, WebComponentBinding};
|
||||
use web_sys::{CustomEvent, CustomEventInit, Event, HtmlElement, InputEvent, ShadowRoot};
|
||||
|
||||
use crate::js_lib;
|
||||
#[web_component(
|
||||
observed_attrs = "['val', 'min', 'max', 'step']",
|
||||
observed_events = "['change', 'click', 'input']"
|
||||
)]
|
||||
pub struct NumberSpinner {
|
||||
root: Option<ShadowRoot>,
|
||||
min: i32,
|
||||
max: i32,
|
||||
step: i32,
|
||||
value: i32,
|
||||
}
|
||||
|
||||
impl NumberSpinner {
|
||||
fn get_input_el(&self) -> HtmlElement {
|
||||
self.root
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.get_element_by_id("nval")
|
||||
.unwrap()
|
||||
.dyn_into()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl WebComponentBinding for NumberSpinner {
|
||||
fn init_mut(&mut self, element: &web_sys::HtmlElement) {
|
||||
(self.min, self.max, self.step, self.value) = (0, 99, 1, 0);
|
||||
debug!("Initializing element instance");
|
||||
let root = html! {
|
||||
span {
|
||||
link rel="stylesheet" href="/ui/static/app.css" { };
|
||||
style {
|
||||
r#"
|
||||
span { display: block; }
|
||||
span.button {
|
||||
font-size: 2em; font-weight: bold;
|
||||
}
|
||||
.number-input {
|
||||
border-width: var(--border-width);
|
||||
border-style: inset;
|
||||
padding: 3pt;
|
||||
border-radius: 10px;
|
||||
width: 3em;
|
||||
}
|
||||
"#
|
||||
};
|
||||
span class="button" id="inc" { "+" }; " "
|
||||
// TODO(jwall): plaintext-only would be nice but I can't actually do that yet.
|
||||
span id="nval" class="number-input" contenteditable="true" { "0" } " "
|
||||
span class="button" id="dec" { "-" };
|
||||
};
|
||||
};
|
||||
self.attach_shadow(element, &root.into_string());
|
||||
self.root = element.shadow_root();
|
||||
}
|
||||
|
||||
fn connected_mut(&mut self, element: &HtmlElement) {
|
||||
debug!("COUNTS: connecting to DOM");
|
||||
let val = element.get_attribute("val").unwrap_or_else(|| "0".into());
|
||||
let min = element.get_attribute("min").unwrap_or_else(|| "0".into());
|
||||
let max = element.get_attribute("max").unwrap_or_else(|| "99".into());
|
||||
let step = element.get_attribute("step").unwrap_or_else(|| "1".into());
|
||||
debug!(?val, ?min, ?max, ?step, "connecting to DOM");
|
||||
let nval_el = self.get_input_el();
|
||||
if let Ok(parsed) = val.parse::<i32>() {
|
||||
self.value = parsed;
|
||||
nval_el.set_inner_text(&val);
|
||||
}
|
||||
if let Ok(parsed) = min.parse::<i32>() {
|
||||
self.min = parsed;
|
||||
}
|
||||
if let Ok(parsed) = max.parse::<i32>() {
|
||||
self.max = parsed;
|
||||
}
|
||||
if let Ok(parsed) = step.parse::<i32>() {
|
||||
self.step = parsed;
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_event_mut(&mut self, element: &web_sys::HtmlElement, event: &Event) {
|
||||
let target: HtmlElement = event.target().unwrap().dyn_into().unwrap();
|
||||
let id = target.get_attribute("id");
|
||||
let event_type = event.type_();
|
||||
let nval_el = self.get_input_el();
|
||||
debug!(?id, ?event_type, "saw event");
|
||||
match (id.as_ref().map(|s| s.as_str()), event_type.as_str()) {
|
||||
(Some("inc"), "click") => {
|
||||
if self.value < self.max {
|
||||
self.value += 1;
|
||||
nval_el.set_inner_text(&format!("{}", self.value));
|
||||
}
|
||||
}
|
||||
(Some("dec"), "click") => {
|
||||
if self.value > self.min {
|
||||
self.value -= 1;
|
||||
nval_el.set_inner_text(&format!("{}", self.value));
|
||||
}
|
||||
}
|
||||
(Some("nval"), "input") => {
|
||||
let input_event = event.dyn_ref::<InputEvent>().unwrap();
|
||||
if let Some(data) = input_event.data() {
|
||||
// We only allow numeric input data here.
|
||||
debug!(data, input_type=?input_event.input_type() , "got input");
|
||||
if data.chars().filter(|c| !c.is_numeric()).count() > 0 {
|
||||
nval_el.set_inner_text(&format!("{}", self.value));
|
||||
}
|
||||
} else {
|
||||
nval_el.set_inner_text(&format!("{}{}", nval_el.inner_text(), self.value));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
debug!("Ignoring event");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let mut event_dict = CustomEventInit::new();
|
||||
event_dict.detail(&JsValue::from_f64(self.value as f64));
|
||||
element
|
||||
.dispatch_event(&CustomEvent::new_with_event_init_dict("updated", &event_dict).unwrap())
|
||||
.unwrap();
|
||||
debug!("Dispatched updated event");
|
||||
}
|
||||
|
||||
fn attribute_changed_mut(
|
||||
&mut self,
|
||||
_element: &web_sys::HtmlElement,
|
||||
name: JsValue,
|
||||
old_value: JsValue,
|
||||
new_value: JsValue,
|
||||
) {
|
||||
let nval_el = self.get_input_el();
|
||||
let name = name.as_string().unwrap();
|
||||
debug!(
|
||||
?name,
|
||||
?old_value,
|
||||
?new_value,
|
||||
"COUNTS: handling attribute change"
|
||||
);
|
||||
match name.as_str() {
|
||||
"val" => {
|
||||
debug!("COUNTS: got an updated value");
|
||||
if let Some(val) = new_value.as_string() {
|
||||
debug!(val, "COUNTS: got an updated value");
|
||||
if let Ok(val) = val.parse::<i32>() {
|
||||
self.value = val;
|
||||
nval_el.set_inner_text(format!("{}", self.value).as_str());
|
||||
} else {
|
||||
error!(?new_value, "COUNTS: Not a valid f64 value");
|
||||
}
|
||||
}
|
||||
}
|
||||
"min" => {
|
||||
if let Some(val) = new_value.as_string() {
|
||||
debug!(val, "COUNTS: got an updated value");
|
||||
if let Ok(val) = val.parse::<i32>() {
|
||||
self.min = val;
|
||||
} else {
|
||||
error!(?new_value, "COUNTS: Not a valid f64 value");
|
||||
}
|
||||
}
|
||||
}
|
||||
"max" => {
|
||||
if let Some(val) = new_value.as_string() {
|
||||
debug!(val, "COUNTS: got an updated value");
|
||||
if let Ok(val) = val.parse::<i32>() {
|
||||
self.max = val;
|
||||
} else {
|
||||
error!(?new_value, "COUNTS: Not a valid f64 value");
|
||||
}
|
||||
}
|
||||
}
|
||||
"step" => {
|
||||
if let Some(val) = new_value.as_string() {
|
||||
debug!(val, "COUNTS: got an updated value");
|
||||
if let Ok(val) = val.parse::<i32>() {
|
||||
self.step = val;
|
||||
} else {
|
||||
error!(?new_value, "COUNTS: Not a valid f64 value");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
debug!("Ignoring Attribute Change");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Props)]
|
||||
pub struct NumberProps<'ctx, F>
|
||||
where
|
||||
F: Fn(Event),
|
||||
F: Fn(CustomEvent),
|
||||
{
|
||||
name: String,
|
||||
class: String,
|
||||
on_change: Option<F>,
|
||||
min: f64,
|
||||
counter: &'ctx Signal<f64>,
|
||||
@ -31,44 +222,27 @@ where
|
||||
#[component]
|
||||
pub fn NumberField<'ctx, F, G: Html>(cx: Scope<'ctx>, props: NumberProps<'ctx, F>) -> View<G>
|
||||
where
|
||||
F: Fn(web_sys::Event) + 'ctx,
|
||||
F: Fn(CustomEvent) + 'ctx,
|
||||
{
|
||||
let NumberProps {
|
||||
name,
|
||||
class,
|
||||
on_change,
|
||||
min,
|
||||
counter,
|
||||
} = props;
|
||||
|
||||
NumberSpinner::define_once();
|
||||
// TODO(jwall): I'm pretty sure this triggers: https://github.com/sycamore-rs/sycamore/issues/602
|
||||
// Which means I probably have to wait till v0.9.0 drops or switch to leptos.
|
||||
let id = name.clone();
|
||||
let inc_target_id = id.clone();
|
||||
let dec_target_id = id.clone();
|
||||
let min_field = format!("{}", min);
|
||||
|
||||
let initial_count = *counter.get();
|
||||
view! {cx,
|
||||
div() {
|
||||
input(type="number", id=id, name=name, class="item-count-sel", min=min_field, max="99", step="1", bind:valueAsNumber=counter, on:input=move |evt| {
|
||||
on_change.as_ref().map(|f| f(evt));
|
||||
})
|
||||
span(class="item-count-inc-dec", on:click=move |_| {
|
||||
let i = *counter.get_untracked();
|
||||
let target = js_lib::get_element_by_id::<HtmlInputElement>(&inc_target_id).unwrap().expect(&format!("No such element with id {}", inc_target_id));
|
||||
counter.set(i+1.0);
|
||||
debug!(counter=%(counter.get_untracked()), "set counter to new value");
|
||||
// We force an input event to get triggered for our target.
|
||||
target.dispatch_event(&web_sys::Event::new("input").expect("Failed to create new event")).expect("Failed to dispatch event to target");
|
||||
}) { "▲" }
|
||||
" "
|
||||
span(class="item-count-inc-dec", on:click=move |_| {
|
||||
let i = *counter.get_untracked();
|
||||
let target = js_lib::get_element_by_id::<HtmlInputElement>(&dec_target_id).unwrap().expect(&format!("No such element with id {}", dec_target_id));
|
||||
if i > min {
|
||||
counter.set(i-1.0);
|
||||
debug!(counter=%(counter.get_untracked()), "set counter to new value");
|
||||
// We force an input event to get triggered for our target.
|
||||
target.dispatch_event(&web_sys::Event::new("input").expect("Failed to create new event")).expect("Failed to dispatch event to target");
|
||||
}
|
||||
}) { "▼" }
|
||||
}
|
||||
number-spinner(id=id, class=(class), val=(initial_count), min=min, on:updated=move |evt: Event| {
|
||||
let event = evt.unchecked_into::<CustomEvent>();
|
||||
let val: f64 = event.detail().as_f64().unwrap();
|
||||
counter.set(val);
|
||||
on_change.as_ref().map(|f| f(event));
|
||||
debug!(counter=%(counter.get_untracked()), "set counter to new value");
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
use chrono::NaiveDate;
|
||||
// Copyright 2023 Jeremy Wall (Jeremy@marzhilsltudios.com)
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -12,6 +11,7 @@ use chrono::NaiveDate;
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use chrono::NaiveDate;
|
||||
use sycamore::prelude::*;
|
||||
|
||||
use crate::app_state::{Message, StateHandler};
|
||||
@ -23,30 +23,25 @@ pub struct PlanListProps<'ctx> {
|
||||
list: &'ctx ReadSignal<Vec<NaiveDate>>,
|
||||
}
|
||||
|
||||
// TODO(jwall): We also need a "new plan button"
|
||||
#[instrument(skip_all, fields(dates=?props.list))]
|
||||
#[component]
|
||||
pub fn PlanList<'ctx, G: Html>(cx: Scope<'ctx>, props: PlanListProps<'ctx>) -> View<G> {
|
||||
let PlanListProps { sh, list } = props;
|
||||
view! {cx,
|
||||
div() {
|
||||
table() {
|
||||
div(class="column-flex") {
|
||||
Indexed(
|
||||
iterable=list,
|
||||
view=move |cx, date| {
|
||||
let date_display = format!("{}", date);
|
||||
view!{cx,
|
||||
tr() {
|
||||
td() {
|
||||
span(role="button", class="outline", on:click=move |_| {
|
||||
sh.dispatch(cx, Message::SelectPlanDate(date, None))
|
||||
}) { (date_display) }
|
||||
}
|
||||
td() {
|
||||
span(role="button", class="destructive", on:click=move |_| {
|
||||
sh.dispatch(cx, Message::DeletePlan(date, None))
|
||||
}) { "Delete Plan" }
|
||||
}
|
||||
div(class="row-flex margin-bot-half") {
|
||||
button(class="outline margin-right-1", on:click=move |_| {
|
||||
sh.dispatch(cx, Message::SelectPlanDate(date, None))
|
||||
}) { (date_display) }
|
||||
button(class="destructive", on:click=move |_| {
|
||||
sh.dispatch(cx, Message::DeletePlan(date, None))
|
||||
}) { "Delete Plan" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -49,7 +49,15 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
|
||||
let store = crate::api::HttpStore::get_from_context(cx);
|
||||
let recipe: &Signal<RecipeEntry> =
|
||||
create_signal(cx, RecipeEntry::new(&recipe_id, String::new()));
|
||||
let text = create_signal(cx, String::new());
|
||||
let text = create_signal(cx, String::from("0"));
|
||||
let serving_count_str = create_signal(cx, String::new());
|
||||
let serving_count = create_memo(cx, || {
|
||||
if let Ok(count) = serving_count_str.get().parse::<i64>() {
|
||||
count
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
let error_text = create_signal(cx, String::from("Parse results..."));
|
||||
let aria_hint = create_signal(cx, "false");
|
||||
let category = create_signal(cx, "Entree".to_owned());
|
||||
@ -79,12 +87,18 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
|
||||
|
||||
debug!("creating editor view");
|
||||
view! {cx,
|
||||
label(for="recipe_category") { "Category" }
|
||||
input(name="recipe_category", bind:value=category, on:change=move |_| dirty.set(true))
|
||||
div(class="grid") {
|
||||
div {
|
||||
label(for="recipe_text") { "Recipe" }
|
||||
textarea(name="recipe_text", bind:value=text, aria-invalid=aria_hint.get(), rows=20, on:change=move |_| {
|
||||
div {
|
||||
label(for="recipe_category") { "Category" }
|
||||
input(name="recipe_category", bind:value=category, on:change=move |_| dirty.set(true))
|
||||
}
|
||||
div {
|
||||
label(for="serving_count") { "Serving Count" }
|
||||
input(name="serving_count", bind:value=serving_count_str, on:change=move |_| dirty.set(true))
|
||||
}
|
||||
div {
|
||||
div(class="row-flex") {
|
||||
label(for="recipe_text", class="block align-stretch expand-height") { "Recipe: " }
|
||||
textarea(class="width-third", name="recipe_text", bind:value=text, aria-invalid=aria_hint.get(), cols="50", rows=20, on:change=move |_| {
|
||||
dirty.set(true);
|
||||
check_recipe_parses(text.get_untracked().as_str(), error_text, aria_hint);
|
||||
}, on:input=move |_| {
|
||||
@ -97,34 +111,37 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
|
||||
}
|
||||
div(class="parse") { (error_text.get()) }
|
||||
}
|
||||
span(role="button", on:click=move |_| {
|
||||
let unparsed = text.get_untracked();
|
||||
if check_recipe_parses(unparsed.as_str(), error_text, aria_hint) {
|
||||
debug!("triggering a save");
|
||||
if !*dirty.get_untracked() {
|
||||
debug!("Recipe text is unchanged");
|
||||
return;
|
||||
div {
|
||||
button(on:click=move |_| {
|
||||
let unparsed = text.get_untracked();
|
||||
if check_recipe_parses(unparsed.as_str(), error_text, aria_hint) {
|
||||
debug!("triggering a save");
|
||||
if !*dirty.get_untracked() {
|
||||
debug!("Recipe text is unchanged");
|
||||
return;
|
||||
}
|
||||
debug!("Recipe text is changed");
|
||||
let category = category.get_untracked();
|
||||
let category = if category.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(category.as_ref().clone())
|
||||
};
|
||||
let recipe_entry = RecipeEntry {
|
||||
id: id.get_untracked().as_ref().clone(),
|
||||
text: text.get_untracked().as_ref().clone(),
|
||||
category,
|
||||
serving_count: Some(*serving_count.get()),
|
||||
};
|
||||
sh.dispatch(cx, Message::SaveRecipe(recipe_entry, None));
|
||||
dirty.set(false);
|
||||
}
|
||||
debug!("Recipe text is changed");
|
||||
let category = category.get_untracked();
|
||||
let category = if category.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(category.as_ref().clone())
|
||||
};
|
||||
let recipe_entry = RecipeEntry(
|
||||
id.get_untracked().as_ref().clone(),
|
||||
text.get_untracked().as_ref().clone(),
|
||||
category,
|
||||
);
|
||||
sh.dispatch(cx, Message::SaveRecipe(recipe_entry, None));
|
||||
dirty.set(false);
|
||||
}
|
||||
// TODO(jwall): Show error message if trying to save when recipe doesn't parse.
|
||||
}) { "Save" } " "
|
||||
span(role="button", on:click=move |_| {
|
||||
sh.dispatch(cx, Message::RemoveRecipe(id.get_untracked().as_ref().to_owned(), Some(Box::new(|| sycamore_router::navigate("/ui/planning/plan")))));
|
||||
}) { "delete" } " "
|
||||
// TODO(jwall): Show error message if trying to save when recipe doesn't parse.
|
||||
}) { "Save" } " "
|
||||
button(on:click=move |_| {
|
||||
sh.dispatch(cx, Message::RemoveRecipe(id.get_untracked().as_ref().to_owned(), Some(Box::new(|| sycamore_router::navigate("/ui/planning/plan")))));
|
||||
}) { "delete" } " "
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -142,7 +159,7 @@ fn Steps<G: Html>(cx: Scope, steps: Vec<recipes::Step>) -> View<G> {
|
||||
view! {cx,
|
||||
div {
|
||||
h3 { "Step " (idx + 1) }
|
||||
ul(class="ingredients") {
|
||||
ul(class="ingredients no-list") {
|
||||
(ingredient_fragments)
|
||||
}
|
||||
div(class="instructions") {
|
||||
@ -166,18 +183,22 @@ pub fn Viewer<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
|
||||
let recipe_signal = sh.get_selector(cx, move |state| {
|
||||
if let Some(recipe) = state.get().recipes.get(&recipe_id) {
|
||||
let title = recipe.title.clone();
|
||||
let serving_count = recipe.serving_count.clone();
|
||||
let desc = recipe.desc.clone().unwrap_or_else(|| String::new());
|
||||
let steps = recipe.steps.clone();
|
||||
Some((title, desc, steps))
|
||||
Some((title, serving_count, desc, steps))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
if let Some((title, desc, steps)) = recipe_signal.get().as_ref().clone() {
|
||||
if let Some((title, serving_count, desc, steps)) = recipe_signal.get().as_ref().clone() {
|
||||
debug!("Viewing recipe.");
|
||||
view.set(view! {cx,
|
||||
div(class="recipe") {
|
||||
h1(class="recipe_title") { (title) }
|
||||
div(class="serving_count") {
|
||||
"Serving Count: " (serving_count.map(|v| format!("{}", v)).unwrap_or_else(|| "Unconfigured".to_owned()))
|
||||
}
|
||||
div(class="recipe_description") {
|
||||
(desc)
|
||||
}
|
||||
|
@ -52,20 +52,26 @@ pub fn CategoryGroup<'ctx, G: Html>(
|
||||
});
|
||||
view! {cx,
|
||||
h2 { (category) }
|
||||
table(class="recipe_selector no-print") {
|
||||
div(class="no-print row-flex flex-wrap-start align-stretch") {
|
||||
(View::new_fragment(
|
||||
rows.get().iter().cloned().map(|r| {
|
||||
view ! {cx,
|
||||
tr { Keyed(
|
||||
Keyed(
|
||||
iterable=r,
|
||||
view=move |cx, sig| {
|
||||
let title = create_memo(cx, move || sig.get().1.title.clone());
|
||||
let serving_count = create_memo(cx, move || sig.get().1.serving_count.clone());
|
||||
view! {cx,
|
||||
td { RecipeSelection(i=sig.get().0.to_owned(), title=title, sh=sh) }
|
||||
div(class="cell column-flex justify-end align-stretch") {
|
||||
RecipeSelection(
|
||||
i=sig.get().0.to_owned(),
|
||||
title=title, sh=sh,
|
||||
serving_count=serving_count,
|
||||
) }
|
||||
}
|
||||
},
|
||||
key=|sig| sig.get().0.to_owned(),
|
||||
)}
|
||||
)
|
||||
}
|
||||
}).collect()
|
||||
))
|
||||
@ -89,7 +95,7 @@ pub fn RecipePlan<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> Vie
|
||||
.get()
|
||||
.recipes
|
||||
.get(r)
|
||||
.expect("Failed to find recipe")
|
||||
.expect(&format!("Failed to find recipe {}", r))
|
||||
.clone(),
|
||||
));
|
||||
map
|
||||
@ -108,13 +114,13 @@ pub fn RecipePlan<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> Vie
|
||||
},
|
||||
key=|(ref cat, _)| cat.clone(),
|
||||
)
|
||||
span(role="button", on:click=move |_| {
|
||||
button(on:click=move |_| {
|
||||
sh.dispatch(cx, Message::LoadState(None));
|
||||
}) { "Reset" } " "
|
||||
span(role="button", on:click=move |_| {
|
||||
button(on:click=move |_| {
|
||||
sh.dispatch(cx, Message::ResetRecipeCounts);
|
||||
}) { "Clear All" } " "
|
||||
span(role="button", on:click=move |_| {
|
||||
button(on:click=move |_| {
|
||||
// Poor man's click event signaling.
|
||||
sh.dispatch(cx, Message::SaveState(None));
|
||||
}) { "Save Plan" } " "
|
||||
|
@ -23,6 +23,7 @@ use crate::components::NumberField;
|
||||
pub struct RecipeCheckBoxProps<'ctx> {
|
||||
pub i: String,
|
||||
pub title: &'ctx ReadSignal<String>,
|
||||
pub serving_count: &'ctx ReadSignal<Option<i64>>,
|
||||
pub sh: StateHandler<'ctx>,
|
||||
}
|
||||
|
||||
@ -35,7 +36,7 @@ pub fn RecipeSelection<'ctx, G: Html>(
|
||||
cx: Scope<'ctx>,
|
||||
props: RecipeCheckBoxProps<'ctx>,
|
||||
) -> View<G> {
|
||||
let RecipeCheckBoxProps { i, title, sh } = props;
|
||||
let RecipeCheckBoxProps { i, title, sh, serving_count, } = props;
|
||||
let id = Rc::new(i);
|
||||
let id_for_count = id.clone();
|
||||
// NOTE(jwall): The below get's a little tricky. We need a separate signal to bind for the
|
||||
@ -65,12 +66,13 @@ pub fn RecipeSelection<'ctx, G: Html>(
|
||||
let name = format!("recipe_id:{}", id);
|
||||
let for_id = name.clone();
|
||||
view! {cx,
|
||||
div() {
|
||||
label(for=for_id) { a(href=href) { (*title) } }
|
||||
NumberField(name=name, counter=count, min=0.0, on_change=Some(move |_| {
|
||||
debug!(idx=%id, count=%(*count.get_untracked()), "setting recipe count");
|
||||
sh.dispatch(cx, Message::UpdateRecipeCount(id.as_ref().clone(), *count.get_untracked() as usize));
|
||||
}))
|
||||
label(for=for_id, class="flex-item-grow") { a(href=href) { (*title) } }
|
||||
div {
|
||||
"Serves: " (serving_count.get().map(|v| v.to_string()).unwrap_or("Unconfigured".to_owned()))
|
||||
}
|
||||
NumberField(name=name, class="flex-item-shrink".to_string(), counter=count, min=0.0, on_change=Some(move |_| {
|
||||
debug!(idx=%id, count=%(*count.get_untracked()), "setting recipe count");
|
||||
sh.dispatch(cx, Message::UpdateRecipeCount(id.as_ref().clone(), *count.get_untracked() as u32));
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,115 @@ use tracing::{debug, info, instrument};
|
||||
|
||||
use crate::app_state::{Message, StateHandler};
|
||||
|
||||
#[instrument(skip_all)]
|
||||
fn make_deleted_ingredients_rows<'ctx, G: Html>(
|
||||
cx: Scope<'ctx>,
|
||||
sh: StateHandler<'ctx>,
|
||||
show_staples: &'ctx ReadSignal<bool>,
|
||||
) -> View<G> {
|
||||
debug!("Making ingredients rows");
|
||||
let ingredients = sh.get_selector(cx, move |state| {
|
||||
let state = state.get();
|
||||
let category_map = &state.category_map;
|
||||
debug!("building ingredient list from state");
|
||||
let mut acc = IngredientAccumulator::new();
|
||||
for (id, count) in state.recipe_counts.iter() {
|
||||
for _ in 0..(*count) {
|
||||
acc.accumulate_from(
|
||||
state
|
||||
.recipes
|
||||
.get(id)
|
||||
.expect(&format!("No such recipe id exists: {}", id)),
|
||||
);
|
||||
}
|
||||
}
|
||||
if *show_staples.get() {
|
||||
if let Some(staples) = &state.staples {
|
||||
acc.accumulate_ingredients_for("Staples", staples.iter());
|
||||
}
|
||||
}
|
||||
let mut ingredients = acc
|
||||
.ingredients()
|
||||
.into_iter()
|
||||
// First we filter out any filtered ingredients
|
||||
.filter(|(i, _)| state.filtered_ingredients.contains(i))
|
||||
// Then we take into account our modified amts
|
||||
.map(|(k, (i, rs))| {
|
||||
let category = category_map
|
||||
.get(&i.name)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| String::new());
|
||||
if state.modified_amts.contains_key(&k) {
|
||||
(
|
||||
k.clone(),
|
||||
(
|
||||
i.name,
|
||||
i.form,
|
||||
category,
|
||||
state.modified_amts.get(&k).unwrap().clone(),
|
||||
rs,
|
||||
),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
k.clone(),
|
||||
(
|
||||
i.name,
|
||||
i.form,
|
||||
category,
|
||||
format!("{}", i.amt.normalize()),
|
||||
rs,
|
||||
),
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(
|
||||
IngredientKey,
|
||||
(String, Option<String>, String, String, BTreeSet<String>),
|
||||
)>>();
|
||||
ingredients.sort_by(|tpl1, tpl2| (&tpl1.1 .2, &tpl1.1 .0).cmp(&(&tpl2.1 .2, &tpl2.1 .0)));
|
||||
ingredients
|
||||
});
|
||||
view!(
|
||||
cx,
|
||||
Indexed(
|
||||
iterable = ingredients,
|
||||
view = move |cx, (k, (name, form, category, amt, rs))| {
|
||||
let category = if category == "" {
|
||||
"other".to_owned()
|
||||
} else {
|
||||
category
|
||||
};
|
||||
let amt_signal = create_signal(cx, amt);
|
||||
let k_clone = k.clone();
|
||||
let form = form.map(|form| format!("({})", form)).unwrap_or_default();
|
||||
let recipes = rs
|
||||
.iter()
|
||||
.fold(String::new(), |acc, s| format!("{}{},", acc, s))
|
||||
.trim_end_matches(",")
|
||||
.to_owned();
|
||||
view! {cx,
|
||||
tr {
|
||||
td {
|
||||
input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
|
||||
sh.dispatch(cx, Message::UpdateAmt(k_clone.clone(), amt_signal.get_untracked().as_ref().clone()));
|
||||
})
|
||||
}
|
||||
td {
|
||||
input(type="button", class="fit-content no-print", value="Undo", on:click={
|
||||
move |_| {
|
||||
sh.dispatch(cx, Message::RemoveFilteredIngredient(k.clone()));
|
||||
}})
|
||||
}
|
||||
td { (name) " " (form) "" br {} "" (category) "" }
|
||||
td { (recipes) }
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
fn make_ingredients_rows<'ctx, G: Html>(
|
||||
cx: Scope<'ctx>,
|
||||
@ -109,12 +218,12 @@ fn make_ingredients_rows<'ctx, G: Html>(
|
||||
view! {cx,
|
||||
tr {
|
||||
td {
|
||||
input(bind:value=amt_signal, type="text", on:change=move |_| {
|
||||
input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
|
||||
sh.dispatch(cx, Message::UpdateAmt(k_clone.clone(), amt_signal.get_untracked().as_ref().clone()));
|
||||
})
|
||||
}
|
||||
td {
|
||||
input(type="button", class="no-print destructive", value="X", on:click={
|
||||
input(type="button", class="fit-content no-print destructive", value="X", on:click={
|
||||
move |_| {
|
||||
sh.dispatch(cx, Message::AddFilteredIngredient(k.clone()));
|
||||
}})
|
||||
@ -143,14 +252,14 @@ fn make_extras_rows<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> V
|
||||
view! {cx,
|
||||
tr {
|
||||
td {
|
||||
input(bind:value=amt_signal, type="text", on:change=move |_| {
|
||||
input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
|
||||
sh.dispatch(cx, Message::UpdateExtra(idx,
|
||||
amt_signal.get_untracked().as_ref().clone(),
|
||||
name_signal.get_untracked().as_ref().clone()));
|
||||
})
|
||||
}
|
||||
td {
|
||||
input(type="button", class="no-print destructive", value="X", on:click=move |_| {
|
||||
input(type="button", class="fit-content no-print destructive", value="X", on:click=move |_| {
|
||||
sh.dispatch(cx, Message::RemoveExtra(idx));
|
||||
})
|
||||
}
|
||||
@ -191,24 +300,49 @@ fn make_shopping_table<'ctx, G: Html>(
|
||||
}
|
||||
}
|
||||
|
||||
fn make_deleted_items_table<'ctx, G: Html>(
|
||||
cx: Scope<'ctx>,
|
||||
sh: StateHandler<'ctx>,
|
||||
show_staples: &'ctx ReadSignal<bool>,
|
||||
) -> View<G> {
|
||||
view! {cx,
|
||||
h2 { "Deleted Items" }
|
||||
table(class="pad-top shopping-list page-breaker container-fluid", role="grid") {
|
||||
tr {
|
||||
th { " Quantity " }
|
||||
th { " Delete " }
|
||||
th { " Ingredient " }
|
||||
th { " Recipes " }
|
||||
}
|
||||
tbody {
|
||||
(make_deleted_ingredients_rows(cx, sh, show_staples))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
#[component]
|
||||
pub fn ShoppingList<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
|
||||
let show_staples = create_signal(cx, true);
|
||||
let show_staples = sh.get_selector(cx, |state| state.get().use_staples);
|
||||
view! {cx,
|
||||
h1 { "Shopping List " }
|
||||
label(for="show_staples_cb") { "Show staples" }
|
||||
input(id="show_staples_cb", type="checkbox", bind:checked=show_staples)
|
||||
input(id="show_staples_cb", type="checkbox", checked=*show_staples.get(), on:change=move|_| {
|
||||
let value = !*show_staples.get_untracked();
|
||||
sh.dispatch(cx, Message::UpdateUseStaples(value));
|
||||
})
|
||||
(make_shopping_table(cx, sh, show_staples))
|
||||
span(role="button", class="no-print", on:click=move |_| {
|
||||
(make_deleted_items_table(cx, sh, show_staples))
|
||||
button(class="no-print", on:click=move |_| {
|
||||
info!("Registering add item request for inventory");
|
||||
sh.dispatch(cx, Message::AddExtra(String::new(), String::new()));
|
||||
}) { "Add Item" } " "
|
||||
span(role="button", class="no-print", on:click=move |_| {
|
||||
button(class="no-print", on:click=move |_| {
|
||||
info!("Registering reset request for inventory");
|
||||
sh.dispatch(cx, Message::ResetInventory);
|
||||
}) { "Reset" } " "
|
||||
span(role="button", class="no-print", on:click=move |_| {
|
||||
button(class="no-print", on:click=move |_| {
|
||||
info!("Registering save request for inventory");
|
||||
sh.dispatch(cx, Message::SaveState(None));
|
||||
}) { "Save" } " "
|
||||
|
@ -72,8 +72,8 @@ pub fn IngredientsEditor<'ctx, G: Html>(
|
||||
|
||||
debug!("creating editor view");
|
||||
view! {cx,
|
||||
div(class="grid") {
|
||||
textarea(bind:value=text, aria-invalid=aria_hint.get(), rows=20, on:change=move |_| {
|
||||
div {
|
||||
textarea(class="width-third", bind:value=text, aria-invalid=aria_hint.get(), rows=20, on:change=move |_| {
|
||||
dirty.set(true);
|
||||
}, on:input=move |_| {
|
||||
let current_ts = js_lib::get_ms_timestamp();
|
||||
@ -84,7 +84,7 @@ pub fn IngredientsEditor<'ctx, G: Html>(
|
||||
})
|
||||
div(class="parse") { (error_text.get()) }
|
||||
}
|
||||
span(role="button", on:click=move |_| {
|
||||
button(on:click=move |_| {
|
||||
let unparsed = text.get();
|
||||
if !*dirty.get_untracked() {
|
||||
debug!("Staples text is unchanged");
|
||||
|
@ -47,12 +47,12 @@ pub fn TabbedView<'a, G: Html>(cx: Scope<'a>, state: TabState<'a, G>) -> View<G>
|
||||
.collect(),
|
||||
);
|
||||
view! {cx,
|
||||
nav {
|
||||
ul(class="tabs") {
|
||||
nav(class="menu-bg menu-font-2 flex-item-shrink") {
|
||||
ul(class="tabs pad-left no-list row-flex align-center") {
|
||||
(menu)
|
||||
}
|
||||
}
|
||||
main(class=".conatiner-fluid") {
|
||||
main(class="flex-item-grow content-font") {
|
||||
(children)
|
||||
}
|
||||
}
|
||||
|
@ -1,79 +0,0 @@
|
||||
// Copyright 2023 Jeremy Wall (Jeremy@marzhilsltudios.com)
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.\
|
||||
use sycamore::{easing, motion, prelude::*};
|
||||
use tracing::debug;
|
||||
use wasm_bindgen::UnwrapThrowExt;
|
||||
|
||||
const SECTION_ID: &'static str = "toast-container";
|
||||
|
||||
#[component]
|
||||
pub fn Container<'a, G: Html>(cx: Scope<'a>) -> View<G> {
|
||||
view! {cx,
|
||||
section(id=SECTION_ID) { }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_output_element(msg: &str, class: &str) -> web_sys::Element {
|
||||
let document = web_sys::window()
|
||||
.expect("No window present")
|
||||
.document()
|
||||
.expect("No document in window");
|
||||
let output = document.create_element("output").unwrap_throw();
|
||||
let message_node = document.create_text_node(msg);
|
||||
output.set_attribute("class", class).unwrap_throw();
|
||||
output.set_attribute("role", "status").unwrap_throw();
|
||||
output.append_child(&message_node).unwrap_throw();
|
||||
output
|
||||
}
|
||||
|
||||
fn show_toast<'a>(cx: Scope<'a>, msg: &str, class: &str, timeout: Option<chrono::Duration>) {
|
||||
let timeout = timeout.unwrap_or_else(|| chrono::Duration::seconds(3));
|
||||
// Insert a toast output element into the container.
|
||||
let tweened = motion::create_tweened_signal(
|
||||
cx,
|
||||
0.0 as f32,
|
||||
timeout
|
||||
.to_std()
|
||||
.expect("Failed to convert timeout duration."),
|
||||
easing::quad_in,
|
||||
);
|
||||
tweened.set(1.0);
|
||||
create_effect_scoped(cx, move |_cx| {
|
||||
if !tweened.is_tweening() {
|
||||
debug!("Detected message timeout.");
|
||||
let container = crate::js_lib::get_element_by_id::<web_sys::HtmlElement>(SECTION_ID)
|
||||
.expect("Failed to get toast-container")
|
||||
.expect("No toast-container");
|
||||
if let Some(node_to_remove) = container.first_element_child() {
|
||||
// Always remove the first child if there is one.
|
||||
container.remove_child(&node_to_remove).unwrap_throw();
|
||||
}
|
||||
}
|
||||
});
|
||||
let output_element = create_output_element(msg, class);
|
||||
crate::js_lib::get_element_by_id::<web_sys::HtmlElement>(SECTION_ID)
|
||||
.expect("Failed to get toast-container")
|
||||
.expect("No toast-container")
|
||||
// Always append after the last child.
|
||||
.append_child(&output_element)
|
||||
.unwrap_throw();
|
||||
}
|
||||
|
||||
pub fn message<'a>(cx: Scope<'a>, msg: &str, timeout: Option<chrono::Duration>) {
|
||||
show_toast(cx, msg, "toast", timeout);
|
||||
}
|
||||
|
||||
pub fn error_message<'a>(cx: Scope<'a>, msg: &str, timeout: Option<chrono::Duration>) {
|
||||
show_toast(cx, msg, "toast error", timeout);
|
||||
}
|
@ -11,33 +11,142 @@
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use anyhow::{Context, Result};
|
||||
use indexed_db::{self, Database, Factory, Transaction};
|
||||
use js_sys::Date;
|
||||
use wasm_bindgen::JsCast;
|
||||
use web_sys::{window, Element, Storage};
|
||||
use std::collections::HashSet;
|
||||
use std::future::Future;
|
||||
use tracing::error;
|
||||
use web_sys::{window, Window};
|
||||
|
||||
pub fn get_storage() -> Storage {
|
||||
window()
|
||||
.expect("No Window Present")
|
||||
pub fn get_storage() -> web_sys::Storage {
|
||||
get_window()
|
||||
.local_storage()
|
||||
.expect("Failed to get storage")
|
||||
.expect("No storage available")
|
||||
}
|
||||
|
||||
pub const STATE_STORE_NAME: &'static str = "state-store";
|
||||
pub const RECIPE_STORE_NAME: &'static str = "recipe-store";
|
||||
pub const SERVING_COUNT_IDX: &'static str = "recipe-serving-count";
|
||||
pub const CATEGORY_IDX: &'static str = "recipe-category";
|
||||
pub const DB_VERSION: u32 = 1;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DBFactory<'name> {
|
||||
name: &'name str,
|
||||
version: Option<u32>,
|
||||
}
|
||||
|
||||
impl Default for DBFactory<'static> {
|
||||
fn default() -> Self {
|
||||
DBFactory {
|
||||
name: STATE_STORE_NAME,
|
||||
version: Some(DB_VERSION),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn version1_setup<'db>(
|
||||
stores: &HashSet<String>,
|
||||
db: &'db Database<std::io::Error>,
|
||||
) -> Result<(), indexed_db::Error<std::io::Error>> {
|
||||
// We use out of line keys for this object store
|
||||
if !stores.contains(STATE_STORE_NAME) {
|
||||
db.build_object_store(STATE_STORE_NAME).create()?;
|
||||
}
|
||||
if !stores.contains(RECIPE_STORE_NAME) {
|
||||
let recipe_store = db.build_object_store(RECIPE_STORE_NAME).create()?;
|
||||
recipe_store
|
||||
.build_index(CATEGORY_IDX, "category")
|
||||
.create()?;
|
||||
recipe_store
|
||||
.build_index(SERVING_COUNT_IDX, "serving_count")
|
||||
.create()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl<'name> DBFactory<'name> {
|
||||
pub async fn get_indexed_db(&self) -> Result<Database<std::io::Error>> {
|
||||
let factory = Factory::<std::io::Error>::get().context("opening IndexedDB")?;
|
||||
let db = factory
|
||||
.open(self.name, self.version.unwrap_or(0), |evt| async move {
|
||||
// NOTE(zaphar): This is the on upgradeneeded handler. It get's called on new databases or
|
||||
// databases with an older version than the one we requested to build.
|
||||
let db = evt.database();
|
||||
let stores = db
|
||||
.object_store_names()
|
||||
.into_iter()
|
||||
.collect::<HashSet<String>>();
|
||||
// NOTE(jwall): This needs to be somewhat clever in handling version upgrades.
|
||||
if db.version() > 0 {
|
||||
version1_setup(&stores, db).await?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.context(format!("Opening or creating the database {}", self.name))?;
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
pub async fn rw_transaction<Fun, RetFut, Ret>(
|
||||
&self,
|
||||
stores: &[&str],
|
||||
transaction: Fun,
|
||||
) -> indexed_db::Result<Ret, std::io::Error>
|
||||
where
|
||||
Fun: 'static + FnOnce(Transaction<std::io::Error>) -> RetFut,
|
||||
RetFut: 'static + Future<Output = indexed_db::Result<Ret, std::io::Error>>,
|
||||
Ret: 'static,
|
||||
{
|
||||
self.get_indexed_db()
|
||||
.await
|
||||
.expect("Failed to open database")
|
||||
.transaction(stores)
|
||||
.rw()
|
||||
.run(transaction)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn ro_transaction<Fun, RetFut, Ret>(
|
||||
&self,
|
||||
stores: &[&str],
|
||||
transaction: Fun,
|
||||
) -> indexed_db::Result<Ret, std::io::Error>
|
||||
where
|
||||
Fun: 'static + FnOnce(Transaction<std::io::Error>) -> RetFut,
|
||||
RetFut: 'static + Future<Output = indexed_db::Result<Ret, std::io::Error>>,
|
||||
Ret: 'static,
|
||||
{
|
||||
self.get_indexed_db()
|
||||
.await
|
||||
.expect("Failed to open database")
|
||||
.transaction(stores)
|
||||
.run(transaction)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_ms_timestamp() -> u32 {
|
||||
Date::new_0().get_milliseconds()
|
||||
}
|
||||
|
||||
pub fn get_element_by_id<E>(id: &str) -> Result<Option<E>, Element>
|
||||
pub fn get_window() -> Window {
|
||||
window().expect("No window present")
|
||||
}
|
||||
|
||||
pub trait LogFailures<V, E> {
|
||||
fn swallow_and_log(self);
|
||||
}
|
||||
|
||||
impl<E> LogFailures<(), E> for Result<(), E>
|
||||
where
|
||||
E: JsCast,
|
||||
E: std::fmt::Debug,
|
||||
{
|
||||
match window()
|
||||
.expect("No window present")
|
||||
.document()
|
||||
.expect("No document in window")
|
||||
.get_element_by_id(id)
|
||||
{
|
||||
Some(e) => e.dyn_into::<E>().map(|e| Some(e)),
|
||||
None => Ok(None),
|
||||
fn swallow_and_log(self) {
|
||||
if let Err(e) = self {
|
||||
error!(err = ?e, "Error: ");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ mod api;
|
||||
mod app_state;
|
||||
mod components;
|
||||
mod js_lib;
|
||||
mod linear;
|
||||
mod pages;
|
||||
mod routing;
|
||||
mod web;
|
||||
|
54
web/src/linear.rs
Normal file
54
web/src/linear.rs
Normal file
@ -0,0 +1,54 @@
|
||||
// Copyright 2022 Jeremy Wall
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use std::convert::Into;
|
||||
use std::ops::Drop;
|
||||
use std::rc::Rc;
|
||||
|
||||
use sycamore::prelude::*;
|
||||
|
||||
pub struct LinearSignal<'ctx, Payload> {
|
||||
pub signal: &'ctx Signal<Payload>,
|
||||
nv: Option<Payload>,
|
||||
}
|
||||
|
||||
impl<'ctx, Payload> Into<LinearSignal<'ctx, Payload>> for &'ctx Signal<Payload> {
|
||||
fn into(self) -> LinearSignal<'ctx, Payload> {
|
||||
LinearSignal {
|
||||
signal: self,
|
||||
nv: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ctx, Payload> LinearSignal<'ctx, Payload> {
|
||||
pub fn update(mut self, payload: Payload) -> Self {
|
||||
self.nv = Some(payload);
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn get(&'ctx self) -> Rc<Payload> {
|
||||
self.signal.get()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ctx, Payload> Drop for LinearSignal<'ctx, Payload> {
|
||||
fn drop(&mut self) {
|
||||
if self.nv.is_some() {
|
||||
let mut val: Option<Payload> = None;
|
||||
std::mem::swap(&mut val, &mut self.nv);
|
||||
let payload = val.unwrap();
|
||||
self.signal.set(payload);
|
||||
}
|
||||
}
|
||||
}
|
@ -27,9 +27,13 @@ pub fn LoginForm<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View
|
||||
input(type="text", id="username", bind:value=username)
|
||||
label(for="password") { "Password" }
|
||||
input(type="password", bind:value=password)
|
||||
span(role="button", on:click=move |_| {
|
||||
button(on:click=move |evt: web_sys::Event| {
|
||||
info!("Attempting login request");
|
||||
let (username, password) = ((*username.get_untracked()).clone(), (*password.get_untracked()).clone());
|
||||
// NOTE(jwall): This is required if we want to keep the below auth request from
|
||||
// failing to send with blocked by browser. This is because it's on a click and
|
||||
// the form tries to do a submit event and aborts our network request.
|
||||
evt.prevent_default();
|
||||
if username != "" && password != "" {
|
||||
spawn_local_scoped(cx, async move {
|
||||
let store = crate::api::HttpStore::get_from_context(cx);
|
||||
|
@ -18,9 +18,13 @@ use crate::{app_state::StateHandler, components::recipe_list::*};
|
||||
|
||||
#[component]
|
||||
pub fn CookPage<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
|
||||
let current_plan = sh.get_selector(cx, |state| {
|
||||
state.get().selected_plan_date
|
||||
});
|
||||
view! {cx,
|
||||
PlanningPage(
|
||||
selected=Some("Cook".to_owned()),
|
||||
plan_date = current_plan,
|
||||
) { RecipeList(sh) }
|
||||
}
|
||||
}
|
||||
|
@ -18,9 +18,13 @@ use crate::{app_state::StateHandler, components::shopping_list::*};
|
||||
|
||||
#[component]
|
||||
pub fn InventoryPage<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
|
||||
let current_plan = sh.get_selector(cx, |state| {
|
||||
state.get().selected_plan_date
|
||||
});
|
||||
view! {cx,
|
||||
PlanningPage(
|
||||
selected=Some("Inventory".to_owned()),
|
||||
plan_date = current_plan,
|
||||
) { ShoppingList(sh) }
|
||||
}
|
||||
}
|
||||
|
@ -12,6 +12,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use crate::components::tabs::*;
|
||||
use chrono::NaiveDate;
|
||||
use sycamore::prelude::*;
|
||||
|
||||
pub mod cook;
|
||||
@ -25,14 +26,19 @@ pub use plan::*;
|
||||
pub use select::*;
|
||||
|
||||
#[derive(Props)]
|
||||
pub struct PageState<'a, G: Html> {
|
||||
pub children: Children<'a, G>,
|
||||
pub struct PageState<'ctx, G: Html> {
|
||||
pub children: Children<'ctx, G>,
|
||||
pub selected: Option<String>,
|
||||
pub plan_date: &'ctx ReadSignal<Option<NaiveDate>>,
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn PlanningPage<'a, G: Html>(cx: Scope<'a>, state: PageState<'a, G>) -> View<G> {
|
||||
let PageState { children, selected } = state;
|
||||
pub fn PlanningPage<'ctx, G: Html>(cx: Scope<'ctx>, state: PageState<'ctx, G>) -> View<G> {
|
||||
let PageState {
|
||||
children,
|
||||
selected,
|
||||
plan_date,
|
||||
} = state;
|
||||
let children = children.call(cx);
|
||||
let planning_tabs: Vec<(String, &'static str)> = vec![
|
||||
("/ui/planning/select".to_owned(), "Select"),
|
||||
@ -45,6 +51,10 @@ pub fn PlanningPage<'a, G: Html>(cx: Scope<'a>, state: PageState<'a, G>) -> View
|
||||
TabbedView(
|
||||
selected=selected,
|
||||
tablist=planning_tabs,
|
||||
) { (children) }
|
||||
) { div {
|
||||
"Plan Date: " (plan_date.get().map_or(String::from("Unknown"), |d| format!("{}", d)))
|
||||
}
|
||||
(children)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user