Compare commits

...

116 Commits

Author SHA1 Message Date
876fa8fb33 wip: flake updates 2025-07-21 19:28:15 -04:00
d4fce72333 chore: update some cert stuff 2025-07-21 19:28:09 -04:00
e59eff8ae7 fix: plan creation was broken 2025-06-02 17:30:48 -04:00
cda5b02a37 fix: bad lorri hook eval 2024-10-08 19:28:43 -04:00
cc66c1f4f5 dev: refactor out into more composable pieces 2024-09-28 10:01:03 -04:00
4ed0b6f8fb feat: Allow restoring deleted inventory items 2024-09-28 10:01:03 -04:00
c77fa24515 refactor: recipe conversion from recipe_entry 2024-09-23 20:10:20 -04:00
6a5046d3c0 maid: formatting 2024-09-23 20:09:46 -04:00
263abda17b fix: various nix build issues 2024-09-23 20:05:43 -04:00
df88c2b7bd fix: busy_timeout to play nice with litestream 2024-09-23 15:14:04 -04:00
8000b7d317 feat: show serving count in the UI in all the places 2024-07-25 15:50:19 -04:00
aba1e114cf maid: cleanup warnings 2024-07-13 20:11:11 -04:00
9126d434d5 fix: Turns out that indexed db doesn't support Map
Had to modify the wasm-bindgen serialization to use objects instead
of the Map type.
2024-07-12 21:54:26 -04:00
548f336e1a dev: use better recipe keys
Having a dedicated object store makes a number of things simpler.
2024-07-12 19:18:24 -04:00
9f3b11a01f dev: migrate recipes from localstorage to indexeddb 2024-07-12 19:12:15 -04:00
f173204d2d dev: use cursor for the get_all_recipes 2024-07-12 18:48:25 -04:00
1f90cc2ef6 feat: migrate user_data and app_state
from localstorage to indexeddb
2024-07-12 18:26:13 -04:00
ed44e929f4 refactor: cleanup and make our upgrade logic more robust 2024-07-12 18:05:20 -04:00
51d165a50b dev: indexeddb indexes on recipe category and serving_count 2024-07-12 17:51:25 -04:00
84cc2a2713 refactor: have two object stores 2024-07-12 01:27:55 -04:00
f75652befa fix: cannot have database at version 0 2024-07-12 00:45:03 -04:00
b93edd2701 dev: use indexdb instead of local storage in the storage layer 2024-07-12 00:45:03 -04:00
4767115da6 dev: Add indexeddb and a helper method for it 2024-07-12 00:45:03 -04:00
1c55a315b0 build: generate sourcemap info for stacktraces
hopefully anyway
2024-07-12 00:45:03 -04:00
fe181fb102 cleanup: a bunch of warnings 2024-07-11 18:28:45 -04:00
61112c4e64 Merge branch 'sqlx-upgrade' 2024-07-11 18:13:39 -04:00
24fea84a0a maint: cleanup gitignore 2024-07-11 18:05:38 -04:00
113b03016f dev: Handle serving counts in the api 2024-07-11 18:04:48 -04:00
9833e22e42 upgrade: Schema version and sqlx version
It's complicated but I while debugging I upgraded sqlx. Shouldn't
have mixed up changes like that but I'm lazy and don't want to redo
it so it'll all have to just be here.
2024-07-11 18:04:41 -04:00
1f986e6372 fix: sqlx-prepare really only depends on wasm 2024-07-11 18:03:01 -04:00
63463fb5d6 build: More fixes and tweaks to unify the builds 2024-07-02 20:55:40 -05:00
6087d31aad Merge branch 'look_and_feel' 2024-07-01 16:02:37 -05:00
4ffb481634 docs: cleanup readme 2024-07-01 15:55:42 -05:00
6bc9f2ea2e build: unify make and nix builds 2024-07-01 15:55:42 -05:00
ec18d9de97 feat: recipe schema now has a setting for serving count 2024-07-01 15:55:42 -05:00
9249dca202 build: Makefile enhancements 2024-07-01 15:55:42 -05:00
dac4324c8f maint: cleanup some unused nix dependencies 2024-07-01 15:55:42 -05:00
e3c4a01648 refactor: make the wasm builder a little more configurable. 2024-07-01 15:55:42 -05:00
e1735e4243 maint: use resolver "2" 2024-07-01 15:55:42 -05:00
651f0cb264 build: get rid of wasm-pack
It get's doing naughty things with network access.
2024-07-01 15:55:42 -05:00
3e853f51eb build: fix rust-tls resolver issues 2024-07-01 15:55:37 -05:00
251cbfa5c7 build: use rustls 2024-07-01 15:52:37 -05:00
1b6023a03e maint: having the wasm-pack version in the logs is useful 2024-07-01 15:52:37 -05:00
3e675b47f4 fix: Unsafe recursive object use 2024-07-01 15:52:37 -05:00
6f7d44ff83 Alloy models for browser_state 2024-07-01 15:52:37 -05:00
b105ce3f4b Add some models 2024-07-01 15:52:37 -05:00
0ba5f18b22 Display current plan date at the top 2024-07-01 15:52:37 -05:00
a320351041 NOTE comment. 2024-07-01 15:52:37 -05:00
874a5fdb57 cargo fmt 2024-07-01 15:52:37 -05:00
bb092212ac Stop using singular for normalization 2024-07-01 15:52:37 -05:00
9022503e76 Have a packaging unit for measures 2024-07-01 15:52:37 -05:00
94e1987f09 ui: more layout tweaks 2024-07-01 15:52:37 -05:00
a104ef5f47 ui: normalization and font tweaks 2024-07-01 15:52:37 -05:00
dac529e8e8 ui: Typography tweaks 2024-07-01 15:52:37 -05:00
6e0e00c7f3 docs: comments for the event handling in login 2024-07-01 15:52:37 -05:00
8942eb59a5 ui: Menu font sizes 2024-07-01 15:52:37 -05:00
e80953e987 maint: upgrade wasm-bindgen version 2024-07-01 15:52:37 -05:00
c64605f9e7 maint: Use gloo_net directly 2024-07-01 15:52:37 -05:00
d7cea46427 fix: Issue with request blocking occuring on login 2024-07-01 15:52:37 -05:00
45737f24e4 UI: Cleansheet CSS redesign
Initial skeleton and layout is working.
Still needs a bunch of tweaks.
2024-07-01 15:52:37 -05:00
61634cd682 maint: Update rust version in nix flake 2023-12-25 14:19:45 -06:00
0eee2e33bf dev: Update wasm-pack version
Motivated by this bug in v0.11.0:
https://github.com/rustwasm/wasm-pack/issues/1247
2023-11-26 20:35:50 -05:00
cbe7957844 dev: Add some explicit clarity to the wasm-pack step 2023-11-26 20:23:45 -05:00
b999359e95 dev: Nix flake cleanup 2023-11-25 22:22:41 -05:00
c672459ec9 dev: Script for running when not in nix 2023-11-25 22:22:41 -05:00
a399858728 maint: Cleanup a bunch of stuff
:wq
2023-11-25 22:22:18 -05:00
e6b827ca21 Enable debug logs when building with Makefile 2023-11-25 22:20:42 -05:00
1432dcea13 feat: Use a web component
A more ergonomic number spinner on mobile.
A cleaner number spinner interface.
2023-11-25 22:20:42 -05:00
a3aa579fa5 feat: some additional js_lib stuff for logging 2023-11-25 22:20:42 -05:00
50eecf9a7c Bring in web component library 2023-11-25 22:20:42 -05:00
39456bb35d
Merge pull request #28 from durin42/push-wsksktrvyzky
cleanup: remove .DS_Store cruft
2023-11-25 22:20:12 -05:00
45b5c84d7c
Merge pull request #29 from durin42/push-kmpkplpoxlys
cleanup: remove redundant clone
2023-11-25 22:19:52 -05:00
Augie Fackler
e30555aabe cleanup: remove redundant clone
```
warning: call to `.clone()` on a reference in this situation does nothing
   --> web/src/app_state.rs:441:86
    |
441 | ..., &local_store, original.clone()).await
    |                            ^^^^^^^^ help: remove this redundant call
    |
    = note: the type `sycamore::reactive::Signal<AppState>` does not implement `Clone`, so calling `clone` on `&sycamore::reactive::Signal<AppState>` copies the reference, which does not do anything and can be removed
```
2023-11-25 11:17:25 -05:00
Augie Fackler
e78116ff8d cleanup: remove .DS_Store cruft 2023-11-25 10:58:01 -05:00
0b7f513f27 fix: Issue when loading user state from local storage 2023-09-23 13:44:16 -04:00
4cefe42072 Pin serde
This needs to stick around due to https://github.com/serde-rs/serde/issues/2538.
2023-08-19 13:29:11 -04:00
db03d603c3 Store the use_staples setting in localstore 2023-07-24 20:56:30 -04:00
2ea0339ad1 LinearSignal as a helper type 2023-07-24 19:37:31 -04:00
b496cf9568 Store app state atomically as one json blob 2023-07-22 16:14:23 -05:00
806fdd2721 When we select a plan date store that date in the app_state 2023-05-31 15:56:16 -04:00
acc922787d Commit lockfile 2023-04-03 14:56:36 -04:00
a57992ab0d Bump version to v0.2.25 2023-04-02 21:01:22 -04:00
03dd38d38d Remove unused css variables 2023-03-29 19:44:05 -04:00
4f3cf8d825 Messaging mechanism 2023-03-28 19:34:37 -04:00
c2b8e79288 Remove extra recipe_title input 2023-03-27 17:35:00 -04:00
3ea6d4fe66 Fix bug with recipe category not updating 2023-03-25 08:55:30 -04:00
85c89b1754 Include wasm-bindgen in devShell 2023-03-22 18:45:43 -04:00
992f4248b2 Add wasm-bindgen directly to the toolchain 2023-03-22 18:43:20 -04:00
4ef59c1ef0 Add TODO for wasm-bindgen 2023-03-22 17:53:54 -04:00
5df86e33ac Bring in wasm-pack directly to preserve keep control over it even more 2023-03-22 16:20:28 -04:00
fafcf6f981 Fix error in the devshell derivation 2023-03-22 15:33:15 -04:00
0beb468197 Bump to v0.2.24 2023-03-21 17:24:11 -04:00
ba825258bb Utilize the plan_table for listing, deleting, and saving meal plans 2023-03-21 17:01:05 -04:00
08e7f41be1 Schema migrations for stand alone recipe plans 2023-03-21 15:48:36 -04:00
1d924fb702 Abstract out the apple sdk bits into a shared library 2023-03-20 19:43:11 -04:00
8e075d681c Bump version to v0.2.23 2023-03-18 18:33:39 -04:00
9a560e53ab Bump various crate versions 2023-03-18 18:17:28 -04:00
3a562a840d Add metrics-process process metrics 2023-03-18 11:15:06 -04:00
53a0515f28 Make session cookie permanent 2023-02-27 20:18:50 -05:00
6421b12e27 Update to the new base64 api 2023-02-27 20:09:12 -05:00
413f8de388 Update favicon 2023-02-27 20:02:36 -05:00
a3c08e94fb use bind:valueAsNumber 2023-02-11 11:46:04 -05:00
8c85ba8cde Update base64 crate version 2023-02-06 16:02:08 -05:00
5bb05c00ad Bump version to 0.2.21 2023-02-04 18:51:50 -05:00
2f6375e381 Sort out recipe saving 2023-02-04 12:29:49 -05:00
b95b297deb Sort the meals in the meal plan by category 2023-02-04 12:06:16 -05:00
ab0937c3b2 Improve the metrics layer so we use an installed recorder 2023-01-29 22:10:05 -05:00
e02fcc82e1 recipe categorization 2023-01-29 19:55:43 -05:00
3f1e79b001 Fix reciper view rendering 2023-01-24 17:43:44 -05:00
3dc8461547
Merge pull request #26 from mrjoe7/fix-missing-dbdir
Fix panic when the 'serve' subcommand is used for the first time (#25)
2023-01-24 16:45:42 -05:00
Tomas Sedlak
410548529c
Fix panic when the 'serve' subcommand is used for the first time (#25)
When the 'serve' subcommand is executed for the first time, sqlite will panic on missing .session_store directory.
2023-01-24 21:52:05 +01:00
9272bfbc5b Always set the session dir directory in run script 2023-01-23 18:20:30 -05:00
797e31b73f Name the metric appropriately 2023-01-23 13:55:03 -05:00
1ff29fbe1b Bump version to 0.2.21 2023-01-22 21:57:14 -05:00
99261fb35a Add prometheus metrics 2023-01-22 21:48:40 -05:00
80e93fa476 sort the plan dates in descending order in the ui 2023-01-19 08:35:13 -05:00
116 changed files with 5573 additions and 1712 deletions

BIN
.DS_Store vendored

Binary file not shown.

9
.envrc Normal file
View File

@ -0,0 +1,9 @@
if has lorri; then
eval "$(lorri direnv)"
elif has nix; then
echo "Using flake fallback since lorri isn't installed"
use flake
else
# noop
echo "Unsupported direnv configuration. We need nix flake support and lorri installed"
fi

4
.gitignore vendored
View File

@ -1,10 +1,10 @@
target/
.lsp/
.clj-kondo/
web/dist/
webdist/
nix/*/result
result
.vscode/
.session_store/
.gitignore/
.DS_Store/
.env

2563
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,11 @@
[workspace]
members = [ "recipes", "kitchen", "web", "api" ]
resolver = "2"
[patch.crates-io]
# TODO(jwall): When the fix for RcSignal Binding is released we can drop this patch.
sycamore = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" }
sycamore-router = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" }
# NOTE(jwall): We are maintaining a patch to remove the unstable async_std_feature. It breaks in our project on
# Rust v1.64
sqlx = { git = "https://github.com/zaphar/sqlx", branch = "remove_unstable_async_std_feature" }
[profile.release]
lto = true

View File

@ -14,6 +14,8 @@
mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST)))
mkfile_dir := $(dir $(mkfile_path))
sqlite_url := sqlite://$(mkfile_dir)/.session_store/store.db
export out := dist
export project := kitchen
kitchen: wasm kitchen/src/*.rs
cd kitchen; cargo build
@ -27,15 +29,19 @@ static-prep: web/index.html web/favicon.ico web/static/*.css
cp -r web/favicon.ico web/dist/
cp -r web/static web/dist/
wasmrelease: wasmrelease-dist static-prep
wasmrelease: wasm-opt static-prep
wasm-opt: wasmrelease-dist
cd web; sh ../scripts/wasm-opt.sh release
wasmrelease-dist: web/src/*.rs web/src/components/*.rs
cd web; wasm-pack build --mode no-install --release --target web --out-dir dist/
cd web; sh ../scripts/wasm-build.sh release
wasm: wasm-dist static-prep
wasm-dist: web/src/*.rs web/src/components/*.rs
cd web; wasm-pack build --mode no-install --target web --out-dir dist/
cd web; sh ../scripts/wasm-build.sh debug
cd web; sh ../scripts/wasm-sourcemap.sh
clean:
rm -rf web/dist/*
@ -44,8 +50,11 @@ clean:
sqlx-migrate:
cd kitchen; cargo sqlx migrate run --database-url $(sqlite_url)
sqlx-add-%:
cd kitchen; cargo sqlx migrate add -r $*
sqlx-revert:
cd kitchen; cargo sqlx migrate revert --database-url $(sqlite_url)
sqlx-prepare:
sqlx-prepare: wasm
cd kitchen; cargo sqlx prepare --database-url $(sqlite_url)

View File

@ -6,10 +6,12 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = "1.0.144"
recipes = { path = "../recipes" }
chrono = "0.4.22"
[dependencies.serde]
version = "1.0.204"
features = ["derive"]
[dependencies.axum]
version = "0.5.16"

103
flake.lock generated
View File

@ -1,5 +1,43 @@
{
"nodes": {
"cargo-wasm2map-src": {
"flake": false,
"locked": {
"lastModified": 1693927731,
"narHash": "sha256-oqJ9ZZLvUK57A9Kf6L4pPrW6nHqb+18+JGKj9HfIaaM=",
"owner": "mtolmacs",
"repo": "wasm2map",
"rev": "c7d80748b7f3af37df24770b9330b17aa9599e3e",
"type": "github"
},
"original": {
"owner": "mtolmacs",
"repo": "wasm2map",
"type": "github"
}
},
"fenix": {
"inputs": {
"nixpkgs": [
"naersk",
"nixpkgs"
],
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1752475459,
"narHash": "sha256-z6QEu4ZFuHiqdOPbYss4/Q8B0BFhacR8ts6jO/F/aOU=",
"owner": "nix-community",
"repo": "fenix",
"rev": "bf0d6f70f4c9a9cf8845f992105652173f4b617f",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "fenix",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
@ -31,21 +69,6 @@
"type": "github"
}
},
"flake-utils_2": {
"locked": {
"lastModified": 1659877975,
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": {
"flake": false,
"locked": {
@ -64,14 +87,15 @@
},
"naersk": {
"inputs": {
"fenix": "fenix",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1659610603,
"narHash": "sha256-LYgASYSPYo7O71WfeUOaEUzYfzuXm8c8eavJcel+pfI=",
"lastModified": 1752689277,
"narHash": "sha256-uldUBFkZe/E7qbvxa3mH1ItrWZyT6w1dBKJQF/3ZSsc=",
"owner": "nix-community",
"repo": "naersk",
"rev": "c6a45e4277fa58abd524681466d3450f896dc094",
"rev": "0e72363d0938b0208d6c646d10649164c43f4d64",
"type": "github"
},
"original": {
@ -82,25 +106,27 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1659868656,
"narHash": "sha256-LINDS957FYzOb412t/Zha44LQqGniMpUIUz4Pi+fvSs=",
"lastModified": 1752077645,
"narHash": "sha256-HM791ZQtXV93xtCY+ZxG1REzhQenSQO020cu6rHtAPk=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "80fc83ad314fe701766ee66ac8286307d65b39e3",
"rev": "be9e214982e20b8310878ac2baa063a961c1bdf6",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1668907448,
"narHash": "sha256-l71WVOLoOLTuVgdn69SX2IosITFu4eQJXVtJqAmg0Wk=",
"lastModified": 1753135609,
"narHash": "sha256-//xMo8MwSw1HoTnIk455J7NIJpsDqwVyD69MOXb7gZM=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "e7cbe75849e582b20884f4b9651a80dffafffb16",
"rev": "5d9316e7fb2d6395818d506ef997530eba1545b7",
"type": "github"
},
"original": {
@ -111,6 +137,7 @@
},
"root": {
"inputs": {
"cargo-wasm2map-src": "cargo-wasm2map-src",
"flake-compat": "flake-compat",
"flake-utils": "flake-utils",
"gitignore": "gitignore",
@ -119,19 +146,35 @@
"rust-overlay": "rust-overlay"
}
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1752428706,
"narHash": "sha256-EJcdxw3aXfP8Ex1Nm3s0awyH9egQvB2Gu+QEnJn2Sfg=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "591e3b7624be97e4443ea7b5542c191311aa141d",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1665398664,
"narHash": "sha256-y/UcVB5k0Wdc0j+7whJE2+vko8m296wZYX37b2lFSpI=",
"lastModified": 1750964660,
"narHash": "sha256-YQ6EyFetjH1uy5JhdhRdPe6cuNXlYpMAQePFfZj4W7M=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "af29a900f10dd6e467622202fb4f6d944d72a3a6",
"rev": "04f0fcfb1a50c63529805a798b4b5c21610ff390",
"type": "github"
},
"original": {

View File

@ -2,28 +2,31 @@
description = "kitchen";
# Pin nixpkgs
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs";
gitignore = { url = "github:hercules-ci/gitignore.nix"; flake = false; };
flake-utils.url = "github:numtide/flake-utils";
rust-overlay = {
url = "github:oxalica/rust-overlay?ref=stable";
inputs.nixpkgs.follows = "nixpkgs";
};
naersk.url = "github:nix-community/naersk";
flake-compat = { url = github:edolstra/flake-compat; flake = false; };
nixpkgs.url = "github:NixOS/nixpkgs";
gitignore = { url = "github:hercules-ci/gitignore.nix"; flake = false; };
flake-utils.url = "github:numtide/flake-utils";
rust-overlay = {
url = "github:oxalica/rust-overlay?ref=stable";
inputs.nixpkgs.follows = "nixpkgs";
};
naersk.url = "github:nix-community/naersk";
flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
cargo-wasm2map-src = { url = "github:mtolmacs/wasm2map"; flake = false; };
};
outputs = {self, nixpkgs, flake-utils, rust-overlay, naersk, gitignore, flake-compat}:
outputs = {nixpkgs, flake-utils, rust-overlay, naersk, cargo-wasm2map-src, ...}:
let
kitchenGen = (import ./nix/kitchen/default.nix);
kitchenWasmGen = (import ./nix/kitchenWasm/default.nix);
moduleGen = (import ./nix/kitchen/module.nix);
version = "0.2.20";
wasm-packGen = (import ./nix/wasm-pack/default.nix);
wasm-bindgenGen = (import ./nix/wasm-bindgen/default.nix);
version = "0.2.25";
in
flake-utils.lib.eachDefaultSystem (system:
let
overlays = [ rust-overlay.overlays.default ];
pkgs = import nixpkgs { inherit system overlays; };
rust-wasm = pkgs.rust-bin.stable."1.64.0".default.override {
rust-wasm = pkgs.rust-bin.stable."1.87.0".default.override {
extensions = [ "rust-src" ];
# Add wasm32 as an extra target besides the native target.
targets = [ "wasm32-unknown-unknown" ];
@ -34,8 +37,29 @@
rustc = rust-wasm;
cargo = rust-wasm;
};
# TODO(jwall): Do the same thing for wasm-bindgen as well?
# We've run into a few problems with the bundled wasm-pack in nixpkgs.
# Better to just control this part of our toolchain directly.
wasm-pack = wasm-packGen {
inherit rust-wasm naersk-lib pkgs;
};
cargo-wasm2map = naersk-lib.buildPackage {
pname = "cargo-wasm2map";
version = "v0.1.0";
build-inputs = [ rust-wasm ];
src = cargo-wasm2map-src;
cargoBuildOptions = opts: opts ++ ["-p" "cargo-wasm2map" ];
};
wasm-bindgen = pkgs.callPackage wasm-bindgenGen { inherit pkgs; };
kitchenWasm = kitchenWasmGen {
inherit pkgs rust-wasm version;
inherit pkgs rust-wasm wasm-bindgen version cargo-wasm2map;
lockFile = ./Cargo.lock;
outputHashes = {
# I'm maintaining some patches for these so the lockfile hashes are a little
# incorrect. We override those here.
"wasm-web-component-0.2.0" = "sha256-quuPgzGb2F96blHmD3BAUjsWQYbSyJGZl27PVrwL92k=";
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
};
};
kitchen = (kitchenGen {
inherit pkgs version naersk-lib kitchenWasm rust-wasm;
@ -45,8 +69,15 @@
root = ./.;
});
kitchenWasmDebug = kitchenWasmGen {
inherit pkgs rust-wasm version;
features = "--features debug_logs";
inherit pkgs rust-wasm wasm-bindgen version cargo-wasm2map;
lockFile = ./Cargo.lock;
outputHashes = {
# I'm maintaining some patches for these so the lockfile hashes are a little
# incorrect. We override those here.
"wasm-web-component-0.2.0" = "sha256-quuPgzGb2F96blHmD3BAUjsWQYbSyJGZl27PVrwL92k=";
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
};
#features = "--features debug_logs";
};
kitchenDebug = (kitchenGen {
inherit pkgs version naersk-lib rust-wasm;
@ -72,7 +103,10 @@
type = "app";
program = "${kitchen}/bin/kitchen";
};
devShell = pkgs.callPackage ./nix/devShell/default.nix { inherit rust-wasm; };
devShell = pkgs.callPackage ./nix/devShell/default.nix {
inherit rust-wasm wasm-bindgen cargo-wasm2map;
wasm-pack-hermetic = wasm-pack;
};
}
);
}

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ?",
"describe": {
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "category",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "serving_count",
"ordinal": 3,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
true,
true
]
},
"hash": "01018c919131848f8fa907a1356a1356b2aa6ca0912de8a296f5fef3486b5ff9"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select password_hashed from users where id = ?",
"describe": {
"columns": [
{
"name": "password_hashed",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "104f07472670436d3eee1733578bbf0c92dc4f965d3d13f9bf4bfbc92958c5b6"
}

View File

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n extra_items.name,\n extra_items.amt\nfrom latest_dates\ninner join extra_items on\n latest_dates.user_id = extra_items.user_id\n and latest_dates.plan_date = extra_items.plan_date",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false
]
},
"hash": "10de1e9950d7d3ae7f017b9175a1cee4ff7fcbc7403a39ea02930c75b4b9160a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from modified_amts where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "10e1c111a16d647a106a3147f4e61e34b0176860ca99cb62cb43dc72550ad990"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into filtered_ingredients(user_id, name, form, measure_type, plan_date)\n values (?, ?, ?, ?, date()) on conflict(user_id, name, form, measure_type, plan_date) DO NOTHING",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "160a9dfccf2e91a37d81f75eba21ec73105a7453c4f1fe76a430d04e525bc6cd"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "select plan_date as \"plan_date: NaiveDate\", recipe_id, count\nfrom plan_recipes\nwhere\n user_id = ?\n and date(plan_date) > ?\norder by user_id, plan_date",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Date"
},
{
"name": "recipe_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "count",
"ordinal": 2,
"type_info": "Int64"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false
]
},
"hash": "19832e3582c05ed49c676fde33cde64274379a83a8dd130f6eec96c1d7250909"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into staples (user_id, content) values (?, ?)\n on conflict(user_id) do update set content = excluded.content",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "1b4a7250e451991ee7e642c6389656814e0dd00c94e59383c02af6313bc76213"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into modified_amts(user_id, name, form, measure_type, amt, plan_date)\n values (?, ?, ?, ?, ?, ?) on conflict (user_id, name, form, measure_type, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "1b6fd91460bef61cf02f210404a4ca57b520c969d1f9613e7101ee6aa7a9962a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from filtered_ingredients where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "23beb05e40cf011170182d4e98cdf1faa3d8df6e5956e471245e666f32e56962"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into category_mappings\n (user_id, ingredient_name, category_name)\n values (?, ?, ?)\n on conflict (user_id, ingredient_name)\n do update set category_name=excluded.category_name\n",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "2582522f8ca9f12eccc70a3b339d9030aee0f52e62d6674cfd3862de2a68a177"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from plan_table where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "27aa0a21f534cdf580841fa111136fc26cf1a0ca4ddb308c12f3f8f5a62d6178"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into plan_table (user_id, plan_date) values (?, ?)\n on conflict (user_id, plan_date) do nothing;",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "288535e7b9e1f02ad1b677e3dddc85f38c0766ce16d26fc1bdd2bf90ab9a7f7c"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "select plan_date as \"plan_date: NaiveDate\", recipe_id, count\n from plan_recipes\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Date"
},
{
"name": "recipe_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "count",
"ordinal": 2,
"type_info": "Int64"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false
]
},
"hash": "2e076acd2405d234daaa866e5a2ac1e10989fc8d2820f90aa722464a7b17db6b"
}

View File

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "select ingredient_name, category_name from category_mappings where user_id = ?",
"describe": {
"columns": [
{
"name": "ingredient_name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "category_name",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false
]
},
"hash": "37f382be1b53efd2f79a0d59ae6a8717f88a86908a7a4128d5ed7339147ca59d"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into extra_items (user_id, name, plan_date, amt)\nvalues (?, ?, date(), ?)\non conflict (user_id, name, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "3caefb86073c47b5dd5d05f639ddef2f7ed2d1fd80f224457d1ec34243cc56c7"
}

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n modified_amts.name,\n modified_amts.form,\n modified_amts.measure_type,\n modified_amts.amt\nfrom latest_dates\ninner join modified_amts on\n latest_dates.user_id = modified_amts.user_id\n and latest_dates.plan_date = modified_amts.plan_date",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "3e43f06f5c2e959f66587c8d74696d6db27d89fd2f7d7e1ed6fa5016b4bd1a91"
}

View File

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "select\n name,\n amt\nfrom extra_items\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false
]
},
"hash": "4237ff804f254c122a36a14135b90434c6576f48d3a83245503d702552ea9f30"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into filtered_ingredients(user_id, name, form, measure_type, plan_date)\n values (?, ?, ?, ?, ?) on conflict(user_id, name, form, measure_type, plan_date) DO NOTHING",
"describe": {
"columns": [],
"parameters": {
"Right": 5
},
"nullable": []
},
"hash": "5883c4a57def93cca45f8f9d81c8bba849547758217cd250e7ab28cc166ab42b"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into users (id, password_hashed) values (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "5d743897fb0d8fd54c3708f1b1c6e416346201faa9e28823c1ba5a421472b1fa"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select content from staples where user_id = ?",
"describe": {
"columns": [
{
"name": "content",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "64af3f713eb4c61ac02cab2dfea83d0ed197e602e99079d4d32cb38d677edf2e"
}

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "select\n modified_amts.name,\n modified_amts.form,\n modified_amts.measure_type,\n modified_amts.amt\nfrom modified_amts\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "699ff0f0d4d4c6e26a21c1922a5b5249d89ed1677680a2276899a7f8b26344ee"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from recipes where user_id = ? and recipe_id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "6c43908d90f229b32ed8b1b076be9b452a995e1b42ba2554e947c515b031831a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into modified_amts(user_id, name, form, measure_type, amt, plan_date)\n values (?, ?, ?, ?, ?, date()) on conflict (user_id, name, form, measure_type, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 5
},
"nullable": []
},
"hash": "6e28698330e42fd6c87ba1e6f1deb664c0d3995caa2b937ceac8c908e98aded6"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from extra_items where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "6f11d90875a6230766a5f9bd1d67665dc4d00c13d7e81b0d18d60baa67987da9"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from sessions where id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "7578157607967a6a4c60f12408c5d9900d15b429a49681a4cae4e02d31c524ec"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "select\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom filtered_ingredients\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false
]
},
"hash": "7695a0602395006f9b76ecd4d0cb5ecd5dee419b71b3b0b9ea4f47a83f3df41a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into plan_recipes (user_id, plan_date, recipe_id, count) values (?, ?, ?, ?)\n on conflict (user_id, plan_date, recipe_id) do update set count=excluded.count;",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "83824ea638cb64c524f5c8984ef6ef28dfe781f0abf168abc4ae9a51e6e0ae88"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into categories (user_id, category_text) values (?, ?)\n on conflict(user_id) do update set category_text=excluded.category_text",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "8490e1bb40879caed62ac1c38cb9af48246f3451b6f7f1e1f33850f1dbe25f58"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select session_value from sessions where id = ?",
"describe": {
"columns": [
{
"name": "session_value",
"ordinal": 0,
"type_info": "Blob"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "928a479ca0f765ec7715bf8784c5490e214486edbf5b78fd501823feb328375b"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from plan_recipes where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "93af0c367a0913d49c92aa69022fa30fc0564bd4dbab7f3ae78673a01439cd6e"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into sessions (id, session_value) values (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "9ad4acd9b9d32c9f9f441276aa71a17674fe4d65698848044778bd4aef77d42d"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "with max_date as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes group by user_id\n)\n\nselect plan_recipes.plan_date as \"plan_date: NaiveDate\", plan_recipes.recipe_id, plan_recipes.count\n from plan_recipes\n inner join max_date on plan_recipes.user_id = max_date.user_id\nwhere\n plan_recipes.user_id = ?\n and plan_recipes.plan_date = max_date.plan_date",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Date"
},
{
"name": "recipe_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "count",
"ordinal": 2,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "ad3408cd773dd8f9308255ec2800171638a1aeda9817c57fb8360f97115f8e97"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into extra_items (user_id, name, amt, plan_date)\nvalues (?, ?, ?, ?)\non conflict (user_id, name, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "ba07658eb11f9d6cfdb5dbee4496b2573f1e51f4b4d9ae760eca3b977649b5c7"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select category_text from categories where user_id = ?",
"describe": {
"columns": [
{
"name": "category_text",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "c988364f9f83f4fa8bd0e594bab432ee7c9ec47ca40f4d16e5e2a8763653f377"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from sessions",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
},
"hash": "d84685a82585c5e4ae72c86ba1fe6e4a7241c4c3c9e948213e5849d956132bad"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom latest_dates\ninner join filtered_ingredients on\n latest_dates.user_id = filtered_ingredients.user_id\n and latest_dates.plan_date = filtered_ingredients.plan_date",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "e38183e2e16afa308672044e5d314296d7cd84c1ffedcbfe790743547dc62de8"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into recipes (user_id, recipe_id, recipe_text, category, serving_count) values (?, ?, ?, ?, ?)\n on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category",
"describe": {
"columns": [],
"parameters": {
"Right": 5
},
"nullable": []
},
"hash": "eb99a37e18009e0dd46caccacea57ba0b25510d80a4e4a282a5ac2be50bba81c"
}

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ? and recipe_id = ?",
"describe": {
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "category",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "serving_count",
"ordinal": 3,
"type_info": "Int64"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
true,
true
]
},
"hash": "ee0491c7d1a31ef80d7abe6ea4c9a8b0618dba58a0a8bceef7bdafec98ccd543"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select distinct plan_date as \"plan_date: NaiveDate\" from plan_table\nwhere user_id = ?",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "fd818a6b1c800c2014b5cfe8a923ac9228832b11d7575585cf7930fbf91306d1"
}

View File

@ -1,6 +1,6 @@
[package]
name = "kitchen"
version = "0.2.20"
version = "0.2.25"
authors = ["Jeremy Wall <jeremy@marzhillstudios.com>"]
edition = "2021"
@ -18,12 +18,21 @@ async-trait = "0.1.57"
async-session = "3.0.0"
ciborium = "0.2.0"
tower = "0.4.13"
serde = "1.0.144"
cookie = "0.16.0"
chrono = "0.4.22"
cookie = "0.17.0"
metrics = "0.20.1"
metrics-exporter-prometheus = "0.11.0"
futures = "0.3"
metrics-process = "1.0.8"
[dependencies.chrono]
version = "0.4.22"
features = ["serde"]
[dependencies.serde]
version = "1.0.204"
[dependencies.argon2]
version = "0.4.1"
version = "0.5.0"
[dependencies.secrecy]
version = "0.8.0"
@ -58,5 +67,5 @@ version = "1.12.0"
features = ["tokio1"]
[dependencies.sqlx]
version = "0.6.2"
features = ["sqlite", "runtime-async-std-rustls", "offline", "chrono"]
version = "0.7"
features = ["sqlite", "runtime-async-std", "tls-rustls", "chrono"]

View File

@ -0,0 +1,2 @@
-- Add down migration script here
alter table recipes drop column category;

View File

@ -0,0 +1,2 @@
-- Add up migration script here
alter table recipes add column category TEXT;

View File

@ -0,0 +1,2 @@
-- Add down migration script here
drop table plan_table;

View File

@ -0,0 +1,10 @@
-- Add up migration script here
create temp table TEMP_plan_dates_deduped AS
select distinct user_id, plan_date from plan_recipes;
create table plan_table (user_id TEXT NOT NULL, plan_date TEXT NOT NULL, primary key (user_id, plan_date) );
insert into plan_table
select user_id, plan_date from TEMP_plan_dates_deduped;
drop table TEMP_plan_dates_deduped;

View File

@ -0,0 +1,2 @@
-- Add down migration script here
ALTER TABLE recipes DROP COLUMN serving_count;

View File

@ -0,0 +1,2 @@
-- Add up migration script here
ALTER TABLE recipes ADD COLUMN serving_count INT;

View File

@ -1,5 +1,15 @@
{
"db": "SQLite",
"05a9f963e3f18b8ceb787c33b6dbdac993f999ff32bb5155f2dff8dc18d840bf": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 4
}
},
"query": "insert into recipes (user_id, recipe_id, recipe_text, category) values (?, ?, ?, ?)\n on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category"
},
"104f07472670436d3eee1733578bbf0c92dc4f965d3d13f9bf4bfbc92958c5b6": {
"describe": {
"columns": [
@ -62,30 +72,6 @@
},
"query": "insert into filtered_ingredients(user_id, name, form, measure_type, plan_date)\n values (?, ?, ?, ?, date()) on conflict(user_id, name, form, measure_type, plan_date) DO NOTHING"
},
"196e289cbd65224293c4213552160a0cdf82f924ac597810fe05102e247b809d": {
"describe": {
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
}
],
"nullable": [
false,
true
],
"parameters": {
"Right": 2
}
},
"query": "select recipe_id, recipe_text from recipes where user_id = ? and recipe_id = ?"
},
"19832e3582c05ed49c676fde33cde64274379a83a8dd130f6eec96c1d7250909": {
"describe": {
"columns": [
@ -136,6 +122,36 @@
},
"query": "insert into modified_amts(user_id, name, form, measure_type, amt, plan_date)\n values (?, ?, ?, ?, ?, ?) on conflict (user_id, name, form, measure_type, plan_date) do update set amt=excluded.amt"
},
"1cc4412dfc3d4acdf257e839b50d6c9abbb6e74e7af606fd12da20f0aedde3de": {
"describe": {
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "category",
"ordinal": 2,
"type_info": "Text"
}
],
"nullable": [
false,
true,
true
],
"parameters": {
"Right": 2
}
},
"query": "select recipe_id, recipe_text, category from recipes where user_id = ? and recipe_id = ?"
},
"23beb05e40cf011170182d4e98cdf1faa3d8df6e5956e471245e666f32e56962": {
"describe": {
"columns": [],
@ -156,6 +172,26 @@
},
"query": "insert into category_mappings\n (user_id, ingredient_name, category_name)\n values (?, ?, ?)\n on conflict (user_id, ingredient_name)\n do update set category_name=excluded.category_name\n"
},
"27aa0a21f534cdf580841fa111136fc26cf1a0ca4ddb308c12f3f8f5a62d6178": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 2
}
},
"query": "delete from plan_table where user_id = ? and plan_date = ?"
},
"288535e7b9e1f02ad1b677e3dddc85f38c0766ce16d26fc1bdd2bf90ab9a7f7c": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 2
}
},
"query": "insert into plan_table (user_id, plan_date) values (?, ?)\n on conflict (user_id, plan_date) do nothing;"
},
"2e076acd2405d234daaa866e5a2ac1e10989fc8d2820f90aa722464a7b17db6b": {
"describe": {
"columns": [
@ -256,15 +292,35 @@
},
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n modified_amts.name,\n modified_amts.form,\n modified_amts.measure_type,\n modified_amts.amt\nfrom latest_dates\ninner join modified_amts on\n latest_dates.user_id = modified_amts.user_id\n and latest_dates.plan_date = modified_amts.plan_date"
},
"3fd4017569dca4fe73e97e0e2bd612027a8c1b17b0b10faabd6459f56ca1c0bb": {
"40c589d8cb88d7ed723c8651833fe8541756ef0c57bf6296a4dfbda7d504dca8": {
"describe": {
"columns": [],
"nullable": [],
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "category",
"ordinal": 2,
"type_info": "Text"
}
],
"nullable": [
false,
true,
true
],
"parameters": {
"Right": 3
"Right": 1
}
},
"query": "insert into recipes (user_id, recipe_id, recipe_text) values (?, ?, ?)\n on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text"
"query": "select recipe_id, recipe_text, category from recipes where user_id = ?"
},
"4237ff804f254c122a36a14135b90434c6576f48d3a83245503d702552ea9f30": {
"describe": {
@ -434,24 +490,6 @@
},
"query": "select\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom filtered_ingredients\nwhere\n user_id = ?\n and plan_date = ?"
},
"7f4abc448b16e8b6b2bb74f8e810e245e81b38e1407085a20d28bfddfc06891f": {
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Date"
}
],
"nullable": [
false
],
"parameters": {
"Right": 1
}
},
"query": "select distinct plan_date as \"plan_date: NaiveDate\" from plan_recipes\nwhere user_id = ?"
},
"83824ea638cb64c524f5c8984ef6ef28dfe781f0abf168abc4ae9a51e6e0ae88": {
"describe": {
"columns": [],
@ -500,30 +538,6 @@
},
"query": "delete from plan_recipes where user_id = ? and plan_date = ?"
},
"95fbc362a2e17add05218a2dac431275b5cc55bd7ac8f4173ee10afefceafa3b": {
"describe": {
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
}
],
"nullable": [
false,
true
],
"parameters": {
"Right": 1
}
},
"query": "select recipe_id, recipe_text from recipes where user_id = ?"
},
"9ad4acd9b9d32c9f9f441276aa71a17674fe4d65698848044778bd4aef77d42d": {
"describe": {
"columns": [],
@ -631,5 +645,23 @@
}
},
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom latest_dates\ninner join filtered_ingredients on\n latest_dates.user_id = filtered_ingredients.user_id\n and latest_dates.plan_date = filtered_ingredients.plan_date"
},
"fd818a6b1c800c2014b5cfe8a923ac9228832b11d7575585cf7930fbf91306d1": {
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Text"
}
],
"nullable": [
false
],
"parameters": {
"Right": 1
}
},
"query": "select distinct plan_date as \"plan_date: NaiveDate\" from plan_table\nwhere user_id = ?"
}
}

View File

@ -38,13 +38,13 @@ fn create_app<'a>() -> clap::App<'a> {
)
(@subcommand groceries =>
(about: "print out a grocery list for a set of recipes")
(@arg csv: --csv "output ingredeints as csv")
(@arg csv: --csv "output ingredients as csv")
(@arg INPUT: +required "Input menu file to parse. One recipe file per line.")
)
(@subcommand serve =>
(about: "Serve the interface via the web")
(@arg recipe_dir: -d --dir +takes_value "Directory containing recipe files to use")
(@arg session_dir: --session_dir +takes_value "Session store directory to use")
(@arg session_dir: --session_dir +takes_value +required "Session store directory to use")
(@arg tls: --tls "Use TLS to serve.")
(@arg cert_path: --cert +takes_value "Certificate path. Required if you specified --tls.")
(@arg key_path: --cert_key +takes_value "Certificate key path. Required if you specified --tls")
@ -55,7 +55,7 @@ fn create_app<'a>() -> clap::App<'a> {
(@arg recipe_dir: -d --dir +takes_value "Directory containing recipe files to load for user")
(@arg user: -u --user +takes_value +required "username to add")
(@arg pass: -p --pass +takes_value +required "password to add for this user")
(@arg session_dir: --session_dir +takes_value "Session store directory to use")
(@arg session_dir: --session_dir +takes_value +required "Session store directory to use")
)
)
.setting(clap::AppSettings::SubcommandRequiredElseHelp)
@ -65,9 +65,10 @@ fn get_session_store_path(matches: &ArgMatches) -> PathBuf {
if let Some(dir) = matches.value_of("session_dir") {
PathBuf::from(dir)
} else {
let mut dir =
std::env::current_dir().expect("Unable to get current directory. Bailing out.");
dir.push(".session_store");
let mut dir = std::env::var("HOME")
.map(PathBuf::from)
.expect("Unable to get user home directory. Bailing out.");
dir.push(".kitchen");
dir
}
}

View File

@ -97,6 +97,7 @@ pub async fn handler(
.domain(domain)
.secure(true)
.path("/")
.permanent()
.finish();
let parsed_cookie = match cookie.to_string().parse() {
Err(err) => {

178
kitchen/src/web/metrics.rs Normal file
View File

@ -0,0 +1,178 @@
// Copyright 2023 Jeremy Wall (Jeremy@marzhilsltudios.com)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A [metrics] powered [TraceLayer] that works with any [Tower](https://crates.io/crates/tower) middleware.
use axum::http::{Request, Response};
use metrics::{histogram, increment_counter, Label};
use std::{
marker::PhantomData,
sync::{
atomic::{AtomicU64, Ordering},
Arc, Mutex,
},
};
use tower_http::{
classify::{ServerErrorsAsFailures, SharedClassifier},
trace::{
DefaultMakeSpan, DefaultOnEos, OnBodyChunk, OnFailure, OnRequest, OnResponse, TraceLayer,
},
};
use tracing;
/// A Metrics Trace Layer using a [MetricsRecorder].
///
/// The layer will record 4 different metrics:
///
/// * http_request_counter
/// * http_request_failure_counter
/// * http_request_size_bytes_hist
/// * http_request_request_time_micros_hist
///
/// Each of the metrics are labled by host, method, and path
pub type MetricsTraceLayer<B, F> = TraceLayer<
SharedClassifier<ServerErrorsAsFailures>,
DefaultMakeSpan,
MetricsRecorder<B, F>,
MetricsRecorder<B, F>,
MetricsRecorder<B, F>,
DefaultOnEos,
MetricsRecorder<B, F>,
>;
/// Holds the state required for recording metrics on a given request.
pub struct MetricsRecorder<B, F>
where
F: Fn(&B) -> u64,
{
labels: Arc<Mutex<Vec<Label>>>,
size: Arc<AtomicU64>,
chunk_len: Arc<F>,
_phantom: PhantomData<B>,
}
impl<B, F> Clone for MetricsRecorder<B, F>
where
F: Fn(&B) -> u64,
{
fn clone(&self) -> Self {
Self {
labels: self.labels.clone(),
size: self.size.clone(),
chunk_len: self.chunk_len.clone(),
_phantom: self._phantom.clone(),
}
}
}
impl<B, F> MetricsRecorder<B, F>
where
F: Fn(&B) -> u64,
{
/// Construct a new [MetricsRecorder] using the installed [Recorder].
pub fn new(f: F) -> Self {
Self {
labels: Arc::new(Mutex::new(Vec::new())),
size: Arc::new(AtomicU64::new(0)),
chunk_len: Arc::new(f),
_phantom: PhantomData,
}
}
}
impl<B, F> OnBodyChunk<B> for MetricsRecorder<B, F>
where
F: Fn(&B) -> u64,
{
fn on_body_chunk(&mut self, chunk: &B, _latency: std::time::Duration, _span: &tracing::Span) {
let _ = self
.size
.fetch_add(self.chunk_len.as_ref()(chunk), Ordering::SeqCst);
}
}
impl<B, FailureClass, F> OnFailure<FailureClass> for MetricsRecorder<B, F>
where
F: Fn(&B) -> u64,
{
fn on_failure(
&mut self,
_failure_classification: FailureClass,
_latency: std::time::Duration,
_span: &tracing::Span,
) {
let labels = self.labels.lock().expect("Failed to unlock labels").clone();
increment_counter!("http_request_failure_counter", labels);
}
}
impl<B, RB, F> OnResponse<RB> for MetricsRecorder<B, F>
where
F: Fn(&B) -> u64,
{
fn on_response(
self,
_response: &Response<RB>,
latency: std::time::Duration,
_span: &tracing::Span,
) {
let labels = self.labels.lock().expect("Failed to unlock labels").clone();
histogram!(
"http_request_time_micros_hist",
latency.as_micros() as f64,
labels.clone()
);
histogram!(
"http_request_size_bytes_hist",
self.size.as_ref().load(Ordering::SeqCst) as f64,
labels
)
}
}
fn make_request_lables(path: String, host: String, method: String) -> Vec<Label> {
vec![
Label::new("path", path),
Label::new("host", host),
Label::new("method", method),
]
}
impl<B, RB, F> OnRequest<RB> for MetricsRecorder<B, F>
where
F: Fn(&B) -> u64,
{
fn on_request(&mut self, request: &Request<RB>, _span: &tracing::Span) {
let path = request.uri().path().to_lowercase();
let host = request.uri().host().unwrap_or("").to_lowercase();
let method = request.method().to_string();
let labels = make_request_lables(path, host, method);
let mut labels_lock = self.labels.lock().expect("Failed to unlock labels");
(*labels_lock.as_mut()) = labels.clone();
increment_counter!("http_request_counter", labels);
}
}
/// Construct a [TraceLayer] that will use an installed [metrics::Recorder] to record metrics per request.
pub fn make_layer<B, F>(f: F) -> MetricsTraceLayer<B, F>
where
F: Fn(&B) -> u64,
{
let metrics_recorder = MetricsRecorder::new(f);
let layer = TraceLayer::new_for_http()
.on_body_chunk(metrics_recorder.clone())
.on_request(metrics_recorder.clone())
.on_response(metrics_recorder.clone())
.on_failure(metrics_recorder.clone());
layer
}

View File

@ -1,4 +1,3 @@
use std::collections::BTreeMap;
// Copyright 2022 Jeremy Wall
//
// Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +11,7 @@ use std::collections::BTreeMap;
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::{collections::BTreeSet, net::SocketAddr};
@ -24,17 +24,18 @@ use axum::{
routing::{get, Router},
};
use chrono::NaiveDate;
use client_api as api;
use metrics_process::Collector;
use mime_guess;
use recipes::{IngredientKey, RecipeEntry};
use rust_embed::RustEmbed;
use storage::{APIStore, AuthStore};
use tower::ServiceBuilder;
use tower_http::trace::TraceLayer;
use tracing::{debug, info, instrument};
use client_api as api;
use storage::{APIStore, AuthStore};
mod auth;
mod metrics;
mod storage;
#[derive(RustEmbed)]
@ -546,6 +547,13 @@ fn mk_v2_routes() -> Router {
#[instrument(fields(recipe_dir=?recipe_dir_path), skip_all)]
pub async fn make_router(recipe_dir_path: PathBuf, store_path: PathBuf) -> Router {
let handle = metrics_exporter_prometheus::PrometheusBuilder::new()
.install_recorder()
.expect("Failed to install Prometheus Recorder");
// Setup the prometheus process metrics.
let collector = Collector::default();
collector.describe();
let metrics_trace_layer = metrics::make_layer(|b: &axum::body::Bytes| b.len() as u64);
let store = Arc::new(storage::file_store::AsyncFileStore::new(
recipe_dir_path.clone(),
));
@ -570,13 +578,21 @@ pub async fn make_router(recipe_dir_path: PathBuf, store_path: PathBuf) -> Route
.nest("/v1", mk_v1_routes())
.nest("/v2", mk_v2_routes()),
)
// NOTE(jwall): Note that the layers are applied to the preceding routes not
.route(
"/metrics/prometheus",
get(|| async move {
collector.collect();
handle.render()
}),
)
// NOTE(jwall): Note that this layer is applied to the preceding routes not
// the following routes.
.layer(
// NOTE(jwall): However service builder will apply the layers from top
// NOTE(jwall): However service builder will apply these layers from top
// to bottom.
ServiceBuilder::new()
.layer(TraceLayer::new_for_http())
.layer(metrics_trace_layer)
.layer(Extension(store))
.layer(Extension(app_store)),
)

View File

@ -1,2 +1,2 @@
select distinct plan_date as "plan_date: NaiveDate" from plan_recipes
select distinct plan_date as "plan_date: NaiveDate" from plan_table
where user_id = ?

View File

@ -22,6 +22,7 @@ use tracing::{debug, instrument};
use super::RecipeEntry;
#[allow(dead_code)]
#[derive(Debug)]
pub struct Error(String);
@ -98,7 +99,7 @@ impl AsyncFileStore {
let file_name = entry.file_name().to_string_lossy().to_string();
debug!("adding recipe file {}", file_name);
let recipe_contents = read_to_string(entry.path()).await?;
entry_vec.push(RecipeEntry(file_name, recipe_contents));
entry_vec.push(RecipeEntry::new(file_name, recipe_contents));
} else {
warn!(
file = %entry.path().to_string_lossy(),
@ -118,7 +119,12 @@ impl AsyncFileStore {
if recipe_path.exists().await && recipe_path.is_file().await {
debug!("Found recipe file {}", recipe_path.to_string_lossy());
let recipe_contents = read_to_string(recipe_path).await?;
return Ok(Some(RecipeEntry(id.as_ref().to_owned(), recipe_contents)));
return Ok(Some(RecipeEntry {
id: id.as_ref().to_owned(),
text: recipe_contents,
category: None,
serving_count: None,
}));
} else {
return Ok(None);
}

View File

@ -0,0 +1,2 @@
insert into plan_table (user_id, plan_date) values (?, ?)
on conflict (user_id, plan_date) do nothing;

View File

@ -14,6 +14,7 @@
use async_std::sync::Arc;
use std::collections::BTreeSet;
use std::str::FromStr;
use std::time::Duration;
use std::{collections::BTreeMap, path::Path};
use argon2::{
@ -256,8 +257,10 @@ pub struct SqliteStore {
impl SqliteStore {
pub async fn new<P: AsRef<Path>>(path: P) -> sqlx::Result<Self> {
std::fs::create_dir_all(&path)?;
let url = format!("sqlite://{}/store.db", path.as_ref().to_string_lossy());
let options = SqliteConnectOptions::from_str(&url)?
.busy_timeout(Duration::from_secs(5))
.journal_mode(SqliteJournalMode::Wal)
.create_if_missing(true);
info!(?options, "Connecting to sqlite db");
@ -267,7 +270,7 @@ impl SqliteStore {
#[instrument(fields(conn_string=self.url), skip_all)]
pub async fn run_migrations(&self) -> sqlx::Result<()> {
info!("Running databse migrations");
info!("Running database migrations");
sqlx::migrate!("./migrations")
.run(self.pool.as_ref())
.await?;
@ -428,19 +431,10 @@ impl APIStore for SqliteStore {
user_id: S,
id: S,
) -> Result<Option<RecipeEntry>> {
// NOTE(jwall): We allow dead code becaue Rust can't figure out that
// this code is actually constructed but it's done via the query_as
// macro.
#[allow(dead_code)]
struct RecipeRow {
pub recipe_id: String,
pub recipe_text: Option<String>,
}
let id = id.as_ref();
let user_id = user_id.as_ref();
let entry = sqlx::query_as!(
RecipeRow,
"select recipe_id, recipe_text from recipes where user_id = ? and recipe_id = ?",
let entry = sqlx::query!(
"select recipe_id, recipe_text, category, serving_count from recipes where user_id = ? and recipe_id = ?",
user_id,
id,
)
@ -448,37 +442,32 @@ impl APIStore for SqliteStore {
.await?
.iter()
.map(|row| {
RecipeEntry(
row.recipe_id.clone(),
row.recipe_text.clone().unwrap_or_else(|| String::new()),
)
RecipeEntry {
id: row.recipe_id.clone(),
text: row.recipe_text.clone().unwrap_or_else(|| String::new()),
category: row.category.clone(),
serving_count: row.serving_count.clone(),
}
})
.nth(0);
Ok(entry)
}
async fn get_recipes_for_user(&self, user_id: &str) -> Result<Option<Vec<RecipeEntry>>> {
// NOTE(jwall): We allow dead code becaue Rust can't figure out that
// this code is actually constructed but it's done via the query_as
// macro.
#[allow(dead_code)]
struct RecipeRow {
pub recipe_id: String,
pub recipe_text: Option<String>,
}
let rows = sqlx::query_as!(
RecipeRow,
"select recipe_id, recipe_text from recipes where user_id = ?",
let rows = sqlx::query!(
"select recipe_id, recipe_text, category, serving_count from recipes where user_id = ?",
user_id,
)
.fetch_all(self.pool.as_ref())
.await?
.iter()
.map(|row| {
RecipeEntry(
row.recipe_id.clone(),
row.recipe_text.clone().unwrap_or_else(|| String::new()),
)
RecipeEntry {
id: row.recipe_id.clone(),
text: row.recipe_text.clone().unwrap_or_else(|| String::new()),
category: row.category.clone(),
serving_count: row.serving_count.clone(),
}
})
.collect();
Ok(Some(rows))
@ -492,12 +481,16 @@ impl APIStore for SqliteStore {
for entry in recipes {
let recipe_id = entry.recipe_id().to_owned();
let recipe_text = entry.recipe_text().to_owned();
let category = entry.category();
let serving_count = entry.serving_count();
sqlx::query!(
"insert into recipes (user_id, recipe_id, recipe_text) values (?, ?, ?)
on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text",
"insert into recipes (user_id, recipe_id, recipe_text, category, serving_count) values (?, ?, ?, ?, ?)
on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category",
user_id,
recipe_id,
recipe_text,
category,
serving_count,
)
.execute(self.pool.as_ref())
.await?;
@ -513,7 +506,7 @@ impl APIStore for SqliteStore {
user_id,
recipe_id,
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
@ -545,8 +538,11 @@ impl APIStore for SqliteStore {
user_id,
date,
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
sqlx::query_file!("src/web/storage/init_meal_plan.sql", user_id, date)
.execute(&mut *transaction)
.await?;
for (id, count) in recipe_counts {
sqlx::query_file!(
"src/web/storage/save_meal_plan.sql",
@ -555,7 +551,7 @@ impl APIStore for SqliteStore {
id,
count
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
@ -630,33 +626,40 @@ impl APIStore for SqliteStore {
debug!("Processing delete request");
let user_id = user_id.as_ref();
let mut transaction = self.pool.as_ref().begin().await?;
sqlx::query!(
"delete from plan_table where user_id = ? and plan_date = ?",
user_id,
date
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"delete from plan_recipes where user_id = ? and plan_date = ?",
user_id,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"delete from filtered_ingredients where user_id = ? and plan_date = ?",
user_id,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"delete from modified_amts where user_id = ? and plan_date = ?",
user_id,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"delete from extra_items where user_id = ? and plan_date = ?",
user_id,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
Ok(())
@ -904,7 +907,7 @@ impl APIStore for SqliteStore {
user_id,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
for key in filtered_ingredients {
let name = key.name();
@ -918,7 +921,7 @@ impl APIStore for SqliteStore {
measure_type,
date,
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
sqlx::query!(
@ -926,7 +929,7 @@ impl APIStore for SqliteStore {
user_id,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
// store the modified amts
for (key, amt) in modified_amts {
@ -943,7 +946,7 @@ impl APIStore for SqliteStore {
amt,
date,
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
sqlx::query!(
@ -951,7 +954,7 @@ impl APIStore for SqliteStore {
user_id,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
// Store the extra items
for (name, amt) in extra_items {
@ -962,7 +965,7 @@ impl APIStore for SqliteStore {
amt,
date
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
@ -990,7 +993,7 @@ impl APIStore for SqliteStore {
form,
measure_type,
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
// store the modified amts
@ -1007,13 +1010,13 @@ impl APIStore for SqliteStore {
measure_type,
amt,
)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
// Store the extra items
for (name, amt) in extra_items {
sqlx::query_file!("src/web/storage/store_extra_items.sql", user_id, name, amt)
.execute(&mut transaction)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;

51
models/browser_state.als Normal file
View File

@ -0,0 +1,51 @@
sig Id {}
sig Text {}
sig Recipe {
, id: one Id
, text: one Text
}
fact {
no r1, r2: Recipe | (r1.id = r2.id) and (r1.text != r2.text)
no r1, r2: Recipe | (r1 != r2) and (r1.id = r2.id)
}
sig Ingredient {}
sig Modifier {}
sig Amt {}
sig ModifiedInventory {
, ingredient: one Ingredient
, modifier: lone Modifier
, amt: one Amt
}
fact {
no mi1, mi2: ModifiedInventory | mi1 != mi2 && (mi1.ingredient = mi2.ingredient) and (mi1.modifier = mi2.modifier)
}
sig DeletedInventory {
, ingredient: one Ingredient
, modifier: lone Modifier
}
fact {
no mi1, mi2: DeletedInventory | mi1 != mi2 && (mi1.ingredient = mi2.ingredient) and (mi1.modifier = mi2.modifier)
}
sig ExtraItems {
, ingredient: one Ingredient
, amt: one Amt
}
sig State {
, recipes: some Recipe
, modified: set ModifiedInventory
, deleted: set DeletedInventory
, extras: set ExtraItems
} {
no rs: Recipe | rs not in recipes
}
run { } for 3 but exactly 2 State, 2 Modifier, exactly 3 ModifiedInventory, exactly 9 Ingredient

17
models/planning.d2 Normal file
View File

@ -0,0 +1,17 @@
Meal Planning: {
shape: sequence_diagram
user: Cook; client: Kitchen frontend; kitchen: Kitchen backend
user -> client: Start new meal Plan
client -> kitchen: new plan created
user -> client: Add recipe to meal plan
client -> kitchen: Update meal plan with recipe
client -> client: cache updated meal plan
user -> client: Do inventory
client -> kitchen: Store inventory mutations
client -> client: cache inventory mutations
user -> client: Undo mutation
client -> kitchen: Store inventory mutations
client -> client: cache inventory mutations
user -> user: Cook recipes
}

125
models/planning.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 94 KiB

View File

@ -1,5 +1,5 @@
{ pkgs, rust-wasm }:
{ pkgs, rust-wasm, wasm-pack-hermetic, wasm-bindgen, cargo-wasm2map }:
with pkgs;
mkShell {
buildInputs = (if stdenv.isDarwin then [ pkgs.darwin.apple_sdk.frameworks.Security ] else [ ]) ++ (with pkgs; [wasm-bindgen-cli wasm-pack llvm clang rust-wasm]);
buildInputs = (with pkgs; [wasm-bindgen wasm-pack-hermetic llvm clang rust-wasm binaryen cargo-wasm2map]);
}

View File

@ -10,14 +10,16 @@ with pkgs;
(naersk-lib.buildPackage rec {
pname = "kitchen";
inherit version;
buildInputs = [ rust-wasm ];
buildInputs = [ rust-wasm libclang ];
# However the crate we are building has it's root in specific crate.
nativeBuildInputs = [llvm clang rust-bindgen];
src = root;
nativeBuildInputs = (if stdenv.isDarwin then [ xcbuild pkgs.darwin.apple_sdk.frameworks.Security ] else [ ]) ++ [llvm clang];
cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ];
postPatch = ''
mkdir -p web/dist
cp -r ${kitchenWasm}/* web/dist/
ls web/dist/
'';
# We have to tell libproc where the libclang.dylib lives
LIBCLANG_PATH="${libclang.lib}/lib/";
})

View File

@ -1,43 +1,43 @@
{pkgs? (import <nixpkgs>) {},
version,
features ? "",
rust-wasm,
wasm-bindgen,
lockFile,
outputHashes,
cargo-wasm2map,
}:
with pkgs;
let
pname = "kitchen-wasm";
src = ./../..;
lockFile = ./../../Cargo.lock;
# NOTE(jwall): Because we use wasm-pack directly below we need
# the cargo dependencies to already be installed.
cargoDeps = (pkgs.rustPlatform.importCargoLock { inherit lockFile; outputHashes = {
# I'm maintaining some patches for these so the lockfile hashes are a little
# incorrect. We override those here.
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
"sqlx-0.6.2" = "sha256-X/LFvtzRfiOIEZJiVzmFvvULPpjhqvI99pSwH7a//GM=";
};
});
cargoDeps = (pkgs.rustPlatform.importCargoLock { inherit lockFile outputHashes; });
in
# TODO(zaphar): I should actually be leveraging naersklib.buildPackage with a postInstall for the optimization and bindgen
stdenv.mkDerivation {
inherit src pname;
version = version;
# we need wasmb-bindgen v0.2.83 exactly
buildInputs = [ rust-wasm wasm-bindgen-cli wasm-pack binaryen];
propagatedBuildInputs = [ rust-wasm wasm-bindgen-cli wasm-pack binaryen];
buildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen cargo-wasm2map];
propagatedBuildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen];
phases = [ "postUnpackPhase" "buildPhase"];
postUnpackPhase = ''
ln -s ${cargoDeps} ./cargo-vendor-dir
cp -r ./cargo-vendor-dir/.cargo ./
cp -r $src/* ./
'';
# TODO(jwall): Build this from the root rather than the src.
# TODO(jwall): Use the makefile for as much of this as possible.
buildPhase = ''
echo building with wasm-pack
mkdir -p $out
cd web
cp -r static $out
RUST_LOG=info wasm-pack build --mode no-install --release --target web --out-dir $out ${features};
export project=kitchen
sh ../scripts/wasm-build.sh release
sh ../scripts/wasm-sourcemap.sh
cp -r index.html $out
cp -r favicon.ico $out
rm -rf $out/release
rm -rf $out/wasm32-unknown-unknown
'';
}

View File

@ -0,0 +1,36 @@
{ pkgs
, lib
, rustPlatform
, fetchCrate
, nodejs
, pkg-config
, openssl
, curl
}:
# This package is special so we don't use the naersk infrastructure to build it.
# Instead we crib from the nixpkgs version with some tweaks to work with our
# flake setup.
rustPlatform.buildRustPackage rec {
pname = "wasm-bindgen-cli";
# NOTE(jwall): This must exactly match the version of the wasm-bindgen crate
# we are using.
version = "0.2.89";
src = fetchCrate {
inherit pname version;
sha256 = "sha256-IPxP68xtNSpwJjV2yNMeepAS0anzGl02hYlSTvPocz8=";
};
cargoHash = "sha256-EsGFW1f9+E5NnMadP/0rRzFCxVJQb0mlTLz/3zYQ5Ac=";
nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl curl ];
nativeCheckInputs = [ nodejs ];
# other tests require it to be ran in the wasm-bindgen monorepo
#cargoTestFlags = [ "--test=reference" ];
doCheck = false;
}

21
nix/wasm-pack/default.nix Normal file
View File

@ -0,0 +1,21 @@
{pkgs,
naersk-lib,
rust-wasm,
}:
with pkgs;
(naersk-lib.buildPackage rec {
pname = "wasm-pack";
version = "v0.12.1";
buildInputs = [ rust-wasm pkgs.openssl curl];
nativeBuildInputs =[llvm clang pkg-config];
OPENSSL_NO_VENDOR=1;
# The checks use network so disable them here
doCheck = false;
src = fetchFromGitHub {
owner = "rustwasm";
repo = "wasm-pack";
rev = version;
hash = "sha256-L4mCgUPG4cgTUpCoaIUOTONBOggXn5vMyPKj48B3MMk=";
};
cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ];
})

View File

@ -6,10 +6,6 @@ A web assembly experiment in Meal Planning and Shopping List management.
Ensure you have rust installed with support for the web assembly target. You can see instructions here: [Rust wasm book](https://rustwasm.github.io/docs/book/game-of-life/setup.html).
You will also want to have trunk installed. You can see instructions for that here: [trunk](https://trunkrs.dev/)
Then obtain the source. We do not at this time publish kitchen on [crates.io](https://crates.io/).
```sh
git clone https://github.com/zaphar/kitchen
cd kitchen
@ -23,7 +19,7 @@ make release
# Hacking on kitchen
If you want to hack on kitchen, then you may find it useful to use trunk in dev mode. The run script will run build the app and run trunk with it watching for changes and reloading on demand in your browser.
The run script will run build the app and run it for you.
```sh
./run.sh

View File

@ -8,8 +8,14 @@ edition = "2021"
[dependencies]
abortable_parser = "~0.2.6"
chrono = "~0.4"
serde = "1.0.144"
[dependencies.chrono]
version = "0.4.22"
features = ["serde"]
[dependencies.serde]
version = "1.0.204"
features = ["derive"]
[dependencies.num-rational]
version = "~0.4.0"

View File

@ -50,27 +50,49 @@ impl Mealplan {
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RecipeEntry(pub String, pub String);
pub struct RecipeEntry {
pub id: String,
pub text: String,
pub category: Option<String>,
pub serving_count: Option<i64>,
}
impl RecipeEntry {
pub fn new<IS: Into<String>, TS: Into<String>>(recipe_id: IS, text: TS) -> Self {
Self(recipe_id.into(), text.into())
Self {
id: recipe_id.into(),
text: text.into(),
category: None,
serving_count: None,
}
}
pub fn set_recipe_id<S: Into<String>>(&mut self, id: S) {
self.0 = id.into();
self.id = id.into();
}
pub fn recipe_id(&self) -> &str {
self.0.as_str()
self.id.as_str()
}
pub fn set_recipe_text<S: Into<String>>(&mut self, text: S) {
self.1 = text.into();
self.text = text.into();
}
pub fn recipe_text(&self) -> &str {
self.1.as_str()
self.text.as_str()
}
pub fn set_category<S: Into<String>>(&mut self, cat: S) {
self.category = Some(cat.into());
}
pub fn category(&self) -> Option<&String> {
self.category.as_ref()
}
pub fn serving_count(&self) -> Option<i64> {
self.serving_count.clone()
}
}
@ -79,6 +101,7 @@ impl RecipeEntry {
pub struct Recipe {
pub title: String,
pub desc: Option<String>,
pub serving_count: Option<i64>,
pub steps: Vec<Step>,
}
@ -88,6 +111,7 @@ impl Recipe {
title: title.into(),
desc: desc.map(|s| s.into()),
steps: Vec::new(),
serving_count: Default::default(),
}
}
@ -124,6 +148,16 @@ impl Recipe {
}
}
impl TryFrom<&RecipeEntry> for Recipe {
type Error = String;
fn try_from(value: &RecipeEntry) -> Result<Self, Self::Error> {
let mut parsed = parse::as_recipe(&value.text)?;
parsed.serving_count = value.serving_count.clone();
Ok(parsed)
}
}
pub struct IngredientAccumulator {
inner: BTreeMap<IngredientKey, (Ingredient, BTreeSet<String>)>,
}
@ -148,16 +182,28 @@ impl IngredientAccumulator {
set.insert(recipe_title.clone());
self.inner.insert(key, (i.clone(), set));
} else {
let amt = match (self.inner[&key].0.amt, i.amt) {
(Volume(rvm), Volume(lvm)) => Volume(lvm + rvm),
(Count(lqty), Count(rqty)) => Count(lqty + rqty),
(Weight(lqty), Weight(rqty)) => Weight(lqty + rqty),
let amts = match (&self.inner[&key].0.amt, &i.amt) {
(Volume(rvm), Volume(lvm)) => vec![Volume(lvm + rvm)],
(Count(lqty), Count(rqty)) => vec![Count(lqty + rqty)],
(Weight(lqty), Weight(rqty)) => vec![Weight(lqty + rqty)],
(Package(lnm, lqty), Package(rnm, rqty)) => {
if lnm == rnm {
vec![Package(lnm.clone(), lqty + rqty)]
} else {
vec![
Package(lnm.clone(), lqty.clone()),
Package(rnm.clone(), rqty.clone()),
]
}
}
_ => unreachable!(),
};
self.inner.get_mut(&key).map(|(i, set)| {
i.amt = amt;
set.insert(recipe_title.clone());
});
for amt in amts {
self.inner.get_mut(&key).map(|(i, set)| {
i.amt = amt;
set.insert(recipe_title.clone());
});
}
}
}
}
@ -186,7 +232,7 @@ pub struct Step {
impl Step {
pub fn new<S: Into<String>>(prep_time: Option<std::time::Duration>, instructions: S) -> Self {
Self {
prep_time: prep_time,
prep_time,
instructions: instructions.into(),
ingredients: Vec::new(),
}

View File

@ -334,7 +334,14 @@ make_fn!(unit<StrIter, String>,
text_token!("kg"),
text_token!("grams"),
text_token!("gram"),
text_token!("g")),
text_token!("g"),
text_token!("pkg"),
text_token!("package"),
text_token!("bottle"),
text_token!("bot"),
text_token!("bag"),
text_token!("can")
),
_ => ws,
(u.to_lowercase().to_singular())
)
@ -393,6 +400,7 @@ pub fn measure(i: StrIter) -> abortable_parser::Result<StrIter, Measure> {
"oz" => Weight(Oz(qty)),
"kg" | "kilogram" => Weight(Kilogram(qty)),
"g" | "gram" => Weight(Gram(qty)),
"pkg" | "package" | "can" | "bag" | "bottle" | "bot" => Measure::pkg(s, qty),
_u => {
eprintln!("Invalid unit: {}", _u);
unreachable!()
@ -418,9 +426,8 @@ pub fn normalize_name(name: &str) -> String {
// NOTE(jwall): The below unwrap is safe because of the length
// check above.
let last = parts.last().unwrap();
let normalized = last.to_singular();
prefix.push(' ');
prefix.push_str(&normalized);
prefix.push_str(&last.to_string());
return prefix;
}
return name.trim().to_lowercase().to_owned();

View File

@ -235,32 +235,30 @@ fn test_ingredient_name_parse() {
#[test]
fn test_ingredient_parse() {
for (i, expected) in vec![
//(
// "1 cup flour ",
// Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1))), ""),
//),
//(
// "\t1 cup flour ",
// Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1))), ""),
//),
//(
// "1 cup apple (chopped)",
// Ingredient::new(
// "apple",
// Some("chopped".to_owned()),
// Volume(Cup(Quantity::Whole(1))),
// "",
// ),
//),
//(
// "1 cup apple (chopped) ",
// Ingredient::new(
// "apple",
// Some("chopped".to_owned()),
// Volume(Cup(Quantity::Whole(1))),
// "",
// ),
//),
(
"1 cup flour ",
Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1)))),
),
(
"\t1 cup flour ",
Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1)))),
),
(
"1 cup apple (chopped)",
Ingredient::new(
"apple",
Some("chopped".to_owned()),
Volume(Cup(Quantity::Whole(1))),
),
),
(
"1 cup apple (chopped) ",
Ingredient::new(
"apple",
Some("chopped".to_owned()),
Volume(Cup(Quantity::Whole(1))),
),
),
(
"1 green bell pepper (chopped) ",
Ingredient::new(
@ -269,6 +267,46 @@ fn test_ingredient_parse() {
Count(Quantity::Whole(1)),
),
),
(
"1 pkg green onion",
Ingredient::new(
"green onion",
None,
Package("pkg".into(), Quantity::Whole(1)),
),
),
(
"1 bottle green onion",
Ingredient::new(
"green onion",
None,
Package("bottle".into(), Quantity::Whole(1)),
),
),
(
"1 bot green onion",
Ingredient::new(
"green onion",
None,
Package("bot".into(), Quantity::Whole(1)),
),
),
(
"1 bag green onion",
Ingredient::new(
"green onion",
None,
Package("bag".into(), Quantity::Whole(1)),
),
),
(
"1 can baked beans",
Ingredient::new(
"baked beans",
None,
Package("can".into(), Quantity::Whole(1)),
),
),
] {
match parse::ingredient(StrIter::new(i)) {
ParseResult::Complete(_, ing) => assert_eq!(ing, expected),

View File

@ -22,6 +22,7 @@ use std::{
convert::TryFrom,
fmt::Display,
ops::{Add, Div, Mul, Sub},
rc::Rc,
};
use num_rational::Ratio;
@ -179,6 +180,20 @@ impl VolumeMeasure {
macro_rules! volume_op {
($trait:ident, $method:ident) => {
impl $trait for &VolumeMeasure {
type Output = VolumeMeasure;
fn $method(self, lhs: Self) -> Self::Output {
let (l, r) = (self.get_ml(), lhs.get_ml());
let result = ML($trait::$method(l, r));
if self.metric() {
result.normalize()
} else {
result.into_tsp().normalize()
}
}
}
impl $trait for VolumeMeasure {
type Output = Self;
@ -293,6 +308,20 @@ impl WeightMeasure {
macro_rules! weight_op {
($trait:ident, $method:ident) => {
impl $trait for &WeightMeasure {
type Output = WeightMeasure;
fn $method(self, lhs: Self) -> Self::Output {
let (l, r) = (self.get_grams(), lhs.get_grams());
let result = WeightMeasure::Gram($trait::$method(l, r));
if self.metric() {
result.normalize()
} else {
result.into_oz().normalize()
}
}
}
impl $trait for WeightMeasure {
type Output = Self;
@ -335,18 +364,19 @@ impl Display for WeightMeasure {
use WeightMeasure::{Gram, Kilogram, Oz, Pound};
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
#[derive(Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
/// Measurements in a Recipe with associated units for them.
pub enum Measure {
/// Volume measurements as meter cubed base unit
Volume(VolumeMeasure),
/// Simple count of items
Count(Quantity),
Package(Rc<str>, Quantity),
/// Weight measure as Grams base unit
Weight(WeightMeasure),
}
use Measure::{Count, Volume, Weight};
use Measure::{Count, Package, Volume, Weight};
impl Measure {
pub fn tsp(qty: Quantity) -> Self {
@ -407,11 +437,16 @@ impl Measure {
Weight(Oz(qty))
}
pub fn pkg<S: Into<Rc<str>>>(name: S, qty: Quantity) -> Self {
Package(name.into(), qty)
}
pub fn measure_type(&self) -> String {
match self {
Volume(_) => "Volume",
Count(_) => "Count",
Weight(_) => "Weight",
Package(_, _) => "Package",
}
.to_owned()
}
@ -421,6 +456,7 @@ impl Measure {
Volume(vm) => vm.plural(),
Count(qty) => qty.plural(),
Weight(wm) => wm.plural(),
Package(_, qty) => qty.plural(),
}
}
@ -429,6 +465,7 @@ impl Measure {
Volume(vm) => Volume(vm.normalize()),
Count(qty) => Count(qty.clone()),
Weight(wm) => Weight(wm.normalize()),
Package(nm, qty) => Package(nm.clone(), qty.clone()),
}
}
}
@ -439,6 +476,7 @@ impl Display for Measure {
Volume(vm) => write!(w, "{}", vm),
Count(qty) => write!(w, "{}", qty),
Weight(wm) => write!(w, "{}", wm),
Package(nm, qty) => write!(w, "{} {}", qty, nm),
}
}
}
@ -533,6 +571,26 @@ impl TryFrom<f32> for Quantity {
macro_rules! quantity_op {
($trait:ident, $method:ident) => {
impl $trait for &Quantity {
type Output = Quantity;
fn $method(self, lhs: Self) -> Self::Output {
match (self, lhs) {
(Whole(rhs), Whole(lhs)) => Frac($trait::$method(
Ratio::from_integer(*rhs),
Ratio::from_integer(*lhs),
)),
(Frac(rhs), Frac(lhs)) => Frac($trait::$method(rhs, lhs)),
(Whole(rhs), Frac(lhs)) => {
Frac($trait::$method(Ratio::from_integer(*rhs), lhs))
}
(Frac(rhs), Whole(lhs)) => {
Frac($trait::$method(rhs, Ratio::from_integer(*lhs)))
}
}
}
}
impl $trait for Quantity {
type Output = Self;

19
run-non-nix.sh Executable file
View File

@ -0,0 +1,19 @@
# Copyright 2022 Jeremy Wall
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXAMPLES=${EXAMPLES:-../examples}
echo Starting server serving ${EXAMPLES}
mkdir -p .session_store
make kitchen
./target/debug/kitchen --verbose debug serve --listen 127.0.0.1:3030 --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+1.pem --cert_key ~/tls-certs/localhost+1-key.pem $@
# This is ghetto but I'm doing it anyway

5
run.sh
View File

@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
EXAMPLES=${EXAMPLES:-../examples}
echo Starting api server serving ${EXAMPLES}
nix run .\#kitchenDebug -- --verbose debug serve --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+2.pem --cert_key ~/tls-certs/localhost+2-key.pem $@
echo Starting server serving ${EXAMPLES}
mkdir .session_store
nix run .\#kitchenDebug -- --verbose debug serve --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+1.pem --cert_key ~/tls-certs/localhost+1-key.pem $@
# This is ghetto but I'm doing it anyway

11
scripts/wasm-build.sh Normal file
View File

@ -0,0 +1,11 @@
set -x
buildtype=$1;
mkdir -p $out
if [ ${buildtype} = "release" ]; then
buildtype_flag="--release"
fi
cargo build --lib ${buildtype_flag} --target wasm32-unknown-unknown --target-dir $out --features debug_logs
wasm-bindgen $out/wasm32-unknown-unknown/${buildtype}/${project}_wasm.wasm --out-dir $out --typescript --target web

6
scripts/wasm-opt.sh Normal file
View File

@ -0,0 +1,6 @@
set -x
buildtype=$1;
wasm-opt $out/wasm32-unknown-unknown/${buildtype}/${project}_wasm.wasm --output $out/${project}_wasm_bg-opt.wasm -O
rm -f $out/${project}_wasm_bg.wasm
mv $out/${project}_wasm_bg-opt.wasm $out/${project}_wasm_bg.wasm

View File

@ -0,0 +1,3 @@
set -x
cargo-wasm2map wasm2map --patch $out/${project}_wasm_bg.wasm --base-url=http://localhost:3030

View File

@ -1,11 +0,0 @@
let
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
in
(import (
fetchTarball {
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
sha256 = lock.nodes.flake-compat.locked.narHash;
}
) {
src = ./.;
}).devShell

View File

@ -1,6 +1,6 @@
[package]
name = "kitchen-wasm"
version = "0.2.20"
version = "0.2.25"
edition = "2021"
[features]
@ -22,9 +22,18 @@ console_error_panic_hook = "0.1.7"
serde_json = "1.0.79"
tracing = "0.1.35"
async-trait = "0.1.57"
base64 = "0.20.0"
base64 = "0.21.0"
sycamore-router = "0.8"
js-sys = "0.3.60"
wasm-web-component = { git = "https://github.com/zaphar/wasm-web-components.git", rev = "v0.3.0" }
maud = "*"
indexed-db = "0.4.1"
anyhow = "1.0.86"
serde-wasm-bindgen = "0.6.5"
[dependencies.serde]
version = "1.0.204"
features = ["derive"]
[dependencies.tracing-subscriber]
version = "0.3.16"
@ -37,20 +46,26 @@ features = ["fmt", "time"]
version = "0.4.22"
features = ["serde"]
[dependencies.reqwasm]
version = "0.5.0"
[dependencies.gloo-net]
version = "0.4.0"
[dependencies.wasm-bindgen]
# we need wasm-bindgen v0.2.83 exactly
version = "= 0.2.83"
version = "= 0.2.89"
[dependencies.web-sys]
version = "0.3"
features = [
"Event",
"InputEvent",
"CustomEvent",
"CustomEventInit",
"EventTarget",
"History",
"HtmlAnchorElement",
"HtmlDivElement",
"HtmlSpanElement",
"HtmlInputElement",
"HtmlTextAreaElement",
"HtmlBaseElement",
"HtmlDialogElement",
"KeyboardEvent",
@ -58,7 +73,12 @@ features = [
"PopStateEvent",
"Url",
"Window",
"Storage"
"IdbFactory",
"IdbOpenDbRequest",
"IdbRequest",
"IdbDatabase",
"IdbRequestReadyState",
"Storage",
]
[dependencies.sycamore]

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -19,7 +19,7 @@
<head>
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" name="viewport"
content="width=device-width, initial-scale=1.0" charset="UTF-8">
<link rel="stylesheet" href="/ui/static/pico.min.css">
<link rel="stylesheet" href="/ui/static/normalize.css">
<link rel="stylesheet" href="/ui/static/app.css">
</head>

View File

@ -13,20 +13,33 @@
// limitations under the License.
use std::collections::{BTreeMap, BTreeSet};
use base64;
use base64::{self, Engine};
use chrono::NaiveDate;
use reqwasm;
use serde_json::{from_str, to_string};
use gloo_net;
// TODO(jwall): Remove this when we have gone a few migrations past.
use serde_json::from_str;
use sycamore::prelude::*;
use tracing::{debug, error, instrument};
use anyhow::Result;
use client_api::*;
use recipes::{IngredientKey, RecipeEntry};
use serde_wasm_bindgen::{from_value, Serializer};
use wasm_bindgen::JsValue;
// TODO(jwall): Remove this when we have gone a few migrations past.
use web_sys::Storage;
use crate::{app_state::AppState, js_lib};
fn to_js<T: serde::ser::Serialize>(value: T) -> Result<JsValue, serde_wasm_bindgen::Error> {
let s = Serializer::new().serialize_maps_as_objects(true);
value.serialize(&s)
}
use crate::{
app_state::{parse_recipes, AppState},
js_lib::{self, DBFactory},
};
#[allow(dead_code)]
#[derive(Debug)]
pub struct Error(String);
@ -66,284 +79,290 @@ impl From<std::string::FromUtf8Error> for Error {
}
}
impl From<reqwasm::Error> for Error {
fn from(item: reqwasm::Error) -> Self {
impl From<gloo_net::Error> for Error {
fn from(item: gloo_net::Error) -> Self {
Error(format!("{:?}", item))
}
}
fn recipe_key<S: std::fmt::Display>(id: S) -> String {
format!("recipe:{}", id)
}
fn category_key<S: std::fmt::Display>(id: S) -> String {
format!("category:{}", id)
}
fn token68(user: String, pass: String) -> String {
base64::encode(format!("{}:{}", user, pass))
base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass))
}
fn convert_to_io_error<V, E>(res: Result<V, E>) -> Result<V, std::io::Error>
where
E: Into<Box<dyn std::error::Error>> + std::fmt::Debug,
{
match res {
Ok(v) => Ok(v),
Err(e) => Err(std::io::Error::new(
std::io::ErrorKind::Other,
format!("{:?}", e),
)),
}
}
#[derive(Clone, Debug)]
pub struct LocalStore {
store: Storage,
// TODO(zaphar): Remove this when it's safe to delete the migration
old_store: Storage,
store: DBFactory<'static>,
}
const APP_STATE_KEY: &'static str = "app-state";
const USER_DATA_KEY: &'static str = "user_data";
impl LocalStore {
pub fn new() -> Self {
Self {
store: js_lib::get_storage(),
store: DBFactory::default(),
old_store: js_lib::get_storage(),
}
}
/// Gets user data from local storage.
pub fn get_user_data(&self) -> Option<UserData> {
self.store
.get("user_data")
.map_or(None, |val| val.map(|val| from_str(&val).unwrap_or(None)))
.flatten()
pub async fn migrate(&self) {
// 1. migrate app-state from localstore to indexeddb
debug!("Peforming localstorage migration");
if let Ok(Some(v)) = self.old_store.get("app_state") {
if let Ok(Some(local_state)) = from_str::<Option<AppState>>(&v) {
self.store_app_state(&local_state).await;
}
}
let _ = self.old_store.remove_item("app_state");
// 2. migrate user-state from localstore to indexeddb
if let Ok(Some(v)) = self.old_store.get(USER_DATA_KEY) {
if let Ok(local_user_data) = from_str::<Option<UserData>>(&v) {
self.set_user_data(local_user_data.as_ref()).await;
}
}
let _ = self.old_store.remove_item(USER_DATA_KEY);
// 3. Recipes
let store_len = self.old_store.length().unwrap();
let mut key_list = Vec::new();
for i in 0..store_len {
let key = self.old_store.key(i).unwrap().unwrap();
if key.starts_with("recipe:") {
key_list.push(key);
}
}
for k in key_list {
if let Ok(Some(recipe)) = self.old_store.get(&k) {
if let Ok(recipe) = from_str::<RecipeEntry>(&recipe) {
self.set_recipe_entry(&recipe).await;
}
}
let _ = self.old_store.delete(&k);
}
}
#[instrument(skip_all)]
pub async fn store_app_state(&self, state: &AppState) {
let state = match to_js(state) {
Ok(state) => state,
Err(err) => {
error!(?err, ?state, "Error deserializing app_state");
return;
}
};
web_sys::console::log_1(&state);
let key = to_js(APP_STATE_KEY).expect("Failed to serialize key");
self.store
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
object_store.put_kv(&key, &state).await?;
Ok(())
})
.await
.expect("Failed to store app-state");
}
#[instrument]
pub async fn fetch_app_state(&self) -> Option<AppState> {
debug!("Loading state from local store");
let recipes = parse_recipes(&self.get_recipes().await).expect("Failed to parse recipes");
self.store
.ro_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let key = convert_to_io_error(to_js(APP_STATE_KEY))?;
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
let mut app_state: AppState = match object_store.get(&key).await? {
Some(s) => convert_to_io_error(from_value(s))?,
None => return Ok(None),
};
if let Some(recipes) = recipes {
debug!("Populating recipes");
for (id, recipe) in recipes {
debug!(id, "Adding recipe from local storage");
app_state.recipes.insert(id, recipe);
}
}
Ok(Some(app_state))
})
.await
.expect("Failed to fetch app-state")
}
#[instrument]
/// Gets user data from local storage.
pub async fn get_user_data(&self) -> Option<UserData> {
self.store
.ro_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let key = to_js(USER_DATA_KEY).expect("Failed to serialize key");
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
let user_data: UserData = match object_store.get(&key).await? {
Some(s) => convert_to_io_error(from_value(s))?,
None => return Ok(None),
};
Ok(Some(user_data))
})
.await
.expect("Failed to fetch user_data")
}
#[instrument]
// Set's user data to local storage.
pub fn set_user_data(&self, data: Option<&UserData>) {
pub async fn set_user_data(&self, data: Option<&UserData>) {
let key = to_js(USER_DATA_KEY).expect("Failed to serialize key");
if let Some(data) = data {
let data = data.clone();
self.store
.set(
"user_data",
&to_string(data).expect("Failed to desrialize user_data"),
)
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
object_store
.put_kv(&key, &convert_to_io_error(to_js(&data))?)
.await?;
Ok(())
})
.await
.expect("Failed to set user_data");
} else {
self.store
.delete("user_data")
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
object_store.delete(&key).await?;
Ok(())
})
.await
.expect("Failed to delete user_data");
}
}
/// Gets categories from local storage.
pub fn get_categories(&self) -> Option<Vec<(String, String)>> {
let mut mappings = Vec::new();
for k in self.get_category_keys() {
if let Some(mut cat_map) = self
.store
.get(&k)
.expect(&format!("Failed to get category key {}", k))
.map(|v| {
from_str::<Vec<(String, String)>>(&v)
.expect(&format!("Failed to parse category key {}", k))
})
{
mappings.extend(cat_map.drain(0..));
}
}
if mappings.is_empty() {
None
} else {
Some(mappings)
}
}
/// Set the categories to the given string.
pub fn set_categories(&self, mappings: Option<&Vec<(String, String)>>) {
if let Some(mappings) = mappings {
for (i, cat) in mappings.iter() {
self.store
.set(
&category_key(i),
&to_string(&(i, cat)).expect("Failed to serialize category mapping"),
)
.expect("Failed to store category mapping");
}
}
}
fn get_storage_keys(&self) -> Vec<String> {
let mut keys = Vec::new();
for idx in 0..self.store.length().unwrap() {
if let Some(k) = self.store.key(idx).expect("Failed to get storage key") {
keys.push(k)
}
}
keys
}
fn get_category_keys(&self) -> impl Iterator<Item = String> {
self.get_storage_keys()
.into_iter()
.filter(|k| k.starts_with("category:"))
}
fn get_recipe_keys(&self) -> impl Iterator<Item = String> {
self.get_storage_keys()
.into_iter()
.filter(|k| k.starts_with("recipe:"))
}
/// Gets all the recipes from local storage.
pub fn get_recipes(&self) -> Option<Vec<RecipeEntry>> {
let mut recipe_list = Vec::new();
for recipe_key in self.get_recipe_keys() {
if let Some(entry) = self
.store
.get(&recipe_key)
.expect(&format!("Failed to get recipe: {}", recipe_key))
{
match from_str(&entry) {
Ok(entry) => {
recipe_list.push(entry);
}
Err(e) => {
error!(recipe_key, err = ?e, "Failed to parse recipe entry");
#[instrument]
async fn get_recipe_keys(&self) -> impl Iterator<Item = String> {
self.store
.ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let mut keys = Vec::new();
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
let key_vec = object_store.get_all_keys(None).await?;
for k in key_vec {
if let Ok(v) = from_value(k) {
keys.push(v);
}
}
}
}
if recipe_list.is_empty() {
return None;
}
Some(recipe_list)
Ok(keys)
})
.await
.expect("Failed to get storage keys")
.into_iter()
}
pub fn get_recipe_entry(&self, id: &str) -> Option<RecipeEntry> {
let key = recipe_key(id);
#[instrument]
/// Gets all the recipes from local storage.
pub async fn get_recipes(&self) -> Option<Vec<RecipeEntry>> {
self.store
.get(&key)
.expect(&format!("Failed to get recipe {}", key))
.map(|entry| from_str(&entry).expect(&format!("Failed to get recipe {}", key)))
.ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let mut recipe_list = Vec::new();
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
let mut c = object_store.cursor().open().await?;
while let Some(value) = c.value() {
recipe_list.push(convert_to_io_error(from_value(value))?);
c.advance(1).await?;
}
if recipe_list.is_empty() {
return Ok(None);
}
Ok(Some(recipe_list))
})
.await
.expect("Failed to get recipes")
}
#[instrument]
pub async fn get_recipe_entry(&self, id: &str) -> Option<RecipeEntry> {
let key = to_js(id).expect("Failed to serialize key");
self.store
.ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
let entry: Option<RecipeEntry> = match object_store.get(&key).await? {
Some(v) => convert_to_io_error(from_value(v))?,
None => None,
};
Ok(entry)
})
.await
.expect("Failed to get recipes")
}
#[instrument]
/// Sets the set of recipes to the entries passed in. Deletes any recipes not
/// in the list.
pub fn set_all_recipes(&self, entries: &Vec<RecipeEntry>) {
for recipe_key in self.get_recipe_keys() {
pub async fn set_all_recipes(&self, entries: &Vec<RecipeEntry>) {
for recipe_key in self.get_recipe_keys().await {
let key = to_js(&recipe_key).expect("Failed to serialize key");
self.store
.delete(&recipe_key)
.expect(&format!("Failed to get recipe {}", recipe_key));
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
object_store.delete(&key).await?;
Ok(())
})
.await
.expect("Failed to delete user_data");
}
for entry in entries {
self.set_recipe_entry(entry);
let entry = entry.clone();
let key = to_js(entry.recipe_id()).expect("Failed to serialize recipe key");
self.store
.rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
object_store
.put_kv(&key, &convert_to_io_error(to_js(&entry))?)
.await?;
Ok(())
})
.await
.expect("Failed to store recipe entry");
}
}
#[instrument]
/// Set recipe entry in local storage.
pub fn set_recipe_entry(&self, entry: &RecipeEntry) {
pub async fn set_recipe_entry(&self, entry: &RecipeEntry) {
let entry = entry.clone();
let key = to_js(entry.recipe_id()).expect("Failed to serialize recipe key");
self.store
.set(
&recipe_key(entry.recipe_id()),
&to_string(&entry).expect(&format!("Failed to get recipe {}", entry.recipe_id())),
)
.expect(&format!("Failed to store recipe {}", entry.recipe_id()))
.rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
object_store
.put_kv(&key, &convert_to_io_error(to_js(&entry))?)
.await?;
Ok(())
})
.await
.expect("Failed to store recipe entry");
}
#[instrument]
/// Delete recipe entry from local storage.
pub fn delete_recipe_entry(&self, recipe_id: &str) {
pub async fn delete_recipe_entry(&self, recipe_id: &str) {
let key = to_js(recipe_id).expect("Failed to serialize key");
self.store
.delete(&recipe_key(recipe_id))
.expect(&format!("Failed to delete recipe {}", recipe_id))
}
/// Save working plan to local storage.
pub fn store_plan(&self, plan: &Vec<(String, i32)>) {
self.store
.set("plan", &to_string(&plan).expect("Failed to serialize plan"))
.expect("Failed to store plan'");
}
pub fn get_plan(&self) -> Option<Vec<(String, i32)>> {
if let Some(plan) = self.store.get("plan").expect("Failed to store plan") {
Some(from_str(&plan).expect("Failed to deserialize plan"))
} else {
None
}
}
pub fn delete_plan(&self) {
self.store.delete("plan").expect("Failed to delete plan");
self.store
.delete("inventory")
.expect("Failed to delete inventory data");
}
pub fn set_plan_date(&self, date: &NaiveDate) {
self.store
.set(
"plan:date",
&to_string(&date).expect("Failed to serialize plan:date"),
)
.expect("Failed to store plan:date");
}
pub fn get_plan_date(&self) -> Option<NaiveDate> {
if let Some(date) = self
.store
.get("plan:date")
.expect("Failed to get plan date")
{
Some(from_str(&date).expect("Failed to deserialize plan_date"))
} else {
None
}
}
pub fn get_inventory_data(
&self,
) -> Option<(
BTreeSet<IngredientKey>,
BTreeMap<IngredientKey, String>,
Vec<(String, String)>,
)> {
if let Some(inventory) = self
.store
.get("inventory")
.expect("Failed to retrieve inventory data")
{
let (filtered, modified, extras): (
BTreeSet<IngredientKey>,
Vec<(IngredientKey, String)>,
Vec<(String, String)>,
) = from_str(&inventory).expect("Failed to deserialize inventory");
return Some((filtered, BTreeMap::from_iter(modified), extras));
}
return None;
}
pub fn set_inventory_data(
&self,
inventory: (
&BTreeSet<IngredientKey>,
&BTreeMap<IngredientKey, String>,
&Vec<(String, String)>,
),
) {
let filtered = inventory.0;
let modified_amts = inventory
.1
.iter()
.map(|(k, amt)| (k.clone(), amt.clone()))
.collect::<Vec<(IngredientKey, String)>>();
let extras = inventory.2;
let inventory_data = (filtered, &modified_amts, extras);
self.store
.set(
"inventory",
&to_string(&inventory_data).expect(&format!(
"Failed to serialize inventory {:?}",
inventory_data
)),
)
.expect("Failed to set inventory");
}
pub fn set_staples(&self, content: &String) {
self.store
.set("staples", content)
.expect("Failed to set staples in local store");
}
pub fn get_staples(&self) -> Option<String> {
self.store
.get("staples")
.expect("Failed to retreive staples from local store")
.rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
object_store.delete(&key).await?;
Ok(())
})
.await
.expect("Failed to delete user_data");
}
}
@ -381,13 +400,17 @@ impl HttpStore {
debug!("attempting login request against api.");
let mut path = self.v2_path();
path.push_str("/auth");
let result = reqwasm::http::Request::get(&path)
let request = gloo_net::http::Request::get(&path)
.header(
"Authorization",
"authorization",
format!("Basic {}", token68(user, pass)).as_str(),
)
.send()
.await;
.mode(web_sys::RequestMode::SameOrigin)
.credentials(web_sys::RequestCredentials::SameOrigin)
.build()
.expect("Failed to build request");
debug!(?request, "Sending auth request");
let result = request.send().await;
if let Ok(resp) = &result {
if resp.status() == 200 {
let user_data = resp
@ -409,7 +432,7 @@ impl HttpStore {
debug!("Retrieving User Account data");
let mut path = self.v2_path();
path.push_str("/account");
let result = reqwasm::http::Request::get(&path).send().await;
let result = gloo_net::http::Request::get(&path).send().await;
if let Ok(resp) = &result {
if resp.status() == 200 {
let user_data = resp
@ -430,11 +453,11 @@ impl HttpStore {
pub async fn fetch_categories(&self) -> Result<Option<Vec<(String, String)>>, Error> {
let mut path = self.v2_path();
path.push_str("/category_map");
let resp = match reqwasm::http::Request::get(&path).send().await {
let resp = match gloo_net::http::Request::get(&path).send().await {
Ok(resp) => resp,
Err(reqwasm::Error::JsError(err)) => {
Err(gloo_net::Error::JsError(err)) => {
error!(path, ?err, "Error hitting api");
return Ok(self.local_store.get_categories());
return Ok(None);
}
Err(err) => {
return Err(err)?;
@ -460,11 +483,11 @@ impl HttpStore {
pub async fn fetch_recipes(&self) -> Result<Option<Vec<RecipeEntry>>, Error> {
let mut path = self.v2_path();
path.push_str("/recipes");
let resp = match reqwasm::http::Request::get(&path).send().await {
let resp = match gloo_net::http::Request::get(&path).send().await {
Ok(resp) => resp,
Err(reqwasm::Error::JsError(err)) => {
Err(gloo_net::Error::JsError(err)) => {
error!(path, ?err, "Error hitting api");
return Ok(self.local_store.get_recipes());
return Ok(self.local_store.get_recipes().await);
}
Err(err) => {
return Err(err)?;
@ -490,11 +513,11 @@ impl HttpStore {
let mut path = self.v2_path();
path.push_str("/recipe/");
path.push_str(id.as_ref());
let resp = match reqwasm::http::Request::get(&path).send().await {
let resp = match gloo_net::http::Request::get(&path).send().await {
Ok(resp) => resp,
Err(reqwasm::Error::JsError(err)) => {
Err(gloo_net::Error::JsError(err)) => {
error!(path, ?err, "Error hitting api");
return Ok(self.local_store.get_recipe_entry(id.as_ref()));
return Ok(self.local_store.get_recipe_entry(id.as_ref()).await);
}
Err(err) => {
return Err(err)?;
@ -514,7 +537,7 @@ impl HttpStore {
.as_success()
.unwrap();
if let Some(ref entry) = entry {
self.local_store.set_recipe_entry(entry);
self.local_store.set_recipe_entry(entry).await;
}
Ok(entry)
}
@ -528,7 +551,7 @@ impl HttpStore {
let mut path = self.v2_path();
path.push_str("/recipe");
path.push_str(&format!("/{}", recipe.as_ref()));
let resp = reqwasm::http::Request::delete(&path).send().await?;
let resp = gloo_net::http::Request::delete(&path).send().await?;
if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into())
} else {
@ -546,10 +569,9 @@ impl HttpStore {
return Err("Recipe Ids can not be empty".into());
}
}
let serialized = to_string(&recipes).expect("Unable to serialize recipe entries");
let resp = reqwasm::http::Request::post(&path)
.body(&serialized)
.header("content-type", "application/json")
let resp = gloo_net::http::Request::post(&path)
.json(&recipes)
.expect("Failed to set body")
.send()
.await?;
if resp.status() != 200 {
@ -564,9 +586,9 @@ impl HttpStore {
pub async fn store_categories(&self, categories: &Vec<(String, String)>) -> Result<(), Error> {
let mut path = self.v2_path();
path.push_str("/category_map");
let resp = reqwasm::http::Request::post(&path)
.body(to_string(&categories).expect("Unable to encode categories as json"))
.header("content-type", "application/json")
let resp = gloo_net::http::Request::post(&path)
.json(&categories)
.expect("Failed to set body")
.send()
.await?;
if resp.status() != 200 {
@ -618,9 +640,9 @@ impl HttpStore {
pub async fn store_plan(&self, plan: Vec<(String, i32)>) -> Result<(), Error> {
let mut path = self.v2_path();
path.push_str("/plan");
let resp = reqwasm::http::Request::post(&path)
.body(to_string(&plan).expect("Unable to encode plan as json"))
.header("content-type", "application/json")
let resp = gloo_net::http::Request::post(&path)
.json(&plan)
.expect("Failed to set body")
.send()
.await?;
if resp.status() != 200 {
@ -640,9 +662,9 @@ impl HttpStore {
path.push_str("/plan");
path.push_str("/at");
path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::post(&path)
.body(to_string(&plan).expect("Unable to encode plan as json"))
.header("content-type", "application/json")
let resp = gloo_net::http::Request::post(&path)
.json(&plan)
.expect("Failed to set body")
.send()
.await?;
if resp.status() != 200 {
@ -657,7 +679,7 @@ impl HttpStore {
let mut path = self.v2_path();
path.push_str("/plan");
path.push_str("/all");
let resp = reqwasm::http::Request::get(&path).send().await?;
let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into())
} else {
@ -676,7 +698,7 @@ impl HttpStore {
path.push_str("/plan");
path.push_str("/at");
path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::delete(&path).send().await?;
let resp = gloo_net::http::Request::delete(&path).send().await?;
if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into())
} else {
@ -692,7 +714,7 @@ impl HttpStore {
path.push_str("/plan");
path.push_str("/at");
path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::get(&path).send().await?;
let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into())
} else {
@ -706,22 +728,22 @@ impl HttpStore {
}
}
pub async fn fetch_plan(&self) -> Result<Option<Vec<(String, i32)>>, Error> {
let mut path = self.v2_path();
path.push_str("/plan");
let resp = reqwasm::http::Request::get(&path).send().await?;
if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into())
} else {
debug!("We got a valid response back");
let plan = resp
.json::<PlanDataResponse>()
.await
.map_err(|e| format!("{}", e))?
.as_success();
Ok(plan)
}
}
//pub async fn fetch_plan(&self) -> Result<Option<Vec<(String, i32)>>, Error> {
// let mut path = self.v2_path();
// path.push_str("/plan");
// let resp = gloo_net::http::Request::get(&path).send().await?;
// if resp.status() != 200 {
// Err(format!("Status: {}", resp.status()).into())
// } else {
// debug!("We got a valid response back");
// let plan = resp
// .json::<PlanDataResponse>()
// .await
// .map_err(|e| format!("{}", e))?
// .as_success();
// Ok(plan)
// }
//}
pub async fn fetch_inventory_for_date(
&self,
@ -738,13 +760,9 @@ impl HttpStore {
path.push_str("/inventory");
path.push_str("/at");
path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::get(&path).send().await?;
let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 {
let err = Err(format!("Status: {}", resp.status()).into());
Ok(match self.local_store.get_inventory_data() {
Some(val) => val,
None => return err,
})
Err(format!("Status: {}", resp.status()).into())
} else {
debug!("We got a valid response back");
let InventoryData {
@ -777,13 +795,9 @@ impl HttpStore {
> {
let mut path = self.v2_path();
path.push_str("/inventory");
let resp = reqwasm::http::Request::get(&path).send().await?;
let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 {
let err = Err(format!("Status: {}", resp.status()).into());
Ok(match self.local_store.get_inventory_data() {
Some(val) => val,
None => return err,
})
Err(format!("Status: {}", resp.status()).into())
} else {
debug!("We got a valid response back");
let InventoryData {
@ -818,13 +832,10 @@ impl HttpStore {
path.push_str(&format!("/{}", date));
let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect();
let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect();
debug!("Storing inventory data in cache");
let serialized_inventory = to_string(&(filtered_ingredients, modified_amts, extra_items))
.expect("Unable to encode plan as json");
debug!("Storing inventory data via API");
let resp = reqwasm::http::Request::post(&path)
.body(&serialized_inventory)
.header("content-type", "application/json")
let resp = gloo_net::http::Request::post(&path)
.json(&(filtered_ingredients, modified_amts, extra_items))
.expect("Failed to set body")
.send()
.await?;
if resp.status() != 200 {
@ -847,13 +858,10 @@ impl HttpStore {
path.push_str("/inventory");
let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect();
let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect();
debug!("Storing inventory data in cache");
let serialized_inventory = to_string(&(filtered_ingredients, modified_amts, extra_items))
.expect("Unable to encode plan as json");
debug!("Storing inventory data via API");
let resp = reqwasm::http::Request::post(&path)
.body(&serialized_inventory)
.header("content-type", "application/json")
let resp = gloo_net::http::Request::post(&path)
.json(&(filtered_ingredients, modified_amts, extra_items))
.expect("Failed to set body")
.send()
.await?;
if resp.status() != 200 {
@ -868,7 +876,7 @@ impl HttpStore {
pub async fn fetch_staples(&self) -> Result<Option<String>, Error> {
let mut path = self.v2_path();
path.push_str("/staples");
let resp = reqwasm::http::Request::get(&path).send().await?;
let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 {
debug!("Invalid response back");
Err(format!("Status: {}", resp.status()).into())
@ -882,15 +890,15 @@ impl HttpStore {
}
}
pub async fn store_staples<S: AsRef<str>>(&self, content: S) -> Result<(), Error> {
pub async fn store_staples<S: AsRef<str> + serde::Serialize>(
&self,
content: S,
) -> Result<(), Error> {
let mut path = self.v2_path();
path.push_str("/staples");
let serialized_staples: String =
to_string(content.as_ref()).expect("Failed to serialize staples to json");
let resp = reqwasm::http::Request::post(&path)
.body(&serialized_staples)
.header("content-type", "application/json")
let resp = gloo_net::http::Request::post(&path)
.json(&content)
.expect("Failed to set body")
.send()
.await?;
if resp.status() != 200 {

View File

@ -19,19 +19,32 @@ use std::{
use chrono::NaiveDate;
use client_api::UserData;
use recipes::{parse, Ingredient, IngredientKey, Recipe, RecipeEntry};
use serde::{Deserialize, Serialize};
use sycamore::futures::spawn_local_scoped;
use sycamore::prelude::*;
use sycamore_state::{Handler, MessageMapper};
use tracing::{debug, error, info, instrument, warn};
use wasm_bindgen::throw_str;
use crate::api::{HttpStore, LocalStore};
use crate::{
api::{HttpStore, LocalStore},
linear::LinearSignal,
};
#[derive(Debug, Clone, PartialEq)]
fn bool_true() -> bool {
true
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppState {
pub recipe_counts: BTreeMap<String, usize>,
pub recipe_counts: BTreeMap<String, u32>,
pub recipe_categories: BTreeMap<String, String>,
pub extras: Vec<(String, String)>,
// FIXME(jwall): This should really be storable I think?
#[serde(skip_deserializing, skip_serializing)]
pub staples: Option<BTreeSet<Ingredient>>,
// FIXME(jwall): This should really be storable I think?
#[serde(skip_deserializing, skip_serializing)]
pub recipes: BTreeMap<String, Recipe>,
pub category_map: BTreeMap<String, String>,
pub filtered_ingredients: BTreeSet<IngredientKey>,
@ -39,12 +52,15 @@ pub struct AppState {
pub auth: Option<UserData>,
pub plan_dates: BTreeSet<NaiveDate>,
pub selected_plan_date: Option<NaiveDate>,
#[serde(default = "bool_true")]
pub use_staples: bool,
}
impl AppState {
pub fn new() -> Self {
Self {
recipe_counts: BTreeMap::new(),
recipe_categories: BTreeMap::new(),
extras: Vec::new(),
staples: None,
recipes: BTreeMap::new(),
@ -54,22 +70,23 @@ impl AppState {
auth: None,
plan_dates: BTreeSet::new(),
selected_plan_date: None,
use_staples: true,
}
}
}
pub enum Message {
ResetRecipeCounts,
UpdateRecipeCount(String, usize),
UpdateRecipeCount(String, u32),
AddExtra(String, String),
RemoveExtra(usize),
UpdateExtra(usize, String, String),
SaveRecipe(RecipeEntry, Option<Box<dyn FnOnce()>>),
SetRecipe(String, Recipe),
RemoveRecipe(String, Option<Box<dyn FnOnce()>>),
UpdateCategory(String, String, Option<Box<dyn FnOnce()>>),
ResetInventory,
AddFilteredIngredient(IngredientKey),
RemoveFilteredIngredient(IngredientKey),
UpdateAmt(IngredientKey, String),
SetUserData(UserData),
SaveState(Option<Box<dyn FnOnce()>>),
@ -77,6 +94,7 @@ pub enum Message {
UpdateStaples(String, Option<Box<dyn FnOnce()>>),
DeletePlan(NaiveDate, Option<Box<dyn FnOnce()>>),
SelectPlanDate(NaiveDate, Option<Box<dyn FnOnce()>>),
UpdateUseStaples(bool), // TODO(jwall): Should this just be various settings?
}
impl Debug for Message {
@ -99,9 +117,6 @@ impl Debug for Message {
.field(arg2)
.finish(),
Self::SaveRecipe(arg0, _) => f.debug_tuple("SaveRecipe").field(arg0).finish(),
Self::SetRecipe(arg0, arg1) => {
f.debug_tuple("SetRecipe").field(arg0).field(arg1).finish()
}
Self::RemoveRecipe(arg0, _) => f.debug_tuple("SetCategoryMap").field(arg0).finish(),
Self::UpdateCategory(i, c, _) => {
f.debug_tuple("UpdateCategory").field(i).field(c).finish()
@ -110,6 +125,9 @@ impl Debug for Message {
Self::AddFilteredIngredient(arg0) => {
f.debug_tuple("AddFilteredIngredient").field(arg0).finish()
}
Self::RemoveFilteredIngredient(arg0) => {
f.debug_tuple("RemoveFilteredIngredient").field(arg0).finish()
}
Self::UpdateAmt(arg0, arg1) => {
f.debug_tuple("UpdateAmt").field(arg0).field(arg1).finish()
}
@ -117,6 +135,7 @@ impl Debug for Message {
Self::SaveState(_) => write!(f, "SaveState"),
Self::LoadState(_) => write!(f, "LoadState"),
Self::UpdateStaples(arg, _) => f.debug_tuple("UpdateStaples").field(arg).finish(),
Self::UpdateUseStaples(arg) => f.debug_tuple("UpdateUseStaples").field(arg).finish(),
Self::SelectPlanDate(arg, _) => f.debug_tuple("SelectPlanDate").field(arg).finish(),
Self::DeletePlan(arg, _) => f.debug_tuple("DeletePlan").field(arg).finish(),
}
@ -129,14 +148,14 @@ pub struct StateMachine {
}
#[instrument]
fn parse_recipes(
pub fn parse_recipes(
recipe_entries: &Option<Vec<RecipeEntry>>,
) -> Result<Option<BTreeMap<String, Recipe>>, String> {
match recipe_entries {
Some(parsed) => {
let mut parsed_map = BTreeMap::new();
for r in parsed {
let recipe = match parse::as_recipe(&r.recipe_text()) {
let recipe = match r.try_into() {
Ok(r) => r,
Err(e) => {
error!("Error parsing recipe {}", e);
@ -156,59 +175,75 @@ impl StateMachine {
Self { store, local_store }
}
#[instrument(skip_all)]
async fn load_state(
store: &HttpStore,
local_store: &LocalStore,
original: &Signal<AppState>,
) -> Result<(), crate::api::Error> {
// NOTE(jwall): We use a linear Signal in here to ensure that we only
// call set on the signal once. When the LinearSignal get's dropped it
// will call set on the contained Signal.
let mut original: LinearSignal<AppState> = original.into();
if let Some(state) = local_store.fetch_app_state().await {
original = original.update(state);
}
let mut state = original.get().as_ref().clone();
info!("Synchronizing Recipes");
let recipe_entries = &store.fetch_recipes().await?;
let recipes = parse_recipes(&recipe_entries)?;
debug!(?recipes, "Parsed Recipes");
if let Some(recipes) = recipes {
state.recipes = recipes;
};
info!("Synchronizing staples");
state.staples = if let Some(content) = store.fetch_staples().await? {
local_store.set_staples(&content);
// now we need to parse staples as ingredients
let mut staples = parse::as_ingredient_list(&content)?;
Some(staples.drain(0..).collect())
} else {
if let Some(content) = local_store.get_staples() {
let mut staples = parse::as_ingredient_list(&content)?;
Some(staples.drain(0..).collect())
} else {
None
}
Some(BTreeSet::new())
};
info!("Synchronizing recipe");
if let Some(recipe_entries) = recipe_entries {
local_store.set_all_recipes(recipe_entries);
local_store.set_all_recipes(recipe_entries).await;
state.recipe_categories = recipe_entries
.iter()
.map(|entry| {
debug!(recipe_entry=?entry, "Getting recipe category");
(
entry.recipe_id().to_owned(),
entry
.category()
.cloned()
.unwrap_or_else(|| "Entree".to_owned()),
)
})
.collect::<BTreeMap<String, String>>();
}
info!("Fetching meal plan list");
let plan_dates = store.fetch_plan_dates().await?;
if let Some(mut plan_dates) = plan_dates {
if let Some(mut plan_dates) = store.fetch_plan_dates().await? {
debug!(?plan_dates, "meal plan list");
state.plan_dates = BTreeSet::from_iter(plan_dates.drain(0..));
}
info!("Synchronizing meal plan");
let plan = if let Some(cached_plan_date) = local_store.get_plan_date() {
let plan = store.fetch_plan_for_date(&cached_plan_date).await?;
state.selected_plan_date = Some(cached_plan_date);
plan
let plan = if let Some(ref cached_plan_date) = state.selected_plan_date {
store
.fetch_plan_for_date(cached_plan_date)
.await?
.or_else(|| Some(Vec::new()))
} else {
store.fetch_plan().await?
None
};
if let Some(plan) = plan {
// set the counts.
let mut plan_map = BTreeMap::new();
for (id, count) in plan {
plan_map.insert(id, count as usize);
plan_map.insert(id, count as u32);
}
state.recipe_counts = plan_map;
for (id, _) in state.recipes.iter() {
@ -217,44 +252,32 @@ impl StateMachine {
}
}
} else {
if let Some(plan) = local_store.get_plan() {
state.recipe_counts = plan.iter().map(|(k, v)| (k.clone(), *v as usize)).collect();
} else {
// Initialize things to zero.
if let Some(rs) = recipe_entries {
for r in rs {
state.recipe_counts.insert(r.recipe_id().to_owned(), 0);
}
// Initialize things to zero.
if let Some(rs) = recipe_entries {
for r in rs {
state.recipe_counts.insert(r.recipe_id().to_owned(), 0);
}
}
}
let plan = state
.recipe_counts
.iter()
.map(|(k, v)| (k.clone(), *v as i32))
.collect::<Vec<(String, i32)>>();
local_store.store_plan(&plan);
info!("Checking for user account data");
if let Some(user_data) = store.fetch_user_data().await {
debug!("Successfully got account data from server");
local_store.set_user_data(Some(&user_data));
local_store.set_user_data(Some(&user_data)).await;
state.auth = Some(user_data);
} else {
debug!("Using account data from local store");
let user_data = local_store.get_user_data();
let user_data = local_store.get_user_data().await;
state.auth = user_data;
}
info!("Synchronizing categories");
match store.fetch_categories().await {
Ok(Some(mut categories_content)) => {
debug!(categories=?categories_content);
local_store.set_categories(Some(&categories_content));
let category_map = BTreeMap::from_iter(categories_content.drain(0..));
state.category_map = category_map;
}
Ok(None) => {
warn!("There is no category file");
local_store.set_categories(None);
}
Err(e) => {
error!("{:?}", e);
@ -268,11 +291,6 @@ impl StateMachine {
info!("Synchronizing inventory data");
match inventory_data {
Ok((filtered_ingredients, modified_amts, extra_items)) => {
local_store.set_inventory_data((
&filtered_ingredients,
&modified_amts,
&extra_items,
));
state.modified_amts = modified_amts;
state.filtered_ingredients = filtered_ingredients;
state.extras = extra_items;
@ -281,7 +299,9 @@ impl StateMachine {
error!("{:?}", e);
}
}
original.set(state);
// Finally we store all of this app state back to our localstore
local_store.store_app_state(&state).await;
original.update(state);
Ok(())
}
}
@ -297,69 +317,49 @@ impl MessageMapper<Message, AppState> for StateMachine {
for (id, _) in original_copy.recipes.iter() {
map.insert(id.clone(), 0);
}
let plan: Vec<(String, i32)> =
map.iter().map(|(s, i)| (s.clone(), *i as i32)).collect();
self.local_store.store_plan(&plan);
original_copy.recipe_counts = map;
}
Message::UpdateRecipeCount(id, count) => {
original_copy.recipe_counts.insert(id, count);
let plan: Vec<(String, i32)> = original_copy
.recipe_counts
.iter()
.map(|(s, i)| (s.clone(), *i as i32))
.collect();
self.local_store.store_plan(&plan);
}
Message::AddExtra(amt, name) => {
original_copy.extras.push((amt, name));
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
))
}
Message::RemoveExtra(idx) => {
original_copy.extras.remove(idx);
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
))
}
Message::UpdateExtra(idx, amt, name) => {
match original_copy.extras.get_mut(idx) {
Some(extra) => {
extra.0 = amt;
extra.1 = name;
}
None => {
throw_str("Attempted to remove extra that didn't exist");
}
Message::UpdateExtra(idx, amt, name) => match original_copy.extras.get_mut(idx) {
Some(extra) => {
extra.0 = amt;
extra.1 = name;
}
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
))
}
Message::SetRecipe(id, recipe) => {
original_copy.recipes.insert(id, recipe);
}
None => {
throw_str("Attempted to remove extra that didn't exist");
}
},
Message::SaveRecipe(entry, callback) => {
let recipe =
parse::as_recipe(entry.recipe_text()).expect("Failed to parse RecipeEntry");
original_copy
.recipes
.insert(entry.recipe_id().to_owned(), recipe);
original_copy
.recipe_counts
.insert(entry.recipe_id().to_owned(), 0);
let recipe_id = entry.recipe_id().to_owned();
let recipe: Recipe = (&entry).try_into().expect("Failed to parse RecipeEntry");
original_copy.recipes.insert(recipe_id.clone(), recipe);
if !original_copy.recipe_counts.contains_key(entry.recipe_id()) {
original_copy.recipe_counts.insert(recipe_id.clone(), 0);
}
if let Some(cat) = entry.category().cloned() {
original_copy
.recipe_categories
.entry(recipe_id.clone())
.and_modify(|c| *c = cat.clone())
.or_insert(cat);
}
let store = self.store.clone();
self.local_store.set_recipe_entry(&entry);
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move {
local_store.set_recipe_entry(&entry).await;
if let Err(e) = store.store_recipes(vec![entry]).await {
// FIXME(jwall): We should have a global way to trigger error messages
error!(err=?e, "Unable to save Recipe");
// FIXME(jwall): This should be an error message
} else {
}
callback.map(|f| f());
});
@ -367,9 +367,10 @@ impl MessageMapper<Message, AppState> for StateMachine {
Message::RemoveRecipe(recipe, callback) => {
original_copy.recipe_counts.remove(&recipe);
original_copy.recipes.remove(&recipe);
self.local_store.delete_recipe_entry(&recipe);
let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move {
local_store.delete_recipe_entry(&recipe).await;
if let Err(err) = store.delete_recipe(&recipe).await {
error!(?err, "Failed to delete recipe");
}
@ -377,8 +378,6 @@ impl MessageMapper<Message, AppState> for StateMachine {
});
}
Message::UpdateCategory(ingredient, category, callback) => {
self.local_store
.set_categories(Some(&vec![(ingredient.clone(), category.clone())]));
original_copy
.category_map
.insert(ingredient.clone(), category.clone());
@ -394,45 +393,42 @@ impl MessageMapper<Message, AppState> for StateMachine {
original_copy.filtered_ingredients = BTreeSet::new();
original_copy.modified_amts = BTreeMap::new();
original_copy.extras = Vec::new();
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
));
}
Message::AddFilteredIngredient(key) => {
original_copy.filtered_ingredients.insert(key);
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
));
}
Message::RemoveFilteredIngredient(key) => {
original_copy.filtered_ingredients.remove(&key);
}
Message::UpdateAmt(key, amt) => {
original_copy.modified_amts.insert(key, amt);
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
));
}
Message::SetUserData(user_data) => {
self.local_store.set_user_data(Some(&user_data));
original_copy.auth = Some(user_data);
let local_store = self.local_store.clone();
original_copy.auth = Some(user_data.clone());
spawn_local_scoped(cx, async move {
local_store.set_user_data(Some(&user_data)).await;
});
}
Message::SaveState(f) => {
let mut original_copy = original_copy.clone();
let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move {
if original_copy.selected_plan_date.is_none() {
original_copy.selected_plan_date = Some(chrono::Local::now().date_naive());
}
original_copy
.plan_dates
.insert(original_copy.selected_plan_date.map(|d| d.clone()).unwrap());
original_copy.plan_dates.insert(
original_copy
.selected_plan_date
.as_ref()
.map(|d| d.clone())
.unwrap(),
);
if let Err(e) = store.store_app_state(&original_copy).await {
error!(err=?e, "Error saving app state")
error!(err=?e, "Error saving app state");
};
local_store.store_app_state(&original_copy).await;
original.set(original_copy);
f.map(|f| f());
});
@ -443,55 +439,58 @@ impl MessageMapper<Message, AppState> for StateMachine {
Message::LoadState(f) => {
let store = self.store.clone();
let local_store = self.local_store.clone();
debug!("Loading user state.");
spawn_local_scoped(cx, async move {
Self::load_state(&store, &local_store, original)
.await
.expect("Failed to load_state.");
local_store.set_inventory_data((
&original.get().filtered_ingredients,
&original.get().modified_amts,
&original.get().extras,
));
if let Err(err) = Self::load_state(&store, &local_store, original).await {
error!(?err, "Failed to load user state");
}
f.map(|f| f());
});
return;
}
Message::UpdateStaples(content, callback) => {
let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move {
local_store.set_staples(&content);
store
.store_staples(content)
.await
.expect("Failed to store staples");
callback.map(|f| f());
if let Err(err) = store.store_staples(content).await {
error!(?err, "Failed to store staples");
} else {
callback.map(|f| f());
}
});
return;
}
Message::UpdateUseStaples(value) => {
original_copy.use_staples = value;
}
Message::SelectPlanDate(date, callback) => {
let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move {
if let Some(mut plan) = store
if let Ok(Some(mut plan)) = store
.fetch_plan_for_date(&date)
.await
.expect("Failed to fetch plan for date")
{
// Note(jwall): This is a little unusual but because this
// is async code we can't rely on the set below.
original_copy.recipe_counts =
BTreeMap::from_iter(plan.drain(0..).map(|(k, v)| (k, v as usize)));
BTreeMap::from_iter(plan.drain(0..).map(|(k, v)| (k, v as u32)));
let (filtered, modified, extras) = store
.fetch_inventory_for_date(&date)
.await
.expect("Failed to fetch inventory_data for date");
original_copy.modified_amts = modified;
original_copy.filtered_ingredients = filtered;
original_copy.extras = extras;
} else {
store.store_plan_for_date(Vec::new(), &date).await.expect("failed to set plan on server");
}
let (filtered, modified, extras) = store
.fetch_inventory_for_date(&date)
original_copy.plan_dates.insert(date.clone());
original_copy.selected_plan_date = Some(date.clone());
store
.store_plan_for_date(vec![], &date)
.await
.expect("Failed to fetch inventory_data for date");
original_copy.modified_amts = modified;
original_copy.filtered_ingredients = filtered;
original_copy.extras = extras;
local_store.set_plan_date(&date);
.expect("Failed to init meal plan for date");
local_store.store_app_state(&original_copy).await;
original.set(original_copy);
callback.map(|f| f());
@ -505,26 +504,34 @@ impl MessageMapper<Message, AppState> for StateMachine {
let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move {
store
.delete_plan_for_date(&date)
.await
.expect("Failed to delete meal plan for date");
local_store.delete_plan();
if let Err(err) = store.delete_plan_for_date(&date).await {
error!(?err, "Error deleting plan");
} else {
original_copy.plan_dates.remove(&date);
// Reset all meal planning state;
let _ = original_copy.recipe_counts.iter_mut().map(|(_, v)| *v = 0);
original_copy.filtered_ingredients = BTreeSet::new();
original_copy.modified_amts = BTreeMap::new();
original_copy.extras = Vec::new();
local_store.store_app_state(&original_copy).await;
original.set(original_copy);
original_copy.plan_dates.remove(&date);
// Reset all meal planning state;
let _ = original_copy.recipe_counts.iter_mut().map(|(_, v)| *v = 0);
original_copy.filtered_ingredients = BTreeSet::new();
original_copy.modified_amts = BTreeMap::new();
original_copy.extras = Vec::new();
original.set(original_copy);
callback.map(|f| f());
callback.map(|f| f());
}
});
// NOTE(jwall): Because we do our signal set above in the async block
// we have to return here to avoid lifetime issues and double setting
// the original signal.
return;
}
}
original.set(original_copy);
spawn_local_scoped(cx, {
let local_store = self.local_store.clone();
async move {
local_store.store_app_state(&original_copy).await;
original.set(original_copy);
}
});
}
}

View File

@ -31,21 +31,30 @@ Instructions here
#[component]
pub fn AddRecipe<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
let recipe_title = create_signal(cx, String::new());
let category = create_signal(cx, String::new());
let create_recipe_signal = create_signal(cx, ());
let dirty = create_signal(cx, false);
let entry = create_memo(cx, || {
RecipeEntry(
recipe_title
let category = category.get().as_ref().to_owned();
let category = if category.is_empty() {
None
} else {
Some(category)
};
RecipeEntry {
id: recipe_title
.get()
.as_ref()
.to_lowercase()
.replace(" ", "_")
.replace("\n", ""),
STARTER_RECIPE
text: STARTER_RECIPE
.replace("TITLE_PLACEHOLDER", recipe_title.get().as_str())
.replace("\r", ""),
)
category,
serving_count: None,
}
});
view! {cx,

View File

@ -49,7 +49,7 @@ fn CategoryRow<'ctx, G: Html>(cx: Scope<'ctx>, props: CategoryRowProps<'ctx>) ->
});
view! {cx,
tr() {
td() {
td(class="margin-bot-1 border-bottom") {
(ingredient_clone) br()
Indexed(
iterable=recipes,

View File

@ -17,8 +17,8 @@ use sycamore::prelude::*;
#[component]
pub fn Footer<G: Html>(cx: Scope) -> View<G> {
view! {cx,
nav(class="no-print") {
ul {
nav(class="no-print menu-font") {
ul(class="no-list") {
li { a(href="https://github.com/zaphar/kitchen") { "On Github" } }
}
}

View File

@ -23,9 +23,9 @@ pub fn Header<'ctx, G: Html>(cx: Scope<'ctx>, h: StateHandler<'ctx>) -> View<G>
None => "Login".to_owned(),
});
view! {cx,
nav(class="no-print") {
nav(class="no-print row-flex align-center header-bg heavy-bottom-border menu-font") {
h1(class="title") { "Kitchen" }
ul {
ul(class="row-flex align-center no-list") {
li { a(href="/ui/planning/select") { "MealPlan" } }
li { a(href="/ui/manage/ingredients") { "Manage" } }
li { a(href="/ui/login") { (login.get()) } }

View File

@ -25,16 +25,6 @@ pub mod shopping_list;
pub mod staples;
pub mod tabs;
pub use add_recipe::*;
pub use categories::*;
pub use footer::*;
pub use header::*;
pub use number_field::*;
pub use plan_list::*;
pub use recipe::*;
pub use recipe_list::*;
pub use recipe_plan::*;
pub use recipe_selection::*;
pub use shopping_list::*;
pub use staples::*;
pub use tabs::*;

View File

@ -11,64 +11,238 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use maud::html;
use sycamore::prelude::*;
use tracing::debug;
use web_sys::{Event, HtmlInputElement};
use tracing::{debug, error};
use wasm_bindgen::{JsCast, JsValue};
use wasm_web_component::{web_component, WebComponentBinding};
use web_sys::{CustomEvent, CustomEventInit, Event, HtmlElement, InputEvent, ShadowRoot};
use crate::js_lib;
#[web_component(
observed_attrs = "['val', 'min', 'max', 'step']",
observed_events = "['change', 'click', 'input']"
)]
pub struct NumberSpinner {
root: Option<ShadowRoot>,
min: i32,
max: i32,
step: i32,
value: i32,
}
impl NumberSpinner {
fn get_input_el(&self) -> HtmlElement {
self.root
.as_ref()
.unwrap()
.get_element_by_id("nval")
.unwrap()
.dyn_into()
.unwrap()
}
}
impl WebComponentBinding for NumberSpinner {
fn init_mut(&mut self, element: &web_sys::HtmlElement) {
(self.min, self.max, self.step, self.value) = (0, 99, 1, 0);
debug!("Initializing element instance");
let root = html! {
span {
link rel="stylesheet" href="/ui/static/app.css" { };
style {
r#"
span { display: block; }
span.button {
font-size: 2em; font-weight: bold;
}
.number-input {
border-width: var(--border-width);
border-style: inset;
padding: 3pt;
border-radius: 10px;
width: 3em;
}
"#
};
span class="button" id="inc" { "+" }; " "
// TODO(jwall): plaintext-only would be nice but I can't actually do that yet.
span id="nval" class="number-input" contenteditable="true" { "0" } " "
span class="button" id="dec" { "-" };
};
};
self.attach_shadow(element, &root.into_string());
self.root = element.shadow_root();
}
fn connected_mut(&mut self, element: &HtmlElement) {
debug!("COUNTS: connecting to DOM");
let val = element.get_attribute("val").unwrap_or_else(|| "0".into());
let min = element.get_attribute("min").unwrap_or_else(|| "0".into());
let max = element.get_attribute("max").unwrap_or_else(|| "99".into());
let step = element.get_attribute("step").unwrap_or_else(|| "1".into());
debug!(?val, ?min, ?max, ?step, "connecting to DOM");
let nval_el = self.get_input_el();
if let Ok(parsed) = val.parse::<i32>() {
self.value = parsed;
nval_el.set_inner_text(&val);
}
if let Ok(parsed) = min.parse::<i32>() {
self.min = parsed;
}
if let Ok(parsed) = max.parse::<i32>() {
self.max = parsed;
}
if let Ok(parsed) = step.parse::<i32>() {
self.step = parsed;
}
}
fn handle_event_mut(&mut self, element: &web_sys::HtmlElement, event: &Event) {
let target: HtmlElement = event.target().unwrap().dyn_into().unwrap();
let id = target.get_attribute("id");
let event_type = event.type_();
let nval_el = self.get_input_el();
debug!(?id, ?event_type, "saw event");
match (id.as_ref().map(|s| s.as_str()), event_type.as_str()) {
(Some("inc"), "click") => {
if self.value < self.max {
self.value += 1;
nval_el.set_inner_text(&format!("{}", self.value));
}
}
(Some("dec"), "click") => {
if self.value > self.min {
self.value -= 1;
nval_el.set_inner_text(&format!("{}", self.value));
}
}
(Some("nval"), "input") => {
let input_event = event.dyn_ref::<InputEvent>().unwrap();
if let Some(data) = input_event.data() {
// We only allow numeric input data here.
debug!(data, input_type=?input_event.input_type() , "got input");
if data.chars().filter(|c| !c.is_numeric()).count() > 0 {
nval_el.set_inner_text(&format!("{}", self.value));
}
} else {
nval_el.set_inner_text(&format!("{}{}", nval_el.inner_text(), self.value));
}
}
_ => {
debug!("Ignoring event");
return;
}
};
let mut event_dict = CustomEventInit::new();
event_dict.detail(&JsValue::from_f64(self.value as f64));
element
.dispatch_event(&CustomEvent::new_with_event_init_dict("updated", &event_dict).unwrap())
.unwrap();
debug!("Dispatched updated event");
}
fn attribute_changed_mut(
&mut self,
_element: &web_sys::HtmlElement,
name: JsValue,
old_value: JsValue,
new_value: JsValue,
) {
let nval_el = self.get_input_el();
let name = name.as_string().unwrap();
debug!(
?name,
?old_value,
?new_value,
"COUNTS: handling attribute change"
);
match name.as_str() {
"val" => {
debug!("COUNTS: got an updated value");
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.value = val;
nval_el.set_inner_text(format!("{}", self.value).as_str());
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
"min" => {
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.min = val;
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
"max" => {
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.max = val;
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
"step" => {
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.step = val;
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
_ => {
debug!("Ignoring Attribute Change");
return;
}
}
}
}
#[derive(Props)]
pub struct NumberProps<'ctx, F>
where
F: Fn(Event),
F: Fn(CustomEvent),
{
name: String,
class: String,
on_change: Option<F>,
min: i32,
counter: &'ctx Signal<String>,
min: f64,
counter: &'ctx Signal<f64>,
}
#[component]
pub fn NumberField<'ctx, F, G: Html>(cx: Scope<'ctx>, props: NumberProps<'ctx, F>) -> View<G>
where
F: Fn(web_sys::Event) + 'ctx,
F: Fn(CustomEvent) + 'ctx,
{
let NumberProps {
name,
class,
on_change,
min,
counter,
} = props;
NumberSpinner::define_once();
// TODO(jwall): I'm pretty sure this triggers: https://github.com/sycamore-rs/sycamore/issues/602
// Which means I probably have to wait till v0.9.0 drops or switch to leptos.
let id = name.clone();
let inc_target_id = id.clone();
let dec_target_id = id.clone();
let min_field = format!("{}", min);
let initial_count = *counter.get();
view! {cx,
div() {
input(type="number", id=id, name=name, class="item-count-sel", min=min_field, max="99", step="1", bind:value=counter, on:input=move |evt| {
on_change.as_ref().map(|f| f(evt));
})
span(class="item-count-inc-dec", on:click=move |_| {
let i: i32 = counter.get_untracked().parse().unwrap();
let target = js_lib::get_element_by_id::<HtmlInputElement>(&inc_target_id).unwrap().expect(&format!("No such element with id {}", inc_target_id));
counter.set(format!("{}", i+1));
debug!(counter=%(counter.get_untracked()), "set counter to new value");
// We force an input event to get triggered for our target.
target.dispatch_event(&web_sys::Event::new("input").expect("Failed to create new event")).expect("Failed to dispatch event to target");
}) { "" }
" "
span(class="item-count-inc-dec", on:click=move |_| {
let i: i32 = counter.get_untracked().parse().unwrap();
let target = js_lib::get_element_by_id::<HtmlInputElement>(&dec_target_id).unwrap().expect(&format!("No such element with id {}", dec_target_id));
if i > min {
counter.set(format!("{}", i-1));
debug!(counter=%(counter.get_untracked()), "set counter to new value");
// We force an input event to get triggered for our target.
target.dispatch_event(&web_sys::Event::new("input").expect("Failed to create new event")).expect("Failed to dispatch event to target");
}
}) { "" }
}
number-spinner(id=id, class=(class), val=(initial_count), min=min, on:updated=move |evt: Event| {
let event = evt.unchecked_into::<CustomEvent>();
let val: f64 = event.detail().as_f64().unwrap();
counter.set(val);
on_change.as_ref().map(|f| f(event));
debug!(counter=%(counter.get_untracked()), "set counter to new value");
})
}
}

View File

@ -1,4 +1,3 @@
use chrono::NaiveDate;
// Copyright 2023 Jeremy Wall (Jeremy@marzhilsltudios.com)
//
// Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +11,7 @@ use chrono::NaiveDate;
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use chrono::NaiveDate;
use sycamore::prelude::*;
use crate::app_state::{Message, StateHandler};
@ -23,30 +23,25 @@ pub struct PlanListProps<'ctx> {
list: &'ctx ReadSignal<Vec<NaiveDate>>,
}
// TODO(jwall): We also need a "new plan button"
#[instrument(skip_all, fields(dates=?props.list))]
#[component]
pub fn PlanList<'ctx, G: Html>(cx: Scope<'ctx>, props: PlanListProps<'ctx>) -> View<G> {
let PlanListProps { sh, list } = props;
view! {cx,
div() {
table() {
div(class="column-flex") {
Indexed(
iterable=list,
view=move |cx, date| {
let date_display = format!("{}", date);
view!{cx,
tr() {
td() {
span(role="button", class="outline", on:click=move |_| {
sh.dispatch(cx, Message::SelectPlanDate(date, None))
}) { (date_display) }
}
td() {
span(role="button", class="destructive", on:click=move |_| {
sh.dispatch(cx, Message::DeletePlan(date, None))
}) { "Delete Plan" }
}
div(class="row-flex margin-bot-half") {
button(class="outline margin-right-1", on:click=move |_| {
sh.dispatch(cx, Message::SelectPlanDate(date, None))
}) { (date_display) }
button(class="destructive", on:click=move |_| {
sh.dispatch(cx, Message::DeletePlan(date, None))
}) { "Delete Plan" }
}
}
},

View File

@ -49,9 +49,18 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
let store = crate::api::HttpStore::get_from_context(cx);
let recipe: &Signal<RecipeEntry> =
create_signal(cx, RecipeEntry::new(&recipe_id, String::new()));
let text = create_signal(cx, String::new());
let text = create_signal(cx, String::from("0"));
let serving_count_str = create_signal(cx, String::new());
let serving_count = create_memo(cx, || {
if let Ok(count) = serving_count_str.get().parse::<i64>() {
count
} else {
0
}
});
let error_text = create_signal(cx, String::from("Parse results..."));
let aria_hint = create_signal(cx, "false");
let category = create_signal(cx, "Entree".to_owned());
spawn_local_scoped(cx, {
let store = store.clone();
@ -62,6 +71,9 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
.expect("Failure getting recipe");
if let Some(entry) = entry {
text.set(entry.recipe_text().to_owned());
if let Some(cat) = entry.category() {
category.set(cat.clone());
}
recipe.set(entry);
} else {
error_text.set("Unable to find recipe".to_owned());
@ -75,65 +87,67 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
debug!("creating editor view");
view! {cx,
div(class="grid") {
textarea(bind:value=text, aria-invalid=aria_hint.get(), rows=20, on:change=move |_| {
dirty.set(true);
check_recipe_parses(text.get_untracked().as_str(), error_text, aria_hint);
}, on:input=move |_| {
let current_ts = js_lib::get_ms_timestamp();
if (current_ts - *ts.get_untracked()) > 100 {
div {
label(for="recipe_category") { "Category" }
input(name="recipe_category", bind:value=category, on:change=move |_| dirty.set(true))
}
div {
label(for="serving_count") { "Serving Count" }
input(name="serving_count", bind:value=serving_count_str, on:change=move |_| dirty.set(true))
}
div {
div(class="row-flex") {
label(for="recipe_text", class="block align-stretch expand-height") { "Recipe: " }
textarea(class="width-third", name="recipe_text", bind:value=text, aria-invalid=aria_hint.get(), cols="50", rows=20, on:change=move |_| {
dirty.set(true);
check_recipe_parses(text.get_untracked().as_str(), error_text, aria_hint);
ts.set(current_ts);
}
})
}, on:input=move |_| {
let current_ts = js_lib::get_ms_timestamp();
if (current_ts - *ts.get_untracked()) > 100 {
check_recipe_parses(text.get_untracked().as_str(), error_text, aria_hint);
ts.set(current_ts);
}
})
}
div(class="parse") { (error_text.get()) }
}
span(role="button", on:click=move |_| {
let unparsed = text.get_untracked();
if check_recipe_parses(unparsed.as_str(), error_text, aria_hint) {
debug!("triggering a save");
if !*dirty.get_untracked() {
debug!("Recipe text is unchanged");
return;
}
debug!("Recipe text is changed");
spawn_local_scoped(cx, {
let store = crate::api::HttpStore::get_from_context(cx);
async move {
debug!("Attempting to save recipe");
if let Err(e) = store
.store_recipes(vec![RecipeEntry(
id.get_untracked().as_ref().clone(),
text.get_untracked().as_ref().clone(),
)])
.await
{
error!(?e, "Failed to save recipe");
error_text.set(format!("{:?}", e));
} else {
// We also need to set recipe in our state
dirty.set(false);
if let Ok(recipe) = recipes::parse::as_recipe(text.get_untracked().as_ref()) {
sh.dispatch(
cx,
Message::SetRecipe(id.get_untracked().as_ref().to_owned(), recipe),
);
}
};
div {
button(on:click=move |_| {
let unparsed = text.get_untracked();
if check_recipe_parses(unparsed.as_str(), error_text, aria_hint) {
debug!("triggering a save");
if !*dirty.get_untracked() {
debug!("Recipe text is unchanged");
return;
}
});
} else {
}
}) { "Save" } " "
span(role="button", on:click=move |_| {
sh.dispatch(cx, Message::RemoveRecipe(id.get_untracked().as_ref().to_owned(), Some(Box::new(|| sycamore_router::navigate("/ui/planning/plan")))));
}) { "delete" } " "
debug!("Recipe text is changed");
let category = category.get_untracked();
let category = if category.is_empty() {
None
} else {
Some(category.as_ref().clone())
};
let recipe_entry = RecipeEntry {
id: id.get_untracked().as_ref().clone(),
text: text.get_untracked().as_ref().clone(),
category,
serving_count: Some(*serving_count.get()),
};
sh.dispatch(cx, Message::SaveRecipe(recipe_entry, None));
dirty.set(false);
}
// TODO(jwall): Show error message if trying to save when recipe doesn't parse.
}) { "Save" } " "
button(on:click=move |_| {
sh.dispatch(cx, Message::RemoveRecipe(id.get_untracked().as_ref().to_owned(), Some(Box::new(|| sycamore_router::navigate("/ui/planning/plan")))));
}) { "delete" } " "
}
}
}
#[component]
fn Steps<G: Html>(cx: Scope, steps: Vec<recipes::Step>) -> View<G> {
let step_fragments = View::new_fragment(steps.iter().map(|step| {
let step_fragments = View::new_fragment(steps.iter().enumerate().map(|(idx, step)| {
let mut step = step.clone();
let ingredient_fragments = View::new_fragment(step.ingredients.drain(0..).map(|i| {
view! {cx,
@ -144,8 +158,8 @@ fn Steps<G: Html>(cx: Scope, steps: Vec<recipes::Step>) -> View<G> {
}).collect());
view! {cx,
div {
h3 { "Instructions" }
ul(class="ingredients") {
h3 { "Step " (idx + 1) }
ul(class="ingredients no-list") {
(ingredient_fragments)
}
div(class="instructions") {
@ -155,7 +169,7 @@ fn Steps<G: Html>(cx: Scope, steps: Vec<recipes::Step>) -> View<G> {
}
}).collect());
view! {cx,
h2 { "Steps: " }
h2 { "Instructions: " }
div(class="recipe_steps") {
(step_fragments)
}
@ -169,18 +183,22 @@ pub fn Viewer<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
let recipe_signal = sh.get_selector(cx, move |state| {
if let Some(recipe) = state.get().recipes.get(&recipe_id) {
let title = recipe.title.clone();
let serving_count = recipe.serving_count.clone();
let desc = recipe.desc.clone().unwrap_or_else(|| String::new());
let steps = recipe.steps.clone();
Some((title, desc, steps))
Some((title, serving_count, desc, steps))
} else {
None
}
});
if let Some((title, desc, steps)) = recipe_signal.get().as_ref().clone() {
if let Some((title, serving_count, desc, steps)) = recipe_signal.get().as_ref().clone() {
debug!("Viewing recipe.");
view.set(view! {cx,
div(class="recipe") {
h1(class="recipe_title") { (title) }
div(class="serving_count") {
"Serving Count: " (serving_count.map(|v| format!("{}", v)).unwrap_or_else(|| "Unconfigured".to_owned()))
}
div(class="recipe_description") {
(desc)
}

View File

@ -1,3 +1,5 @@
use std::collections::BTreeMap;
// Copyright 2022 Jeremy Wall
//
// Licensed under the Apache License, Version 2.0 (the "License");
@ -13,54 +15,112 @@
// limitations under the License.
use recipes::Recipe;
use sycamore::prelude::*;
use tracing::instrument;
use tracing::{debug, instrument};
use crate::app_state::{Message, StateHandler};
use crate::components::recipe_selection::*;
#[derive(Props)]
pub struct CategoryGroupProps<'ctx> {
sh: StateHandler<'ctx>,
category: String,
recipes: Vec<(String, Recipe)>,
row_size: usize,
}
#[allow(non_snake_case)]
#[instrument(skip_all)]
pub fn RecipePlan<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
let rows = sh.get_selector(cx, move |state| {
pub fn CategoryGroup<'ctx, G: Html>(
cx: Scope<'ctx>,
CategoryGroupProps {
sh,
category,
recipes,
row_size,
}: CategoryGroupProps<'ctx>,
) -> View<G> {
let rows = create_signal(cx, {
let mut rows = Vec::new();
for row in state
.get()
.recipes
for row in recipes
.iter()
.map(|(k, v)| create_signal(cx, (k.clone(), v.clone())))
.map(|(id, r)| create_signal(cx, (id.clone(), r.clone())))
.collect::<Vec<&Signal<(String, Recipe)>>>()
.chunks(4)
.chunks(row_size)
{
rows.push(create_signal(cx, Vec::from(row)));
}
rows
});
view! {cx,
table(class="recipe_selector no-print") {
h2 { (category) }
div(class="no-print row-flex flex-wrap-start align-stretch") {
(View::new_fragment(
rows.get().iter().cloned().map(|r| {
view ! {cx,
tr { Keyed(
Keyed(
iterable=r,
view=move |cx, sig| {
let title = create_memo(cx, move || sig.get().1.title.clone());
let serving_count = create_memo(cx, move || sig.get().1.serving_count.clone());
view! {cx,
td { RecipeSelection(i=sig.get().0.to_owned(), title=title, sh=sh) }
div(class="cell column-flex justify-end align-stretch") {
RecipeSelection(
i=sig.get().0.to_owned(),
title=title, sh=sh,
serving_count=serving_count,
) }
}
},
key=|sig| sig.get().0.to_owned(),
)}
)
}
}).collect()
))
}
span(role="button", on:click=move |_| {
}
}
#[allow(non_snake_case)]
#[instrument(skip_all)]
pub fn RecipePlan<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
let recipe_category_groups = sh.get_selector(cx, |state| {
state
.get()
.recipe_categories
.iter()
.fold(BTreeMap::new(), |mut map, (r, cat)| {
debug!(?cat, recipe_id=?r, "Accumulating recipe into category");
map.entry(cat.clone()).or_insert(Vec::new()).push((
r.clone(),
state
.get()
.recipes
.get(r)
.expect(&format!("Failed to find recipe {}", r))
.clone(),
));
map
})
.iter()
.map(|(cat, rs)| (cat.clone(), rs.clone()))
.collect::<Vec<(String, Vec<(String, Recipe)>)>>()
});
view! {cx,
Keyed(
iterable=recipe_category_groups,
view=move |cx, (cat, recipes)| {
view! {cx,
CategoryGroup(sh=sh, category=cat, recipes=recipes, row_size=4)
}
},
key=|(ref cat, _)| cat.clone(),
)
button(on:click=move |_| {
sh.dispatch(cx, Message::LoadState(None));
}) { "Reset" } " "
span(role="button", on:click=move |_| {
button(on:click=move |_| {
sh.dispatch(cx, Message::ResetRecipeCounts);
}) { "Clear All" } " "
span(role="button", on:click=move |_| {
button(on:click=move |_| {
// Poor man's click event signaling.
sh.dispatch(cx, Message::SaveState(None));
}) { "Save Plan" } " "

View File

@ -23,6 +23,7 @@ use crate::components::NumberField;
pub struct RecipeCheckBoxProps<'ctx> {
pub i: String,
pub title: &'ctx ReadSignal<String>,
pub serving_count: &'ctx ReadSignal<Option<i64>>,
pub sh: StateHandler<'ctx>,
}
@ -35,7 +36,7 @@ pub fn RecipeSelection<'ctx, G: Html>(
cx: Scope<'ctx>,
props: RecipeCheckBoxProps<'ctx>,
) -> View<G> {
let RecipeCheckBoxProps { i, title, sh } = props;
let RecipeCheckBoxProps { i, title, sh, serving_count, } = props;
let id = Rc::new(i);
let id_for_count = id.clone();
// NOTE(jwall): The below get's a little tricky. We need a separate signal to bind for the
@ -52,10 +53,10 @@ pub fn RecipeSelection<'ctx, G: Html>(
.get(id_for_count.as_ref())
.unwrap_or(&0)
});
let count = create_signal(cx, format!("{}", *current_count.get_untracked()));
let count = create_signal(cx, *current_count.get_untracked() as f64);
create_effect(cx, || {
let updated_count = format!("{}", current_count.get());
if &updated_count != count.get_untracked().as_ref() {
let updated_count = *current_count.get() as f64;
if updated_count != *count.get_untracked() {
count.set(updated_count);
}
});
@ -65,12 +66,13 @@ pub fn RecipeSelection<'ctx, G: Html>(
let name = format!("recipe_id:{}", id);
let for_id = name.clone();
view! {cx,
div() {
label(for=for_id) { a(href=href) { (*title) } }
NumberField(name=name, counter=count, min=0, on_change=Some(move |_| {
debug!(idx=%id, count=%(*count.get_untracked()), "setting recipe count");
sh.dispatch(cx, Message::UpdateRecipeCount(id.as_ref().clone(), count.get_untracked().parse().expect("Count is not a valid usize")));
}))
label(for=for_id, class="flex-item-grow") { a(href=href) { (*title) } }
div {
"Serves: " (serving_count.get().map(|v| v.to_string()).unwrap_or("Unconfigured".to_owned()))
}
NumberField(name=name, class="flex-item-shrink".to_string(), counter=count, min=0.0, on_change=Some(move |_| {
debug!(idx=%id, count=%(*count.get_untracked()), "setting recipe count");
sh.dispatch(cx, Message::UpdateRecipeCount(id.as_ref().clone(), *count.get_untracked() as u32));
}))
}
}

View File

@ -19,6 +19,115 @@ use tracing::{debug, info, instrument};
use crate::app_state::{Message, StateHandler};
#[instrument(skip_all)]
fn make_deleted_ingredients_rows<'ctx, G: Html>(
cx: Scope<'ctx>,
sh: StateHandler<'ctx>,
show_staples: &'ctx ReadSignal<bool>,
) -> View<G> {
debug!("Making ingredients rows");
let ingredients = sh.get_selector(cx, move |state| {
let state = state.get();
let category_map = &state.category_map;
debug!("building ingredient list from state");
let mut acc = IngredientAccumulator::new();
for (id, count) in state.recipe_counts.iter() {
for _ in 0..(*count) {
acc.accumulate_from(
state
.recipes
.get(id)
.expect(&format!("No such recipe id exists: {}", id)),
);
}
}
if *show_staples.get() {
if let Some(staples) = &state.staples {
acc.accumulate_ingredients_for("Staples", staples.iter());
}
}
let mut ingredients = acc
.ingredients()
.into_iter()
// First we filter out any filtered ingredients
.filter(|(i, _)| state.filtered_ingredients.contains(i))
// Then we take into account our modified amts
.map(|(k, (i, rs))| {
let category = category_map
.get(&i.name)
.cloned()
.unwrap_or_else(|| String::new());
if state.modified_amts.contains_key(&k) {
(
k.clone(),
(
i.name,
i.form,
category,
state.modified_amts.get(&k).unwrap().clone(),
rs,
),
)
} else {
(
k.clone(),
(
i.name,
i.form,
category,
format!("{}", i.amt.normalize()),
rs,
),
)
}
})
.collect::<Vec<(
IngredientKey,
(String, Option<String>, String, String, BTreeSet<String>),
)>>();
ingredients.sort_by(|tpl1, tpl2| (&tpl1.1 .2, &tpl1.1 .0).cmp(&(&tpl2.1 .2, &tpl2.1 .0)));
ingredients
});
view!(
cx,
Indexed(
iterable = ingredients,
view = move |cx, (k, (name, form, category, amt, rs))| {
let category = if category == "" {
"other".to_owned()
} else {
category
};
let amt_signal = create_signal(cx, amt);
let k_clone = k.clone();
let form = form.map(|form| format!("({})", form)).unwrap_or_default();
let recipes = rs
.iter()
.fold(String::new(), |acc, s| format!("{}{},", acc, s))
.trim_end_matches(",")
.to_owned();
view! {cx,
tr {
td {
input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
sh.dispatch(cx, Message::UpdateAmt(k_clone.clone(), amt_signal.get_untracked().as_ref().clone()));
})
}
td {
input(type="button", class="fit-content no-print", value="Undo", on:click={
move |_| {
sh.dispatch(cx, Message::RemoveFilteredIngredient(k.clone()));
}})
}
td { (name) " " (form) "" br {} "" (category) "" }
td { (recipes) }
}
}
}
)
)
}
#[instrument(skip_all)]
fn make_ingredients_rows<'ctx, G: Html>(
cx: Scope<'ctx>,
@ -109,12 +218,12 @@ fn make_ingredients_rows<'ctx, G: Html>(
view! {cx,
tr {
td {
input(bind:value=amt_signal, type="text", on:change=move |_| {
input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
sh.dispatch(cx, Message::UpdateAmt(k_clone.clone(), amt_signal.get_untracked().as_ref().clone()));
})
}
td {
input(type="button", class="no-print destructive", value="X", on:click={
input(type="button", class="fit-content no-print destructive", value="X", on:click={
move |_| {
sh.dispatch(cx, Message::AddFilteredIngredient(k.clone()));
}})
@ -143,14 +252,14 @@ fn make_extras_rows<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> V
view! {cx,
tr {
td {
input(bind:value=amt_signal, type="text", on:change=move |_| {
input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
sh.dispatch(cx, Message::UpdateExtra(idx,
amt_signal.get_untracked().as_ref().clone(),
name_signal.get_untracked().as_ref().clone()));
})
}
td {
input(type="button", class="no-print destructive", value="X", on:click=move |_| {
input(type="button", class="fit-content no-print destructive", value="X", on:click=move |_| {
sh.dispatch(cx, Message::RemoveExtra(idx));
})
}
@ -191,24 +300,49 @@ fn make_shopping_table<'ctx, G: Html>(
}
}
fn make_deleted_items_table<'ctx, G: Html>(
cx: Scope<'ctx>,
sh: StateHandler<'ctx>,
show_staples: &'ctx ReadSignal<bool>,
) -> View<G> {
view! {cx,
h2 { "Deleted Items" }
table(class="pad-top shopping-list page-breaker container-fluid", role="grid") {
tr {
th { " Quantity " }
th { " Delete " }
th { " Ingredient " }
th { " Recipes " }
}
tbody {
(make_deleted_ingredients_rows(cx, sh, show_staples))
}
}
}
}
#[instrument(skip_all)]
#[component]
pub fn ShoppingList<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
let show_staples = create_signal(cx, true);
let show_staples = sh.get_selector(cx, |state| state.get().use_staples);
view! {cx,
h1 { "Shopping List " }
label(for="show_staples_cb") { "Show staples" }
input(id="show_staples_cb", type="checkbox", bind:checked=show_staples)
input(id="show_staples_cb", type="checkbox", checked=*show_staples.get(), on:change=move|_| {
let value = !*show_staples.get_untracked();
sh.dispatch(cx, Message::UpdateUseStaples(value));
})
(make_shopping_table(cx, sh, show_staples))
span(role="button", class="no-print", on:click=move |_| {
(make_deleted_items_table(cx, sh, show_staples))
button(class="no-print", on:click=move |_| {
info!("Registering add item request for inventory");
sh.dispatch(cx, Message::AddExtra(String::new(), String::new()));
}) { "Add Item" } " "
span(role="button", class="no-print", on:click=move |_| {
button(class="no-print", on:click=move |_| {
info!("Registering reset request for inventory");
sh.dispatch(cx, Message::ResetInventory);
}) { "Reset" } " "
span(role="button", class="no-print", on:click=move |_| {
button(class="no-print", on:click=move |_| {
info!("Registering save request for inventory");
sh.dispatch(cx, Message::SaveState(None));
}) { "Save" } " "

Some files were not shown because too many files have changed in this diff Show More