Compare commits

...

81 Commits

Author SHA1 Message Date
876fa8fb33 wip: flake updates 2025-07-21 19:28:15 -04:00
d4fce72333 chore: update some cert stuff 2025-07-21 19:28:09 -04:00
e59eff8ae7 fix: plan creation was broken 2025-06-02 17:30:48 -04:00
cda5b02a37 fix: bad lorri hook eval 2024-10-08 19:28:43 -04:00
cc66c1f4f5 dev: refactor out into more composable pieces 2024-09-28 10:01:03 -04:00
4ed0b6f8fb feat: Allow restoring deleted inventory items 2024-09-28 10:01:03 -04:00
c77fa24515 refactor: recipe conversion from recipe_entry 2024-09-23 20:10:20 -04:00
6a5046d3c0 maid: formatting 2024-09-23 20:09:46 -04:00
263abda17b fix: various nix build issues 2024-09-23 20:05:43 -04:00
df88c2b7bd fix: busy_timeout to play nice with litestream 2024-09-23 15:14:04 -04:00
8000b7d317 feat: show serving count in the UI in all the places 2024-07-25 15:50:19 -04:00
aba1e114cf maid: cleanup warnings 2024-07-13 20:11:11 -04:00
9126d434d5 fix: Turns out that indexed db doesn't support Map
Had to modify the wasm-bindgen serialization to use objects instead
of the Map type.
2024-07-12 21:54:26 -04:00
548f336e1a dev: use better recipe keys
Having a dedicated object store makes a number of things simpler.
2024-07-12 19:18:24 -04:00
9f3b11a01f dev: migrate recipes from localstorage to indexeddb 2024-07-12 19:12:15 -04:00
f173204d2d dev: use cursor for the get_all_recipes 2024-07-12 18:48:25 -04:00
1f90cc2ef6 feat: migrate user_data and app_state
from localstorage to indexeddb
2024-07-12 18:26:13 -04:00
ed44e929f4 refactor: cleanup and make our upgrade logic more robust 2024-07-12 18:05:20 -04:00
51d165a50b dev: indexeddb indexes on recipe category and serving_count 2024-07-12 17:51:25 -04:00
84cc2a2713 refactor: have two object stores 2024-07-12 01:27:55 -04:00
f75652befa fix: cannot have database at version 0 2024-07-12 00:45:03 -04:00
b93edd2701 dev: use indexdb instead of local storage in the storage layer 2024-07-12 00:45:03 -04:00
4767115da6 dev: Add indexeddb and a helper method for it 2024-07-12 00:45:03 -04:00
1c55a315b0 build: generate sourcemap info for stacktraces
hopefully anyway
2024-07-12 00:45:03 -04:00
fe181fb102 cleanup: a bunch of warnings 2024-07-11 18:28:45 -04:00
61112c4e64 Merge branch 'sqlx-upgrade' 2024-07-11 18:13:39 -04:00
24fea84a0a maint: cleanup gitignore 2024-07-11 18:05:38 -04:00
113b03016f dev: Handle serving counts in the api 2024-07-11 18:04:48 -04:00
9833e22e42 upgrade: Schema version and sqlx version
It's complicated but I while debugging I upgraded sqlx. Shouldn't
have mixed up changes like that but I'm lazy and don't want to redo
it so it'll all have to just be here.
2024-07-11 18:04:41 -04:00
1f986e6372 fix: sqlx-prepare really only depends on wasm 2024-07-11 18:03:01 -04:00
63463fb5d6 build: More fixes and tweaks to unify the builds 2024-07-02 20:55:40 -05:00
6087d31aad Merge branch 'look_and_feel' 2024-07-01 16:02:37 -05:00
4ffb481634 docs: cleanup readme 2024-07-01 15:55:42 -05:00
6bc9f2ea2e build: unify make and nix builds 2024-07-01 15:55:42 -05:00
ec18d9de97 feat: recipe schema now has a setting for serving count 2024-07-01 15:55:42 -05:00
9249dca202 build: Makefile enhancements 2024-07-01 15:55:42 -05:00
dac4324c8f maint: cleanup some unused nix dependencies 2024-07-01 15:55:42 -05:00
e3c4a01648 refactor: make the wasm builder a little more configurable. 2024-07-01 15:55:42 -05:00
e1735e4243 maint: use resolver "2" 2024-07-01 15:55:42 -05:00
651f0cb264 build: get rid of wasm-pack
It get's doing naughty things with network access.
2024-07-01 15:55:42 -05:00
3e853f51eb build: fix rust-tls resolver issues 2024-07-01 15:55:37 -05:00
251cbfa5c7 build: use rustls 2024-07-01 15:52:37 -05:00
1b6023a03e maint: having the wasm-pack version in the logs is useful 2024-07-01 15:52:37 -05:00
3e675b47f4 fix: Unsafe recursive object use 2024-07-01 15:52:37 -05:00
6f7d44ff83 Alloy models for browser_state 2024-07-01 15:52:37 -05:00
b105ce3f4b Add some models 2024-07-01 15:52:37 -05:00
0ba5f18b22 Display current plan date at the top 2024-07-01 15:52:37 -05:00
a320351041 NOTE comment. 2024-07-01 15:52:37 -05:00
874a5fdb57 cargo fmt 2024-07-01 15:52:37 -05:00
bb092212ac Stop using singular for normalization 2024-07-01 15:52:37 -05:00
9022503e76 Have a packaging unit for measures 2024-07-01 15:52:37 -05:00
94e1987f09 ui: more layout tweaks 2024-07-01 15:52:37 -05:00
a104ef5f47 ui: normalization and font tweaks 2024-07-01 15:52:37 -05:00
dac529e8e8 ui: Typography tweaks 2024-07-01 15:52:37 -05:00
6e0e00c7f3 docs: comments for the event handling in login 2024-07-01 15:52:37 -05:00
8942eb59a5 ui: Menu font sizes 2024-07-01 15:52:37 -05:00
e80953e987 maint: upgrade wasm-bindgen version 2024-07-01 15:52:37 -05:00
c64605f9e7 maint: Use gloo_net directly 2024-07-01 15:52:37 -05:00
d7cea46427 fix: Issue with request blocking occuring on login 2024-07-01 15:52:37 -05:00
45737f24e4 UI: Cleansheet CSS redesign
Initial skeleton and layout is working.
Still needs a bunch of tweaks.
2024-07-01 15:52:37 -05:00
61634cd682 maint: Update rust version in nix flake 2023-12-25 14:19:45 -06:00
0eee2e33bf dev: Update wasm-pack version
Motivated by this bug in v0.11.0:
https://github.com/rustwasm/wasm-pack/issues/1247
2023-11-26 20:35:50 -05:00
cbe7957844 dev: Add some explicit clarity to the wasm-pack step 2023-11-26 20:23:45 -05:00
b999359e95 dev: Nix flake cleanup 2023-11-25 22:22:41 -05:00
c672459ec9 dev: Script for running when not in nix 2023-11-25 22:22:41 -05:00
a399858728 maint: Cleanup a bunch of stuff
:wq
2023-11-25 22:22:18 -05:00
e6b827ca21 Enable debug logs when building with Makefile 2023-11-25 22:20:42 -05:00
1432dcea13 feat: Use a web component
A more ergonomic number spinner on mobile.
A cleaner number spinner interface.
2023-11-25 22:20:42 -05:00
a3aa579fa5 feat: some additional js_lib stuff for logging 2023-11-25 22:20:42 -05:00
50eecf9a7c Bring in web component library 2023-11-25 22:20:42 -05:00
39456bb35d
Merge pull request #28 from durin42/push-wsksktrvyzky
cleanup: remove .DS_Store cruft
2023-11-25 22:20:12 -05:00
45b5c84d7c
Merge pull request #29 from durin42/push-kmpkplpoxlys
cleanup: remove redundant clone
2023-11-25 22:19:52 -05:00
Augie Fackler
e30555aabe cleanup: remove redundant clone
```
warning: call to `.clone()` on a reference in this situation does nothing
   --> web/src/app_state.rs:441:86
    |
441 | ..., &local_store, original.clone()).await
    |                            ^^^^^^^^ help: remove this redundant call
    |
    = note: the type `sycamore::reactive::Signal<AppState>` does not implement `Clone`, so calling `clone` on `&sycamore::reactive::Signal<AppState>` copies the reference, which does not do anything and can be removed
```
2023-11-25 11:17:25 -05:00
Augie Fackler
e78116ff8d cleanup: remove .DS_Store cruft 2023-11-25 10:58:01 -05:00
0b7f513f27 fix: Issue when loading user state from local storage 2023-09-23 13:44:16 -04:00
4cefe42072 Pin serde
This needs to stick around due to https://github.com/serde-rs/serde/issues/2538.
2023-08-19 13:29:11 -04:00
db03d603c3 Store the use_staples setting in localstore 2023-07-24 20:56:30 -04:00
2ea0339ad1 LinearSignal as a helper type 2023-07-24 19:37:31 -04:00
b496cf9568 Store app state atomically as one json blob 2023-07-22 16:14:23 -05:00
806fdd2721 When we select a plan date store that date in the app_state 2023-05-31 15:56:16 -04:00
acc922787d Commit lockfile 2023-04-03 14:56:36 -04:00
107 changed files with 4675 additions and 1816 deletions

BIN
.DS_Store vendored

Binary file not shown.

9
.envrc Normal file
View File

@ -0,0 +1,9 @@
if has lorri; then
eval "$(lorri direnv)"
elif has nix; then
echo "Using flake fallback since lorri isn't installed"
use flake
else
# noop
echo "Unsupported direnv configuration. We need nix flake support and lorri installed"
fi

5
.gitignore vendored
View File

@ -1,11 +1,10 @@
target/ target/
.lsp/ .lsp/
.clj-kondo/
web/dist/ web/dist/
webdist/
nix/*/result nix/*/result
result result
.vscode/ .vscode/
.session_store/ .session_store/
.gitignore/ .gitignore/
.DS_Store/ .DS_Store/
.env

2406
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,11 @@
[workspace] [workspace]
members = [ "recipes", "kitchen", "web", "api" ] members = [ "recipes", "kitchen", "web", "api" ]
resolver = "2"
[patch.crates-io] [patch.crates-io]
# TODO(jwall): When the fix for RcSignal Binding is released we can drop this patch. # TODO(jwall): When the fix for RcSignal Binding is released we can drop this patch.
sycamore = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" } sycamore = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" }
sycamore-router = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" } sycamore-router = { git = "https://github.com/sycamore-rs/sycamore/", rev = "5d49777b4a66fb5730c40898fd2ee8cde15bcdc3" }
# NOTE(jwall): We are maintaining a patch to remove the unstable async_std_feature. It breaks in our project on
# Rust v1.64
sqlx = { git = "https://github.com/zaphar/sqlx", branch = "remove_unstable_async_std_feature" }
[profile.release] [profile.release]
lto = true lto = true

View File

@ -14,6 +14,8 @@
mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST))) mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST)))
mkfile_dir := $(dir $(mkfile_path)) mkfile_dir := $(dir $(mkfile_path))
sqlite_url := sqlite://$(mkfile_dir)/.session_store/store.db sqlite_url := sqlite://$(mkfile_dir)/.session_store/store.db
export out := dist
export project := kitchen
kitchen: wasm kitchen/src/*.rs kitchen: wasm kitchen/src/*.rs
cd kitchen; cargo build cd kitchen; cargo build
@ -27,15 +29,19 @@ static-prep: web/index.html web/favicon.ico web/static/*.css
cp -r web/favicon.ico web/dist/ cp -r web/favicon.ico web/dist/
cp -r web/static web/dist/ cp -r web/static web/dist/
wasmrelease: wasmrelease-dist static-prep wasmrelease: wasm-opt static-prep
wasm-opt: wasmrelease-dist
cd web; sh ../scripts/wasm-opt.sh release
wasmrelease-dist: web/src/*.rs web/src/components/*.rs wasmrelease-dist: web/src/*.rs web/src/components/*.rs
cd web; wasm-pack build --mode no-install --release --target web --out-dir dist/ cd web; sh ../scripts/wasm-build.sh release
wasm: wasm-dist static-prep wasm: wasm-dist static-prep
wasm-dist: web/src/*.rs web/src/components/*.rs wasm-dist: web/src/*.rs web/src/components/*.rs
cd web; wasm-pack build --mode no-install --target web --out-dir dist/ cd web; sh ../scripts/wasm-build.sh debug
cd web; sh ../scripts/wasm-sourcemap.sh
clean: clean:
rm -rf web/dist/* rm -rf web/dist/*
@ -50,5 +56,5 @@ sqlx-add-%:
sqlx-revert: sqlx-revert:
cd kitchen; cargo sqlx migrate revert --database-url $(sqlite_url) cd kitchen; cargo sqlx migrate revert --database-url $(sqlite_url)
sqlx-prepare: sqlx-prepare: wasm
cd kitchen; cargo sqlx prepare --database-url $(sqlite_url) cd kitchen; cargo sqlx prepare --database-url $(sqlite_url)

View File

@ -6,10 +6,12 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
serde = "1.0.144"
recipes = { path = "../recipes" } recipes = { path = "../recipes" }
chrono = "0.4.22" chrono = "0.4.22"
[dependencies.serde]
version = "1.0.204"
features = ["derive"]
[dependencies.axum] [dependencies.axum]
version = "0.5.16" version = "0.5.16"
@ -18,4 +20,4 @@ optional = true
[features] [features]
default = [] default = []
server = ["axum"] server = ["axum"]
browser = [] browser = []

103
flake.lock generated
View File

@ -1,5 +1,43 @@
{ {
"nodes": { "nodes": {
"cargo-wasm2map-src": {
"flake": false,
"locked": {
"lastModified": 1693927731,
"narHash": "sha256-oqJ9ZZLvUK57A9Kf6L4pPrW6nHqb+18+JGKj9HfIaaM=",
"owner": "mtolmacs",
"repo": "wasm2map",
"rev": "c7d80748b7f3af37df24770b9330b17aa9599e3e",
"type": "github"
},
"original": {
"owner": "mtolmacs",
"repo": "wasm2map",
"type": "github"
}
},
"fenix": {
"inputs": {
"nixpkgs": [
"naersk",
"nixpkgs"
],
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1752475459,
"narHash": "sha256-z6QEu4ZFuHiqdOPbYss4/Q8B0BFhacR8ts6jO/F/aOU=",
"owner": "nix-community",
"repo": "fenix",
"rev": "bf0d6f70f4c9a9cf8845f992105652173f4b617f",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "fenix",
"type": "github"
}
},
"flake-compat": { "flake-compat": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -31,21 +69,6 @@
"type": "github" "type": "github"
} }
}, },
"flake-utils_2": {
"locked": {
"lastModified": 1659877975,
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": { "gitignore": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -64,14 +87,15 @@
}, },
"naersk": { "naersk": {
"inputs": { "inputs": {
"fenix": "fenix",
"nixpkgs": "nixpkgs" "nixpkgs": "nixpkgs"
}, },
"locked": { "locked": {
"lastModified": 1671096816, "lastModified": 1752689277,
"narHash": "sha256-ezQCsNgmpUHdZANDCILm3RvtO1xH8uujk/+EqNvzIOg=", "narHash": "sha256-uldUBFkZe/E7qbvxa3mH1ItrWZyT6w1dBKJQF/3ZSsc=",
"owner": "nix-community", "owner": "nix-community",
"repo": "naersk", "repo": "naersk",
"rev": "d998160d6a076cfe8f9741e56aeec7e267e3e114", "rev": "0e72363d0938b0208d6c646d10649164c43f4d64",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -82,25 +106,27 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1678987615, "lastModified": 1752077645,
"narHash": "sha256-lF4agoB7ysQGNHRXvOqxtSKIZrUZwClA85aASahQlYM=", "narHash": "sha256-HM791ZQtXV93xtCY+ZxG1REzhQenSQO020cu6rHtAPk=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "194c2aa446b2b059886bb68be15ef6736d5a8c31", "rev": "be9e214982e20b8310878ac2baa063a961c1bdf6",
"type": "github" "type": "github"
}, },
"original": { "original": {
"id": "nixpkgs", "owner": "NixOS",
"type": "indirect" "ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
} }
}, },
"nixpkgs_2": { "nixpkgs_2": {
"locked": { "locked": {
"lastModified": 1679174867, "lastModified": 1753135609,
"narHash": "sha256-fFxb8wN3bjOMvHPr63Iyzo3cuHhQzWW03UkckfTeBWU=", "narHash": "sha256-//xMo8MwSw1HoTnIk455J7NIJpsDqwVyD69MOXb7gZM=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "f5ec87b82832736f1624874fd34eb60c0b68bdd6", "rev": "5d9316e7fb2d6395818d506ef997530eba1545b7",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -111,6 +137,7 @@
}, },
"root": { "root": {
"inputs": { "inputs": {
"cargo-wasm2map-src": "cargo-wasm2map-src",
"flake-compat": "flake-compat", "flake-compat": "flake-compat",
"flake-utils": "flake-utils", "flake-utils": "flake-utils",
"gitignore": "gitignore", "gitignore": "gitignore",
@ -119,19 +146,35 @@
"rust-overlay": "rust-overlay" "rust-overlay": "rust-overlay"
} }
}, },
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1752428706,
"narHash": "sha256-EJcdxw3aXfP8Ex1Nm3s0awyH9egQvB2Gu+QEnJn2Sfg=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "591e3b7624be97e4443ea7b5542c191311aa141d",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
},
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1678397831, "lastModified": 1750964660,
"narHash": "sha256-7xbxSoiht8G+Zgz55R0ILPsTdbnksILCDMIxeg8Buns=", "narHash": "sha256-YQ6EyFetjH1uy5JhdhRdPe6cuNXlYpMAQePFfZj4W7M=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "bdf08e2f43488283eeb25b4a7e7ecba9147a955c", "rev": "04f0fcfb1a50c63529805a798b4b5c21610ff390",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -2,17 +2,18 @@
description = "kitchen"; description = "kitchen";
# Pin nixpkgs # Pin nixpkgs
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs"; nixpkgs.url = "github:NixOS/nixpkgs";
gitignore = { url = "github:hercules-ci/gitignore.nix"; flake = false; }; gitignore = { url = "github:hercules-ci/gitignore.nix"; flake = false; };
flake-utils.url = "github:numtide/flake-utils"; flake-utils.url = "github:numtide/flake-utils";
rust-overlay = { rust-overlay = {
url = "github:oxalica/rust-overlay?ref=stable"; url = "github:oxalica/rust-overlay?ref=stable";
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
}; };
naersk.url = "github:nix-community/naersk"; naersk.url = "github:nix-community/naersk";
flake-compat = { url = github:edolstra/flake-compat; flake = false; }; flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
cargo-wasm2map-src = { url = "github:mtolmacs/wasm2map"; flake = false; };
}; };
outputs = {self, nixpkgs, flake-utils, rust-overlay, naersk, gitignore, flake-compat}: outputs = {nixpkgs, flake-utils, rust-overlay, naersk, cargo-wasm2map-src, ...}:
let let
kitchenGen = (import ./nix/kitchen/default.nix); kitchenGen = (import ./nix/kitchen/default.nix);
kitchenWasmGen = (import ./nix/kitchenWasm/default.nix); kitchenWasmGen = (import ./nix/kitchenWasm/default.nix);
@ -25,7 +26,7 @@
let let
overlays = [ rust-overlay.overlays.default ]; overlays = [ rust-overlay.overlays.default ];
pkgs = import nixpkgs { inherit system overlays; }; pkgs = import nixpkgs { inherit system overlays; };
rust-wasm = pkgs.rust-bin.stable."1.68.0".default.override { rust-wasm = pkgs.rust-bin.stable."1.87.0".default.override {
extensions = [ "rust-src" ]; extensions = [ "rust-src" ];
# Add wasm32 as an extra target besides the native target. # Add wasm32 as an extra target besides the native target.
targets = [ "wasm32-unknown-unknown" ]; targets = [ "wasm32-unknown-unknown" ];
@ -42,9 +43,23 @@
wasm-pack = wasm-packGen { wasm-pack = wasm-packGen {
inherit rust-wasm naersk-lib pkgs; inherit rust-wasm naersk-lib pkgs;
}; };
cargo-wasm2map = naersk-lib.buildPackage {
pname = "cargo-wasm2map";
version = "v0.1.0";
build-inputs = [ rust-wasm ];
src = cargo-wasm2map-src;
cargoBuildOptions = opts: opts ++ ["-p" "cargo-wasm2map" ];
};
wasm-bindgen = pkgs.callPackage wasm-bindgenGen { inherit pkgs; }; wasm-bindgen = pkgs.callPackage wasm-bindgenGen { inherit pkgs; };
kitchenWasm = kitchenWasmGen { kitchenWasm = kitchenWasmGen {
inherit pkgs rust-wasm wasm-bindgen version; inherit pkgs rust-wasm wasm-bindgen version cargo-wasm2map;
lockFile = ./Cargo.lock;
outputHashes = {
# I'm maintaining some patches for these so the lockfile hashes are a little
# incorrect. We override those here.
"wasm-web-component-0.2.0" = "sha256-quuPgzGb2F96blHmD3BAUjsWQYbSyJGZl27PVrwL92k=";
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
};
}; };
kitchen = (kitchenGen { kitchen = (kitchenGen {
inherit pkgs version naersk-lib kitchenWasm rust-wasm; inherit pkgs version naersk-lib kitchenWasm rust-wasm;
@ -54,8 +69,15 @@
root = ./.; root = ./.;
}); });
kitchenWasmDebug = kitchenWasmGen { kitchenWasmDebug = kitchenWasmGen {
inherit pkgs rust-wasm wasm-bindgen version; inherit pkgs rust-wasm wasm-bindgen version cargo-wasm2map;
features = "--features debug_logs"; lockFile = ./Cargo.lock;
outputHashes = {
# I'm maintaining some patches for these so the lockfile hashes are a little
# incorrect. We override those here.
"wasm-web-component-0.2.0" = "sha256-quuPgzGb2F96blHmD3BAUjsWQYbSyJGZl27PVrwL92k=";
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
};
#features = "--features debug_logs";
}; };
kitchenDebug = (kitchenGen { kitchenDebug = (kitchenGen {
inherit pkgs version naersk-lib rust-wasm; inherit pkgs version naersk-lib rust-wasm;
@ -82,9 +104,9 @@
program = "${kitchen}/bin/kitchen"; program = "${kitchen}/bin/kitchen";
}; };
devShell = pkgs.callPackage ./nix/devShell/default.nix { devShell = pkgs.callPackage ./nix/devShell/default.nix {
inherit rust-wasm wasm-bindgen; inherit rust-wasm wasm-bindgen cargo-wasm2map;
wasm-pack-hermetic = wasm-pack; wasm-pack-hermetic = wasm-pack;
}; };
} }
); );
} }

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ?",
"describe": {
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "category",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "serving_count",
"ordinal": 3,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
true,
true
]
},
"hash": "01018c919131848f8fa907a1356a1356b2aa6ca0912de8a296f5fef3486b5ff9"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select password_hashed from users where id = ?",
"describe": {
"columns": [
{
"name": "password_hashed",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "104f07472670436d3eee1733578bbf0c92dc4f965d3d13f9bf4bfbc92958c5b6"
}

View File

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n extra_items.name,\n extra_items.amt\nfrom latest_dates\ninner join extra_items on\n latest_dates.user_id = extra_items.user_id\n and latest_dates.plan_date = extra_items.plan_date",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false
]
},
"hash": "10de1e9950d7d3ae7f017b9175a1cee4ff7fcbc7403a39ea02930c75b4b9160a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from modified_amts where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "10e1c111a16d647a106a3147f4e61e34b0176860ca99cb62cb43dc72550ad990"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into filtered_ingredients(user_id, name, form, measure_type, plan_date)\n values (?, ?, ?, ?, date()) on conflict(user_id, name, form, measure_type, plan_date) DO NOTHING",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "160a9dfccf2e91a37d81f75eba21ec73105a7453c4f1fe76a430d04e525bc6cd"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "select plan_date as \"plan_date: NaiveDate\", recipe_id, count\nfrom plan_recipes\nwhere\n user_id = ?\n and date(plan_date) > ?\norder by user_id, plan_date",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Date"
},
{
"name": "recipe_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "count",
"ordinal": 2,
"type_info": "Int64"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false
]
},
"hash": "19832e3582c05ed49c676fde33cde64274379a83a8dd130f6eec96c1d7250909"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into staples (user_id, content) values (?, ?)\n on conflict(user_id) do update set content = excluded.content",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "1b4a7250e451991ee7e642c6389656814e0dd00c94e59383c02af6313bc76213"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into modified_amts(user_id, name, form, measure_type, amt, plan_date)\n values (?, ?, ?, ?, ?, ?) on conflict (user_id, name, form, measure_type, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "1b6fd91460bef61cf02f210404a4ca57b520c969d1f9613e7101ee6aa7a9962a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from filtered_ingredients where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "23beb05e40cf011170182d4e98cdf1faa3d8df6e5956e471245e666f32e56962"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into category_mappings\n (user_id, ingredient_name, category_name)\n values (?, ?, ?)\n on conflict (user_id, ingredient_name)\n do update set category_name=excluded.category_name\n",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "2582522f8ca9f12eccc70a3b339d9030aee0f52e62d6674cfd3862de2a68a177"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from plan_table where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "27aa0a21f534cdf580841fa111136fc26cf1a0ca4ddb308c12f3f8f5a62d6178"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into plan_table (user_id, plan_date) values (?, ?)\n on conflict (user_id, plan_date) do nothing;",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "288535e7b9e1f02ad1b677e3dddc85f38c0766ce16d26fc1bdd2bf90ab9a7f7c"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "select plan_date as \"plan_date: NaiveDate\", recipe_id, count\n from plan_recipes\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Date"
},
{
"name": "recipe_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "count",
"ordinal": 2,
"type_info": "Int64"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false
]
},
"hash": "2e076acd2405d234daaa866e5a2ac1e10989fc8d2820f90aa722464a7b17db6b"
}

View File

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "select ingredient_name, category_name from category_mappings where user_id = ?",
"describe": {
"columns": [
{
"name": "ingredient_name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "category_name",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false
]
},
"hash": "37f382be1b53efd2f79a0d59ae6a8717f88a86908a7a4128d5ed7339147ca59d"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into extra_items (user_id, name, plan_date, amt)\nvalues (?, ?, date(), ?)\non conflict (user_id, name, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "3caefb86073c47b5dd5d05f639ddef2f7ed2d1fd80f224457d1ec34243cc56c7"
}

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n modified_amts.name,\n modified_amts.form,\n modified_amts.measure_type,\n modified_amts.amt\nfrom latest_dates\ninner join modified_amts on\n latest_dates.user_id = modified_amts.user_id\n and latest_dates.plan_date = modified_amts.plan_date",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "3e43f06f5c2e959f66587c8d74696d6db27d89fd2f7d7e1ed6fa5016b4bd1a91"
}

View File

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "select\n name,\n amt\nfrom extra_items\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false
]
},
"hash": "4237ff804f254c122a36a14135b90434c6576f48d3a83245503d702552ea9f30"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into filtered_ingredients(user_id, name, form, measure_type, plan_date)\n values (?, ?, ?, ?, ?) on conflict(user_id, name, form, measure_type, plan_date) DO NOTHING",
"describe": {
"columns": [],
"parameters": {
"Right": 5
},
"nullable": []
},
"hash": "5883c4a57def93cca45f8f9d81c8bba849547758217cd250e7ab28cc166ab42b"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into users (id, password_hashed) values (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "5d743897fb0d8fd54c3708f1b1c6e416346201faa9e28823c1ba5a421472b1fa"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select content from staples where user_id = ?",
"describe": {
"columns": [
{
"name": "content",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "64af3f713eb4c61ac02cab2dfea83d0ed197e602e99079d4d32cb38d677edf2e"
}

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "select\n modified_amts.name,\n modified_amts.form,\n modified_amts.measure_type,\n modified_amts.amt\nfrom modified_amts\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "amt",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "699ff0f0d4d4c6e26a21c1922a5b5249d89ed1677680a2276899a7f8b26344ee"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from recipes where user_id = ? and recipe_id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "6c43908d90f229b32ed8b1b076be9b452a995e1b42ba2554e947c515b031831a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into modified_amts(user_id, name, form, measure_type, amt, plan_date)\n values (?, ?, ?, ?, ?, date()) on conflict (user_id, name, form, measure_type, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 5
},
"nullable": []
},
"hash": "6e28698330e42fd6c87ba1e6f1deb664c0d3995caa2b937ceac8c908e98aded6"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from extra_items where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "6f11d90875a6230766a5f9bd1d67665dc4d00c13d7e81b0d18d60baa67987da9"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from sessions where id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "7578157607967a6a4c60f12408c5d9900d15b429a49681a4cae4e02d31c524ec"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "select\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom filtered_ingredients\nwhere\n user_id = ?\n and plan_date = ?",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false
]
},
"hash": "7695a0602395006f9b76ecd4d0cb5ecd5dee419b71b3b0b9ea4f47a83f3df41a"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into plan_recipes (user_id, plan_date, recipe_id, count) values (?, ?, ?, ?)\n on conflict (user_id, plan_date, recipe_id) do update set count=excluded.count;",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "83824ea638cb64c524f5c8984ef6ef28dfe781f0abf168abc4ae9a51e6e0ae88"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into categories (user_id, category_text) values (?, ?)\n on conflict(user_id) do update set category_text=excluded.category_text",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "8490e1bb40879caed62ac1c38cb9af48246f3451b6f7f1e1f33850f1dbe25f58"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select session_value from sessions where id = ?",
"describe": {
"columns": [
{
"name": "session_value",
"ordinal": 0,
"type_info": "Blob"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "928a479ca0f765ec7715bf8784c5490e214486edbf5b78fd501823feb328375b"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from plan_recipes where user_id = ? and plan_date = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "93af0c367a0913d49c92aa69022fa30fc0564bd4dbab7f3ae78673a01439cd6e"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into sessions (id, session_value) values (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "9ad4acd9b9d32c9f9f441276aa71a17674fe4d65698848044778bd4aef77d42d"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "with max_date as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes group by user_id\n)\n\nselect plan_recipes.plan_date as \"plan_date: NaiveDate\", plan_recipes.recipe_id, plan_recipes.count\n from plan_recipes\n inner join max_date on plan_recipes.user_id = max_date.user_id\nwhere\n plan_recipes.user_id = ?\n and plan_recipes.plan_date = max_date.plan_date",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Date"
},
{
"name": "recipe_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "count",
"ordinal": 2,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "ad3408cd773dd8f9308255ec2800171638a1aeda9817c57fb8360f97115f8e97"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into extra_items (user_id, name, amt, plan_date)\nvalues (?, ?, ?, ?)\non conflict (user_id, name, plan_date) do update set amt=excluded.amt",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "ba07658eb11f9d6cfdb5dbee4496b2573f1e51f4b4d9ae760eca3b977649b5c7"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select category_text from categories where user_id = ?",
"describe": {
"columns": [
{
"name": "category_text",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "c988364f9f83f4fa8bd0e594bab432ee7c9ec47ca40f4d16e5e2a8763653f377"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "delete from sessions",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
},
"hash": "d84685a82585c5e4ae72c86ba1fe6e4a7241c4c3c9e948213e5849d956132bad"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "with latest_dates as (\n select user_id, max(date(plan_date)) as plan_date from plan_recipes\n where user_id = ?\n group by user_id\n)\n\nselect\n filtered_ingredients.name,\n filtered_ingredients.form,\n filtered_ingredients.measure_type\nfrom latest_dates\ninner join filtered_ingredients on\n latest_dates.user_id = filtered_ingredients.user_id\n and latest_dates.plan_date = filtered_ingredients.plan_date",
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "form",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "measure_type",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "e38183e2e16afa308672044e5d314296d7cd84c1ffedcbfe790743547dc62de8"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "insert into recipes (user_id, recipe_id, recipe_text, category, serving_count) values (?, ?, ?, ?, ?)\n on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category",
"describe": {
"columns": [],
"parameters": {
"Right": 5
},
"nullable": []
},
"hash": "eb99a37e18009e0dd46caccacea57ba0b25510d80a4e4a282a5ac2be50bba81c"
}

View File

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ? and recipe_id = ?",
"describe": {
"columns": [
{
"name": "recipe_id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "recipe_text",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "category",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "serving_count",
"ordinal": 3,
"type_info": "Int64"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
true,
true
]
},
"hash": "ee0491c7d1a31ef80d7abe6ea4c9a8b0618dba58a0a8bceef7bdafec98ccd543"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "select distinct plan_date as \"plan_date: NaiveDate\" from plan_table\nwhere user_id = ?",
"describe": {
"columns": [
{
"name": "plan_date: NaiveDate",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "fd818a6b1c800c2014b5cfe8a923ac9228832b11d7575585cf7930fbf91306d1"
}

View File

@ -18,14 +18,19 @@ async-trait = "0.1.57"
async-session = "3.0.0" async-session = "3.0.0"
ciborium = "0.2.0" ciborium = "0.2.0"
tower = "0.4.13" tower = "0.4.13"
serde = "1.0.144"
cookie = "0.17.0" cookie = "0.17.0"
chrono = "0.4.22"
metrics = "0.20.1" metrics = "0.20.1"
metrics-exporter-prometheus = "0.11.0" metrics-exporter-prometheus = "0.11.0"
futures = "0.3" futures = "0.3"
metrics-process = "1.0.8" metrics-process = "1.0.8"
[dependencies.chrono]
version = "0.4.22"
features = ["serde"]
[dependencies.serde]
version = "1.0.204"
[dependencies.argon2] [dependencies.argon2]
version = "0.5.0" version = "0.5.0"
@ -62,5 +67,5 @@ version = "1.12.0"
features = ["tokio1"] features = ["tokio1"]
[dependencies.sqlx] [dependencies.sqlx]
version = "0.6.2" version = "0.7"
features = ["sqlite", "runtime-async-std-rustls", "offline", "chrono"] features = ["sqlite", "runtime-async-std", "tls-rustls", "chrono"]

View File

@ -2,4 +2,4 @@
fn main() { fn main() {
// trigger recompilation when a new migration is added // trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations"); println!("cargo:rerun-if-changed=migrations");
} }

View File

@ -0,0 +1,2 @@
-- Add down migration script here
ALTER TABLE recipes DROP COLUMN serving_count;

View File

@ -0,0 +1,2 @@
-- Add up migration script here
ALTER TABLE recipes ADD COLUMN serving_count INT;

View File

@ -1,4 +1,3 @@
use std::collections::BTreeMap;
// Copyright 2022 Jeremy Wall // Copyright 2022 Jeremy Wall
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +11,7 @@ use std::collections::BTreeMap;
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use std::collections::BTreeMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::{collections::BTreeSet, net::SocketAddr}; use std::{collections::BTreeSet, net::SocketAddr};

View File

@ -22,6 +22,7 @@ use tracing::{debug, instrument};
use super::RecipeEntry; use super::RecipeEntry;
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub struct Error(String); pub struct Error(String);
@ -98,7 +99,7 @@ impl AsyncFileStore {
let file_name = entry.file_name().to_string_lossy().to_string(); let file_name = entry.file_name().to_string_lossy().to_string();
debug!("adding recipe file {}", file_name); debug!("adding recipe file {}", file_name);
let recipe_contents = read_to_string(entry.path()).await?; let recipe_contents = read_to_string(entry.path()).await?;
entry_vec.push(RecipeEntry(file_name, recipe_contents, None)); entry_vec.push(RecipeEntry::new(file_name, recipe_contents));
} else { } else {
warn!( warn!(
file = %entry.path().to_string_lossy(), file = %entry.path().to_string_lossy(),
@ -118,11 +119,12 @@ impl AsyncFileStore {
if recipe_path.exists().await && recipe_path.is_file().await { if recipe_path.exists().await && recipe_path.is_file().await {
debug!("Found recipe file {}", recipe_path.to_string_lossy()); debug!("Found recipe file {}", recipe_path.to_string_lossy());
let recipe_contents = read_to_string(recipe_path).await?; let recipe_contents = read_to_string(recipe_path).await?;
return Ok(Some(RecipeEntry( return Ok(Some(RecipeEntry {
id.as_ref().to_owned(), id: id.as_ref().to_owned(),
recipe_contents, text: recipe_contents,
None, category: None,
))); serving_count: None,
}));
} else { } else {
return Ok(None); return Ok(None);
} }

View File

@ -14,6 +14,7 @@
use async_std::sync::Arc; use async_std::sync::Arc;
use std::collections::BTreeSet; use std::collections::BTreeSet;
use std::str::FromStr; use std::str::FromStr;
use std::time::Duration;
use std::{collections::BTreeMap, path::Path}; use std::{collections::BTreeMap, path::Path};
use argon2::{ use argon2::{
@ -259,6 +260,7 @@ impl SqliteStore {
std::fs::create_dir_all(&path)?; std::fs::create_dir_all(&path)?;
let url = format!("sqlite://{}/store.db", path.as_ref().to_string_lossy()); let url = format!("sqlite://{}/store.db", path.as_ref().to_string_lossy());
let options = SqliteConnectOptions::from_str(&url)? let options = SqliteConnectOptions::from_str(&url)?
.busy_timeout(Duration::from_secs(5))
.journal_mode(SqliteJournalMode::Wal) .journal_mode(SqliteJournalMode::Wal)
.create_if_missing(true); .create_if_missing(true);
info!(?options, "Connecting to sqlite db"); info!(?options, "Connecting to sqlite db");
@ -429,20 +431,10 @@ impl APIStore for SqliteStore {
user_id: S, user_id: S,
id: S, id: S,
) -> Result<Option<RecipeEntry>> { ) -> Result<Option<RecipeEntry>> {
// NOTE(jwall): We allow dead code becaue Rust can't figure out that
// this code is actually constructed but it's done via the query_as
// macro.
#[allow(dead_code)]
struct RecipeRow {
pub recipe_id: String,
pub recipe_text: Option<String>,
pub category: Option<String>,
}
let id = id.as_ref(); let id = id.as_ref();
let user_id = user_id.as_ref(); let user_id = user_id.as_ref();
let entry = sqlx::query_as!( let entry = sqlx::query!(
RecipeRow, "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ? and recipe_id = ?",
"select recipe_id, recipe_text, category from recipes where user_id = ? and recipe_id = ?",
user_id, user_id,
id, id,
) )
@ -450,40 +442,32 @@ impl APIStore for SqliteStore {
.await? .await?
.iter() .iter()
.map(|row| { .map(|row| {
RecipeEntry( RecipeEntry {
row.recipe_id.clone(), id: row.recipe_id.clone(),
row.recipe_text.clone().unwrap_or_else(|| String::new()), text: row.recipe_text.clone().unwrap_or_else(|| String::new()),
row.category.clone() category: row.category.clone(),
) serving_count: row.serving_count.clone(),
}
}) })
.nth(0); .nth(0);
Ok(entry) Ok(entry)
} }
async fn get_recipes_for_user(&self, user_id: &str) -> Result<Option<Vec<RecipeEntry>>> { async fn get_recipes_for_user(&self, user_id: &str) -> Result<Option<Vec<RecipeEntry>>> {
// NOTE(jwall): We allow dead code becaue Rust can't figure out that let rows = sqlx::query!(
// this code is actually constructed but it's done via the query_as "select recipe_id, recipe_text, category, serving_count from recipes where user_id = ?",
// macro.
#[allow(dead_code)]
struct RecipeRow {
pub recipe_id: String,
pub recipe_text: Option<String>,
pub category: Option<String>,
}
let rows = sqlx::query_as!(
RecipeRow,
"select recipe_id, recipe_text, category from recipes where user_id = ?",
user_id, user_id,
) )
.fetch_all(self.pool.as_ref()) .fetch_all(self.pool.as_ref())
.await? .await?
.iter() .iter()
.map(|row| { .map(|row| {
RecipeEntry( RecipeEntry {
row.recipe_id.clone(), id: row.recipe_id.clone(),
row.recipe_text.clone().unwrap_or_else(|| String::new()), text: row.recipe_text.clone().unwrap_or_else(|| String::new()),
row.category.clone(), category: row.category.clone(),
) serving_count: row.serving_count.clone(),
}
}) })
.collect(); .collect();
Ok(Some(rows)) Ok(Some(rows))
@ -498,13 +482,15 @@ impl APIStore for SqliteStore {
let recipe_id = entry.recipe_id().to_owned(); let recipe_id = entry.recipe_id().to_owned();
let recipe_text = entry.recipe_text().to_owned(); let recipe_text = entry.recipe_text().to_owned();
let category = entry.category(); let category = entry.category();
let serving_count = entry.serving_count();
sqlx::query!( sqlx::query!(
"insert into recipes (user_id, recipe_id, recipe_text, category) values (?, ?, ?, ?) "insert into recipes (user_id, recipe_id, recipe_text, category, serving_count) values (?, ?, ?, ?, ?)
on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category", on conflict(user_id, recipe_id) do update set recipe_text=excluded.recipe_text, category=excluded.category",
user_id, user_id,
recipe_id, recipe_id,
recipe_text, recipe_text,
category, category,
serving_count,
) )
.execute(self.pool.as_ref()) .execute(self.pool.as_ref())
.await?; .await?;
@ -520,7 +506,7 @@ impl APIStore for SqliteStore {
user_id, user_id,
recipe_id, recipe_id,
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
transaction.commit().await?; transaction.commit().await?;
@ -552,10 +538,10 @@ impl APIStore for SqliteStore {
user_id, user_id,
date, date,
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
sqlx::query_file!("src/web/storage/init_meal_plan.sql", user_id, date) sqlx::query_file!("src/web/storage/init_meal_plan.sql", user_id, date)
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
for (id, count) in recipe_counts { for (id, count) in recipe_counts {
sqlx::query_file!( sqlx::query_file!(
@ -565,7 +551,7 @@ impl APIStore for SqliteStore {
id, id,
count count
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
transaction.commit().await?; transaction.commit().await?;
@ -645,35 +631,35 @@ impl APIStore for SqliteStore {
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
"delete from plan_recipes where user_id = ? and plan_date = ?", "delete from plan_recipes where user_id = ? and plan_date = ?",
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
"delete from filtered_ingredients where user_id = ? and plan_date = ?", "delete from filtered_ingredients where user_id = ? and plan_date = ?",
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
"delete from modified_amts where user_id = ? and plan_date = ?", "delete from modified_amts where user_id = ? and plan_date = ?",
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
"delete from extra_items where user_id = ? and plan_date = ?", "delete from extra_items where user_id = ? and plan_date = ?",
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
transaction.commit().await?; transaction.commit().await?;
Ok(()) Ok(())
@ -921,7 +907,7 @@ impl APIStore for SqliteStore {
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
for key in filtered_ingredients { for key in filtered_ingredients {
let name = key.name(); let name = key.name();
@ -935,7 +921,7 @@ impl APIStore for SqliteStore {
measure_type, measure_type,
date, date,
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
sqlx::query!( sqlx::query!(
@ -943,7 +929,7 @@ impl APIStore for SqliteStore {
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
// store the modified amts // store the modified amts
for (key, amt) in modified_amts { for (key, amt) in modified_amts {
@ -960,7 +946,7 @@ impl APIStore for SqliteStore {
amt, amt,
date, date,
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
sqlx::query!( sqlx::query!(
@ -968,7 +954,7 @@ impl APIStore for SqliteStore {
user_id, user_id,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
// Store the extra items // Store the extra items
for (name, amt) in extra_items { for (name, amt) in extra_items {
@ -979,7 +965,7 @@ impl APIStore for SqliteStore {
amt, amt,
date date
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
transaction.commit().await?; transaction.commit().await?;
@ -1007,7 +993,7 @@ impl APIStore for SqliteStore {
form, form,
measure_type, measure_type,
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
// store the modified amts // store the modified amts
@ -1024,13 +1010,13 @@ impl APIStore for SqliteStore {
measure_type, measure_type,
amt, amt,
) )
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
// Store the extra items // Store the extra items
for (name, amt) in extra_items { for (name, amt) in extra_items {
sqlx::query_file!("src/web/storage/store_extra_items.sql", user_id, name, amt) sqlx::query_file!("src/web/storage/store_extra_items.sql", user_id, name, amt)
.execute(&mut transaction) .execute(&mut *transaction)
.await?; .await?;
} }
transaction.commit().await?; transaction.commit().await?;

51
models/browser_state.als Normal file
View File

@ -0,0 +1,51 @@
sig Id {}
sig Text {}
sig Recipe {
, id: one Id
, text: one Text
}
fact {
no r1, r2: Recipe | (r1.id = r2.id) and (r1.text != r2.text)
no r1, r2: Recipe | (r1 != r2) and (r1.id = r2.id)
}
sig Ingredient {}
sig Modifier {}
sig Amt {}
sig ModifiedInventory {
, ingredient: one Ingredient
, modifier: lone Modifier
, amt: one Amt
}
fact {
no mi1, mi2: ModifiedInventory | mi1 != mi2 && (mi1.ingredient = mi2.ingredient) and (mi1.modifier = mi2.modifier)
}
sig DeletedInventory {
, ingredient: one Ingredient
, modifier: lone Modifier
}
fact {
no mi1, mi2: DeletedInventory | mi1 != mi2 && (mi1.ingredient = mi2.ingredient) and (mi1.modifier = mi2.modifier)
}
sig ExtraItems {
, ingredient: one Ingredient
, amt: one Amt
}
sig State {
, recipes: some Recipe
, modified: set ModifiedInventory
, deleted: set DeletedInventory
, extras: set ExtraItems
} {
no rs: Recipe | rs not in recipes
}
run { } for 3 but exactly 2 State, 2 Modifier, exactly 3 ModifiedInventory, exactly 9 Ingredient

17
models/planning.d2 Normal file
View File

@ -0,0 +1,17 @@
Meal Planning: {
shape: sequence_diagram
user: Cook; client: Kitchen frontend; kitchen: Kitchen backend
user -> client: Start new meal Plan
client -> kitchen: new plan created
user -> client: Add recipe to meal plan
client -> kitchen: Update meal plan with recipe
client -> client: cache updated meal plan
user -> client: Do inventory
client -> kitchen: Store inventory mutations
client -> client: cache inventory mutations
user -> client: Undo mutation
client -> kitchen: Store inventory mutations
client -> client: cache inventory mutations
user -> user: Cook recipes
}

125
models/planning.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 94 KiB

View File

@ -1,8 +1,5 @@
let { pkgs, rust-wasm, wasm-pack-hermetic, wasm-bindgen, cargo-wasm2map }:
lib = import ../lib/lib.nix;
in
{ pkgs, rust-wasm, wasm-pack-hermetic, wasm-bindgen }:
with pkgs; with pkgs;
mkShell { mkShell {
buildInputs = (lib.darwin-sdk pkgs) ++ (with pkgs; [wasm-bindgen wasm-pack-hermetic llvm clang rust-wasm]); buildInputs = (with pkgs; [wasm-bindgen wasm-pack-hermetic llvm clang rust-wasm binaryen cargo-wasm2map]);
} }

View File

@ -1,6 +1,3 @@
let
lib = import ../lib/lib.nix;
in
{pkgs ? (import <nixpkgs>) {}, {pkgs ? (import <nixpkgs>) {},
# Because it's a workspace we need the other crates available as source # Because it's a workspace we need the other crates available as source
root, root,
@ -15,7 +12,7 @@ with pkgs;
inherit version; inherit version;
buildInputs = [ rust-wasm libclang ]; buildInputs = [ rust-wasm libclang ];
# However the crate we are building has it's root in specific crate. # However the crate we are building has it's root in specific crate.
nativeBuildInputs = (lib.darwin-sdk pkgs) ++ [llvm clang rust-bindgen]; nativeBuildInputs = [llvm clang rust-bindgen];
src = root; src = root;
cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ]; cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ];
postPatch = '' postPatch = ''
@ -25,4 +22,4 @@ with pkgs;
''; '';
# We have to tell libproc where the libclang.dylib lives # We have to tell libproc where the libclang.dylib lives
LIBCLANG_PATH="${libclang.lib}/lib/"; LIBCLANG_PATH="${libclang.lib}/lib/";
}) })

View File

@ -1,29 +1,25 @@
{pkgs? (import <nixpkgs>) {}, {pkgs? (import <nixpkgs>) {},
version, version,
features ? "",
rust-wasm, rust-wasm,
wasm-bindgen, wasm-bindgen,
lockFile,
outputHashes,
cargo-wasm2map,
}: }:
with pkgs; with pkgs;
let let
pname = "kitchen-wasm"; pname = "kitchen-wasm";
src = ./../..; src = ./../..;
lockFile = ./../../Cargo.lock;
# NOTE(jwall): Because we use wasm-pack directly below we need # NOTE(jwall): Because we use wasm-pack directly below we need
# the cargo dependencies to already be installed. # the cargo dependencies to already be installed.
cargoDeps = (pkgs.rustPlatform.importCargoLock { inherit lockFile; outputHashes = { cargoDeps = (pkgs.rustPlatform.importCargoLock { inherit lockFile outputHashes; });
# I'm maintaining some patches for these so the lockfile hashes are a little
# incorrect. We override those here.
"sycamore-0.8.2" = "sha256-D968+8C5EelGGmot9/LkAlULZOf/Cr+1WYXRCMwb1nQ=";
"sqlx-0.6.2" = "sha256-X/LFvtzRfiOIEZJiVzmFvvULPpjhqvI99pSwH7a//GM=";
};
});
in in
# TODO(zaphar): I should actually be leveraging naersklib.buildPackage with a postInstall for the optimization and bindgen
stdenv.mkDerivation { stdenv.mkDerivation {
inherit src pname; inherit src pname;
version = version; version = version;
# we need wasmb-bindgen v0.2.83 exactly # we need wasmb-bindgen v0.2.83 exactly
buildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen]; buildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen cargo-wasm2map];
propagatedBuildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen]; propagatedBuildInputs = [ rust-wasm wasm-bindgen wasm-pack binaryen];
phases = [ "postUnpackPhase" "buildPhase"]; phases = [ "postUnpackPhase" "buildPhase"];
postUnpackPhase = '' postUnpackPhase = ''
@ -31,14 +27,17 @@ stdenv.mkDerivation {
cp -r ./cargo-vendor-dir/.cargo ./ cp -r ./cargo-vendor-dir/.cargo ./
cp -r $src/* ./ cp -r $src/* ./
''; '';
# TODO(jwall): Build this from the root rather than the src. # TODO(jwall): Use the makefile for as much of this as possible.
buildPhase = '' buildPhase = ''
echo building with wasm-pack
mkdir -p $out mkdir -p $out
cd web cd web
cp -r static $out cp -r static $out
RUST_LOG=info wasm-pack build --mode no-install --release --target web --out-dir $out ${features}; export project=kitchen
sh ../scripts/wasm-build.sh release
sh ../scripts/wasm-sourcemap.sh
cp -r index.html $out cp -r index.html $out
cp -r favicon.ico $out cp -r favicon.ico $out
rm -rf $out/release
rm -rf $out/wasm32-unknown-unknown
''; '';
} }

View File

@ -1,7 +0,0 @@
{
darwin-sdk = pkgs: with pkgs; (if stdenv.isDarwin then (with darwin.apple_sdk.frameworks; [
xcbuild
Security
fixDarwinDylibNames
]) else [ ]);
}

View File

@ -1,6 +1,3 @@
let
my-lib = import ../lib/lib.nix;
in
{ pkgs { pkgs
, lib , lib
, rustPlatform , rustPlatform
@ -8,9 +5,7 @@ in
, nodejs , nodejs
, pkg-config , pkg-config
, openssl , openssl
, stdenv
, curl , curl
, runCommand
}: }:
# This package is special so we don't use the naersk infrastructure to build it. # This package is special so we don't use the naersk infrastructure to build it.
@ -20,21 +15,22 @@ rustPlatform.buildRustPackage rec {
pname = "wasm-bindgen-cli"; pname = "wasm-bindgen-cli";
# NOTE(jwall): This must exactly match the version of the wasm-bindgen crate # NOTE(jwall): This must exactly match the version of the wasm-bindgen crate
# we are using. # we are using.
version = "0.2.84"; version = "0.2.89";
src = fetchCrate { src = fetchCrate {
inherit pname version; inherit pname version;
sha256 = "sha256-0rK+Yx4/Jy44Fw5VwJ3tG243ZsyOIBBehYU54XP/JGk="; sha256 = "sha256-IPxP68xtNSpwJjV2yNMeepAS0anzGl02hYlSTvPocz8=";
}; };
cargoSha256 = "sha256-vcpxcRlW1OKoD64owFF6mkxSqmNrvY+y3Ckn5UwEQ50="; cargoHash = "sha256-EsGFW1f9+E5NnMadP/0rRzFCxVJQb0mlTLz/3zYQ5Ac=";
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl curl ] ++ (my-lib.darwin-sdk pkgs); buildInputs = [ openssl curl ];
nativeCheckInputs = [ nodejs ]; nativeCheckInputs = [ nodejs ];
# other tests require it to be ran in the wasm-bindgen monorepo # other tests require it to be ran in the wasm-bindgen monorepo
cargoTestFlags = [ "--test=interface-types" ]; #cargoTestFlags = [ "--test=reference" ];
} doCheck = false;
}

View File

@ -1,6 +1,3 @@
let
my-lib = import ../lib/lib.nix;
in
{pkgs, {pkgs,
naersk-lib, naersk-lib,
rust-wasm, rust-wasm,
@ -8,9 +5,9 @@ in
with pkgs; with pkgs;
(naersk-lib.buildPackage rec { (naersk-lib.buildPackage rec {
pname = "wasm-pack"; pname = "wasm-pack";
version = "v0.11.0"; version = "v0.12.1";
buildInputs = [ rust-wasm pkgs.openssl curl]; buildInputs = [ rust-wasm pkgs.openssl curl];
nativeBuildInputs = (my-lib.darwin-sdk pkgs) ++ [llvm clang pkg-config]; nativeBuildInputs =[llvm clang pkg-config];
OPENSSL_NO_VENDOR=1; OPENSSL_NO_VENDOR=1;
# The checks use network so disable them here # The checks use network so disable them here
doCheck = false; doCheck = false;
@ -18,7 +15,7 @@ with pkgs;
owner = "rustwasm"; owner = "rustwasm";
repo = "wasm-pack"; repo = "wasm-pack";
rev = version; rev = version;
sha256 = "sha256-3iwXoYnmrZsbwFUR41uI/4jnCF0OjeRO7UqVDaGJJbQ="; hash = "sha256-L4mCgUPG4cgTUpCoaIUOTONBOggXn5vMyPKj48B3MMk=";
}; };
cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ]; cargoBuildOptions = opts: opts ++ ["-p" "${pname}" ];
}) })

View File

@ -6,10 +6,6 @@ A web assembly experiment in Meal Planning and Shopping List management.
Ensure you have rust installed with support for the web assembly target. You can see instructions here: [Rust wasm book](https://rustwasm.github.io/docs/book/game-of-life/setup.html). Ensure you have rust installed with support for the web assembly target. You can see instructions here: [Rust wasm book](https://rustwasm.github.io/docs/book/game-of-life/setup.html).
You will also want to have trunk installed. You can see instructions for that here: [trunk](https://trunkrs.dev/)
Then obtain the source. We do not at this time publish kitchen on [crates.io](https://crates.io/).
```sh ```sh
git clone https://github.com/zaphar/kitchen git clone https://github.com/zaphar/kitchen
cd kitchen cd kitchen
@ -23,7 +19,7 @@ make release
# Hacking on kitchen # Hacking on kitchen
If you want to hack on kitchen, then you may find it useful to use trunk in dev mode. The run script will run build the app and run trunk with it watching for changes and reloading on demand in your browser. The run script will run build the app and run it for you.
```sh ```sh
./run.sh ./run.sh
@ -37,4 +33,4 @@ If all of the above looks like too much work, and you already use the nix packag
```sh ```sh
nix run github:zaphar/kitchen nix run github:zaphar/kitchen
``` ```

View File

@ -8,8 +8,14 @@ edition = "2021"
[dependencies] [dependencies]
abortable_parser = "~0.2.6" abortable_parser = "~0.2.6"
chrono = "~0.4"
serde = "1.0.144" [dependencies.chrono]
version = "0.4.22"
features = ["serde"]
[dependencies.serde]
version = "1.0.204"
features = ["derive"]
[dependencies.num-rational] [dependencies.num-rational]
version = "~0.4.0" version = "~0.4.0"

View File

@ -50,35 +50,49 @@ impl Mealplan {
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RecipeEntry(pub String, pub String, pub Option<String>); pub struct RecipeEntry {
pub id: String,
pub text: String,
pub category: Option<String>,
pub serving_count: Option<i64>,
}
impl RecipeEntry { impl RecipeEntry {
pub fn new<IS: Into<String>, TS: Into<String>>(recipe_id: IS, text: TS) -> Self { pub fn new<IS: Into<String>, TS: Into<String>>(recipe_id: IS, text: TS) -> Self {
Self(recipe_id.into(), text.into(), None) Self {
id: recipe_id.into(),
text: text.into(),
category: None,
serving_count: None,
}
} }
pub fn set_recipe_id<S: Into<String>>(&mut self, id: S) { pub fn set_recipe_id<S: Into<String>>(&mut self, id: S) {
self.0 = id.into(); self.id = id.into();
} }
pub fn recipe_id(&self) -> &str { pub fn recipe_id(&self) -> &str {
self.0.as_str() self.id.as_str()
} }
pub fn set_recipe_text<S: Into<String>>(&mut self, text: S) { pub fn set_recipe_text<S: Into<String>>(&mut self, text: S) {
self.1 = text.into(); self.text = text.into();
} }
pub fn recipe_text(&self) -> &str { pub fn recipe_text(&self) -> &str {
self.1.as_str() self.text.as_str()
} }
pub fn set_category<S: Into<String>>(&mut self, cat: S) { pub fn set_category<S: Into<String>>(&mut self, cat: S) {
self.2 = Some(cat.into()); self.category = Some(cat.into());
} }
pub fn category(&self) -> Option<&String> { pub fn category(&self) -> Option<&String> {
self.2.as_ref() self.category.as_ref()
}
pub fn serving_count(&self) -> Option<i64> {
self.serving_count.clone()
} }
} }
@ -87,6 +101,7 @@ impl RecipeEntry {
pub struct Recipe { pub struct Recipe {
pub title: String, pub title: String,
pub desc: Option<String>, pub desc: Option<String>,
pub serving_count: Option<i64>,
pub steps: Vec<Step>, pub steps: Vec<Step>,
} }
@ -96,6 +111,7 @@ impl Recipe {
title: title.into(), title: title.into(),
desc: desc.map(|s| s.into()), desc: desc.map(|s| s.into()),
steps: Vec::new(), steps: Vec::new(),
serving_count: Default::default(),
} }
} }
@ -132,6 +148,16 @@ impl Recipe {
} }
} }
impl TryFrom<&RecipeEntry> for Recipe {
type Error = String;
fn try_from(value: &RecipeEntry) -> Result<Self, Self::Error> {
let mut parsed = parse::as_recipe(&value.text)?;
parsed.serving_count = value.serving_count.clone();
Ok(parsed)
}
}
pub struct IngredientAccumulator { pub struct IngredientAccumulator {
inner: BTreeMap<IngredientKey, (Ingredient, BTreeSet<String>)>, inner: BTreeMap<IngredientKey, (Ingredient, BTreeSet<String>)>,
} }
@ -156,16 +182,28 @@ impl IngredientAccumulator {
set.insert(recipe_title.clone()); set.insert(recipe_title.clone());
self.inner.insert(key, (i.clone(), set)); self.inner.insert(key, (i.clone(), set));
} else { } else {
let amt = match (self.inner[&key].0.amt, i.amt) { let amts = match (&self.inner[&key].0.amt, &i.amt) {
(Volume(rvm), Volume(lvm)) => Volume(lvm + rvm), (Volume(rvm), Volume(lvm)) => vec![Volume(lvm + rvm)],
(Count(lqty), Count(rqty)) => Count(lqty + rqty), (Count(lqty), Count(rqty)) => vec![Count(lqty + rqty)],
(Weight(lqty), Weight(rqty)) => Weight(lqty + rqty), (Weight(lqty), Weight(rqty)) => vec![Weight(lqty + rqty)],
(Package(lnm, lqty), Package(rnm, rqty)) => {
if lnm == rnm {
vec![Package(lnm.clone(), lqty + rqty)]
} else {
vec![
Package(lnm.clone(), lqty.clone()),
Package(rnm.clone(), rqty.clone()),
]
}
}
_ => unreachable!(), _ => unreachable!(),
}; };
self.inner.get_mut(&key).map(|(i, set)| { for amt in amts {
i.amt = amt; self.inner.get_mut(&key).map(|(i, set)| {
set.insert(recipe_title.clone()); i.amt = amt;
}); set.insert(recipe_title.clone());
});
}
} }
} }
} }
@ -194,7 +232,7 @@ pub struct Step {
impl Step { impl Step {
pub fn new<S: Into<String>>(prep_time: Option<std::time::Duration>, instructions: S) -> Self { pub fn new<S: Into<String>>(prep_time: Option<std::time::Duration>, instructions: S) -> Self {
Self { Self {
prep_time: prep_time, prep_time,
instructions: instructions.into(), instructions: instructions.into(),
ingredients: Vec::new(), ingredients: Vec::new(),
} }

View File

@ -334,7 +334,14 @@ make_fn!(unit<StrIter, String>,
text_token!("kg"), text_token!("kg"),
text_token!("grams"), text_token!("grams"),
text_token!("gram"), text_token!("gram"),
text_token!("g")), text_token!("g"),
text_token!("pkg"),
text_token!("package"),
text_token!("bottle"),
text_token!("bot"),
text_token!("bag"),
text_token!("can")
),
_ => ws, _ => ws,
(u.to_lowercase().to_singular()) (u.to_lowercase().to_singular())
) )
@ -393,6 +400,7 @@ pub fn measure(i: StrIter) -> abortable_parser::Result<StrIter, Measure> {
"oz" => Weight(Oz(qty)), "oz" => Weight(Oz(qty)),
"kg" | "kilogram" => Weight(Kilogram(qty)), "kg" | "kilogram" => Weight(Kilogram(qty)),
"g" | "gram" => Weight(Gram(qty)), "g" | "gram" => Weight(Gram(qty)),
"pkg" | "package" | "can" | "bag" | "bottle" | "bot" => Measure::pkg(s, qty),
_u => { _u => {
eprintln!("Invalid unit: {}", _u); eprintln!("Invalid unit: {}", _u);
unreachable!() unreachable!()
@ -418,9 +426,8 @@ pub fn normalize_name(name: &str) -> String {
// NOTE(jwall): The below unwrap is safe because of the length // NOTE(jwall): The below unwrap is safe because of the length
// check above. // check above.
let last = parts.last().unwrap(); let last = parts.last().unwrap();
let normalized = last.to_singular();
prefix.push(' '); prefix.push(' ');
prefix.push_str(&normalized); prefix.push_str(&last.to_string());
return prefix; return prefix;
} }
return name.trim().to_lowercase().to_owned(); return name.trim().to_lowercase().to_owned();

View File

@ -235,32 +235,30 @@ fn test_ingredient_name_parse() {
#[test] #[test]
fn test_ingredient_parse() { fn test_ingredient_parse() {
for (i, expected) in vec![ for (i, expected) in vec![
//( (
// "1 cup flour ", "1 cup flour ",
// Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1))), ""), Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1)))),
//), ),
//( (
// "\t1 cup flour ", "\t1 cup flour ",
// Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1))), ""), Ingredient::new("flour", None, Volume(Cup(Quantity::Whole(1)))),
//), ),
//( (
// "1 cup apple (chopped)", "1 cup apple (chopped)",
// Ingredient::new( Ingredient::new(
// "apple", "apple",
// Some("chopped".to_owned()), Some("chopped".to_owned()),
// Volume(Cup(Quantity::Whole(1))), Volume(Cup(Quantity::Whole(1))),
// "", ),
// ), ),
//), (
//( "1 cup apple (chopped) ",
// "1 cup apple (chopped) ", Ingredient::new(
// Ingredient::new( "apple",
// "apple", Some("chopped".to_owned()),
// Some("chopped".to_owned()), Volume(Cup(Quantity::Whole(1))),
// Volume(Cup(Quantity::Whole(1))), ),
// "", ),
// ),
//),
( (
"1 green bell pepper (chopped) ", "1 green bell pepper (chopped) ",
Ingredient::new( Ingredient::new(
@ -269,6 +267,46 @@ fn test_ingredient_parse() {
Count(Quantity::Whole(1)), Count(Quantity::Whole(1)),
), ),
), ),
(
"1 pkg green onion",
Ingredient::new(
"green onion",
None,
Package("pkg".into(), Quantity::Whole(1)),
),
),
(
"1 bottle green onion",
Ingredient::new(
"green onion",
None,
Package("bottle".into(), Quantity::Whole(1)),
),
),
(
"1 bot green onion",
Ingredient::new(
"green onion",
None,
Package("bot".into(), Quantity::Whole(1)),
),
),
(
"1 bag green onion",
Ingredient::new(
"green onion",
None,
Package("bag".into(), Quantity::Whole(1)),
),
),
(
"1 can baked beans",
Ingredient::new(
"baked beans",
None,
Package("can".into(), Quantity::Whole(1)),
),
),
] { ] {
match parse::ingredient(StrIter::new(i)) { match parse::ingredient(StrIter::new(i)) {
ParseResult::Complete(_, ing) => assert_eq!(ing, expected), ParseResult::Complete(_, ing) => assert_eq!(ing, expected),

View File

@ -22,6 +22,7 @@ use std::{
convert::TryFrom, convert::TryFrom,
fmt::Display, fmt::Display,
ops::{Add, Div, Mul, Sub}, ops::{Add, Div, Mul, Sub},
rc::Rc,
}; };
use num_rational::Ratio; use num_rational::Ratio;
@ -179,6 +180,20 @@ impl VolumeMeasure {
macro_rules! volume_op { macro_rules! volume_op {
($trait:ident, $method:ident) => { ($trait:ident, $method:ident) => {
impl $trait for &VolumeMeasure {
type Output = VolumeMeasure;
fn $method(self, lhs: Self) -> Self::Output {
let (l, r) = (self.get_ml(), lhs.get_ml());
let result = ML($trait::$method(l, r));
if self.metric() {
result.normalize()
} else {
result.into_tsp().normalize()
}
}
}
impl $trait for VolumeMeasure { impl $trait for VolumeMeasure {
type Output = Self; type Output = Self;
@ -293,6 +308,20 @@ impl WeightMeasure {
macro_rules! weight_op { macro_rules! weight_op {
($trait:ident, $method:ident) => { ($trait:ident, $method:ident) => {
impl $trait for &WeightMeasure {
type Output = WeightMeasure;
fn $method(self, lhs: Self) -> Self::Output {
let (l, r) = (self.get_grams(), lhs.get_grams());
let result = WeightMeasure::Gram($trait::$method(l, r));
if self.metric() {
result.normalize()
} else {
result.into_oz().normalize()
}
}
}
impl $trait for WeightMeasure { impl $trait for WeightMeasure {
type Output = Self; type Output = Self;
@ -335,18 +364,19 @@ impl Display for WeightMeasure {
use WeightMeasure::{Gram, Kilogram, Oz, Pound}; use WeightMeasure::{Gram, Kilogram, Oz, Pound};
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)] #[derive(Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
/// Measurements in a Recipe with associated units for them. /// Measurements in a Recipe with associated units for them.
pub enum Measure { pub enum Measure {
/// Volume measurements as meter cubed base unit /// Volume measurements as meter cubed base unit
Volume(VolumeMeasure), Volume(VolumeMeasure),
/// Simple count of items /// Simple count of items
Count(Quantity), Count(Quantity),
Package(Rc<str>, Quantity),
/// Weight measure as Grams base unit /// Weight measure as Grams base unit
Weight(WeightMeasure), Weight(WeightMeasure),
} }
use Measure::{Count, Volume, Weight}; use Measure::{Count, Package, Volume, Weight};
impl Measure { impl Measure {
pub fn tsp(qty: Quantity) -> Self { pub fn tsp(qty: Quantity) -> Self {
@ -407,11 +437,16 @@ impl Measure {
Weight(Oz(qty)) Weight(Oz(qty))
} }
pub fn pkg<S: Into<Rc<str>>>(name: S, qty: Quantity) -> Self {
Package(name.into(), qty)
}
pub fn measure_type(&self) -> String { pub fn measure_type(&self) -> String {
match self { match self {
Volume(_) => "Volume", Volume(_) => "Volume",
Count(_) => "Count", Count(_) => "Count",
Weight(_) => "Weight", Weight(_) => "Weight",
Package(_, _) => "Package",
} }
.to_owned() .to_owned()
} }
@ -421,6 +456,7 @@ impl Measure {
Volume(vm) => vm.plural(), Volume(vm) => vm.plural(),
Count(qty) => qty.plural(), Count(qty) => qty.plural(),
Weight(wm) => wm.plural(), Weight(wm) => wm.plural(),
Package(_, qty) => qty.plural(),
} }
} }
@ -429,6 +465,7 @@ impl Measure {
Volume(vm) => Volume(vm.normalize()), Volume(vm) => Volume(vm.normalize()),
Count(qty) => Count(qty.clone()), Count(qty) => Count(qty.clone()),
Weight(wm) => Weight(wm.normalize()), Weight(wm) => Weight(wm.normalize()),
Package(nm, qty) => Package(nm.clone(), qty.clone()),
} }
} }
} }
@ -439,6 +476,7 @@ impl Display for Measure {
Volume(vm) => write!(w, "{}", vm), Volume(vm) => write!(w, "{}", vm),
Count(qty) => write!(w, "{}", qty), Count(qty) => write!(w, "{}", qty),
Weight(wm) => write!(w, "{}", wm), Weight(wm) => write!(w, "{}", wm),
Package(nm, qty) => write!(w, "{} {}", qty, nm),
} }
} }
} }
@ -533,6 +571,26 @@ impl TryFrom<f32> for Quantity {
macro_rules! quantity_op { macro_rules! quantity_op {
($trait:ident, $method:ident) => { ($trait:ident, $method:ident) => {
impl $trait for &Quantity {
type Output = Quantity;
fn $method(self, lhs: Self) -> Self::Output {
match (self, lhs) {
(Whole(rhs), Whole(lhs)) => Frac($trait::$method(
Ratio::from_integer(*rhs),
Ratio::from_integer(*lhs),
)),
(Frac(rhs), Frac(lhs)) => Frac($trait::$method(rhs, lhs)),
(Whole(rhs), Frac(lhs)) => {
Frac($trait::$method(Ratio::from_integer(*rhs), lhs))
}
(Frac(rhs), Whole(lhs)) => {
Frac($trait::$method(rhs, Ratio::from_integer(*lhs)))
}
}
}
}
impl $trait for Quantity { impl $trait for Quantity {
type Output = Self; type Output = Self;

19
run-non-nix.sh Executable file
View File

@ -0,0 +1,19 @@
# Copyright 2022 Jeremy Wall
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXAMPLES=${EXAMPLES:-../examples}
echo Starting server serving ${EXAMPLES}
mkdir -p .session_store
make kitchen
./target/debug/kitchen --verbose debug serve --listen 127.0.0.1:3030 --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+1.pem --cert_key ~/tls-certs/localhost+1-key.pem $@
# This is ghetto but I'm doing it anyway

2
run.sh
View File

@ -14,5 +14,5 @@
EXAMPLES=${EXAMPLES:-../examples} EXAMPLES=${EXAMPLES:-../examples}
echo Starting server serving ${EXAMPLES} echo Starting server serving ${EXAMPLES}
mkdir .session_store mkdir .session_store
nix run .\#kitchenDebug -- --verbose debug serve --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+2.pem --cert_key ~/tls-certs/localhost+2-key.pem $@ nix run .\#kitchenDebug -- --verbose debug serve --session_dir .session_store --dir ${EXAMPLES} --tls --cert ~/tls-certs/localhost+1.pem --cert_key ~/tls-certs/localhost+1-key.pem $@
# This is ghetto but I'm doing it anyway # This is ghetto but I'm doing it anyway

11
scripts/wasm-build.sh Normal file
View File

@ -0,0 +1,11 @@
set -x
buildtype=$1;
mkdir -p $out
if [ ${buildtype} = "release" ]; then
buildtype_flag="--release"
fi
cargo build --lib ${buildtype_flag} --target wasm32-unknown-unknown --target-dir $out --features debug_logs
wasm-bindgen $out/wasm32-unknown-unknown/${buildtype}/${project}_wasm.wasm --out-dir $out --typescript --target web

6
scripts/wasm-opt.sh Normal file
View File

@ -0,0 +1,6 @@
set -x
buildtype=$1;
wasm-opt $out/wasm32-unknown-unknown/${buildtype}/${project}_wasm.wasm --output $out/${project}_wasm_bg-opt.wasm -O
rm -f $out/${project}_wasm_bg.wasm
mv $out/${project}_wasm_bg-opt.wasm $out/${project}_wasm_bg.wasm

View File

@ -0,0 +1,3 @@
set -x
cargo-wasm2map wasm2map --patch $out/${project}_wasm_bg.wasm --base-url=http://localhost:3030

View File

@ -1,11 +0,0 @@
let
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
in
(import (
fetchTarball {
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
sha256 = lock.nodes.flake-compat.locked.narHash;
}
) {
src = ./.;
}).devShell

View File

@ -25,6 +25,15 @@ async-trait = "0.1.57"
base64 = "0.21.0" base64 = "0.21.0"
sycamore-router = "0.8" sycamore-router = "0.8"
js-sys = "0.3.60" js-sys = "0.3.60"
wasm-web-component = { git = "https://github.com/zaphar/wasm-web-components.git", rev = "v0.3.0" }
maud = "*"
indexed-db = "0.4.1"
anyhow = "1.0.86"
serde-wasm-bindgen = "0.6.5"
[dependencies.serde]
version = "1.0.204"
features = ["derive"]
[dependencies.tracing-subscriber] [dependencies.tracing-subscriber]
version = "0.3.16" version = "0.3.16"
@ -37,20 +46,26 @@ features = ["fmt", "time"]
version = "0.4.22" version = "0.4.22"
features = ["serde"] features = ["serde"]
[dependencies.reqwasm] [dependencies.gloo-net]
version = "0.5.0" version = "0.4.0"
[dependencies.wasm-bindgen] [dependencies.wasm-bindgen]
# we need wasm-bindgen v0.2.84 exactly version = "= 0.2.89"
version = "= 0.2.84"
[dependencies.web-sys] [dependencies.web-sys]
version = "0.3" version = "0.3"
features = [ features = [
"Event", "Event",
"InputEvent",
"CustomEvent",
"CustomEventInit",
"EventTarget", "EventTarget",
"History", "History",
"HtmlAnchorElement", "HtmlAnchorElement",
"HtmlDivElement",
"HtmlSpanElement",
"HtmlInputElement",
"HtmlTextAreaElement",
"HtmlBaseElement", "HtmlBaseElement",
"HtmlDialogElement", "HtmlDialogElement",
"KeyboardEvent", "KeyboardEvent",
@ -58,7 +73,12 @@ features = [
"PopStateEvent", "PopStateEvent",
"Url", "Url",
"Window", "Window",
"Storage" "IdbFactory",
"IdbOpenDbRequest",
"IdbRequest",
"IdbDatabase",
"IdbRequestReadyState",
"Storage",
] ]
[dependencies.sycamore] [dependencies.sycamore]

View File

@ -19,7 +19,7 @@
<head> <head>
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" name="viewport" <meta content="text/html;charset=utf-8" http-equiv="Content-Type" name="viewport"
content="width=device-width, initial-scale=1.0" charset="UTF-8"> content="width=device-width, initial-scale=1.0" charset="UTF-8">
<link rel="stylesheet" href="/ui/static/pico.min.css"> <link rel="stylesheet" href="/ui/static/normalize.css">
<link rel="stylesheet" href="/ui/static/app.css"> <link rel="stylesheet" href="/ui/static/app.css">
</head> </head>
@ -35,4 +35,4 @@
</script> </script>
</body> </body>
</html> </html>

View File

@ -15,18 +15,31 @@ use std::collections::{BTreeMap, BTreeSet};
use base64::{self, Engine}; use base64::{self, Engine};
use chrono::NaiveDate; use chrono::NaiveDate;
use reqwasm; use gloo_net;
use serde_json::{from_str, to_string}; // TODO(jwall): Remove this when we have gone a few migrations past.
use serde_json::from_str;
use sycamore::prelude::*; use sycamore::prelude::*;
use tracing::{debug, error, instrument}; use tracing::{debug, error, instrument};
use anyhow::Result;
use client_api::*; use client_api::*;
use recipes::{IngredientKey, RecipeEntry}; use recipes::{IngredientKey, RecipeEntry};
use serde_wasm_bindgen::{from_value, Serializer};
use wasm_bindgen::JsValue; use wasm_bindgen::JsValue;
// TODO(jwall): Remove this when we have gone a few migrations past.
use web_sys::Storage; use web_sys::Storage;
use crate::{app_state::AppState, js_lib}; fn to_js<T: serde::ser::Serialize>(value: T) -> Result<JsValue, serde_wasm_bindgen::Error> {
let s = Serializer::new().serialize_maps_as_objects(true);
value.serialize(&s)
}
use crate::{
app_state::{parse_recipes, AppState},
js_lib::{self, DBFactory},
};
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub struct Error(String); pub struct Error(String);
@ -66,284 +79,290 @@ impl From<std::string::FromUtf8Error> for Error {
} }
} }
impl From<reqwasm::Error> for Error { impl From<gloo_net::Error> for Error {
fn from(item: reqwasm::Error) -> Self { fn from(item: gloo_net::Error) -> Self {
Error(format!("{:?}", item)) Error(format!("{:?}", item))
} }
} }
fn recipe_key<S: std::fmt::Display>(id: S) -> String {
format!("recipe:{}", id)
}
fn category_key<S: std::fmt::Display>(id: S) -> String {
format!("category:{}", id)
}
fn token68(user: String, pass: String) -> String { fn token68(user: String, pass: String) -> String {
base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass)) base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass))
} }
fn convert_to_io_error<V, E>(res: Result<V, E>) -> Result<V, std::io::Error>
where
E: Into<Box<dyn std::error::Error>> + std::fmt::Debug,
{
match res {
Ok(v) => Ok(v),
Err(e) => Err(std::io::Error::new(
std::io::ErrorKind::Other,
format!("{:?}", e),
)),
}
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct LocalStore { pub struct LocalStore {
store: Storage, // TODO(zaphar): Remove this when it's safe to delete the migration
old_store: Storage,
store: DBFactory<'static>,
} }
const APP_STATE_KEY: &'static str = "app-state";
const USER_DATA_KEY: &'static str = "user_data";
impl LocalStore { impl LocalStore {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
store: js_lib::get_storage(), store: DBFactory::default(),
old_store: js_lib::get_storage(),
} }
} }
/// Gets user data from local storage. pub async fn migrate(&self) {
pub fn get_user_data(&self) -> Option<UserData> { // 1. migrate app-state from localstore to indexeddb
self.store debug!("Peforming localstorage migration");
.get("user_data") if let Ok(Some(v)) = self.old_store.get("app_state") {
.map_or(None, |val| val.map(|val| from_str(&val).unwrap_or(None))) if let Ok(Some(local_state)) = from_str::<Option<AppState>>(&v) {
.flatten() self.store_app_state(&local_state).await;
}
}
let _ = self.old_store.remove_item("app_state");
// 2. migrate user-state from localstore to indexeddb
if let Ok(Some(v)) = self.old_store.get(USER_DATA_KEY) {
if let Ok(local_user_data) = from_str::<Option<UserData>>(&v) {
self.set_user_data(local_user_data.as_ref()).await;
}
}
let _ = self.old_store.remove_item(USER_DATA_KEY);
// 3. Recipes
let store_len = self.old_store.length().unwrap();
let mut key_list = Vec::new();
for i in 0..store_len {
let key = self.old_store.key(i).unwrap().unwrap();
if key.starts_with("recipe:") {
key_list.push(key);
}
}
for k in key_list {
if let Ok(Some(recipe)) = self.old_store.get(&k) {
if let Ok(recipe) = from_str::<RecipeEntry>(&recipe) {
self.set_recipe_entry(&recipe).await;
}
}
let _ = self.old_store.delete(&k);
}
} }
#[instrument(skip_all)]
pub async fn store_app_state(&self, state: &AppState) {
let state = match to_js(state) {
Ok(state) => state,
Err(err) => {
error!(?err, ?state, "Error deserializing app_state");
return;
}
};
web_sys::console::log_1(&state);
let key = to_js(APP_STATE_KEY).expect("Failed to serialize key");
self.store
.rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
object_store.put_kv(&key, &state).await?;
Ok(())
})
.await
.expect("Failed to store app-state");
}
#[instrument]
pub async fn fetch_app_state(&self) -> Option<AppState> {
debug!("Loading state from local store");
let recipes = parse_recipes(&self.get_recipes().await).expect("Failed to parse recipes");
self.store
.ro_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let key = convert_to_io_error(to_js(APP_STATE_KEY))?;
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
let mut app_state: AppState = match object_store.get(&key).await? {
Some(s) => convert_to_io_error(from_value(s))?,
None => return Ok(None),
};
if let Some(recipes) = recipes {
debug!("Populating recipes");
for (id, recipe) in recipes {
debug!(id, "Adding recipe from local storage");
app_state.recipes.insert(id, recipe);
}
}
Ok(Some(app_state))
})
.await
.expect("Failed to fetch app-state")
}
#[instrument]
/// Gets user data from local storage.
pub async fn get_user_data(&self) -> Option<UserData> {
self.store
.ro_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let key = to_js(USER_DATA_KEY).expect("Failed to serialize key");
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
let user_data: UserData = match object_store.get(&key).await? {
Some(s) => convert_to_io_error(from_value(s))?,
None => return Ok(None),
};
Ok(Some(user_data))
})
.await
.expect("Failed to fetch user_data")
}
#[instrument]
// Set's user data to local storage. // Set's user data to local storage.
pub fn set_user_data(&self, data: Option<&UserData>) { pub async fn set_user_data(&self, data: Option<&UserData>) {
let key = to_js(USER_DATA_KEY).expect("Failed to serialize key");
if let Some(data) = data { if let Some(data) = data {
let data = data.clone();
self.store self.store
.set( .rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
"user_data", let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
&to_string(data).expect("Failed to desrialize user_data"), object_store
) .put_kv(&key, &convert_to_io_error(to_js(&data))?)
.await?;
Ok(())
})
.await
.expect("Failed to set user_data"); .expect("Failed to set user_data");
} else { } else {
self.store self.store
.delete("user_data") .rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
object_store.delete(&key).await?;
Ok(())
})
.await
.expect("Failed to delete user_data"); .expect("Failed to delete user_data");
} }
} }
/// Gets categories from local storage. #[instrument]
pub fn get_categories(&self) -> Option<Vec<(String, String)>> { async fn get_recipe_keys(&self) -> impl Iterator<Item = String> {
let mut mappings = Vec::new(); self.store
for k in self.get_category_keys() { .ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
if let Some(mut cat_map) = self let mut keys = Vec::new();
.store let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
.get(&k) let key_vec = object_store.get_all_keys(None).await?;
.expect(&format!("Failed to get category key {}", k)) for k in key_vec {
.map(|v| { if let Ok(v) = from_value(k) {
from_str::<Vec<(String, String)>>(&v) keys.push(v);
.expect(&format!("Failed to parse category key {}", k))
})
{
mappings.extend(cat_map.drain(0..));
}
}
if mappings.is_empty() {
None
} else {
Some(mappings)
}
}
/// Set the categories to the given string.
pub fn set_categories(&self, mappings: Option<&Vec<(String, String)>>) {
if let Some(mappings) = mappings {
for (i, cat) in mappings.iter() {
self.store
.set(
&category_key(i),
&to_string(&(i, cat)).expect("Failed to serialize category mapping"),
)
.expect("Failed to store category mapping");
}
}
}
fn get_storage_keys(&self) -> Vec<String> {
let mut keys = Vec::new();
for idx in 0..self.store.length().unwrap() {
if let Some(k) = self.store.key(idx).expect("Failed to get storage key") {
keys.push(k)
}
}
keys
}
fn get_category_keys(&self) -> impl Iterator<Item = String> {
self.get_storage_keys()
.into_iter()
.filter(|k| k.starts_with("category:"))
}
fn get_recipe_keys(&self) -> impl Iterator<Item = String> {
self.get_storage_keys()
.into_iter()
.filter(|k| k.starts_with("recipe:"))
}
/// Gets all the recipes from local storage.
pub fn get_recipes(&self) -> Option<Vec<RecipeEntry>> {
let mut recipe_list = Vec::new();
for recipe_key in self.get_recipe_keys() {
if let Some(entry) = self
.store
.get(&recipe_key)
.expect(&format!("Failed to get recipe: {}", recipe_key))
{
match from_str(&entry) {
Ok(entry) => {
recipe_list.push(entry);
}
Err(e) => {
error!(recipe_key, err = ?e, "Failed to parse recipe entry");
} }
} }
} Ok(keys)
} })
if recipe_list.is_empty() { .await
return None; .expect("Failed to get storage keys")
} .into_iter()
Some(recipe_list)
} }
pub fn get_recipe_entry(&self, id: &str) -> Option<RecipeEntry> { #[instrument]
let key = recipe_key(id); /// Gets all the recipes from local storage.
pub async fn get_recipes(&self) -> Option<Vec<RecipeEntry>> {
self.store self.store
.get(&key) .ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
.expect(&format!("Failed to get recipe {}", key)) let mut recipe_list = Vec::new();
.map(|entry| from_str(&entry).expect(&format!("Failed to get recipe {}", key))) let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
let mut c = object_store.cursor().open().await?;
while let Some(value) = c.value() {
recipe_list.push(convert_to_io_error(from_value(value))?);
c.advance(1).await?;
}
if recipe_list.is_empty() {
return Ok(None);
}
Ok(Some(recipe_list))
})
.await
.expect("Failed to get recipes")
} }
#[instrument]
pub async fn get_recipe_entry(&self, id: &str) -> Option<RecipeEntry> {
let key = to_js(id).expect("Failed to serialize key");
self.store
.ro_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
let entry: Option<RecipeEntry> = match object_store.get(&key).await? {
Some(v) => convert_to_io_error(from_value(v))?,
None => None,
};
Ok(entry)
})
.await
.expect("Failed to get recipes")
}
#[instrument]
/// Sets the set of recipes to the entries passed in. Deletes any recipes not /// Sets the set of recipes to the entries passed in. Deletes any recipes not
/// in the list. /// in the list.
pub fn set_all_recipes(&self, entries: &Vec<RecipeEntry>) { pub async fn set_all_recipes(&self, entries: &Vec<RecipeEntry>) {
for recipe_key in self.get_recipe_keys() { for recipe_key in self.get_recipe_keys().await {
let key = to_js(&recipe_key).expect("Failed to serialize key");
self.store self.store
.delete(&recipe_key) .rw_transaction(&[js_lib::STATE_STORE_NAME], |trx| async move {
.expect(&format!("Failed to get recipe {}", recipe_key)); let object_store = trx.object_store(js_lib::STATE_STORE_NAME)?;
object_store.delete(&key).await?;
Ok(())
})
.await
.expect("Failed to delete user_data");
} }
for entry in entries { for entry in entries {
self.set_recipe_entry(entry); let entry = entry.clone();
let key = to_js(entry.recipe_id()).expect("Failed to serialize recipe key");
self.store
.rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
object_store
.put_kv(&key, &convert_to_io_error(to_js(&entry))?)
.await?;
Ok(())
})
.await
.expect("Failed to store recipe entry");
} }
} }
#[instrument]
/// Set recipe entry in local storage. /// Set recipe entry in local storage.
pub fn set_recipe_entry(&self, entry: &RecipeEntry) { pub async fn set_recipe_entry(&self, entry: &RecipeEntry) {
let entry = entry.clone();
let key = to_js(entry.recipe_id()).expect("Failed to serialize recipe key");
self.store self.store
.set( .rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
&recipe_key(entry.recipe_id()), let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
&to_string(&entry).expect(&format!("Failed to get recipe {}", entry.recipe_id())), object_store
) .put_kv(&key, &convert_to_io_error(to_js(&entry))?)
.expect(&format!("Failed to store recipe {}", entry.recipe_id())) .await?;
Ok(())
})
.await
.expect("Failed to store recipe entry");
} }
#[instrument]
/// Delete recipe entry from local storage. /// Delete recipe entry from local storage.
pub fn delete_recipe_entry(&self, recipe_id: &str) { pub async fn delete_recipe_entry(&self, recipe_id: &str) {
let key = to_js(recipe_id).expect("Failed to serialize key");
self.store self.store
.delete(&recipe_key(recipe_id)) .rw_transaction(&[js_lib::RECIPE_STORE_NAME], |trx| async move {
.expect(&format!("Failed to delete recipe {}", recipe_id)) let object_store = trx.object_store(js_lib::RECIPE_STORE_NAME)?;
} object_store.delete(&key).await?;
Ok(())
/// Save working plan to local storage. })
pub fn store_plan(&self, plan: &Vec<(String, i32)>) { .await
self.store .expect("Failed to delete user_data");
.set("plan", &to_string(&plan).expect("Failed to serialize plan"))
.expect("Failed to store plan'");
}
pub fn get_plan(&self) -> Option<Vec<(String, i32)>> {
if let Some(plan) = self.store.get("plan").expect("Failed to store plan") {
Some(from_str(&plan).expect("Failed to deserialize plan"))
} else {
None
}
}
pub fn delete_plan(&self) {
self.store.delete("plan").expect("Failed to delete plan");
self.store
.delete("inventory")
.expect("Failed to delete inventory data");
}
pub fn set_plan_date(&self, date: &NaiveDate) {
self.store
.set(
"plan:date",
&to_string(&date).expect("Failed to serialize plan:date"),
)
.expect("Failed to store plan:date");
}
pub fn get_plan_date(&self) -> Option<NaiveDate> {
if let Some(date) = self
.store
.get("plan:date")
.expect("Failed to get plan date")
{
Some(from_str(&date).expect("Failed to deserialize plan_date"))
} else {
None
}
}
pub fn get_inventory_data(
&self,
) -> Option<(
BTreeSet<IngredientKey>,
BTreeMap<IngredientKey, String>,
Vec<(String, String)>,
)> {
if let Some(inventory) = self
.store
.get("inventory")
.expect("Failed to retrieve inventory data")
{
let (filtered, modified, extras): (
BTreeSet<IngredientKey>,
Vec<(IngredientKey, String)>,
Vec<(String, String)>,
) = from_str(&inventory).expect("Failed to deserialize inventory");
return Some((filtered, BTreeMap::from_iter(modified), extras));
}
return None;
}
pub fn set_inventory_data(
&self,
inventory: (
&BTreeSet<IngredientKey>,
&BTreeMap<IngredientKey, String>,
&Vec<(String, String)>,
),
) {
let filtered = inventory.0;
let modified_amts = inventory
.1
.iter()
.map(|(k, amt)| (k.clone(), amt.clone()))
.collect::<Vec<(IngredientKey, String)>>();
let extras = inventory.2;
let inventory_data = (filtered, &modified_amts, extras);
self.store
.set(
"inventory",
&to_string(&inventory_data).expect(&format!(
"Failed to serialize inventory {:?}",
inventory_data
)),
)
.expect("Failed to set inventory");
}
pub fn set_staples(&self, content: &String) {
self.store
.set("staples", content)
.expect("Failed to set staples in local store");
}
pub fn get_staples(&self) -> Option<String> {
self.store
.get("staples")
.expect("Failed to retreive staples from local store")
} }
} }
@ -381,13 +400,17 @@ impl HttpStore {
debug!("attempting login request against api."); debug!("attempting login request against api.");
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/auth"); path.push_str("/auth");
let result = reqwasm::http::Request::get(&path) let request = gloo_net::http::Request::get(&path)
.header( .header(
"Authorization", "authorization",
format!("Basic {}", token68(user, pass)).as_str(), format!("Basic {}", token68(user, pass)).as_str(),
) )
.send() .mode(web_sys::RequestMode::SameOrigin)
.await; .credentials(web_sys::RequestCredentials::SameOrigin)
.build()
.expect("Failed to build request");
debug!(?request, "Sending auth request");
let result = request.send().await;
if let Ok(resp) = &result { if let Ok(resp) = &result {
if resp.status() == 200 { if resp.status() == 200 {
let user_data = resp let user_data = resp
@ -409,7 +432,7 @@ impl HttpStore {
debug!("Retrieving User Account data"); debug!("Retrieving User Account data");
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/account"); path.push_str("/account");
let result = reqwasm::http::Request::get(&path).send().await; let result = gloo_net::http::Request::get(&path).send().await;
if let Ok(resp) = &result { if let Ok(resp) = &result {
if resp.status() == 200 { if resp.status() == 200 {
let user_data = resp let user_data = resp
@ -430,11 +453,11 @@ impl HttpStore {
pub async fn fetch_categories(&self) -> Result<Option<Vec<(String, String)>>, Error> { pub async fn fetch_categories(&self) -> Result<Option<Vec<(String, String)>>, Error> {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/category_map"); path.push_str("/category_map");
let resp = match reqwasm::http::Request::get(&path).send().await { let resp = match gloo_net::http::Request::get(&path).send().await {
Ok(resp) => resp, Ok(resp) => resp,
Err(reqwasm::Error::JsError(err)) => { Err(gloo_net::Error::JsError(err)) => {
error!(path, ?err, "Error hitting api"); error!(path, ?err, "Error hitting api");
return Ok(self.local_store.get_categories()); return Ok(None);
} }
Err(err) => { Err(err) => {
return Err(err)?; return Err(err)?;
@ -460,11 +483,11 @@ impl HttpStore {
pub async fn fetch_recipes(&self) -> Result<Option<Vec<RecipeEntry>>, Error> { pub async fn fetch_recipes(&self) -> Result<Option<Vec<RecipeEntry>>, Error> {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/recipes"); path.push_str("/recipes");
let resp = match reqwasm::http::Request::get(&path).send().await { let resp = match gloo_net::http::Request::get(&path).send().await {
Ok(resp) => resp, Ok(resp) => resp,
Err(reqwasm::Error::JsError(err)) => { Err(gloo_net::Error::JsError(err)) => {
error!(path, ?err, "Error hitting api"); error!(path, ?err, "Error hitting api");
return Ok(self.local_store.get_recipes()); return Ok(self.local_store.get_recipes().await);
} }
Err(err) => { Err(err) => {
return Err(err)?; return Err(err)?;
@ -490,11 +513,11 @@ impl HttpStore {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/recipe/"); path.push_str("/recipe/");
path.push_str(id.as_ref()); path.push_str(id.as_ref());
let resp = match reqwasm::http::Request::get(&path).send().await { let resp = match gloo_net::http::Request::get(&path).send().await {
Ok(resp) => resp, Ok(resp) => resp,
Err(reqwasm::Error::JsError(err)) => { Err(gloo_net::Error::JsError(err)) => {
error!(path, ?err, "Error hitting api"); error!(path, ?err, "Error hitting api");
return Ok(self.local_store.get_recipe_entry(id.as_ref())); return Ok(self.local_store.get_recipe_entry(id.as_ref()).await);
} }
Err(err) => { Err(err) => {
return Err(err)?; return Err(err)?;
@ -514,7 +537,7 @@ impl HttpStore {
.as_success() .as_success()
.unwrap(); .unwrap();
if let Some(ref entry) = entry { if let Some(ref entry) = entry {
self.local_store.set_recipe_entry(entry); self.local_store.set_recipe_entry(entry).await;
} }
Ok(entry) Ok(entry)
} }
@ -528,7 +551,7 @@ impl HttpStore {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/recipe"); path.push_str("/recipe");
path.push_str(&format!("/{}", recipe.as_ref())); path.push_str(&format!("/{}", recipe.as_ref()));
let resp = reqwasm::http::Request::delete(&path).send().await?; let resp = gloo_net::http::Request::delete(&path).send().await?;
if resp.status() != 200 { if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into()) Err(format!("Status: {}", resp.status()).into())
} else { } else {
@ -546,10 +569,9 @@ impl HttpStore {
return Err("Recipe Ids can not be empty".into()); return Err("Recipe Ids can not be empty".into());
} }
} }
let serialized = to_string(&recipes).expect("Unable to serialize recipe entries"); let resp = gloo_net::http::Request::post(&path)
let resp = reqwasm::http::Request::post(&path) .json(&recipes)
.body(&serialized) .expect("Failed to set body")
.header("content-type", "application/json")
.send() .send()
.await?; .await?;
if resp.status() != 200 { if resp.status() != 200 {
@ -564,9 +586,9 @@ impl HttpStore {
pub async fn store_categories(&self, categories: &Vec<(String, String)>) -> Result<(), Error> { pub async fn store_categories(&self, categories: &Vec<(String, String)>) -> Result<(), Error> {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/category_map"); path.push_str("/category_map");
let resp = reqwasm::http::Request::post(&path) let resp = gloo_net::http::Request::post(&path)
.body(to_string(&categories).expect("Unable to encode categories as json")) .json(&categories)
.header("content-type", "application/json") .expect("Failed to set body")
.send() .send()
.await?; .await?;
if resp.status() != 200 { if resp.status() != 200 {
@ -618,9 +640,9 @@ impl HttpStore {
pub async fn store_plan(&self, plan: Vec<(String, i32)>) -> Result<(), Error> { pub async fn store_plan(&self, plan: Vec<(String, i32)>) -> Result<(), Error> {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/plan"); path.push_str("/plan");
let resp = reqwasm::http::Request::post(&path) let resp = gloo_net::http::Request::post(&path)
.body(to_string(&plan).expect("Unable to encode plan as json")) .json(&plan)
.header("content-type", "application/json") .expect("Failed to set body")
.send() .send()
.await?; .await?;
if resp.status() != 200 { if resp.status() != 200 {
@ -640,9 +662,9 @@ impl HttpStore {
path.push_str("/plan"); path.push_str("/plan");
path.push_str("/at"); path.push_str("/at");
path.push_str(&format!("/{}", date)); path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::post(&path) let resp = gloo_net::http::Request::post(&path)
.body(to_string(&plan).expect("Unable to encode plan as json")) .json(&plan)
.header("content-type", "application/json") .expect("Failed to set body")
.send() .send()
.await?; .await?;
if resp.status() != 200 { if resp.status() != 200 {
@ -657,7 +679,7 @@ impl HttpStore {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/plan"); path.push_str("/plan");
path.push_str("/all"); path.push_str("/all");
let resp = reqwasm::http::Request::get(&path).send().await?; let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 { if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into()) Err(format!("Status: {}", resp.status()).into())
} else { } else {
@ -676,7 +698,7 @@ impl HttpStore {
path.push_str("/plan"); path.push_str("/plan");
path.push_str("/at"); path.push_str("/at");
path.push_str(&format!("/{}", date)); path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::delete(&path).send().await?; let resp = gloo_net::http::Request::delete(&path).send().await?;
if resp.status() != 200 { if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into()) Err(format!("Status: {}", resp.status()).into())
} else { } else {
@ -692,7 +714,7 @@ impl HttpStore {
path.push_str("/plan"); path.push_str("/plan");
path.push_str("/at"); path.push_str("/at");
path.push_str(&format!("/{}", date)); path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::get(&path).send().await?; let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 { if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into()) Err(format!("Status: {}", resp.status()).into())
} else { } else {
@ -706,22 +728,22 @@ impl HttpStore {
} }
} }
pub async fn fetch_plan(&self) -> Result<Option<Vec<(String, i32)>>, Error> { //pub async fn fetch_plan(&self) -> Result<Option<Vec<(String, i32)>>, Error> {
let mut path = self.v2_path(); // let mut path = self.v2_path();
path.push_str("/plan"); // path.push_str("/plan");
let resp = reqwasm::http::Request::get(&path).send().await?; // let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 { // if resp.status() != 200 {
Err(format!("Status: {}", resp.status()).into()) // Err(format!("Status: {}", resp.status()).into())
} else { // } else {
debug!("We got a valid response back"); // debug!("We got a valid response back");
let plan = resp // let plan = resp
.json::<PlanDataResponse>() // .json::<PlanDataResponse>()
.await // .await
.map_err(|e| format!("{}", e))? // .map_err(|e| format!("{}", e))?
.as_success(); // .as_success();
Ok(plan) // Ok(plan)
} // }
} //}
pub async fn fetch_inventory_for_date( pub async fn fetch_inventory_for_date(
&self, &self,
@ -738,13 +760,9 @@ impl HttpStore {
path.push_str("/inventory"); path.push_str("/inventory");
path.push_str("/at"); path.push_str("/at");
path.push_str(&format!("/{}", date)); path.push_str(&format!("/{}", date));
let resp = reqwasm::http::Request::get(&path).send().await?; let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 { if resp.status() != 200 {
let err = Err(format!("Status: {}", resp.status()).into()); Err(format!("Status: {}", resp.status()).into())
Ok(match self.local_store.get_inventory_data() {
Some(val) => val,
None => return err,
})
} else { } else {
debug!("We got a valid response back"); debug!("We got a valid response back");
let InventoryData { let InventoryData {
@ -777,13 +795,9 @@ impl HttpStore {
> { > {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/inventory"); path.push_str("/inventory");
let resp = reqwasm::http::Request::get(&path).send().await?; let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 { if resp.status() != 200 {
let err = Err(format!("Status: {}", resp.status()).into()); Err(format!("Status: {}", resp.status()).into())
Ok(match self.local_store.get_inventory_data() {
Some(val) => val,
None => return err,
})
} else { } else {
debug!("We got a valid response back"); debug!("We got a valid response back");
let InventoryData { let InventoryData {
@ -818,13 +832,10 @@ impl HttpStore {
path.push_str(&format!("/{}", date)); path.push_str(&format!("/{}", date));
let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect(); let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect();
let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect(); let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect();
debug!("Storing inventory data in cache");
let serialized_inventory = to_string(&(filtered_ingredients, modified_amts, extra_items))
.expect("Unable to encode plan as json");
debug!("Storing inventory data via API"); debug!("Storing inventory data via API");
let resp = reqwasm::http::Request::post(&path) let resp = gloo_net::http::Request::post(&path)
.body(&serialized_inventory) .json(&(filtered_ingredients, modified_amts, extra_items))
.header("content-type", "application/json") .expect("Failed to set body")
.send() .send()
.await?; .await?;
if resp.status() != 200 { if resp.status() != 200 {
@ -847,13 +858,10 @@ impl HttpStore {
path.push_str("/inventory"); path.push_str("/inventory");
let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect(); let filtered_ingredients: Vec<IngredientKey> = filtered_ingredients.into_iter().collect();
let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect(); let modified_amts: Vec<(IngredientKey, String)> = modified_amts.into_iter().collect();
debug!("Storing inventory data in cache");
let serialized_inventory = to_string(&(filtered_ingredients, modified_amts, extra_items))
.expect("Unable to encode plan as json");
debug!("Storing inventory data via API"); debug!("Storing inventory data via API");
let resp = reqwasm::http::Request::post(&path) let resp = gloo_net::http::Request::post(&path)
.body(&serialized_inventory) .json(&(filtered_ingredients, modified_amts, extra_items))
.header("content-type", "application/json") .expect("Failed to set body")
.send() .send()
.await?; .await?;
if resp.status() != 200 { if resp.status() != 200 {
@ -868,7 +876,7 @@ impl HttpStore {
pub async fn fetch_staples(&self) -> Result<Option<String>, Error> { pub async fn fetch_staples(&self) -> Result<Option<String>, Error> {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/staples"); path.push_str("/staples");
let resp = reqwasm::http::Request::get(&path).send().await?; let resp = gloo_net::http::Request::get(&path).send().await?;
if resp.status() != 200 { if resp.status() != 200 {
debug!("Invalid response back"); debug!("Invalid response back");
Err(format!("Status: {}", resp.status()).into()) Err(format!("Status: {}", resp.status()).into())
@ -882,15 +890,15 @@ impl HttpStore {
} }
} }
pub async fn store_staples<S: AsRef<str>>(&self, content: S) -> Result<(), Error> { pub async fn store_staples<S: AsRef<str> + serde::Serialize>(
&self,
content: S,
) -> Result<(), Error> {
let mut path = self.v2_path(); let mut path = self.v2_path();
path.push_str("/staples"); path.push_str("/staples");
let serialized_staples: String = let resp = gloo_net::http::Request::post(&path)
to_string(content.as_ref()).expect("Failed to serialize staples to json"); .json(&content)
.expect("Failed to set body")
let resp = reqwasm::http::Request::post(&path)
.body(&serialized_staples)
.header("content-type", "application/json")
.send() .send()
.await?; .await?;
if resp.status() != 200 { if resp.status() != 200 {

View File

@ -19,6 +19,7 @@ use std::{
use chrono::NaiveDate; use chrono::NaiveDate;
use client_api::UserData; use client_api::UserData;
use recipes::{parse, Ingredient, IngredientKey, Recipe, RecipeEntry}; use recipes::{parse, Ingredient, IngredientKey, Recipe, RecipeEntry};
use serde::{Deserialize, Serialize};
use sycamore::futures::spawn_local_scoped; use sycamore::futures::spawn_local_scoped;
use sycamore::prelude::*; use sycamore::prelude::*;
use sycamore_state::{Handler, MessageMapper}; use sycamore_state::{Handler, MessageMapper};
@ -27,15 +28,23 @@ use wasm_bindgen::throw_str;
use crate::{ use crate::{
api::{HttpStore, LocalStore}, api::{HttpStore, LocalStore},
components, linear::LinearSignal,
}; };
#[derive(Debug, Clone, PartialEq)] fn bool_true() -> bool {
true
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AppState { pub struct AppState {
pub recipe_counts: BTreeMap<String, usize>, pub recipe_counts: BTreeMap<String, u32>,
pub recipe_categories: BTreeMap<String, String>, pub recipe_categories: BTreeMap<String, String>,
pub extras: Vec<(String, String)>, pub extras: Vec<(String, String)>,
// FIXME(jwall): This should really be storable I think?
#[serde(skip_deserializing, skip_serializing)]
pub staples: Option<BTreeSet<Ingredient>>, pub staples: Option<BTreeSet<Ingredient>>,
// FIXME(jwall): This should really be storable I think?
#[serde(skip_deserializing, skip_serializing)]
pub recipes: BTreeMap<String, Recipe>, pub recipes: BTreeMap<String, Recipe>,
pub category_map: BTreeMap<String, String>, pub category_map: BTreeMap<String, String>,
pub filtered_ingredients: BTreeSet<IngredientKey>, pub filtered_ingredients: BTreeSet<IngredientKey>,
@ -43,6 +52,8 @@ pub struct AppState {
pub auth: Option<UserData>, pub auth: Option<UserData>,
pub plan_dates: BTreeSet<NaiveDate>, pub plan_dates: BTreeSet<NaiveDate>,
pub selected_plan_date: Option<NaiveDate>, pub selected_plan_date: Option<NaiveDate>,
#[serde(default = "bool_true")]
pub use_staples: bool,
} }
impl AppState { impl AppState {
@ -59,13 +70,14 @@ impl AppState {
auth: None, auth: None,
plan_dates: BTreeSet::new(), plan_dates: BTreeSet::new(),
selected_plan_date: None, selected_plan_date: None,
use_staples: true,
} }
} }
} }
pub enum Message { pub enum Message {
ResetRecipeCounts, ResetRecipeCounts,
UpdateRecipeCount(String, usize), UpdateRecipeCount(String, u32),
AddExtra(String, String), AddExtra(String, String),
RemoveExtra(usize), RemoveExtra(usize),
UpdateExtra(usize, String, String), UpdateExtra(usize, String, String),
@ -74,6 +86,7 @@ pub enum Message {
UpdateCategory(String, String, Option<Box<dyn FnOnce()>>), UpdateCategory(String, String, Option<Box<dyn FnOnce()>>),
ResetInventory, ResetInventory,
AddFilteredIngredient(IngredientKey), AddFilteredIngredient(IngredientKey),
RemoveFilteredIngredient(IngredientKey),
UpdateAmt(IngredientKey, String), UpdateAmt(IngredientKey, String),
SetUserData(UserData), SetUserData(UserData),
SaveState(Option<Box<dyn FnOnce()>>), SaveState(Option<Box<dyn FnOnce()>>),
@ -81,6 +94,7 @@ pub enum Message {
UpdateStaples(String, Option<Box<dyn FnOnce()>>), UpdateStaples(String, Option<Box<dyn FnOnce()>>),
DeletePlan(NaiveDate, Option<Box<dyn FnOnce()>>), DeletePlan(NaiveDate, Option<Box<dyn FnOnce()>>),
SelectPlanDate(NaiveDate, Option<Box<dyn FnOnce()>>), SelectPlanDate(NaiveDate, Option<Box<dyn FnOnce()>>),
UpdateUseStaples(bool), // TODO(jwall): Should this just be various settings?
} }
impl Debug for Message { impl Debug for Message {
@ -111,6 +125,9 @@ impl Debug for Message {
Self::AddFilteredIngredient(arg0) => { Self::AddFilteredIngredient(arg0) => {
f.debug_tuple("AddFilteredIngredient").field(arg0).finish() f.debug_tuple("AddFilteredIngredient").field(arg0).finish()
} }
Self::RemoveFilteredIngredient(arg0) => {
f.debug_tuple("RemoveFilteredIngredient").field(arg0).finish()
}
Self::UpdateAmt(arg0, arg1) => { Self::UpdateAmt(arg0, arg1) => {
f.debug_tuple("UpdateAmt").field(arg0).field(arg1).finish() f.debug_tuple("UpdateAmt").field(arg0).field(arg1).finish()
} }
@ -118,6 +135,7 @@ impl Debug for Message {
Self::SaveState(_) => write!(f, "SaveState"), Self::SaveState(_) => write!(f, "SaveState"),
Self::LoadState(_) => write!(f, "LoadState"), Self::LoadState(_) => write!(f, "LoadState"),
Self::UpdateStaples(arg, _) => f.debug_tuple("UpdateStaples").field(arg).finish(), Self::UpdateStaples(arg, _) => f.debug_tuple("UpdateStaples").field(arg).finish(),
Self::UpdateUseStaples(arg) => f.debug_tuple("UpdateUseStaples").field(arg).finish(),
Self::SelectPlanDate(arg, _) => f.debug_tuple("SelectPlanDate").field(arg).finish(), Self::SelectPlanDate(arg, _) => f.debug_tuple("SelectPlanDate").field(arg).finish(),
Self::DeletePlan(arg, _) => f.debug_tuple("DeletePlan").field(arg).finish(), Self::DeletePlan(arg, _) => f.debug_tuple("DeletePlan").field(arg).finish(),
} }
@ -130,14 +148,14 @@ pub struct StateMachine {
} }
#[instrument] #[instrument]
fn parse_recipes( pub fn parse_recipes(
recipe_entries: &Option<Vec<RecipeEntry>>, recipe_entries: &Option<Vec<RecipeEntry>>,
) -> Result<Option<BTreeMap<String, Recipe>>, String> { ) -> Result<Option<BTreeMap<String, Recipe>>, String> {
match recipe_entries { match recipe_entries {
Some(parsed) => { Some(parsed) => {
let mut parsed_map = BTreeMap::new(); let mut parsed_map = BTreeMap::new();
for r in parsed { for r in parsed {
let recipe = match parse::as_recipe(&r.recipe_text()) { let recipe = match r.try_into() {
Ok(r) => r, Ok(r) => r,
Err(e) => { Err(e) => {
error!("Error parsing recipe {}", e); error!("Error parsing recipe {}", e);
@ -157,40 +175,44 @@ impl StateMachine {
Self { store, local_store } Self { store, local_store }
} }
#[instrument(skip_all)]
async fn load_state( async fn load_state(
store: &HttpStore, store: &HttpStore,
local_store: &LocalStore, local_store: &LocalStore,
original: &Signal<AppState>, original: &Signal<AppState>,
) -> Result<(), crate::api::Error> { ) -> Result<(), crate::api::Error> {
// NOTE(jwall): We use a linear Signal in here to ensure that we only
// call set on the signal once. When the LinearSignal get's dropped it
// will call set on the contained Signal.
let mut original: LinearSignal<AppState> = original.into();
if let Some(state) = local_store.fetch_app_state().await {
original = original.update(state);
}
let mut state = original.get().as_ref().clone(); let mut state = original.get().as_ref().clone();
info!("Synchronizing Recipes"); info!("Synchronizing Recipes");
let recipe_entries = &store.fetch_recipes().await?; let recipe_entries = &store.fetch_recipes().await?;
let recipes = parse_recipes(&recipe_entries)?; let recipes = parse_recipes(&recipe_entries)?;
debug!(?recipes, "Parsed Recipes");
if let Some(recipes) = recipes { if let Some(recipes) = recipes {
state.recipes = recipes; state.recipes = recipes;
}; };
info!("Synchronizing staples"); info!("Synchronizing staples");
state.staples = if let Some(content) = store.fetch_staples().await? { state.staples = if let Some(content) = store.fetch_staples().await? {
local_store.set_staples(&content);
// now we need to parse staples as ingredients // now we need to parse staples as ingredients
let mut staples = parse::as_ingredient_list(&content)?; let mut staples = parse::as_ingredient_list(&content)?;
Some(staples.drain(0..).collect()) Some(staples.drain(0..).collect())
} else { } else {
if let Some(content) = local_store.get_staples() { Some(BTreeSet::new())
let mut staples = parse::as_ingredient_list(&content)?;
Some(staples.drain(0..).collect())
} else {
None
}
}; };
info!("Synchronizing recipe");
if let Some(recipe_entries) = recipe_entries { if let Some(recipe_entries) = recipe_entries {
local_store.set_all_recipes(recipe_entries); local_store.set_all_recipes(recipe_entries).await;
state.recipe_categories = recipe_entries state.recipe_categories = recipe_entries
.iter() .iter()
.map(|entry| { .map(|entry| {
debug!(recipe_entry=?entry, "Getting recipe category");
( (
entry.recipe_id().to_owned(), entry.recipe_id().to_owned(),
entry entry
@ -203,25 +225,25 @@ impl StateMachine {
} }
info!("Fetching meal plan list"); info!("Fetching meal plan list");
let plan_dates = store.fetch_plan_dates().await?; if let Some(mut plan_dates) = store.fetch_plan_dates().await? {
if let Some(mut plan_dates) = plan_dates {
debug!(?plan_dates, "meal plan list"); debug!(?plan_dates, "meal plan list");
state.plan_dates = BTreeSet::from_iter(plan_dates.drain(0..)); state.plan_dates = BTreeSet::from_iter(plan_dates.drain(0..));
} }
info!("Synchronizing meal plan"); info!("Synchronizing meal plan");
let plan = if let Some(cached_plan_date) = local_store.get_plan_date() { let plan = if let Some(ref cached_plan_date) = state.selected_plan_date {
let plan = store.fetch_plan_for_date(&cached_plan_date).await?; store
state.selected_plan_date = Some(cached_plan_date); .fetch_plan_for_date(cached_plan_date)
plan .await?
.or_else(|| Some(Vec::new()))
} else { } else {
store.fetch_plan().await? None
}; };
if let Some(plan) = plan { if let Some(plan) = plan {
// set the counts. // set the counts.
let mut plan_map = BTreeMap::new(); let mut plan_map = BTreeMap::new();
for (id, count) in plan { for (id, count) in plan {
plan_map.insert(id, count as usize); plan_map.insert(id, count as u32);
} }
state.recipe_counts = plan_map; state.recipe_counts = plan_map;
for (id, _) in state.recipes.iter() { for (id, _) in state.recipes.iter() {
@ -230,44 +252,32 @@ impl StateMachine {
} }
} }
} else { } else {
if let Some(plan) = local_store.get_plan() { // Initialize things to zero.
state.recipe_counts = plan.iter().map(|(k, v)| (k.clone(), *v as usize)).collect(); if let Some(rs) = recipe_entries {
} else { for r in rs {
// Initialize things to zero. state.recipe_counts.insert(r.recipe_id().to_owned(), 0);
if let Some(rs) = recipe_entries {
for r in rs {
state.recipe_counts.insert(r.recipe_id().to_owned(), 0);
}
} }
} }
} }
let plan = state
.recipe_counts
.iter()
.map(|(k, v)| (k.clone(), *v as i32))
.collect::<Vec<(String, i32)>>();
local_store.store_plan(&plan);
info!("Checking for user account data"); info!("Checking for user account data");
if let Some(user_data) = store.fetch_user_data().await { if let Some(user_data) = store.fetch_user_data().await {
debug!("Successfully got account data from server"); debug!("Successfully got account data from server");
local_store.set_user_data(Some(&user_data)); local_store.set_user_data(Some(&user_data)).await;
state.auth = Some(user_data); state.auth = Some(user_data);
} else { } else {
debug!("Using account data from local store"); debug!("Using account data from local store");
let user_data = local_store.get_user_data(); let user_data = local_store.get_user_data().await;
state.auth = user_data; state.auth = user_data;
} }
info!("Synchronizing categories"); info!("Synchronizing categories");
match store.fetch_categories().await { match store.fetch_categories().await {
Ok(Some(mut categories_content)) => { Ok(Some(mut categories_content)) => {
debug!(categories=?categories_content); debug!(categories=?categories_content);
local_store.set_categories(Some(&categories_content));
let category_map = BTreeMap::from_iter(categories_content.drain(0..)); let category_map = BTreeMap::from_iter(categories_content.drain(0..));
state.category_map = category_map; state.category_map = category_map;
} }
Ok(None) => { Ok(None) => {
warn!("There is no category file"); warn!("There is no category file");
local_store.set_categories(None);
} }
Err(e) => { Err(e) => {
error!("{:?}", e); error!("{:?}", e);
@ -281,11 +291,6 @@ impl StateMachine {
info!("Synchronizing inventory data"); info!("Synchronizing inventory data");
match inventory_data { match inventory_data {
Ok((filtered_ingredients, modified_amts, extra_items)) => { Ok((filtered_ingredients, modified_amts, extra_items)) => {
local_store.set_inventory_data((
&filtered_ingredients,
&modified_amts,
&extra_items,
));
state.modified_amts = modified_amts; state.modified_amts = modified_amts;
state.filtered_ingredients = filtered_ingredients; state.filtered_ingredients = filtered_ingredients;
state.extras = extra_items; state.extras = extra_items;
@ -294,7 +299,9 @@ impl StateMachine {
error!("{:?}", e); error!("{:?}", e);
} }
} }
original.set(state); // Finally we store all of this app state back to our localstore
local_store.store_app_state(&state).await;
original.update(state);
Ok(()) Ok(())
} }
} }
@ -310,80 +317,49 @@ impl MessageMapper<Message, AppState> for StateMachine {
for (id, _) in original_copy.recipes.iter() { for (id, _) in original_copy.recipes.iter() {
map.insert(id.clone(), 0); map.insert(id.clone(), 0);
} }
let plan: Vec<(String, i32)> =
map.iter().map(|(s, i)| (s.clone(), *i as i32)).collect();
self.local_store.store_plan(&plan);
original_copy.recipe_counts = map; original_copy.recipe_counts = map;
} }
Message::UpdateRecipeCount(id, count) => { Message::UpdateRecipeCount(id, count) => {
original_copy.recipe_counts.insert(id, count); original_copy.recipe_counts.insert(id, count);
let plan: Vec<(String, i32)> = original_copy
.recipe_counts
.iter()
.map(|(s, i)| (s.clone(), *i as i32))
.collect();
self.local_store.store_plan(&plan);
} }
Message::AddExtra(amt, name) => { Message::AddExtra(amt, name) => {
original_copy.extras.push((amt, name)); original_copy.extras.push((amt, name));
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
))
} }
Message::RemoveExtra(idx) => { Message::RemoveExtra(idx) => {
original_copy.extras.remove(idx); original_copy.extras.remove(idx);
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
))
} }
Message::UpdateExtra(idx, amt, name) => { Message::UpdateExtra(idx, amt, name) => match original_copy.extras.get_mut(idx) {
match original_copy.extras.get_mut(idx) { Some(extra) => {
Some(extra) => { extra.0 = amt;
extra.0 = amt; extra.1 = name;
extra.1 = name;
}
None => {
throw_str("Attempted to remove extra that didn't exist");
}
} }
self.local_store.set_inventory_data(( None => {
&original_copy.filtered_ingredients, throw_str("Attempted to remove extra that didn't exist");
&original_copy.modified_amts, }
&original_copy.extras, },
))
}
Message::SaveRecipe(entry, callback) => { Message::SaveRecipe(entry, callback) => {
let recipe = let recipe_id = entry.recipe_id().to_owned();
parse::as_recipe(entry.recipe_text()).expect("Failed to parse RecipeEntry"); let recipe: Recipe = (&entry).try_into().expect("Failed to parse RecipeEntry");
original_copy original_copy.recipes.insert(recipe_id.clone(), recipe);
.recipes
.insert(entry.recipe_id().to_owned(), recipe);
if !original_copy.recipe_counts.contains_key(entry.recipe_id()) { if !original_copy.recipe_counts.contains_key(entry.recipe_id()) {
original_copy original_copy.recipe_counts.insert(recipe_id.clone(), 0);
.recipe_counts
.insert(entry.recipe_id().to_owned(), 0);
} }
if let Some(cat) = entry.category().cloned() { if let Some(cat) = entry.category().cloned() {
original_copy original_copy
.recipe_categories .recipe_categories
.entry(entry.recipe_id().to_owned()) .entry(recipe_id.clone())
.and_modify(|c| *c = cat.clone()) .and_modify(|c| *c = cat.clone())
.or_insert(cat); .or_insert(cat);
} }
let store = self.store.clone(); let store = self.store.clone();
self.local_store.set_recipe_entry(&entry); let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
local_store.set_recipe_entry(&entry).await;
if let Err(e) = store.store_recipes(vec![entry]).await { if let Err(e) = store.store_recipes(vec![entry]).await {
// FIXME(jwall): We should have a global way to trigger error messages // FIXME(jwall): We should have a global way to trigger error messages
error!(err=?e, "Unable to save Recipe"); error!(err=?e, "Unable to save Recipe");
// FIXME(jwall): This should be an error message // FIXME(jwall): This should be an error message
components::toast::error_message(cx, "Failed to save Recipe", None);
} else { } else {
components::toast::message(cx, "Saved Recipe", None);
} }
callback.map(|f| f()); callback.map(|f| f());
}); });
@ -391,21 +367,17 @@ impl MessageMapper<Message, AppState> for StateMachine {
Message::RemoveRecipe(recipe, callback) => { Message::RemoveRecipe(recipe, callback) => {
original_copy.recipe_counts.remove(&recipe); original_copy.recipe_counts.remove(&recipe);
original_copy.recipes.remove(&recipe); original_copy.recipes.remove(&recipe);
self.local_store.delete_recipe_entry(&recipe);
let store = self.store.clone(); let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
local_store.delete_recipe_entry(&recipe).await;
if let Err(err) = store.delete_recipe(&recipe).await { if let Err(err) = store.delete_recipe(&recipe).await {
error!(?err, "Failed to delete recipe"); error!(?err, "Failed to delete recipe");
components::toast::error_message(cx, "Unable to delete recipe", None);
} else {
components::toast::message(cx, "Deleted Recipe", None);
} }
callback.map(|f| f()); callback.map(|f| f());
}); });
} }
Message::UpdateCategory(ingredient, category, callback) => { Message::UpdateCategory(ingredient, category, callback) => {
self.local_store
.set_categories(Some(&vec![(ingredient.clone(), category.clone())]));
original_copy original_copy
.category_map .category_map
.insert(ingredient.clone(), category.clone()); .insert(ingredient.clone(), category.clone());
@ -421,49 +393,42 @@ impl MessageMapper<Message, AppState> for StateMachine {
original_copy.filtered_ingredients = BTreeSet::new(); original_copy.filtered_ingredients = BTreeSet::new();
original_copy.modified_amts = BTreeMap::new(); original_copy.modified_amts = BTreeMap::new();
original_copy.extras = Vec::new(); original_copy.extras = Vec::new();
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
));
components::toast::message(cx, "Reset Inventory", None);
} }
Message::AddFilteredIngredient(key) => { Message::AddFilteredIngredient(key) => {
original_copy.filtered_ingredients.insert(key); original_copy.filtered_ingredients.insert(key);
self.local_store.set_inventory_data(( }
&original_copy.filtered_ingredients, Message::RemoveFilteredIngredient(key) => {
&original_copy.modified_amts, original_copy.filtered_ingredients.remove(&key);
&original_copy.extras,
));
} }
Message::UpdateAmt(key, amt) => { Message::UpdateAmt(key, amt) => {
original_copy.modified_amts.insert(key, amt); original_copy.modified_amts.insert(key, amt);
self.local_store.set_inventory_data((
&original_copy.filtered_ingredients,
&original_copy.modified_amts,
&original_copy.extras,
));
} }
Message::SetUserData(user_data) => { Message::SetUserData(user_data) => {
self.local_store.set_user_data(Some(&user_data)); let local_store = self.local_store.clone();
original_copy.auth = Some(user_data); original_copy.auth = Some(user_data.clone());
spawn_local_scoped(cx, async move {
local_store.set_user_data(Some(&user_data)).await;
});
} }
Message::SaveState(f) => { Message::SaveState(f) => {
let mut original_copy = original_copy.clone(); let mut original_copy = original_copy.clone();
let store = self.store.clone(); let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
if original_copy.selected_plan_date.is_none() { if original_copy.selected_plan_date.is_none() {
original_copy.selected_plan_date = Some(chrono::Local::now().date_naive()); original_copy.selected_plan_date = Some(chrono::Local::now().date_naive());
} }
original_copy original_copy.plan_dates.insert(
.plan_dates original_copy
.insert(original_copy.selected_plan_date.map(|d| d.clone()).unwrap()); .selected_plan_date
.as_ref()
.map(|d| d.clone())
.unwrap(),
);
if let Err(e) = store.store_app_state(&original_copy).await { if let Err(e) = store.store_app_state(&original_copy).await {
error!(err=?e, "Error saving app state"); error!(err=?e, "Error saving app state");
components::toast::error_message(cx, "Failed to save user state", None);
} else {
components::toast::message(cx, "Saved user state", None);
}; };
local_store.store_app_state(&original_copy).await;
original.set(original_copy); original.set(original_copy);
f.map(|f| f()); f.map(|f| f());
}); });
@ -474,17 +439,10 @@ impl MessageMapper<Message, AppState> for StateMachine {
Message::LoadState(f) => { Message::LoadState(f) => {
let store = self.store.clone(); let store = self.store.clone();
let local_store = self.local_store.clone(); let local_store = self.local_store.clone();
debug!("Loading user state.");
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
if let Err(err) = Self::load_state(&store, &local_store, original).await { if let Err(err) = Self::load_state(&store, &local_store, original).await {
error!(?err, "Failed to load user state"); error!(?err, "Failed to load user state");
components::toast::error_message(cx, "Failed to load_state.", None);
} else {
components::toast::message(cx, "Loaded user state", None);
local_store.set_inventory_data((
&original.get().filtered_ingredients,
&original.get().modified_amts,
&original.get().extras,
));
} }
f.map(|f| f()); f.map(|f| f());
}); });
@ -492,47 +450,47 @@ impl MessageMapper<Message, AppState> for StateMachine {
} }
Message::UpdateStaples(content, callback) => { Message::UpdateStaples(content, callback) => {
let store = self.store.clone(); let store = self.store.clone();
let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
local_store.set_staples(&content);
if let Err(err) = store.store_staples(content).await { if let Err(err) = store.store_staples(content).await {
error!(?err, "Failed to store staples"); error!(?err, "Failed to store staples");
components::toast::error_message(cx, "Failed to store staples", None);
} else { } else {
components::toast::message(cx, "Updated staples", None);
callback.map(|f| f()); callback.map(|f| f());
} }
}); });
return; return;
} }
Message::UpdateUseStaples(value) => {
original_copy.use_staples = value;
}
Message::SelectPlanDate(date, callback) => { Message::SelectPlanDate(date, callback) => {
let store = self.store.clone(); let store = self.store.clone();
let local_store = self.local_store.clone(); let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
if let Some(mut plan) = store if let Ok(Some(mut plan)) = store
.fetch_plan_for_date(&date) .fetch_plan_for_date(&date)
.await .await
.expect("Failed to fetch plan for date")
{ {
// Note(jwall): This is a little unusual but because this // Note(jwall): This is a little unusual but because this
// is async code we can't rely on the set below. // is async code we can't rely on the set below.
original_copy.recipe_counts = original_copy.recipe_counts =
BTreeMap::from_iter(plan.drain(0..).map(|(k, v)| (k, v as usize))); BTreeMap::from_iter(plan.drain(0..).map(|(k, v)| (k, v as u32)));
let (filtered, modified, extras) = store
.fetch_inventory_for_date(&date)
.await
.expect("Failed to fetch inventory_data for date");
original_copy.modified_amts = modified;
original_copy.filtered_ingredients = filtered;
original_copy.extras = extras;
} else {
store.store_plan_for_date(Vec::new(), &date).await.expect("failed to set plan on server");
} }
let (filtered, modified, extras) = store
.fetch_inventory_for_date(&date)
.await
.expect("Failed to fetch inventory_data for date");
original_copy.plan_dates.insert(date.clone()); original_copy.plan_dates.insert(date.clone());
original_copy.modified_amts = modified; original_copy.selected_plan_date = Some(date.clone());
original_copy.filtered_ingredients = filtered;
original_copy.extras = extras;
local_store.set_plan_date(&date);
store store
.store_plan_for_date(vec![], &date) .store_plan_for_date(vec![], &date)
.await .await
.expect("Failed to init meal plan for date"); .expect("Failed to init meal plan for date");
local_store.store_app_state(&original_copy).await;
original.set(original_copy); original.set(original_copy);
callback.map(|f| f()); callback.map(|f| f());
@ -547,31 +505,33 @@ impl MessageMapper<Message, AppState> for StateMachine {
let local_store = self.local_store.clone(); let local_store = self.local_store.clone();
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
if let Err(err) = store.delete_plan_for_date(&date).await { if let Err(err) = store.delete_plan_for_date(&date).await {
components::toast::error_message(
cx,
"Failed to delete meal plan for date",
None,
);
error!(?err, "Error deleting plan"); error!(?err, "Error deleting plan");
} else { } else {
local_store.delete_plan();
original_copy.plan_dates.remove(&date); original_copy.plan_dates.remove(&date);
// Reset all meal planning state; // Reset all meal planning state;
let _ = original_copy.recipe_counts.iter_mut().map(|(_, v)| *v = 0); let _ = original_copy.recipe_counts.iter_mut().map(|(_, v)| *v = 0);
original_copy.filtered_ingredients = BTreeSet::new(); original_copy.filtered_ingredients = BTreeSet::new();
original_copy.modified_amts = BTreeMap::new(); original_copy.modified_amts = BTreeMap::new();
original_copy.extras = Vec::new(); original_copy.extras = Vec::new();
local_store.store_app_state(&original_copy).await;
original.set(original_copy); original.set(original_copy);
components::toast::message(cx, "Deleted Plan", None);
callback.map(|f| f()); callback.map(|f| f());
} }
}); });
// NOTE(jwall): Because we do our signal set above in the async block
// we have to return here to avoid lifetime issues and double setting
// the original signal.
return; return;
} }
} }
original.set(original_copy); spawn_local_scoped(cx, {
let local_store = self.local_store.clone();
async move {
local_store.store_app_state(&original_copy).await;
original.set(original_copy);
}
});
} }
} }

View File

@ -42,18 +42,19 @@ pub fn AddRecipe<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View
} else { } else {
Some(category) Some(category)
}; };
RecipeEntry( RecipeEntry {
recipe_title id: recipe_title
.get() .get()
.as_ref() .as_ref()
.to_lowercase() .to_lowercase()
.replace(" ", "_") .replace(" ", "_")
.replace("\n", ""), .replace("\n", ""),
STARTER_RECIPE text: STARTER_RECIPE
.replace("TITLE_PLACEHOLDER", recipe_title.get().as_str()) .replace("TITLE_PLACEHOLDER", recipe_title.get().as_str())
.replace("\r", ""), .replace("\r", ""),
category, category,
) serving_count: None,
}
}); });
view! {cx, view! {cx,

View File

@ -49,7 +49,7 @@ fn CategoryRow<'ctx, G: Html>(cx: Scope<'ctx>, props: CategoryRowProps<'ctx>) ->
}); });
view! {cx, view! {cx,
tr() { tr() {
td() { td(class="margin-bot-1 border-bottom") {
(ingredient_clone) br() (ingredient_clone) br()
Indexed( Indexed(
iterable=recipes, iterable=recipes,

View File

@ -17,8 +17,8 @@ use sycamore::prelude::*;
#[component] #[component]
pub fn Footer<G: Html>(cx: Scope) -> View<G> { pub fn Footer<G: Html>(cx: Scope) -> View<G> {
view! {cx, view! {cx,
nav(class="no-print") { nav(class="no-print menu-font") {
ul { ul(class="no-list") {
li { a(href="https://github.com/zaphar/kitchen") { "On Github" } } li { a(href="https://github.com/zaphar/kitchen") { "On Github" } }
} }
} }

View File

@ -23,9 +23,9 @@ pub fn Header<'ctx, G: Html>(cx: Scope<'ctx>, h: StateHandler<'ctx>) -> View<G>
None => "Login".to_owned(), None => "Login".to_owned(),
}); });
view! {cx, view! {cx,
nav(class="no-print") { nav(class="no-print row-flex align-center header-bg heavy-bottom-border menu-font") {
h1(class="title") { "Kitchen" } h1(class="title") { "Kitchen" }
ul { ul(class="row-flex align-center no-list") {
li { a(href="/ui/planning/select") { "MealPlan" } } li { a(href="/ui/planning/select") { "MealPlan" } }
li { a(href="/ui/manage/ingredients") { "Manage" } } li { a(href="/ui/manage/ingredients") { "Manage" } }
li { a(href="/ui/login") { (login.get()) } } li { a(href="/ui/login") { (login.get()) } }

View File

@ -24,18 +24,7 @@ pub mod recipe_selection;
pub mod shopping_list; pub mod shopping_list;
pub mod staples; pub mod staples;
pub mod tabs; pub mod tabs;
pub mod toast;
pub use add_recipe::*;
pub use categories::*;
pub use footer::*;
pub use header::*; pub use header::*;
pub use number_field::*; pub use number_field::*;
pub use plan_list::*; pub use plan_list::*;
pub use recipe::*;
pub use recipe_list::*;
pub use recipe_plan::*;
pub use recipe_selection::*;
pub use shopping_list::*;
pub use staples::*;
pub use tabs::*;

View File

@ -11,18 +11,209 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use maud::html;
use sycamore::prelude::*; use sycamore::prelude::*;
use tracing::debug; use tracing::{debug, error};
use web_sys::{Event, HtmlInputElement}; use wasm_bindgen::{JsCast, JsValue};
use wasm_web_component::{web_component, WebComponentBinding};
use web_sys::{CustomEvent, CustomEventInit, Event, HtmlElement, InputEvent, ShadowRoot};
use crate::js_lib; #[web_component(
observed_attrs = "['val', 'min', 'max', 'step']",
observed_events = "['change', 'click', 'input']"
)]
pub struct NumberSpinner {
root: Option<ShadowRoot>,
min: i32,
max: i32,
step: i32,
value: i32,
}
impl NumberSpinner {
fn get_input_el(&self) -> HtmlElement {
self.root
.as_ref()
.unwrap()
.get_element_by_id("nval")
.unwrap()
.dyn_into()
.unwrap()
}
}
impl WebComponentBinding for NumberSpinner {
fn init_mut(&mut self, element: &web_sys::HtmlElement) {
(self.min, self.max, self.step, self.value) = (0, 99, 1, 0);
debug!("Initializing element instance");
let root = html! {
span {
link rel="stylesheet" href="/ui/static/app.css" { };
style {
r#"
span { display: block; }
span.button {
font-size: 2em; font-weight: bold;
}
.number-input {
border-width: var(--border-width);
border-style: inset;
padding: 3pt;
border-radius: 10px;
width: 3em;
}
"#
};
span class="button" id="inc" { "+" }; " "
// TODO(jwall): plaintext-only would be nice but I can't actually do that yet.
span id="nval" class="number-input" contenteditable="true" { "0" } " "
span class="button" id="dec" { "-" };
};
};
self.attach_shadow(element, &root.into_string());
self.root = element.shadow_root();
}
fn connected_mut(&mut self, element: &HtmlElement) {
debug!("COUNTS: connecting to DOM");
let val = element.get_attribute("val").unwrap_or_else(|| "0".into());
let min = element.get_attribute("min").unwrap_or_else(|| "0".into());
let max = element.get_attribute("max").unwrap_or_else(|| "99".into());
let step = element.get_attribute("step").unwrap_or_else(|| "1".into());
debug!(?val, ?min, ?max, ?step, "connecting to DOM");
let nval_el = self.get_input_el();
if let Ok(parsed) = val.parse::<i32>() {
self.value = parsed;
nval_el.set_inner_text(&val);
}
if let Ok(parsed) = min.parse::<i32>() {
self.min = parsed;
}
if let Ok(parsed) = max.parse::<i32>() {
self.max = parsed;
}
if let Ok(parsed) = step.parse::<i32>() {
self.step = parsed;
}
}
fn handle_event_mut(&mut self, element: &web_sys::HtmlElement, event: &Event) {
let target: HtmlElement = event.target().unwrap().dyn_into().unwrap();
let id = target.get_attribute("id");
let event_type = event.type_();
let nval_el = self.get_input_el();
debug!(?id, ?event_type, "saw event");
match (id.as_ref().map(|s| s.as_str()), event_type.as_str()) {
(Some("inc"), "click") => {
if self.value < self.max {
self.value += 1;
nval_el.set_inner_text(&format!("{}", self.value));
}
}
(Some("dec"), "click") => {
if self.value > self.min {
self.value -= 1;
nval_el.set_inner_text(&format!("{}", self.value));
}
}
(Some("nval"), "input") => {
let input_event = event.dyn_ref::<InputEvent>().unwrap();
if let Some(data) = input_event.data() {
// We only allow numeric input data here.
debug!(data, input_type=?input_event.input_type() , "got input");
if data.chars().filter(|c| !c.is_numeric()).count() > 0 {
nval_el.set_inner_text(&format!("{}", self.value));
}
} else {
nval_el.set_inner_text(&format!("{}{}", nval_el.inner_text(), self.value));
}
}
_ => {
debug!("Ignoring event");
return;
}
};
let mut event_dict = CustomEventInit::new();
event_dict.detail(&JsValue::from_f64(self.value as f64));
element
.dispatch_event(&CustomEvent::new_with_event_init_dict("updated", &event_dict).unwrap())
.unwrap();
debug!("Dispatched updated event");
}
fn attribute_changed_mut(
&mut self,
_element: &web_sys::HtmlElement,
name: JsValue,
old_value: JsValue,
new_value: JsValue,
) {
let nval_el = self.get_input_el();
let name = name.as_string().unwrap();
debug!(
?name,
?old_value,
?new_value,
"COUNTS: handling attribute change"
);
match name.as_str() {
"val" => {
debug!("COUNTS: got an updated value");
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.value = val;
nval_el.set_inner_text(format!("{}", self.value).as_str());
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
"min" => {
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.min = val;
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
"max" => {
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.max = val;
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
"step" => {
if let Some(val) = new_value.as_string() {
debug!(val, "COUNTS: got an updated value");
if let Ok(val) = val.parse::<i32>() {
self.step = val;
} else {
error!(?new_value, "COUNTS: Not a valid f64 value");
}
}
}
_ => {
debug!("Ignoring Attribute Change");
return;
}
}
}
}
#[derive(Props)] #[derive(Props)]
pub struct NumberProps<'ctx, F> pub struct NumberProps<'ctx, F>
where where
F: Fn(Event), F: Fn(CustomEvent),
{ {
name: String, name: String,
class: String,
on_change: Option<F>, on_change: Option<F>,
min: f64, min: f64,
counter: &'ctx Signal<f64>, counter: &'ctx Signal<f64>,
@ -31,44 +222,27 @@ where
#[component] #[component]
pub fn NumberField<'ctx, F, G: Html>(cx: Scope<'ctx>, props: NumberProps<'ctx, F>) -> View<G> pub fn NumberField<'ctx, F, G: Html>(cx: Scope<'ctx>, props: NumberProps<'ctx, F>) -> View<G>
where where
F: Fn(web_sys::Event) + 'ctx, F: Fn(CustomEvent) + 'ctx,
{ {
let NumberProps { let NumberProps {
name, name,
class,
on_change, on_change,
min, min,
counter, counter,
} = props; } = props;
NumberSpinner::define_once();
// TODO(jwall): I'm pretty sure this triggers: https://github.com/sycamore-rs/sycamore/issues/602
// Which means I probably have to wait till v0.9.0 drops or switch to leptos.
let id = name.clone(); let id = name.clone();
let inc_target_id = id.clone(); let initial_count = *counter.get();
let dec_target_id = id.clone();
let min_field = format!("{}", min);
view! {cx, view! {cx,
div() { number-spinner(id=id, class=(class), val=(initial_count), min=min, on:updated=move |evt: Event| {
input(type="number", id=id, name=name, class="item-count-sel", min=min_field, max="99", step="1", bind:valueAsNumber=counter, on:input=move |evt| { let event = evt.unchecked_into::<CustomEvent>();
on_change.as_ref().map(|f| f(evt)); let val: f64 = event.detail().as_f64().unwrap();
}) counter.set(val);
span(class="item-count-inc-dec", on:click=move |_| { on_change.as_ref().map(|f| f(event));
let i = *counter.get_untracked(); debug!(counter=%(counter.get_untracked()), "set counter to new value");
let target = js_lib::get_element_by_id::<HtmlInputElement>(&inc_target_id).unwrap().expect(&format!("No such element with id {}", inc_target_id)); })
counter.set(i+1.0);
debug!(counter=%(counter.get_untracked()), "set counter to new value");
// We force an input event to get triggered for our target.
target.dispatch_event(&web_sys::Event::new("input").expect("Failed to create new event")).expect("Failed to dispatch event to target");
}) { "" }
" "
span(class="item-count-inc-dec", on:click=move |_| {
let i = *counter.get_untracked();
let target = js_lib::get_element_by_id::<HtmlInputElement>(&dec_target_id).unwrap().expect(&format!("No such element with id {}", dec_target_id));
if i > min {
counter.set(i-1.0);
debug!(counter=%(counter.get_untracked()), "set counter to new value");
// We force an input event to get triggered for our target.
target.dispatch_event(&web_sys::Event::new("input").expect("Failed to create new event")).expect("Failed to dispatch event to target");
}
}) { "" }
}
} }
} }

View File

@ -1,4 +1,3 @@
use chrono::NaiveDate;
// Copyright 2023 Jeremy Wall (Jeremy@marzhilsltudios.com) // Copyright 2023 Jeremy Wall (Jeremy@marzhilsltudios.com)
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
@ -12,6 +11,7 @@ use chrono::NaiveDate;
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use chrono::NaiveDate;
use sycamore::prelude::*; use sycamore::prelude::*;
use crate::app_state::{Message, StateHandler}; use crate::app_state::{Message, StateHandler};
@ -23,30 +23,25 @@ pub struct PlanListProps<'ctx> {
list: &'ctx ReadSignal<Vec<NaiveDate>>, list: &'ctx ReadSignal<Vec<NaiveDate>>,
} }
// TODO(jwall): We also need a "new plan button"
#[instrument(skip_all, fields(dates=?props.list))] #[instrument(skip_all, fields(dates=?props.list))]
#[component] #[component]
pub fn PlanList<'ctx, G: Html>(cx: Scope<'ctx>, props: PlanListProps<'ctx>) -> View<G> { pub fn PlanList<'ctx, G: Html>(cx: Scope<'ctx>, props: PlanListProps<'ctx>) -> View<G> {
let PlanListProps { sh, list } = props; let PlanListProps { sh, list } = props;
view! {cx, view! {cx,
div() { div() {
table() { div(class="column-flex") {
Indexed( Indexed(
iterable=list, iterable=list,
view=move |cx, date| { view=move |cx, date| {
let date_display = format!("{}", date); let date_display = format!("{}", date);
view!{cx, view!{cx,
tr() { div(class="row-flex margin-bot-half") {
td() { button(class="outline margin-right-1", on:click=move |_| {
span(role="button", class="outline", on:click=move |_| { sh.dispatch(cx, Message::SelectPlanDate(date, None))
sh.dispatch(cx, Message::SelectPlanDate(date, None)) }) { (date_display) }
}) { (date_display) } button(class="destructive", on:click=move |_| {
} sh.dispatch(cx, Message::DeletePlan(date, None))
td() { }) { "Delete Plan" }
span(role="button", class="destructive", on:click=move |_| {
sh.dispatch(cx, Message::DeletePlan(date, None))
}) { "Delete Plan" }
}
} }
} }
}, },

View File

@ -49,7 +49,15 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
let store = crate::api::HttpStore::get_from_context(cx); let store = crate::api::HttpStore::get_from_context(cx);
let recipe: &Signal<RecipeEntry> = let recipe: &Signal<RecipeEntry> =
create_signal(cx, RecipeEntry::new(&recipe_id, String::new())); create_signal(cx, RecipeEntry::new(&recipe_id, String::new()));
let text = create_signal(cx, String::new()); let text = create_signal(cx, String::from("0"));
let serving_count_str = create_signal(cx, String::new());
let serving_count = create_memo(cx, || {
if let Ok(count) = serving_count_str.get().parse::<i64>() {
count
} else {
0
}
});
let error_text = create_signal(cx, String::from("Parse results...")); let error_text = create_signal(cx, String::from("Parse results..."));
let aria_hint = create_signal(cx, "false"); let aria_hint = create_signal(cx, "false");
let category = create_signal(cx, "Entree".to_owned()); let category = create_signal(cx, "Entree".to_owned());
@ -79,12 +87,18 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
debug!("creating editor view"); debug!("creating editor view");
view! {cx, view! {cx,
label(for="recipe_category") { "Category" } div {
input(name="recipe_category", bind:value=category, on:change=move |_| dirty.set(true)) label(for="recipe_category") { "Category" }
div(class="grid") { input(name="recipe_category", bind:value=category, on:change=move |_| dirty.set(true))
div { }
label(for="recipe_text") { "Recipe" } div {
textarea(name="recipe_text", bind:value=text, aria-invalid=aria_hint.get(), rows=20, on:change=move |_| { label(for="serving_count") { "Serving Count" }
input(name="serving_count", bind:value=serving_count_str, on:change=move |_| dirty.set(true))
}
div {
div(class="row-flex") {
label(for="recipe_text", class="block align-stretch expand-height") { "Recipe: " }
textarea(class="width-third", name="recipe_text", bind:value=text, aria-invalid=aria_hint.get(), cols="50", rows=20, on:change=move |_| {
dirty.set(true); dirty.set(true);
check_recipe_parses(text.get_untracked().as_str(), error_text, aria_hint); check_recipe_parses(text.get_untracked().as_str(), error_text, aria_hint);
}, on:input=move |_| { }, on:input=move |_| {
@ -97,34 +111,37 @@ pub fn Editor<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
} }
div(class="parse") { (error_text.get()) } div(class="parse") { (error_text.get()) }
} }
span(role="button", on:click=move |_| { div {
let unparsed = text.get_untracked(); button(on:click=move |_| {
if check_recipe_parses(unparsed.as_str(), error_text, aria_hint) { let unparsed = text.get_untracked();
debug!("triggering a save"); if check_recipe_parses(unparsed.as_str(), error_text, aria_hint) {
if !*dirty.get_untracked() { debug!("triggering a save");
debug!("Recipe text is unchanged"); if !*dirty.get_untracked() {
return; debug!("Recipe text is unchanged");
return;
}
debug!("Recipe text is changed");
let category = category.get_untracked();
let category = if category.is_empty() {
None
} else {
Some(category.as_ref().clone())
};
let recipe_entry = RecipeEntry {
id: id.get_untracked().as_ref().clone(),
text: text.get_untracked().as_ref().clone(),
category,
serving_count: Some(*serving_count.get()),
};
sh.dispatch(cx, Message::SaveRecipe(recipe_entry, None));
dirty.set(false);
} }
debug!("Recipe text is changed"); // TODO(jwall): Show error message if trying to save when recipe doesn't parse.
let category = category.get_untracked(); }) { "Save" } " "
let category = if category.is_empty() { button(on:click=move |_| {
None sh.dispatch(cx, Message::RemoveRecipe(id.get_untracked().as_ref().to_owned(), Some(Box::new(|| sycamore_router::navigate("/ui/planning/plan")))));
} else { }) { "delete" } " "
Some(category.as_ref().clone()) }
};
let recipe_entry = RecipeEntry(
id.get_untracked().as_ref().clone(),
text.get_untracked().as_ref().clone(),
category,
);
sh.dispatch(cx, Message::SaveRecipe(recipe_entry, None));
dirty.set(false);
}
// TODO(jwall): Show error message if trying to save when recipe doesn't parse.
}) { "Save" } " "
span(role="button", on:click=move |_| {
sh.dispatch(cx, Message::RemoveRecipe(id.get_untracked().as_ref().to_owned(), Some(Box::new(|| sycamore_router::navigate("/ui/planning/plan")))));
}) { "delete" } " "
} }
} }
@ -142,7 +159,7 @@ fn Steps<G: Html>(cx: Scope, steps: Vec<recipes::Step>) -> View<G> {
view! {cx, view! {cx,
div { div {
h3 { "Step " (idx + 1) } h3 { "Step " (idx + 1) }
ul(class="ingredients") { ul(class="ingredients no-list") {
(ingredient_fragments) (ingredient_fragments)
} }
div(class="instructions") { div(class="instructions") {
@ -166,18 +183,22 @@ pub fn Viewer<'ctx, G: Html>(cx: Scope<'ctx>, props: RecipeComponentProps<'ctx>)
let recipe_signal = sh.get_selector(cx, move |state| { let recipe_signal = sh.get_selector(cx, move |state| {
if let Some(recipe) = state.get().recipes.get(&recipe_id) { if let Some(recipe) = state.get().recipes.get(&recipe_id) {
let title = recipe.title.clone(); let title = recipe.title.clone();
let serving_count = recipe.serving_count.clone();
let desc = recipe.desc.clone().unwrap_or_else(|| String::new()); let desc = recipe.desc.clone().unwrap_or_else(|| String::new());
let steps = recipe.steps.clone(); let steps = recipe.steps.clone();
Some((title, desc, steps)) Some((title, serving_count, desc, steps))
} else { } else {
None None
} }
}); });
if let Some((title, desc, steps)) = recipe_signal.get().as_ref().clone() { if let Some((title, serving_count, desc, steps)) = recipe_signal.get().as_ref().clone() {
debug!("Viewing recipe."); debug!("Viewing recipe.");
view.set(view! {cx, view.set(view! {cx,
div(class="recipe") { div(class="recipe") {
h1(class="recipe_title") { (title) } h1(class="recipe_title") { (title) }
div(class="serving_count") {
"Serving Count: " (serving_count.map(|v| format!("{}", v)).unwrap_or_else(|| "Unconfigured".to_owned()))
}
div(class="recipe_description") { div(class="recipe_description") {
(desc) (desc)
} }

View File

@ -52,20 +52,26 @@ pub fn CategoryGroup<'ctx, G: Html>(
}); });
view! {cx, view! {cx,
h2 { (category) } h2 { (category) }
table(class="recipe_selector no-print") { div(class="no-print row-flex flex-wrap-start align-stretch") {
(View::new_fragment( (View::new_fragment(
rows.get().iter().cloned().map(|r| { rows.get().iter().cloned().map(|r| {
view ! {cx, view ! {cx,
tr { Keyed( Keyed(
iterable=r, iterable=r,
view=move |cx, sig| { view=move |cx, sig| {
let title = create_memo(cx, move || sig.get().1.title.clone()); let title = create_memo(cx, move || sig.get().1.title.clone());
let serving_count = create_memo(cx, move || sig.get().1.serving_count.clone());
view! {cx, view! {cx,
td { RecipeSelection(i=sig.get().0.to_owned(), title=title, sh=sh) } div(class="cell column-flex justify-end align-stretch") {
RecipeSelection(
i=sig.get().0.to_owned(),
title=title, sh=sh,
serving_count=serving_count,
) }
} }
}, },
key=|sig| sig.get().0.to_owned(), key=|sig| sig.get().0.to_owned(),
)} )
} }
}).collect() }).collect()
)) ))
@ -89,7 +95,7 @@ pub fn RecipePlan<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> Vie
.get() .get()
.recipes .recipes
.get(r) .get(r)
.expect("Failed to find recipe") .expect(&format!("Failed to find recipe {}", r))
.clone(), .clone(),
)); ));
map map
@ -108,13 +114,13 @@ pub fn RecipePlan<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> Vie
}, },
key=|(ref cat, _)| cat.clone(), key=|(ref cat, _)| cat.clone(),
) )
span(role="button", on:click=move |_| { button(on:click=move |_| {
sh.dispatch(cx, Message::LoadState(None)); sh.dispatch(cx, Message::LoadState(None));
}) { "Reset" } " " }) { "Reset" } " "
span(role="button", on:click=move |_| { button(on:click=move |_| {
sh.dispatch(cx, Message::ResetRecipeCounts); sh.dispatch(cx, Message::ResetRecipeCounts);
}) { "Clear All" } " " }) { "Clear All" } " "
span(role="button", on:click=move |_| { button(on:click=move |_| {
// Poor man's click event signaling. // Poor man's click event signaling.
sh.dispatch(cx, Message::SaveState(None)); sh.dispatch(cx, Message::SaveState(None));
}) { "Save Plan" } " " }) { "Save Plan" } " "

View File

@ -23,6 +23,7 @@ use crate::components::NumberField;
pub struct RecipeCheckBoxProps<'ctx> { pub struct RecipeCheckBoxProps<'ctx> {
pub i: String, pub i: String,
pub title: &'ctx ReadSignal<String>, pub title: &'ctx ReadSignal<String>,
pub serving_count: &'ctx ReadSignal<Option<i64>>,
pub sh: StateHandler<'ctx>, pub sh: StateHandler<'ctx>,
} }
@ -35,7 +36,7 @@ pub fn RecipeSelection<'ctx, G: Html>(
cx: Scope<'ctx>, cx: Scope<'ctx>,
props: RecipeCheckBoxProps<'ctx>, props: RecipeCheckBoxProps<'ctx>,
) -> View<G> { ) -> View<G> {
let RecipeCheckBoxProps { i, title, sh } = props; let RecipeCheckBoxProps { i, title, sh, serving_count, } = props;
let id = Rc::new(i); let id = Rc::new(i);
let id_for_count = id.clone(); let id_for_count = id.clone();
// NOTE(jwall): The below get's a little tricky. We need a separate signal to bind for the // NOTE(jwall): The below get's a little tricky. We need a separate signal to bind for the
@ -65,12 +66,13 @@ pub fn RecipeSelection<'ctx, G: Html>(
let name = format!("recipe_id:{}", id); let name = format!("recipe_id:{}", id);
let for_id = name.clone(); let for_id = name.clone();
view! {cx, view! {cx,
div() { label(for=for_id, class="flex-item-grow") { a(href=href) { (*title) } }
label(for=for_id) { a(href=href) { (*title) } } div {
NumberField(name=name, counter=count, min=0.0, on_change=Some(move |_| { "Serves: " (serving_count.get().map(|v| v.to_string()).unwrap_or("Unconfigured".to_owned()))
debug!(idx=%id, count=%(*count.get_untracked()), "setting recipe count");
sh.dispatch(cx, Message::UpdateRecipeCount(id.as_ref().clone(), *count.get_untracked() as usize));
}))
} }
NumberField(name=name, class="flex-item-shrink".to_string(), counter=count, min=0.0, on_change=Some(move |_| {
debug!(idx=%id, count=%(*count.get_untracked()), "setting recipe count");
sh.dispatch(cx, Message::UpdateRecipeCount(id.as_ref().clone(), *count.get_untracked() as u32));
}))
} }
} }

View File

@ -19,6 +19,115 @@ use tracing::{debug, info, instrument};
use crate::app_state::{Message, StateHandler}; use crate::app_state::{Message, StateHandler};
#[instrument(skip_all)]
fn make_deleted_ingredients_rows<'ctx, G: Html>(
cx: Scope<'ctx>,
sh: StateHandler<'ctx>,
show_staples: &'ctx ReadSignal<bool>,
) -> View<G> {
debug!("Making ingredients rows");
let ingredients = sh.get_selector(cx, move |state| {
let state = state.get();
let category_map = &state.category_map;
debug!("building ingredient list from state");
let mut acc = IngredientAccumulator::new();
for (id, count) in state.recipe_counts.iter() {
for _ in 0..(*count) {
acc.accumulate_from(
state
.recipes
.get(id)
.expect(&format!("No such recipe id exists: {}", id)),
);
}
}
if *show_staples.get() {
if let Some(staples) = &state.staples {
acc.accumulate_ingredients_for("Staples", staples.iter());
}
}
let mut ingredients = acc
.ingredients()
.into_iter()
// First we filter out any filtered ingredients
.filter(|(i, _)| state.filtered_ingredients.contains(i))
// Then we take into account our modified amts
.map(|(k, (i, rs))| {
let category = category_map
.get(&i.name)
.cloned()
.unwrap_or_else(|| String::new());
if state.modified_amts.contains_key(&k) {
(
k.clone(),
(
i.name,
i.form,
category,
state.modified_amts.get(&k).unwrap().clone(),
rs,
),
)
} else {
(
k.clone(),
(
i.name,
i.form,
category,
format!("{}", i.amt.normalize()),
rs,
),
)
}
})
.collect::<Vec<(
IngredientKey,
(String, Option<String>, String, String, BTreeSet<String>),
)>>();
ingredients.sort_by(|tpl1, tpl2| (&tpl1.1 .2, &tpl1.1 .0).cmp(&(&tpl2.1 .2, &tpl2.1 .0)));
ingredients
});
view!(
cx,
Indexed(
iterable = ingredients,
view = move |cx, (k, (name, form, category, amt, rs))| {
let category = if category == "" {
"other".to_owned()
} else {
category
};
let amt_signal = create_signal(cx, amt);
let k_clone = k.clone();
let form = form.map(|form| format!("({})", form)).unwrap_or_default();
let recipes = rs
.iter()
.fold(String::new(), |acc, s| format!("{}{},", acc, s))
.trim_end_matches(",")
.to_owned();
view! {cx,
tr {
td {
input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
sh.dispatch(cx, Message::UpdateAmt(k_clone.clone(), amt_signal.get_untracked().as_ref().clone()));
})
}
td {
input(type="button", class="fit-content no-print", value="Undo", on:click={
move |_| {
sh.dispatch(cx, Message::RemoveFilteredIngredient(k.clone()));
}})
}
td { (name) " " (form) "" br {} "" (category) "" }
td { (recipes) }
}
}
}
)
)
}
#[instrument(skip_all)] #[instrument(skip_all)]
fn make_ingredients_rows<'ctx, G: Html>( fn make_ingredients_rows<'ctx, G: Html>(
cx: Scope<'ctx>, cx: Scope<'ctx>,
@ -109,12 +218,12 @@ fn make_ingredients_rows<'ctx, G: Html>(
view! {cx, view! {cx,
tr { tr {
td { td {
input(bind:value=amt_signal, type="text", on:change=move |_| { input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
sh.dispatch(cx, Message::UpdateAmt(k_clone.clone(), amt_signal.get_untracked().as_ref().clone())); sh.dispatch(cx, Message::UpdateAmt(k_clone.clone(), amt_signal.get_untracked().as_ref().clone()));
}) })
} }
td { td {
input(type="button", class="no-print destructive", value="X", on:click={ input(type="button", class="fit-content no-print destructive", value="X", on:click={
move |_| { move |_| {
sh.dispatch(cx, Message::AddFilteredIngredient(k.clone())); sh.dispatch(cx, Message::AddFilteredIngredient(k.clone()));
}}) }})
@ -143,14 +252,14 @@ fn make_extras_rows<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> V
view! {cx, view! {cx,
tr { tr {
td { td {
input(bind:value=amt_signal, type="text", on:change=move |_| { input(bind:value=amt_signal, class="width-5", type="text", on:change=move |_| {
sh.dispatch(cx, Message::UpdateExtra(idx, sh.dispatch(cx, Message::UpdateExtra(idx,
amt_signal.get_untracked().as_ref().clone(), amt_signal.get_untracked().as_ref().clone(),
name_signal.get_untracked().as_ref().clone())); name_signal.get_untracked().as_ref().clone()));
}) })
} }
td { td {
input(type="button", class="no-print destructive", value="X", on:click=move |_| { input(type="button", class="fit-content no-print destructive", value="X", on:click=move |_| {
sh.dispatch(cx, Message::RemoveExtra(idx)); sh.dispatch(cx, Message::RemoveExtra(idx));
}) })
} }
@ -191,24 +300,49 @@ fn make_shopping_table<'ctx, G: Html>(
} }
} }
fn make_deleted_items_table<'ctx, G: Html>(
cx: Scope<'ctx>,
sh: StateHandler<'ctx>,
show_staples: &'ctx ReadSignal<bool>,
) -> View<G> {
view! {cx,
h2 { "Deleted Items" }
table(class="pad-top shopping-list page-breaker container-fluid", role="grid") {
tr {
th { " Quantity " }
th { " Delete " }
th { " Ingredient " }
th { " Recipes " }
}
tbody {
(make_deleted_ingredients_rows(cx, sh, show_staples))
}
}
}
}
#[instrument(skip_all)] #[instrument(skip_all)]
#[component] #[component]
pub fn ShoppingList<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> { pub fn ShoppingList<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
let show_staples = create_signal(cx, true); let show_staples = sh.get_selector(cx, |state| state.get().use_staples);
view! {cx, view! {cx,
h1 { "Shopping List " } h1 { "Shopping List " }
label(for="show_staples_cb") { "Show staples" } label(for="show_staples_cb") { "Show staples" }
input(id="show_staples_cb", type="checkbox", bind:checked=show_staples) input(id="show_staples_cb", type="checkbox", checked=*show_staples.get(), on:change=move|_| {
let value = !*show_staples.get_untracked();
sh.dispatch(cx, Message::UpdateUseStaples(value));
})
(make_shopping_table(cx, sh, show_staples)) (make_shopping_table(cx, sh, show_staples))
span(role="button", class="no-print", on:click=move |_| { (make_deleted_items_table(cx, sh, show_staples))
button(class="no-print", on:click=move |_| {
info!("Registering add item request for inventory"); info!("Registering add item request for inventory");
sh.dispatch(cx, Message::AddExtra(String::new(), String::new())); sh.dispatch(cx, Message::AddExtra(String::new(), String::new()));
}) { "Add Item" } " " }) { "Add Item" } " "
span(role="button", class="no-print", on:click=move |_| { button(class="no-print", on:click=move |_| {
info!("Registering reset request for inventory"); info!("Registering reset request for inventory");
sh.dispatch(cx, Message::ResetInventory); sh.dispatch(cx, Message::ResetInventory);
}) { "Reset" } " " }) { "Reset" } " "
span(role="button", class="no-print", on:click=move |_| { button(class="no-print", on:click=move |_| {
info!("Registering save request for inventory"); info!("Registering save request for inventory");
sh.dispatch(cx, Message::SaveState(None)); sh.dispatch(cx, Message::SaveState(None));
}) { "Save" } " " }) { "Save" } " "

View File

@ -72,8 +72,8 @@ pub fn IngredientsEditor<'ctx, G: Html>(
debug!("creating editor view"); debug!("creating editor view");
view! {cx, view! {cx,
div(class="grid") { div {
textarea(bind:value=text, aria-invalid=aria_hint.get(), rows=20, on:change=move |_| { textarea(class="width-third", bind:value=text, aria-invalid=aria_hint.get(), rows=20, on:change=move |_| {
dirty.set(true); dirty.set(true);
}, on:input=move |_| { }, on:input=move |_| {
let current_ts = js_lib::get_ms_timestamp(); let current_ts = js_lib::get_ms_timestamp();
@ -84,7 +84,7 @@ pub fn IngredientsEditor<'ctx, G: Html>(
}) })
div(class="parse") { (error_text.get()) } div(class="parse") { (error_text.get()) }
} }
span(role="button", on:click=move |_| { button(on:click=move |_| {
let unparsed = text.get(); let unparsed = text.get();
if !*dirty.get_untracked() { if !*dirty.get_untracked() {
debug!("Staples text is unchanged"); debug!("Staples text is unchanged");

View File

@ -47,12 +47,12 @@ pub fn TabbedView<'a, G: Html>(cx: Scope<'a>, state: TabState<'a, G>) -> View<G>
.collect(), .collect(),
); );
view! {cx, view! {cx,
nav { nav(class="menu-bg menu-font-2 flex-item-shrink") {
ul(class="tabs") { ul(class="tabs pad-left no-list row-flex align-center") {
(menu) (menu)
} }
} }
main(class=".conatiner-fluid") { main(class="flex-item-grow content-font") {
(children) (children)
} }
} }

View File

@ -1,79 +0,0 @@
// Copyright 2023 Jeremy Wall (Jeremy@marzhilsltudios.com)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.\
use sycamore::{easing, motion, prelude::*};
use tracing::debug;
use wasm_bindgen::UnwrapThrowExt;
const SECTION_ID: &'static str = "toast-container";
#[component]
pub fn Container<'a, G: Html>(cx: Scope<'a>) -> View<G> {
view! {cx,
section(id=SECTION_ID) { }
}
}
pub fn create_output_element(msg: &str, class: &str) -> web_sys::Element {
let document = web_sys::window()
.expect("No window present")
.document()
.expect("No document in window");
let output = document.create_element("output").unwrap_throw();
let message_node = document.create_text_node(msg);
output.set_attribute("class", class).unwrap_throw();
output.set_attribute("role", "status").unwrap_throw();
output.append_child(&message_node).unwrap_throw();
output
}
fn show_toast<'a>(cx: Scope<'a>, msg: &str, class: &str, timeout: Option<chrono::Duration>) {
let timeout = timeout.unwrap_or_else(|| chrono::Duration::seconds(3));
// Insert a toast output element into the container.
let tweened = motion::create_tweened_signal(
cx,
0.0 as f32,
timeout
.to_std()
.expect("Failed to convert timeout duration."),
easing::quad_in,
);
tweened.set(1.0);
create_effect_scoped(cx, move |_cx| {
if !tweened.is_tweening() {
debug!("Detected message timeout.");
let container = crate::js_lib::get_element_by_id::<web_sys::HtmlElement>(SECTION_ID)
.expect("Failed to get toast-container")
.expect("No toast-container");
if let Some(node_to_remove) = container.first_element_child() {
// Always remove the first child if there is one.
container.remove_child(&node_to_remove).unwrap_throw();
}
}
});
let output_element = create_output_element(msg, class);
crate::js_lib::get_element_by_id::<web_sys::HtmlElement>(SECTION_ID)
.expect("Failed to get toast-container")
.expect("No toast-container")
// Always append after the last child.
.append_child(&output_element)
.unwrap_throw();
}
pub fn message<'a>(cx: Scope<'a>, msg: &str, timeout: Option<chrono::Duration>) {
show_toast(cx, msg, "toast", timeout);
}
pub fn error_message<'a>(cx: Scope<'a>, msg: &str, timeout: Option<chrono::Duration>) {
show_toast(cx, msg, "toast error", timeout);
}

View File

@ -11,33 +11,142 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use anyhow::{Context, Result};
use indexed_db::{self, Database, Factory, Transaction};
use js_sys::Date; use js_sys::Date;
use wasm_bindgen::JsCast; use std::collections::HashSet;
use web_sys::{window, Element, Storage}; use std::future::Future;
use tracing::error;
use web_sys::{window, Window};
pub fn get_storage() -> Storage { pub fn get_storage() -> web_sys::Storage {
window() get_window()
.expect("No Window Present")
.local_storage() .local_storage()
.expect("Failed to get storage") .expect("Failed to get storage")
.expect("No storage available") .expect("No storage available")
} }
pub const STATE_STORE_NAME: &'static str = "state-store";
pub const RECIPE_STORE_NAME: &'static str = "recipe-store";
pub const SERVING_COUNT_IDX: &'static str = "recipe-serving-count";
pub const CATEGORY_IDX: &'static str = "recipe-category";
pub const DB_VERSION: u32 = 1;
#[derive(Clone, Debug)]
pub struct DBFactory<'name> {
name: &'name str,
version: Option<u32>,
}
impl Default for DBFactory<'static> {
fn default() -> Self {
DBFactory {
name: STATE_STORE_NAME,
version: Some(DB_VERSION),
}
}
}
async fn version1_setup<'db>(
stores: &HashSet<String>,
db: &'db Database<std::io::Error>,
) -> Result<(), indexed_db::Error<std::io::Error>> {
// We use out of line keys for this object store
if !stores.contains(STATE_STORE_NAME) {
db.build_object_store(STATE_STORE_NAME).create()?;
}
if !stores.contains(RECIPE_STORE_NAME) {
let recipe_store = db.build_object_store(RECIPE_STORE_NAME).create()?;
recipe_store
.build_index(CATEGORY_IDX, "category")
.create()?;
recipe_store
.build_index(SERVING_COUNT_IDX, "serving_count")
.create()?;
}
Ok(())
}
impl<'name> DBFactory<'name> {
pub async fn get_indexed_db(&self) -> Result<Database<std::io::Error>> {
let factory = Factory::<std::io::Error>::get().context("opening IndexedDB")?;
let db = factory
.open(self.name, self.version.unwrap_or(0), |evt| async move {
// NOTE(zaphar): This is the on upgradeneeded handler. It get's called on new databases or
// databases with an older version than the one we requested to build.
let db = evt.database();
let stores = db
.object_store_names()
.into_iter()
.collect::<HashSet<String>>();
// NOTE(jwall): This needs to be somewhat clever in handling version upgrades.
if db.version() > 0 {
version1_setup(&stores, db).await?;
}
Ok(())
})
.await
.context(format!("Opening or creating the database {}", self.name))?;
Ok(db)
}
pub async fn rw_transaction<Fun, RetFut, Ret>(
&self,
stores: &[&str],
transaction: Fun,
) -> indexed_db::Result<Ret, std::io::Error>
where
Fun: 'static + FnOnce(Transaction<std::io::Error>) -> RetFut,
RetFut: 'static + Future<Output = indexed_db::Result<Ret, std::io::Error>>,
Ret: 'static,
{
self.get_indexed_db()
.await
.expect("Failed to open database")
.transaction(stores)
.rw()
.run(transaction)
.await
}
pub async fn ro_transaction<Fun, RetFut, Ret>(
&self,
stores: &[&str],
transaction: Fun,
) -> indexed_db::Result<Ret, std::io::Error>
where
Fun: 'static + FnOnce(Transaction<std::io::Error>) -> RetFut,
RetFut: 'static + Future<Output = indexed_db::Result<Ret, std::io::Error>>,
Ret: 'static,
{
self.get_indexed_db()
.await
.expect("Failed to open database")
.transaction(stores)
.run(transaction)
.await
}
}
pub fn get_ms_timestamp() -> u32 { pub fn get_ms_timestamp() -> u32 {
Date::new_0().get_milliseconds() Date::new_0().get_milliseconds()
} }
pub fn get_element_by_id<E>(id: &str) -> Result<Option<E>, Element> pub fn get_window() -> Window {
window().expect("No window present")
}
pub trait LogFailures<V, E> {
fn swallow_and_log(self);
}
impl<E> LogFailures<(), E> for Result<(), E>
where where
E: JsCast, E: std::fmt::Debug,
{ {
match window() fn swallow_and_log(self) {
.expect("No window present") if let Err(e) = self {
.document() error!(err = ?e, "Error: ");
.expect("No document in window") }
.get_element_by_id(id)
{
Some(e) => e.dyn_into::<E>().map(|e| Some(e)),
None => Ok(None),
} }
} }

View File

@ -15,6 +15,7 @@ mod api;
mod app_state; mod app_state;
mod components; mod components;
mod js_lib; mod js_lib;
mod linear;
mod pages; mod pages;
mod routing; mod routing;
mod web; mod web;

54
web/src/linear.rs Normal file
View File

@ -0,0 +1,54 @@
// Copyright 2022 Jeremy Wall
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::convert::Into;
use std::ops::Drop;
use std::rc::Rc;
use sycamore::prelude::*;
pub struct LinearSignal<'ctx, Payload> {
pub signal: &'ctx Signal<Payload>,
nv: Option<Payload>,
}
impl<'ctx, Payload> Into<LinearSignal<'ctx, Payload>> for &'ctx Signal<Payload> {
fn into(self) -> LinearSignal<'ctx, Payload> {
LinearSignal {
signal: self,
nv: None,
}
}
}
impl<'ctx, Payload> LinearSignal<'ctx, Payload> {
pub fn update(mut self, payload: Payload) -> Self {
self.nv = Some(payload);
return self;
}
pub fn get(&'ctx self) -> Rc<Payload> {
self.signal.get()
}
}
impl<'ctx, Payload> Drop for LinearSignal<'ctx, Payload> {
fn drop(&mut self) {
if self.nv.is_some() {
let mut val: Option<Payload> = None;
std::mem::swap(&mut val, &mut self.nv);
let payload = val.unwrap();
self.signal.set(payload);
}
}
}

View File

@ -27,9 +27,13 @@ pub fn LoginForm<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View
input(type="text", id="username", bind:value=username) input(type="text", id="username", bind:value=username)
label(for="password") { "Password" } label(for="password") { "Password" }
input(type="password", bind:value=password) input(type="password", bind:value=password)
span(role="button", on:click=move |_| { button(on:click=move |evt: web_sys::Event| {
info!("Attempting login request"); info!("Attempting login request");
let (username, password) = ((*username.get_untracked()).clone(), (*password.get_untracked()).clone()); let (username, password) = ((*username.get_untracked()).clone(), (*password.get_untracked()).clone());
// NOTE(jwall): This is required if we want to keep the below auth request from
// failing to send with blocked by browser. This is because it's on a click and
// the form tries to do a submit event and aborts our network request.
evt.prevent_default();
if username != "" && password != "" { if username != "" && password != "" {
spawn_local_scoped(cx, async move { spawn_local_scoped(cx, async move {
let store = crate::api::HttpStore::get_from_context(cx); let store = crate::api::HttpStore::get_from_context(cx);

View File

@ -18,9 +18,13 @@ use crate::{app_state::StateHandler, components::recipe_list::*};
#[component] #[component]
pub fn CookPage<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> { pub fn CookPage<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
let current_plan = sh.get_selector(cx, |state| {
state.get().selected_plan_date
});
view! {cx, view! {cx,
PlanningPage( PlanningPage(
selected=Some("Cook".to_owned()), selected=Some("Cook".to_owned()),
plan_date = current_plan,
) { RecipeList(sh) } ) { RecipeList(sh) }
} }
} }

View File

@ -18,9 +18,13 @@ use crate::{app_state::StateHandler, components::shopping_list::*};
#[component] #[component]
pub fn InventoryPage<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> { pub fn InventoryPage<'ctx, G: Html>(cx: Scope<'ctx>, sh: StateHandler<'ctx>) -> View<G> {
let current_plan = sh.get_selector(cx, |state| {
state.get().selected_plan_date
});
view! {cx, view! {cx,
PlanningPage( PlanningPage(
selected=Some("Inventory".to_owned()), selected=Some("Inventory".to_owned()),
plan_date = current_plan,
) { ShoppingList(sh) } ) { ShoppingList(sh) }
} }
} }

Some files were not shown because too many files have changed in this diff Show More