From 2221dca277bb40ca69bf57ff3528d681e3126655 Mon Sep 17 00:00:00 2001 From: Jeremy Wall Date: Wed, 23 Jul 2025 16:50:46 -0400 Subject: [PATCH] wip: indexeddb implementation --- Cargo.lock | 139 +++++ Makefile | 17 + offline-web-storage/Cargo.toml | 16 +- offline-web-storage/src/indexeddb.rs | 557 ++++++++++++++++++ .../src/integration_tests/indexeddb.rs | 51 ++ .../mod.rs} | 63 +- .../src/integration_tests/sqlite.rs | 67 +++ offline-web-storage/src/lib.rs | 29 +- offline-web-storage/src/sqlite.rs | 137 ++--- 9 files changed, 951 insertions(+), 125 deletions(-) create mode 100644 offline-web-storage/src/integration_tests/indexeddb.rs rename offline-web-storage/src/{integration_tests.rs => integration_tests/mod.rs} (74%) create mode 100644 offline-web-storage/src/integration_tests/sqlite.rs diff --git a/Cargo.lock b/Cargo.lock index 7c06982..d0d5593 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -57,6 +57,28 @@ version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "async-trait" version = "0.1.88" @@ -891,6 +913,20 @@ dependencies = [ "zerovec", ] +[[package]] +name = "idb" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afe8830d5802f769dc0be20a87f9f116798c896650cb6266eb5c19a3c109eed" +dependencies = [ + "js-sys", + "num-traits", + "thiserror 1.0.69", + "tokio", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "idna" version = "1.0.3" @@ -1032,6 +1068,16 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minicov" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27fe9f1cc3c22e1687f9446c2083c4c5fc7f0bcf1c7a86bdbded14985895b4b" +dependencies = [ + "cc", + "walkdir", +] + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -1153,14 +1199,19 @@ dependencies = [ "anyhow", "blake2", "chrono", + "idb", "offline-web-model", "serde", "serde_json", "sqlx", "thiserror 2.0.12", "tokio", + "tokio-test", "uuid", "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test", + "web-sys", ] [[package]] @@ -1470,6 +1521,15 @@ version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "scopeguard" version = "1.2.0" @@ -2026,6 +2086,19 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-test" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7" +dependencies = [ + "async-stream", + "bytes", + "futures-core", + "tokio", + "tokio-stream", +] + [[package]] name = "tokio-tungstenite" version = "0.24.0" @@ -2219,6 +2292,16 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -2266,6 +2349,19 @@ dependencies = [ "wasm-bindgen-shared", ] +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "wasm-bindgen-macro" version = "0.2.100" @@ -2298,6 +2394,40 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-bindgen-test" +version = "0.3.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66c8d5e33ca3b6d9fa3b4676d774c5778031d27a578c2b007f905acf816152c3" +dependencies = [ + "js-sys", + "minicov", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test-macro", +] + +[[package]] +name = "wasm-bindgen-test-macro" +version = "0.3.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17d5042cc5fa009658f9a7333ef24291b1291a25b6382dd68862a7f3b969f69b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webpki-roots" version = "0.25.4" @@ -2314,6 +2444,15 @@ dependencies = [ "wasite", ] +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "windows-core" version = "0.61.2" diff --git a/Makefile b/Makefile index 6f5c810..16fc7c2 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,5 @@ +all: build test + build: wasm native native: model-native storage-native @@ -14,3 +16,18 @@ model-wasm: cargo build --target=wasm32-unknown-unknown --features wasm -p offline-web-model --verbose storage-wasm: cargo build --target=wasm32-unknown-unknown --features wasm -p offline-web-storage --verbose + +test: test-native test-wasm + +test-native: test-model-native test-storage-native + +test-model-native: + cargo test --features native -p offline-web-model + +test-storage-native: + cargo test --features native -p offline-web-storage + +test-wasm: test-storage-wasm + +test-storage-wasm: + cd offline-web-storage && wasm-pack test --headless --firefox --features wasm diff --git a/offline-web-storage/Cargo.toml b/offline-web-storage/Cargo.toml index 7431420..86d8455 100644 --- a/offline-web-storage/Cargo.toml +++ b/offline-web-storage/Cargo.toml @@ -3,14 +3,11 @@ name = "offline-web-storage" version = "0.1.0" edition = "2021" -[features] -native = ["sqlx", "tokio"] -wasm = ["offline-web-model/wasm", "uuid/js"] - [dependencies] offline-web-model = { path = "../offline-web-model" } sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "sqlite", "uuid", "chrono"], optional=true } tokio = { version = "1.0", features = ["full"], optional=true } +tokio-test = { version = "0.4", optional = true } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" anyhow = "1.0" @@ -19,5 +16,16 @@ chrono = { version = "0.4", features = ["serde"] } blake2 = "0.10" thiserror = "2.0.12" wasm-bindgen = { version = "0.2.100", optional=true } +idb = { version = "0.6.1", optional=true } +web-sys = { version = "0.3", features = ["console"], optional=true } [dev-dependencies] +wasm-bindgen-test = "0.3" +wasm-bindgen-futures = "0.4" + +[lib] +crate-type = ["cdylib", "rlib"] + +[features] +native = ["sqlx", "tokio", "tokio-test"] +wasm = ["offline-web-model/wasm", "uuid/js", "idb", "wasm-bindgen", "web-sys"] diff --git a/offline-web-storage/src/indexeddb.rs b/offline-web-storage/src/indexeddb.rs index e69de29..f79670c 100644 --- a/offline-web-storage/src/indexeddb.rs +++ b/offline-web-storage/src/indexeddb.rs @@ -0,0 +1,557 @@ +use std::collections::{HashSet, VecDeque}; +use std::sync::Arc; + +use idb::{ + Database, DatabaseEvent, Factory, KeyPath, ObjectStore, ObjectStoreParams, Query, TransactionMode, +}; +use offline_web_model::Reference; +use serde::{Deserialize, Serialize}; +use wasm_bindgen::JsValue; + +use crate::ReferenceStore; +use crate::StoreError; + +const DB_NAME: &str = "offline_web_store"; +const DB_VERSION: u32 = 1; +const REF_ENTRIES_STORE: &str = "ref_entries"; +const REF_DEPENDENCIES_STORE: &str = "ref_dependencies"; +const CONTENT_STORE: &str = "content_store"; + +#[derive(Serialize, Deserialize, Clone)] +struct RefEntry { + id: String, + content_address: Option, + name: String, +} + +#[derive(Serialize, Deserialize, Clone)] +struct RefDependency { + parent_id: String, + dependent_id: String, +} + +#[derive(Serialize, Deserialize, Clone)] +struct ContentEntry { + content_address: String, + content: Vec, +} + +pub struct IndexedDbReferenceStore { + db: Database, +} + +impl IndexedDbReferenceStore { + pub async fn new() -> Result { + let factory = Factory::new()?; + + let mut open_request = factory.open(DB_NAME, Some(DB_VERSION))?; + + // Set up database upgrade handler + open_request.on_upgrade_needed(|event| { + let db = event.database().expect("Failed to get indexeddb database"); + + // Create ref_entries object store + if !db + .store_names() + .iter() + .any(|name| name == REF_ENTRIES_STORE) + { + let ref_store = db.create_object_store(REF_ENTRIES_STORE, ObjectStoreParams::new()) + .expect(&format!( + "Failed to create ref_entries store: {:?}", + REF_ENTRIES_STORE + )); + + // Create name index for get_graph method + ref_store.create_index("name", KeyPath::new_single("name"), None) + .expect("Failed to create name index"); + } + + // Create ref_dependencies object store + if !db + .store_names() + .iter() + .any(|name| name == REF_DEPENDENCIES_STORE) + { + db.create_object_store(REF_DEPENDENCIES_STORE, ObjectStoreParams::new()) + .expect(&format!( + "Failed to create ref_dependencies store: {:?}", + REF_DEPENDENCIES_STORE + )); + } + + // Create content_store object store + if !db.store_names().iter().any(|name| name == CONTENT_STORE) { + db.create_object_store(CONTENT_STORE, ObjectStoreParams::new()) + .expect(&format!( + "Failed to create content_store: {:?}", + CONTENT_STORE + )); + } + }); + + let db = open_request.await?; + + Ok(Self { db }) + } + + + async fn clear_dependencies_sync( + &self, + dep_store: &ObjectStore, + parent_id: &str, + ) -> Result<(), StoreError> { + let cursor_request = dep_store.open_cursor(None, None)?; + let mut keys_to_delete: Vec = Vec::new(); + + if let Ok(Some(cursor)) = cursor_request.await { + loop { + let should_continue = match cursor.value() { + Ok(value) => { + if let Some(dep_str) = value.as_string() { + if let Ok(dep_entry) = serde_json::from_str::(&dep_str) { + if dep_entry.parent_id == parent_id { + if let Ok(key) = cursor.key() { + if let Some(key_str) = key.as_string() { + keys_to_delete.push(key_str); + } + } + } + } + } + true + } + Err(_) => false, + }; + + if !should_continue { + break; + } + + // Try to continue to next item + match cursor.advance(1) { + Ok(advance_request) => { + if advance_request.await.is_err() { + break; + } + if cursor.value().is_err() { + break; + } + } + Err(_) => break, + } + } + } + + // Delete the found keys + for key in keys_to_delete { + let delete_request = dep_store.delete(JsValue::from_str(&key))?; + delete_request.await?; + } + + Ok(()) + } + + async fn get_dependents(&self, parent_id: &str) -> Result>, StoreError> { + // Use a completely iterative approach to build the dependency tree + let mut all_refs = std::collections::HashMap::new(); + let mut dependency_map = std::collections::HashMap::>::new(); + let mut to_process = std::collections::VecDeque::new(); + let mut processed = std::collections::HashSet::new(); + + // Start with direct children of the parent + let direct_children = self.get_direct_children(parent_id).await?; + for child_id in direct_children.clone() { + to_process.push_back(child_id); + } + + // Process all references iteratively to build both refs map and dependency map + while let Some(ref_id) = to_process.pop_front() { + if processed.contains(&ref_id) { + continue; + } + processed.insert(ref_id.clone()); + + // Get the reference without dependents first + if let Ok(reference) = self.get_reference_without_dependents(&ref_id).await { + all_refs.insert(ref_id.clone(), reference); + + // Get its direct children and add them to the processing queue + if let Ok(children) = self.get_direct_children(&ref_id).await { + dependency_map.insert(ref_id.clone(), children.clone()); + for child_id in children { + to_process.push_back(child_id); + } + } else { + dependency_map.insert(ref_id.clone(), Vec::new()); + } + } + } + + // Now build the dependency tree iteratively using the maps + let mut result = Vec::new(); + for child_id in direct_children { + if let Some(reference) = self.build_full_reference(&child_id, &all_refs, &dependency_map) { + result.push(Arc::new(reference)); + } + } + + Ok(result) + } + + fn build_full_reference( + &self, + target_id: &str, + all_refs: &std::collections::HashMap, + dependency_map: &std::collections::HashMap> + ) -> Option { + let mut built_refs = std::collections::HashMap::::new(); + let mut to_build = std::collections::VecDeque::new(); + let mut processed = std::collections::HashSet::new(); + + // Start from leaves and work up + to_build.push_back(target_id.to_string()); + + while let Some(ref_id) = to_build.pop_back() { + if processed.contains(&ref_id) { + continue; + } + + let base_ref = all_refs.get(&ref_id)?; + let children = dependency_map.get(&ref_id).cloned().unwrap_or_default(); + + // Check if all children are already built + let all_children_built = children.iter().all(|child_id| built_refs.contains_key(child_id)); + + if all_children_built { + // Build this reference with its dependents + let mut dependents = Vec::new(); + for child_id in &children { + if let Some(child_ref) = built_refs.get(child_id) { + dependents.push(Arc::new(child_ref.clone())); + } + } + + let full_ref = Reference { + id: base_ref.id.clone(), + content_address: base_ref.content_address.clone(), + name: base_ref.name.clone(), + dependents, + }; + + built_refs.insert(ref_id.clone(), full_ref); + processed.insert(ref_id); + } else { + // Add children to be built first, then re-add this ref + to_build.push_front(ref_id); + for child_id in children { + if !processed.contains(&child_id) && !built_refs.contains_key(&child_id) { + to_build.push_back(child_id); + } + } + } + } + + built_refs.remove(target_id) + } + + async fn find_reference_by_name(&self, name: &str) -> Result, StoreError> { + let transaction = self + .db + .transaction(&[REF_ENTRIES_STORE], TransactionMode::ReadOnly)?; + + let ref_store = transaction.object_store(REF_ENTRIES_STORE)?; + + // Use cursor to search through all references + let cursor_request = ref_store.open_cursor(None, None)?; + + if let Ok(Some(cursor)) = cursor_request.await { + loop { + let should_continue = match cursor.value() { + Ok(value) => { + if let Some(ref_str) = value.as_string() { + if let Ok(ref_entry) = serde_json::from_str::(&ref_str) { + if ref_entry.name == name { + return Ok(Some(ref_entry)); + } + } + } + true + } + Err(_) => false, + }; + + if !should_continue { + break; + } + + // Try to continue to next item + match cursor.advance(1) { + Ok(advance_request) => { + if advance_request.await.is_err() { + break; + } + if cursor.value().is_err() { + break; + } + } + Err(_) => break, + } + } + } + + Ok(None) + } + + async fn get_reference_without_dependents(&self, id: &str) -> Result { + let transaction = self + .db + .transaction(&[REF_ENTRIES_STORE], TransactionMode::ReadOnly)?; + + let ref_store = transaction.object_store(REF_ENTRIES_STORE)?; + + let request = ref_store.get(Query::Key(JsValue::from_str(id)))?; + let value = request.await?; + + if let Some(value) = value { + if let Some(ref_str) = value.as_string() { + if let Ok(ref_entry) = serde_json::from_str::(&ref_str) { + return Ok(Reference { + id: ref_entry.id, + content_address: ref_entry.content_address, + name: ref_entry.name, + dependents: Vec::new(), // No dependents in this method + }); + } + } + } + + Err(StoreError::NoSuchReference) + } + + async fn get_direct_children(&self, parent_id: &str) -> Result, StoreError> { + let transaction = self + .db + .transaction(&[REF_DEPENDENCIES_STORE], TransactionMode::ReadOnly)?; + + let dep_store = transaction.object_store(REF_DEPENDENCIES_STORE)?; + + let mut children = Vec::new(); + let cursor_request = dep_store.open_cursor(None, None)?; + + if let Ok(Some(cursor)) = cursor_request.await { + loop { + let should_continue = match cursor.value() { + Ok(value) => { + if let Some(dep_str) = value.as_string() { + if let Ok(dep_entry) = serde_json::from_str::(&dep_str) { + if dep_entry.parent_id == parent_id { + children.push(dep_entry.dependent_id); + } + } + } + true + } + Err(_) => false, + }; + + if !should_continue { + break; + } + + // Try to continue to next item + match cursor.advance(1) { + Ok(advance_request) => { + // Wait for the advance operation, but if it fails, we're done + if advance_request.await.is_err() { + break; + } + // Check if cursor is still valid + if cursor.value().is_err() { + break; + } + } + Err(_) => break, + } + } + } + + Ok(children) + } +} + +impl ReferenceStore for IndexedDbReferenceStore { + async fn get_reference(&self, id: &str) -> Result { + let mut reference = self.get_reference_without_dependents(id).await?; + reference.dependents = self.get_dependents(id).await?; + Ok(reference) + } + + async fn get_content_for_reference(&self, reference: Reference) -> Result { + if let Some(content_address) = &reference.content_address { + let transaction = self + .db + .transaction(&[CONTENT_STORE], TransactionMode::ReadOnly) + .map_err(|e| { + StoreError::StorageError(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to create transaction: {:?}", e), + ))) + })?; + + let content_store = transaction.object_store(CONTENT_STORE).map_err(|e| { + StoreError::StorageError(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to get content_store: {:?}", e), + ))) + })?; + + let request = content_store + .get(Query::Key(JsValue::from_str(content_address))) + .map_err(|e| { + StoreError::StorageError(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to get content: {:?}", e), + ))) + })?; + + let value = request.await.map_err(|e| { + StoreError::StorageError(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to await content: {:?}", e), + ))) + })?; + + if let Some(value) = value { + if let Some(content_str) = value.as_string() { + if let Ok(content_entry) = serde_json::from_str::(&content_str) { + return String::from_utf8(content_entry.content) + .map_err(|e| StoreError::StorageError(Box::new(e))); + } + } + } + + Err(StoreError::NoSuchContentAddress) + } else { + Err(StoreError::NoSuchContentAddress) + } + } + + async fn get_graph(&self, root_name: &str) -> Result, StoreError> { + let mut visited = HashSet::new(); + let mut result = Vec::new(); + let mut queue = VecDeque::new(); + + queue.push_back(root_name.to_string()); + + while let Some(current_name) = queue.pop_front() { + if visited.contains(¤t_name) { + continue; + } + + visited.insert(current_name.clone()); + + // Find reference by name - use a cursor to search through all references + if let Ok(reference_opt) = self.find_reference_by_name(¤t_name).await { + if let Some(ref_entry) = reference_opt { + let dependents = self.get_dependents(&ref_entry.id).await?; + + let reference = Reference { + id: ref_entry.id, + content_address: ref_entry.content_address, + name: ref_entry.name, + dependents: dependents.clone(), + }; + + result.push(reference); + + // Add dependent names to queue + for dependent in dependents { + if !visited.contains(&dependent.name) { + queue.push_back(dependent.name.clone()); + } + } + } + } + } + + Ok(result) + } + + async fn store_reference(&self, reference: &Reference) -> Result<(), StoreError> { + let transaction = self + .db + .transaction( + &[REF_ENTRIES_STORE, REF_DEPENDENCIES_STORE], + TransactionMode::ReadWrite, + )?; + + let ref_store = transaction.object_store(REF_ENTRIES_STORE)?; + let dep_store = transaction.object_store(REF_DEPENDENCIES_STORE)?; + + // Store the reference entry + let ref_entry = RefEntry { + id: reference.id.clone(), + content_address: reference.content_address.clone(), + name: reference.name.clone(), + }; + + let ref_value = serde_json::to_string(&ref_entry)?; + let ref_request = ref_store.put( + &JsValue::from_str(&ref_value), + Some(&JsValue::from_str(&reference.id)), + )?; + ref_request.await?; + + // Clear existing dependencies for this reference + self.clear_dependencies_sync(&dep_store, &reference.id).await?; + + // Store new dependencies + for dependent in &reference.dependents { + let dep_entry = RefDependency { + parent_id: reference.id.clone(), + dependent_id: dependent.id.clone(), + }; + + let dep_value = serde_json::to_string(&dep_entry)?; + let dep_key = format!("{}:{}", reference.id, dependent.id); + + let dep_request = dep_store.put( + &JsValue::from_str(&dep_value), + Some(&JsValue::from_str(&dep_key)), + )?; + dep_request.await?; + } + + let commit_request = transaction.commit()?; + commit_request.await?; + + Ok(()) + } + + async fn store_content(&self, content_address: &str, content: &[u8]) -> Result<(), StoreError> { + let transaction = self + .db + .transaction(&[CONTENT_STORE], TransactionMode::ReadWrite)?; + + let content_store = transaction.object_store(CONTENT_STORE)?; + + let content_entry = ContentEntry { + content_address: content_address.to_string(), + content: content.to_vec(), + }; + + let content_value = serde_json::to_string(&content_entry)?; + let put_request = content_store.put( + &JsValue::from_str(&content_value), + Some(&JsValue::from_str(content_address)), + )?; + put_request.await?; + + let commit_request = transaction.commit()?; + commit_request.await?; + + Ok(()) + } +} + diff --git a/offline-web-storage/src/integration_tests/indexeddb.rs b/offline-web-storage/src/integration_tests/indexeddb.rs new file mode 100644 index 0000000..eff2f1f --- /dev/null +++ b/offline-web-storage/src/integration_tests/indexeddb.rs @@ -0,0 +1,51 @@ +use super::*; +use crate::IndexedDbReferenceStore; +use wasm_bindgen_test::*; + +wasm_bindgen_test_configure!(run_in_browser); + +async fn create_indexeddb_store() -> IndexedDbReferenceStore { + IndexedDbReferenceStore::new().await.unwrap() +} + +#[wasm_bindgen_test] +async fn test_store_and_retrieve_reference() { + let store = create_indexeddb_store().await; + test_store_and_retrieve_reference_impl(&store).await; +} + +#[wasm_bindgen_test] +async fn test_store_and_retrieve_content() { + let store = create_indexeddb_store().await; + test_store_and_retrieve_content_impl(&store).await; +} + +#[wasm_bindgen_test] +async fn test_reference_with_dependents() { + let store = create_indexeddb_store().await; + test_reference_with_dependents_impl(&store).await; +} + +#[wasm_bindgen_test] +async fn test_get_graph() { + let store = create_indexeddb_store().await; + test_get_graph_impl(&store).await; +} + +#[wasm_bindgen_test] +async fn test_nonexistent_reference() { + let store = create_indexeddb_store().await; + test_nonexistent_reference_impl(&store).await; +} + +#[wasm_bindgen_test] +async fn test_nonexistent_content() { + let store = create_indexeddb_store().await; + test_nonexistent_content_impl(&store).await; +} + +#[wasm_bindgen_test] +async fn test_reference_without_content_address() { + let store = create_indexeddb_store().await; + test_reference_without_content_address_impl(&store).await; +} diff --git a/offline-web-storage/src/integration_tests.rs b/offline-web-storage/src/integration_tests/mod.rs similarity index 74% rename from offline-web-storage/src/integration_tests.rs rename to offline-web-storage/src/integration_tests/mod.rs index d09343c..db5c69c 100644 --- a/offline-web-storage/src/integration_tests.rs +++ b/offline-web-storage/src/integration_tests/mod.rs @@ -1,16 +1,9 @@ use std::sync::Arc; use offline_web_model::Reference; -use super::{ReferenceStore, SqliteReferenceStore, StoreError}; +use crate::{ReferenceStore, StoreError}; -async fn create_test_store() -> SqliteReferenceStore { - SqliteReferenceStore::new("sqlite::memory:").await.unwrap() -} - -#[tokio::test] -async fn test_store_and_retrieve_reference() { - let store = create_test_store().await; - +pub async fn test_store_and_retrieve_reference_impl(store: &T) { // Create a test reference let reference = Reference::new( Some("test_content_address".to_string()), @@ -30,10 +23,7 @@ async fn test_store_and_retrieve_reference() { assert_eq!(retrieved.dependents.len(), reference.dependents.len()); } -#[tokio::test] -async fn test_store_and_retrieve_content() { - let store = create_test_store().await; - +pub async fn test_store_and_retrieve_content_impl(store: &T) { let content = b"Hello, World!"; let content_address = "test_content_address"; @@ -53,10 +43,7 @@ async fn test_store_and_retrieve_content() { assert_eq!(retrieved_content, String::from_utf8(content.to_vec()).unwrap()); } -#[tokio::test] -async fn test_reference_with_dependents() { - let store = create_test_store().await; - +pub async fn test_reference_with_dependents_impl(store: &T) { // Create a leaf reference (no dependents) let leaf_ref = Reference::new( Some("leaf_content_address".to_string()), @@ -83,10 +70,7 @@ async fn test_reference_with_dependents() { assert_eq!(retrieved_parent.dependents[0].name, leaf_ref.name); } -#[tokio::test] -async fn test_get_graph() { - let store = create_test_store().await; - +pub async fn test_get_graph_impl(store: &T) { // Create a hierarchy of references let leaf1 = Reference::new( Some("leaf1_content".to_string()), @@ -130,10 +114,7 @@ async fn test_get_graph() { assert!(names.contains(&&"leaf2".to_string())); } -#[tokio::test] -async fn test_nonexistent_reference() { - let store = create_test_store().await; - +pub async fn test_nonexistent_reference_impl(store: &T) { // Try to retrieve a reference that doesn't exist let result = store.get_reference("nonexistent_id").await; @@ -141,10 +122,7 @@ async fn test_nonexistent_reference() { assert!(matches!(result, Err(StoreError::NoSuchReference))); } -#[tokio::test] -async fn test_nonexistent_content() { - let store = create_test_store().await; - +pub async fn test_nonexistent_content_impl(store: &T) { // Create a reference with a content address that doesn't exist let reference = Reference::new( Some("nonexistent_content_address".to_string()), @@ -158,10 +136,7 @@ async fn test_nonexistent_content() { assert!(matches!(result, Err(StoreError::NoSuchContentAddress))); } -#[tokio::test] -async fn test_reference_without_content_address() { - let store = create_test_store().await; - +pub async fn test_reference_without_content_address_impl(store: &T) { // Create a reference without a content address let reference = Reference::new(None, "test_reference".to_string()); @@ -172,22 +147,8 @@ async fn test_reference_without_content_address() { assert!(matches!(result, Err(StoreError::NoSuchContentAddress))); } -#[tokio::test] -async fn test_schema_version_management() { - let store = create_test_store().await; - - // Verify the schema version is correctly set - let version = store.get_current_schema_version().await.unwrap(); - assert_eq!(version, 1, "Schema version should be 1"); - - // Verify we can still perform basic operations - let reference = Reference::new( - Some("test_content".to_string()), - "test_schema_version".to_string(), - ); - - store.store_reference(&reference).await.unwrap(); - let retrieved = store.get_reference(&reference.id).await.unwrap(); - assert_eq!(retrieved.name, reference.name); -} +#[cfg(all(test, feature="native"))] +mod sqlite; +#[cfg(all(test, feature="wasm"))] +mod indexeddb; diff --git a/offline-web-storage/src/integration_tests/sqlite.rs b/offline-web-storage/src/integration_tests/sqlite.rs new file mode 100644 index 0000000..84ac0e9 --- /dev/null +++ b/offline-web-storage/src/integration_tests/sqlite.rs @@ -0,0 +1,67 @@ +use super::*; +use crate::SqliteReferenceStore; + +async fn create_sqlite_store() -> SqliteReferenceStore { + SqliteReferenceStore::new("sqlite::memory:").await.unwrap() +} + +#[tokio::test] +async fn test_store_and_retrieve_reference() { + let store = create_sqlite_store().await; + test_store_and_retrieve_reference_impl(&store).await; +} + +#[tokio::test] +async fn test_store_and_retrieve_content() { + let store = create_sqlite_store().await; + test_store_and_retrieve_content_impl(&store).await; +} + +#[tokio::test] +async fn test_reference_with_dependents() { + let store = create_sqlite_store().await; + test_reference_with_dependents_impl(&store).await; +} + +#[tokio::test] +async fn test_get_graph() { + let store = create_sqlite_store().await; + test_get_graph_impl(&store).await; +} + +#[tokio::test] +async fn test_nonexistent_reference() { + let store = create_sqlite_store().await; + test_nonexistent_reference_impl(&store).await; +} + +#[tokio::test] +async fn test_nonexistent_content() { + let store = create_sqlite_store().await; + test_nonexistent_content_impl(&store).await; +} + +#[tokio::test] +async fn test_reference_without_content_address() { + let store = create_sqlite_store().await; + test_reference_without_content_address_impl(&store).await; +} + +#[tokio::test] +async fn test_schema_version_management() { + let store = create_sqlite_store().await; + + // Verify the schema version is correctly set + let version = store.get_current_schema_version().await.unwrap(); + assert_eq!(version, 1, "Schema version should be 1"); + + // Verify we can still perform basic operations + let reference = Reference::new( + Some("test_content".to_string()), + "test_schema_version".to_string(), + ); + + store.store_reference(&reference).await.unwrap(); + let retrieved = store.get_reference(&reference.id).await.unwrap(); + assert_eq!(retrieved.name, reference.name); +} diff --git a/offline-web-storage/src/lib.rs b/offline-web-storage/src/lib.rs index f8d3fd9..fc3dc21 100644 --- a/offline-web-storage/src/lib.rs +++ b/offline-web-storage/src/lib.rs @@ -5,12 +5,31 @@ use offline_web_model::Reference; pub enum StoreError { #[error("No such reference")] NoSuchReference, + #[error("Invalid reference")] + InvalidReference, #[error("No such content address")] NoSuchContentAddress, + #[error("Serialization Error: {0:?}")] + SerializationError(Box), #[error("Unknown Storage Error: {0:?}")] StorageError(Box), } +#[cfg(feature="wasm")] +impl From for StoreError { + fn from(value: idb::Error) -> Self { + // TODO(jwall): We can probably be more helpful in our error message here. + StoreError::StorageError(Box::new(value)) + } +} + +#[cfg(feature="wasm")] +impl From for StoreError { + fn from(value: serde_json::Error) -> Self { + StoreError::SerializationError(Box::new(value)) + } +} + #[allow(async_fn_in_trait)] pub trait ReferenceStore { async fn get_reference(&self, id: &str) -> Result; @@ -18,6 +37,10 @@ pub trait ReferenceStore { async fn get_content_for_reference(&self, reference: Reference) -> Result; async fn get_graph(&self, root_name: &str) -> Result, StoreError>; + + async fn store_reference(&self, reference: &Reference) -> Result<(), StoreError>; + + async fn store_content(&self, content_address: &str, content: &[u8]) -> Result<(), StoreError>; } #[cfg(feature="native")] @@ -26,8 +49,10 @@ mod sqlite; #[cfg(feature="native")] pub use sqlite::SqliteReferenceStore; -#[cfg(feature="native")] +#[cfg(feature="wasm")] mod indexeddb; -#[cfg(all(test, feature="native"))] +#[cfg(feature="wasm")] +pub use indexeddb::IndexedDbReferenceStore; + mod integration_tests; diff --git a/offline-web-storage/src/sqlite.rs b/offline-web-storage/src/sqlite.rs index 4ed1dde..5eb7205 100644 --- a/offline-web-storage/src/sqlite.rs +++ b/offline-web-storage/src/sqlite.rs @@ -166,74 +166,6 @@ impl SqliteReferenceStore { Ok(()) } - pub async fn store_reference(&self, reference: &Reference) -> Result<(), StoreError> { - // Use a transaction to ensure atomicity - let mut tx = self.pool.begin().await - .map_err(|e| StoreError::StorageError(Box::new(e)))?; - - // Insert or update the reference - sqlx::query( - r#" - INSERT OR REPLACE INTO ref_entries (id, content_address, name) - VALUES (?, ?, ?) - "#, - ) - .bind(&reference.id) - .bind(&reference.content_address) - .bind(&reference.name) - .execute(&mut *tx) - .await - .map_err(|e| StoreError::StorageError(Box::new(e)))?; - - // Delete existing dependencies for this reference - sqlx::query( - r#" - DELETE FROM ref_dependencies - WHERE parent_id = ? - "#, - ) - .bind(&reference.id) - .execute(&mut *tx) - .await - .map_err(|e| StoreError::StorageError(Box::new(e)))?; - - // Insert new dependencies - for dependent in &reference.dependents { - sqlx::query( - r#" - INSERT INTO ref_dependencies (parent_id, dependent_id) - VALUES (?, ?) - "#, - ) - .bind(&reference.id) - .bind(&dependent.id) - .execute(&mut *tx) - .await - .map_err(|e| StoreError::StorageError(Box::new(e)))?; - } - - // Commit the transaction - tx.commit().await - .map_err(|e| StoreError::StorageError(Box::new(e)))?; - - Ok(()) - } - - pub async fn store_content(&self, content_address: &str, content: &[u8]) -> Result<(), StoreError> { - sqlx::query( - r#" - INSERT OR REPLACE INTO content_store (content_address, content) - VALUES (?, ?) - "#, - ) - .bind(content_address) - .bind(content) - .execute(&self.pool) - .await - .map_err(|e| StoreError::StorageError(Box::new(e)))?; - - Ok(()) - } } impl ReferenceStore for SqliteReferenceStore { @@ -354,6 +286,75 @@ impl ReferenceStore for SqliteReferenceStore { Ok(result) } + + async fn store_reference(&self, reference: &Reference) -> Result<(), StoreError> { + // Use a transaction to ensure atomicity + let mut tx = self.pool.begin().await + .map_err(|e| StoreError::StorageError(Box::new(e)))?; + + // Insert or update the reference + sqlx::query( + r#" + INSERT OR REPLACE INTO ref_entries (id, content_address, name) + VALUES (?, ?, ?) + "#, + ) + .bind(&reference.id) + .bind(&reference.content_address) + .bind(&reference.name) + .execute(&mut *tx) + .await + .map_err(|e| StoreError::StorageError(Box::new(e)))?; + + // Delete existing dependencies for this reference + sqlx::query( + r#" + DELETE FROM ref_dependencies + WHERE parent_id = ? + "#, + ) + .bind(&reference.id) + .execute(&mut *tx) + .await + .map_err(|e| StoreError::StorageError(Box::new(e)))?; + + // Insert new dependencies + for dependent in &reference.dependents { + sqlx::query( + r#" + INSERT INTO ref_dependencies (parent_id, dependent_id) + VALUES (?, ?) + "#, + ) + .bind(&reference.id) + .bind(&dependent.id) + .execute(&mut *tx) + .await + .map_err(|e| StoreError::StorageError(Box::new(e)))?; + } + + // Commit the transaction + tx.commit().await + .map_err(|e| StoreError::StorageError(Box::new(e)))?; + + Ok(()) + } + + async fn store_content(&self, content_address: &str, content: &[u8]) -> Result<(), StoreError> { + sqlx::query( + r#" + INSERT OR REPLACE INTO content_store (content_address, content) + VALUES (?, ?) + "#, + ) + .bind(content_address) + .bind(content) + .execute(&self.pool) + .await + .map_err(|e| StoreError::StorageError(Box::new(e)))?; + + Ok(()) + } } impl SqliteReferenceStore {