Compare commits

...

2 Commits

3 changed files with 78 additions and 38 deletions

View File

@ -10,7 +10,7 @@ fn get_deterministic_candidate(graph: &Graph) -> Arc<Reference> {
.refs .refs
.values() .values()
.filter(|r| r.name != graph.root.name && r.is_leaf()) .filter(|r| r.name != graph.root.name && r.is_leaf())
.map(|r| r.clone()) .cloned()
.collect(); .collect();
// Sort by name to ensure deterministic ordering // Sort by name to ensure deterministic ordering
@ -57,7 +57,7 @@ fn test_root_not_a_dependency() {
let root_name = graph.root.name.clone(); let root_name = graph.root.name.clone();
// Check all references to ensure none have the root as a dependent // Check all references to ensure none have the root as a dependent
for (_, reference) in graph.refs.as_ref() { for reference in graph.refs.as_ref().values() {
for dep in &reference.dependents { for dep in &reference.dependents {
assert_ne!( assert_ne!(
dep.name, root_name, dep.name, root_name,
@ -88,7 +88,7 @@ fn test_all_nodes_connected_to_root() {
collect_reachable(&graph.root, &mut reachable); collect_reachable(&graph.root, &mut reachable);
// Check that all nodes in the graph are reachable from the root // Check that all nodes in the graph are reachable from the root
for (name, _) in graph.refs.as_ref() { for name in graph.refs.as_ref().keys() {
assert!( assert!(
reachable.contains(name), reachable.contains(name),
"All nodes should be reachable from the root: {}", "All nodes should be reachable from the root: {}",
@ -273,12 +273,12 @@ fn test_reference_ids_stable_regardless_of_dependency_order() {
let base_ref = Reference::new(Some(String::from("base_content")), String::from("/base")); let base_ref = Reference::new(Some(String::from("base_content")), String::from("/base"));
// Test multiple different orders of adding the same dependencies // Test multiple different orders of adding the same dependencies
let orders = vec![ let orders = [
vec![dep_a.clone(), dep_b.clone(), dep_c.clone(), dep_d.clone()], // alphabetical vec![dep_a.clone(), dep_b.clone(), dep_c.clone(), dep_d.clone()], // alphabetical
vec![dep_d.clone(), dep_c.clone(), dep_b.clone(), dep_a.clone()], // reverse alphabetical vec![dep_d.clone(), dep_c.clone(), dep_b.clone(), dep_a.clone()], // reverse alphabetical
vec![dep_b.clone(), dep_d.clone(), dep_a.clone(), dep_c.clone()], // random order 1 vec![dep_b.clone(), dep_d.clone(), dep_a.clone(), dep_c.clone()], // random order 1
vec![dep_c.clone(), dep_a.clone(), dep_d.clone(), dep_b.clone()], // random order 2 vec![dep_c.clone(), dep_a.clone(), dep_d.clone(), dep_b.clone()], // random order 2
vec![dep_d.clone(), dep_a.clone(), dep_b.clone(), dep_c.clone()], // random order 3 vec![dep_d.clone(), dep_a.clone(), dep_b.clone(), dep_c.clone()],
]; ];
let mut all_ids = Vec::new(); let mut all_ids = Vec::new();
@ -325,7 +325,7 @@ fn test_dependencies_lexicographically_ordered() {
let graph = create_test_graph(); let graph = create_test_graph();
// Check all references to ensure their dependents are lexicographically ordered // Check all references to ensure their dependents are lexicographically ordered
for (_, reference) in graph.refs.as_ref() { for reference in graph.refs.as_ref().values() {
if reference.dependents.len() > 1 { if reference.dependents.len() > 1 {
// Check that dependents are ordered by name // Check that dependents are ordered by name
for i in 0..reference.dependents.len() - 1 { for i in 0..reference.dependents.len() - 1 {

View File

@ -145,44 +145,85 @@ impl ReferenceStore {
})) }))
} }
fn get_dependents<'a>( async fn get_dependents(&self, parent_id: &str) -> Result<Vec<Arc<Reference>>> {
&'a self, // Get all dependents at once using a recursive CTE (Common Table Expression)
parent_id: &'a str, let rows = sqlx::query(
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<Vec<Arc<Reference>>>> + Send + 'a>> r#"
{ WITH RECURSIVE dependent_tree AS (
Box::pin(async move { -- Base case: direct dependents
let rows = sqlx::query( SELECT r.id, r.content_address, r.name, rd.parent_id, 0 as level
r#"
SELECT r.id, r.content_address, r.name
FROM refs r FROM refs r
JOIN reference_dependencies rd ON r.id = rd.dependent_id JOIN reference_dependencies rd ON r.id = rd.dependent_id
WHERE rd.parent_id = ? WHERE rd.parent_id = ?
ORDER BY r.name
"#, UNION ALL
-- Recursive case: dependents of dependents
SELECT r.id, r.content_address, r.name, rd.parent_id, dt.level + 1
FROM refs r
JOIN reference_dependencies rd ON r.id = rd.dependent_id
JOIN dependent_tree dt ON rd.parent_id = dt.id
) )
.bind(parent_id) SELECT id, content_address, name, parent_id, level
.fetch_all(&self.pool) FROM dependent_tree
.await?; ORDER BY level, name
"#,
)
.bind(parent_id)
.fetch_all(&self.pool)
.await?;
let mut dependents = Vec::new(); // Build the tree structure from the flattened results
for row in rows { let mut refs_map: std::collections::HashMap<String, Arc<Reference>> = std::collections::HashMap::new();
let id: String = row.get("id"); let mut children_map: std::collections::HashMap<String, Vec<String>> = std::collections::HashMap::new();
let content_address: Option<String> = row.get("content_address");
let name: String = row.get("name");
// Recursively get dependents for each dependent // First pass: create all references and track relationships
let nested_dependents = self.get_dependents(&id).await?; for row in &rows {
let id: String = row.get("id");
let content_address: Option<String> = row.get("content_address");
let name: String = row.get("name");
let parent_id: String = row.get("parent_id");
dependents.push(Arc::new(Reference { // Create reference without dependents first
id, let reference = Arc::new(Reference {
content_address, id: id.clone(),
name, content_address,
dependents: nested_dependents, name,
})); dependents: Vec::new(),
});
refs_map.insert(id.clone(), reference);
children_map.entry(parent_id).or_default().push(id);
}
// Second pass: build the tree by adding dependents to each reference
fn build_dependents(
ref_id: &str,
refs_map: &std::collections::HashMap<String, Arc<Reference>>,
children_map: &std::collections::HashMap<String, Vec<String>>,
) -> Vec<Arc<Reference>> {
if let Some(child_ids) = children_map.get(ref_id) {
let mut dependents = Vec::new();
for child_id in child_ids {
if let Some(child_ref) = refs_map.get(child_id) {
let nested_dependents = build_dependents(child_id, refs_map, children_map);
let updated_child = Arc::new(Reference {
id: child_ref.id.clone(),
content_address: child_ref.content_address.clone(),
name: child_ref.name.clone(),
dependents: nested_dependents,
});
dependents.push(updated_child);
}
}
dependents.sort_by(|a, b| a.name.cmp(&b.name));
dependents
} else {
Vec::new()
} }
}
Ok(dependents) Ok(build_dependents(parent_id, &refs_map, &children_map))
})
} }
pub async fn get_references_by_name(&self, name: &str) -> Result<Vec<Reference>> { pub async fn get_references_by_name(&self, name: &str) -> Result<Vec<Reference>> {
@ -253,7 +294,7 @@ impl ReferenceStore {
) -> Result<()> { ) -> Result<()> {
let mut tx = self.pool.begin().await?; let mut tx = self.pool.begin().await?;
for (_, reference) in updated_references { for reference in updated_references.values() {
// Update the reference // Update the reference
sqlx::query( sqlx::query(
r#" r#"

View File

@ -4,8 +4,7 @@ use offline_web_model::Reference;
use offline_web_storage::ReferenceStore; use offline_web_storage::ReferenceStore;
async fn create_test_store() -> ReferenceStore { async fn create_test_store() -> ReferenceStore {
let store = ReferenceStore::new("sqlite::memory:").await.unwrap(); ReferenceStore::new("sqlite::memory:").await.unwrap()
store
} }
#[tokio::test] #[tokio::test]