use std::{collections::HashMap, sync::Arc}; use blake2::{Blake2b512, Digest}; use rand::Rng; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Reference { pub object_id: String, pub content_address: String, pub path: String, #[serde(skip_serializing_if = "Vec::is_empty")] pub dependents: Vec>, } impl Reference { pub fn new(object_id: String, content_address: String, path: String) -> Self { Self { object_id, content_address, path, dependents: Vec::new(), } } pub fn add_dep(mut self, dep: Arc) -> Self { self.dependents.push(dep); self } pub fn to_arc(self) -> Arc { Arc::new(self) } pub fn is_leaf(&self) -> bool { return self.dependents.is_empty(); } /// Calculates a content address hash based on dependent references pub fn calculate_content_address(&self) -> String { if self.is_leaf() { return self.content_address.clone(); } let mut hasher = Blake2b512::new(); for dep in &self.dependents { hasher.update(&dep.content_address); } format!("{:x}", hasher.finalize()) } } pub struct Graph { pub root: Arc, pub refs: Arc>>, pub objects: Arc>, } impl Graph { /// Gets a reference by its path pub fn get_reference(&self, path: &str) -> Option> { self.refs.get(path).cloned() } /// Gets an object by its content address pub fn get_object(&self, content_address: &str) -> Option<&String> { self.objects.get(content_address) } } pub fn random_object() -> (String, String) { let mut rng = rand::rng(); let random_size = rng.random_range(50..=4096); let random_string: String = (0..random_size) .map(|_| rng.sample(rand::distr::Alphanumeric) as char) .collect(); let mut hasher = Blake2b512::new(); hasher.update(&random_string); let hash = format!("{:x}", hasher.finalize()); (hash, random_string) } impl Graph { /// Updates a reference to point to a new object, recalculating content addresses /// for all affected references in the graph pub fn update_reference(&mut self, path: &String, new_object_id: String, new_content: String) -> Result<(), String> { // Create a mutable copy of our maps let mut refs = HashMap::new(); for (k, v) in self.refs.as_ref() { refs.insert(k.clone(), v.clone()); } let mut objects = HashMap::new(); for (k, v) in self.objects.as_ref() { objects.insert(k.clone(), v.clone()); } // Find the reference to update let ref_to_update = refs.get(path).ok_or_else(|| format!("Reference with path {} not found", path))?; // Calculate hash for the new content let mut hasher = Blake2b512::new(); hasher.update(&new_content); let new_address = format!("{:x}", hasher.finalize()); // Create updated reference let updated_ref = Arc::new(Reference { object_id: new_object_id, content_address: new_address.clone(), path: path.to_string(), dependents: ref_to_update.dependents.clone(), }); // Update objects map with new content objects.insert(new_address.clone(), new_content); // Update references map with new reference refs.insert(path.to_string(), updated_ref.clone()); // Find and update all parent references that contain this reference self.update_parent_references(&mut refs, path)?; // Update the root reference if needed if path == &self.root.path { self.root = refs.get(path).unwrap().clone(); } // Update the Arc maps self.refs = Arc::new(refs); self.objects = Arc::new(objects); Ok(()) } /// Recursively updates parent references when a child reference changes fn update_parent_references(&self, refs: &mut HashMap>, updated_path: &str) -> Result<(), String> { // Find all references that have the updated reference as a dependent let parent_paths: Vec = refs .iter() .filter(|(_, r)| r.dependents.iter().any(|dep| dep.path == updated_path)) .map(|(path, _)| path.clone()) .collect(); for parent_path in parent_paths { if let Some(parent_ref) = refs.get(&parent_path) { // Create a new list of dependents with the updated reference let mut new_dependents = Vec::new(); for dep in &parent_ref.dependents { if dep.path == updated_path { // Use the updated reference new_dependents.push(refs.get(updated_path).unwrap().clone()); } else { // Keep the existing dependent new_dependents.push(dep.clone()); } } // Calculate new content address based on updated dependents let mut hasher = Blake2b512::new(); for dep in &new_dependents { hasher.update(&dep.content_address); } let new_address = format!("{:x}", hasher.finalize()); // Create updated parent reference let updated_parent = Arc::new(Reference { object_id: parent_ref.object_id.clone(), content_address: new_address, path: parent_ref.path.clone(), dependents: new_dependents, }); // Update the references map refs.insert(parent_path.clone(), updated_parent); // Recursively update parents of this parent self.update_parent_references(refs, &parent_path)?; } } Ok(()) } pub fn random_graph() -> Graph { let path_root = String::from("ref/0"); let mut objects = HashMap::new(); let mut refs = HashMap::new(); let mut root_ref = Reference::new( "username:0".to_string(), String::from("0"), path_root.clone(), ); let mut root_hasher = Blake2b512::new(); for i in 1..=10 { let mut item_ref = Reference::new( format!("item:{}", i), format!("0:{}", i), format!("/item/{}", i), ); let mut hasher = Blake2b512::new(); for j in 1..=10 { let (address, content) = random_object(); hasher.update(&content); let leaf_ref = Reference::new( format!("item:{}:subitem:{}", i, j), format!("{}", address), format!("/item/{}/subitem/{}", i, j), ) .to_arc(); item_ref = item_ref.add_dep(leaf_ref.clone()); objects.insert(address.clone(), content); hasher.update(&leaf_ref.content_address); refs.insert(leaf_ref.path.clone(), leaf_ref); } let hash = format!("{:x}", hasher.finalize()); item_ref.content_address = hash; root_hasher.update(&item_ref.content_address); let rc_ref = item_ref.to_arc(); root_ref = root_ref.add_dep(rc_ref.clone()); refs.insert(rc_ref.path.clone(), rc_ref); } root_ref.content_address = format!("{:x}", root_hasher.finalize()); let rc_root = root_ref.to_arc(); refs.insert(rc_root.path.clone(), rc_root.clone()); dbg!(&objects); Graph { root: rc_root, refs: Arc::new(refs), objects: Arc::new(objects), } } }