wip: mutatable graph
This commit is contained in:
parent
0d3d634672
commit
fe372b96fd
11
Cargo.lock
generated
11
Cargo.lock
generated
@ -535,7 +535,7 @@ dependencies = [
|
|||||||
"axum 0.8.3",
|
"axum 0.8.3",
|
||||||
"blake2",
|
"blake2",
|
||||||
"offline-web-model",
|
"offline-web-model",
|
||||||
"rand 0.9.0",
|
"rand 0.9.1",
|
||||||
"serde",
|
"serde",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
@ -544,6 +544,8 @@ dependencies = [
|
|||||||
name = "offline-web-model"
|
name = "offline-web-model"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"blake2",
|
||||||
|
"rand 0.9.1",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -555,7 +557,7 @@ dependencies = [
|
|||||||
"axum-typed-websockets",
|
"axum-typed-websockets",
|
||||||
"blake2",
|
"blake2",
|
||||||
"offline-web-model",
|
"offline-web-model",
|
||||||
"rand 0.9.0",
|
"rand 0.9.1",
|
||||||
"serde",
|
"serde",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
@ -630,13 +632,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand"
|
name = "rand"
|
||||||
version = "0.9.0"
|
version = "0.9.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rand_chacha 0.9.0",
|
"rand_chacha 0.9.0",
|
||||||
"rand_core 0.9.3",
|
"rand_core 0.9.3",
|
||||||
"zerocopy",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1,77 +1,8 @@
|
|||||||
use std::{collections::HashMap, sync::Arc};
|
use std::{collections::HashMap, sync::Arc};
|
||||||
|
|
||||||
use axum::{extract::Path, http, response::{Html, IntoResponse}, routing::get, Json, Router};
|
use axum::{extract::Path, http, response::{Html, IntoResponse}, routing::get, Json, Router};
|
||||||
use blake2::{Blake2b512, Digest};
|
|
||||||
use rand::Rng;
|
|
||||||
|
|
||||||
use offline_web_model::Reference;
|
use offline_web_model::{Graph, Reference};
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct AddressableObject {
|
|
||||||
pub address: String,
|
|
||||||
pub content: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn random_object() -> AddressableObject {
|
|
||||||
let mut rng = rand::rng();
|
|
||||||
let random_size = rng.random_range(50..=4096);
|
|
||||||
let random_string: String = (0..random_size)
|
|
||||||
.map(|_| rng.sample(rand::distr::Alphanumeric) as char)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut hasher = Blake2b512::new();
|
|
||||||
hasher.update(&random_string);
|
|
||||||
let hash = format!("{:x}", hasher.finalize());
|
|
||||||
|
|
||||||
AddressableObject {
|
|
||||||
address: hash,
|
|
||||||
content: random_string,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn random_references_and_objects() -> (Arc<Reference>, Arc<HashMap<String, Arc<Reference>>>, Arc<HashMap<String, AddressableObject>>) {
|
|
||||||
let path_root = String::from("ref/0");
|
|
||||||
let mut objects = HashMap::new();
|
|
||||||
let mut refs = HashMap::new();
|
|
||||||
let mut root_ref = Reference::new(
|
|
||||||
"username:0".to_string(),
|
|
||||||
String::from("0"),
|
|
||||||
path_root.clone(),
|
|
||||||
);
|
|
||||||
let mut root_hasher = Blake2b512::new();
|
|
||||||
for i in 1..=10 {
|
|
||||||
let mut item_ref = Reference::new(
|
|
||||||
format!("item:{}", i),
|
|
||||||
format!("0:{}", i),
|
|
||||||
format!("/item/{}", i),
|
|
||||||
);
|
|
||||||
let mut hasher = Blake2b512::new();
|
|
||||||
for j in 1..=10 {
|
|
||||||
let object = random_object();
|
|
||||||
hasher.update(&object.content);
|
|
||||||
let leaf_ref = Reference::new(
|
|
||||||
format!("item:{}:subitem:{}", i, j),
|
|
||||||
format!("{}", object.address),
|
|
||||||
format!("/item/{}/subitem/{}", i, j),
|
|
||||||
).to_arc();
|
|
||||||
item_ref = item_ref.add_dep(leaf_ref.clone());
|
|
||||||
objects.insert(object.address.clone(), object);
|
|
||||||
hasher.update(&leaf_ref.content_address);
|
|
||||||
refs.insert(leaf_ref.path.clone(), leaf_ref);
|
|
||||||
}
|
|
||||||
let hash = format!("{:x}", hasher.finalize());
|
|
||||||
item_ref.content_address = hash;
|
|
||||||
root_hasher.update(&item_ref.content_address);
|
|
||||||
let rc_ref = item_ref.to_arc();
|
|
||||||
root_ref = root_ref.add_dep(rc_ref.clone());
|
|
||||||
refs.insert(rc_ref.path.clone(), rc_ref);
|
|
||||||
}
|
|
||||||
root_ref.content_address = format!("{:x}", root_hasher.finalize());
|
|
||||||
let rc_root = root_ref.to_arc();
|
|
||||||
refs.insert(rc_root.path.clone(), rc_root.clone());
|
|
||||||
dbg!(&objects);
|
|
||||||
(rc_root, Arc::new(refs), Arc::new(objects))
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(jeremy): Allow this to autoexpand the content_addresses?
|
// TODO(jeremy): Allow this to autoexpand the content_addresses?
|
||||||
async fn all_references(root_ref: Arc<Reference>) -> Json<Arc<Reference>> {
|
async fn all_references(root_ref: Arc<Reference>) -> Json<Arc<Reference>> {
|
||||||
@ -86,10 +17,10 @@ async fn ref_path(refs: Arc<HashMap<String, Arc<Reference>>>, Path(path): Path<S
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn object_path(objects: Arc<HashMap<String, AddressableObject>>, Path(addr): Path<String>) -> String {
|
async fn object_path(objects: Arc<HashMap<String, String>>, Path(addr): Path<String>) -> String {
|
||||||
dbg!(&addr);
|
dbg!(&addr);
|
||||||
match objects.get(&addr) {
|
match objects.get(&addr) {
|
||||||
Some(o) => o.content.clone(),
|
Some(o) => o.clone(),
|
||||||
None => todo!("Return a 404?"),
|
None => todo!("Return a 404?"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -101,25 +32,25 @@ async fn get_client_js() -> impl IntoResponse {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn endpoints(root_ref: Arc<Reference>, refs: Arc<HashMap<String, Arc<Reference>>>, objects: Arc<HashMap<String, AddressableObject>>) -> Router {
|
pub fn endpoints(graph: Graph) -> Router {
|
||||||
Router::new().nest(
|
Router::new().nest(
|
||||||
"/api/v1",
|
"/api/v1",
|
||||||
Router::new().nest(
|
Router::new().nest(
|
||||||
"/ref",
|
"/ref",
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/all/username", get({
|
.route("/all/username", get({
|
||||||
let state = root_ref.clone();
|
let state = graph.root.clone();
|
||||||
move || all_references(state)
|
move || all_references(state)
|
||||||
}))
|
}))
|
||||||
.route("/item/{*path}", get({
|
.route("/item/{*path}", get({
|
||||||
let refs = refs.clone();
|
let refs = graph.refs.clone();
|
||||||
move |path| ref_path(refs, path)
|
move |path| ref_path(refs, path)
|
||||||
}))
|
}))
|
||||||
).nest(
|
).nest(
|
||||||
"/object",
|
"/object",
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/{addr}", get({
|
.route("/{addr}", get({
|
||||||
let objects = objects.clone();
|
let objects = graph.objects.clone();
|
||||||
move |addr| object_path(objects, addr)
|
move |addr| object_path(objects, addr)
|
||||||
}))
|
}))
|
||||||
),
|
),
|
||||||
@ -131,10 +62,11 @@ pub fn endpoints(root_ref: Arc<Reference>, refs: Arc<HashMap<String, Arc<Referen
|
|||||||
// TODO(jwall): Javascript test script
|
// TODO(jwall): Javascript test script
|
||||||
pub async fn serve() {
|
pub async fn serve() {
|
||||||
// run our app with hyper, listening globally on port 3000
|
// run our app with hyper, listening globally on port 3000
|
||||||
let (root_ref, refs, objects) = random_references_and_objects();
|
let graph = Graph::random_graph();
|
||||||
|
|
||||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000")
|
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000")
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
println!("Server ui starting on http://127.0.0.1:3000/ui/");
|
println!("Server ui starting on http://127.0.0.1:3000/ui/");
|
||||||
axum::serve(listener, endpoints(root_ref, refs, objects)).await.unwrap();
|
axum::serve(listener, endpoints(graph)).await.unwrap();
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::{collections::HashMap, sync::Arc};
|
use std::sync::Arc;
|
||||||
|
|
||||||
use axum::{
|
use axum::{
|
||||||
http,
|
http,
|
||||||
@ -7,83 +7,9 @@ use axum::{
|
|||||||
Router,
|
Router,
|
||||||
};
|
};
|
||||||
use axum_typed_websockets::{Message, WebSocket, WebSocketUpgrade};
|
use axum_typed_websockets::{Message, WebSocket, WebSocketUpgrade};
|
||||||
use blake2::{Blake2b512, Digest};
|
|
||||||
use rand::Rng;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use offline_web_model::Reference;
|
use offline_web_model::{Graph, Reference};
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct AddressableObject {
|
|
||||||
pub address: String,
|
|
||||||
pub content: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn random_object() -> AddressableObject {
|
|
||||||
let mut rng = rand::rng();
|
|
||||||
let random_size = rng.random_range(50..=4096);
|
|
||||||
let random_string: String = (0..random_size)
|
|
||||||
.map(|_| rng.sample(rand::distr::Alphanumeric) as char)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut hasher = Blake2b512::new();
|
|
||||||
hasher.update(&random_string);
|
|
||||||
let hash = format!("{:x}", hasher.finalize());
|
|
||||||
|
|
||||||
AddressableObject {
|
|
||||||
address: hash,
|
|
||||||
content: random_string,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn random_references_and_objects() -> (
|
|
||||||
Arc<Reference>,
|
|
||||||
Arc<HashMap<String, Arc<Reference>>>,
|
|
||||||
Arc<HashMap<String, AddressableObject>>,
|
|
||||||
) {
|
|
||||||
let path_root = String::from("ref/0");
|
|
||||||
let mut objects = HashMap::new();
|
|
||||||
let mut refs = HashMap::new();
|
|
||||||
let mut root_ref = Reference::new(
|
|
||||||
"username:0".to_string(),
|
|
||||||
String::from("0"),
|
|
||||||
path_root.clone(),
|
|
||||||
);
|
|
||||||
let mut root_hasher = Blake2b512::new();
|
|
||||||
for i in 1..=10 {
|
|
||||||
let mut item_ref = Reference::new(
|
|
||||||
format!("item:{}", i),
|
|
||||||
format!("0:{}", i),
|
|
||||||
format!("/item/{}", i),
|
|
||||||
);
|
|
||||||
let mut hasher = Blake2b512::new();
|
|
||||||
for j in 1..=10 {
|
|
||||||
let object = random_object();
|
|
||||||
hasher.update(&object.content);
|
|
||||||
let leaf_ref = Reference::new(
|
|
||||||
format!("item:{}:subitem:{}", i, j),
|
|
||||||
format!("{}", object.address),
|
|
||||||
format!("/item/{}/subitem/{}", i, j),
|
|
||||||
)
|
|
||||||
.to_arc();
|
|
||||||
item_ref = item_ref.add_dep(leaf_ref.clone());
|
|
||||||
objects.insert(object.address.clone(), object);
|
|
||||||
hasher.update(&leaf_ref.content_address);
|
|
||||||
refs.insert(leaf_ref.path.clone(), leaf_ref);
|
|
||||||
}
|
|
||||||
let hash = format!("{:x}", hasher.finalize());
|
|
||||||
item_ref.content_address = hash;
|
|
||||||
root_hasher.update(&item_ref.content_address);
|
|
||||||
let rc_ref = item_ref.to_arc();
|
|
||||||
root_ref = root_ref.add_dep(rc_ref.clone());
|
|
||||||
refs.insert(rc_ref.path.clone(), rc_ref);
|
|
||||||
}
|
|
||||||
root_ref.content_address = format!("{:x}", root_hasher.finalize());
|
|
||||||
let rc_root = root_ref.to_arc();
|
|
||||||
refs.insert(rc_root.path.clone(), rc_root.clone());
|
|
||||||
dbg!(&objects);
|
|
||||||
(rc_root, Arc::new(refs), Arc::new(objects))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_client_js() -> impl IntoResponse {
|
async fn get_client_js() -> impl IntoResponse {
|
||||||
(
|
(
|
||||||
@ -107,19 +33,12 @@ enum ClientMsg {
|
|||||||
|
|
||||||
async fn handle_websocket(
|
async fn handle_websocket(
|
||||||
ws: WebSocketUpgrade<ServerMsg, ClientMsg>,
|
ws: WebSocketUpgrade<ServerMsg, ClientMsg>,
|
||||||
root_ref: Arc<Reference>,
|
graph: Arc<Graph>,
|
||||||
refs: Arc<HashMap<String, Arc<Reference>>>,
|
|
||||||
objects: Arc<HashMap<String, AddressableObject>>,
|
|
||||||
) -> impl IntoResponse {
|
) -> impl IntoResponse {
|
||||||
ws.on_upgrade(|socket| handle_socket(socket, root_ref, refs, objects))
|
ws.on_upgrade(|socket| handle_socket(socket, graph))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_socket(
|
async fn handle_socket(mut socket: WebSocket<ServerMsg, ClientMsg>, graph: Arc<Graph>) {
|
||||||
mut socket: WebSocket<ServerMsg, ClientMsg>,
|
|
||||||
root_ref: Arc<Reference>,
|
|
||||||
refs: Arc<HashMap<String, Arc<Reference>>>,
|
|
||||||
objects: Arc<HashMap<String, AddressableObject>>,
|
|
||||||
) {
|
|
||||||
// Send initial data to the client
|
// Send initial data to the client
|
||||||
|
|
||||||
// Handle incoming messages
|
// Handle incoming messages
|
||||||
@ -129,7 +48,7 @@ async fn handle_socket(
|
|||||||
Message::Item(ClientMsg::Bootstrap) => {
|
Message::Item(ClientMsg::Bootstrap) => {
|
||||||
if let Err(e) = socket
|
if let Err(e) = socket
|
||||||
.send(Message::Item(ServerMsg::Reference(
|
.send(Message::Item(ServerMsg::Reference(
|
||||||
root_ref.as_ref().clone(),
|
graph.root.as_ref().clone(),
|
||||||
)))
|
)))
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@ -138,7 +57,7 @@ async fn handle_socket(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Message::Item(ClientMsg::GetReference(path)) => {
|
Message::Item(ClientMsg::GetReference(path)) => {
|
||||||
if let Some(reference) = refs.get(&path) {
|
if let Some(reference) = graph.refs.get(&path) {
|
||||||
if let Err(e) = socket
|
if let Err(e) = socket
|
||||||
.send(Message::Item(ServerMsg::Reference((**reference).clone())))
|
.send(Message::Item(ServerMsg::Reference((**reference).clone())))
|
||||||
.await
|
.await
|
||||||
@ -151,9 +70,9 @@ async fn handle_socket(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Message::Item(ClientMsg::GetObject(address)) => {
|
Message::Item(ClientMsg::GetObject(address)) => {
|
||||||
if let Some(object) = objects.get(&address) {
|
if let Some(object) = graph.get_object(&address) {
|
||||||
if let Err(e) = socket
|
if let Err(e) = socket
|
||||||
.send(Message::Item(ServerMsg::Object(object.content.clone())))
|
.send(Message::Item(ServerMsg::Object(object.clone())))
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
println!("Error sending object: {:?}", e);
|
println!("Error sending object: {:?}", e);
|
||||||
@ -179,19 +98,16 @@ async fn handle_socket(
|
|||||||
println!("WebSocket connection closed");
|
println!("WebSocket connection closed");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn endpoints(
|
pub fn endpoints(graph: Graph) -> Router {
|
||||||
root_ref: Arc<Reference>,
|
|
||||||
refs: Arc<HashMap<String, Arc<Reference>>>,
|
|
||||||
objects: Arc<HashMap<String, AddressableObject>>,
|
|
||||||
) -> Router {
|
|
||||||
// WebSocket route now implemented
|
// WebSocket route now implemented
|
||||||
|
let graph = Arc::new(graph);
|
||||||
Router::new()
|
Router::new()
|
||||||
.nest(
|
.nest(
|
||||||
"/api/v1",
|
"/api/v1",
|
||||||
Router::new().route(
|
Router::new().route(
|
||||||
"/ws",
|
"/ws",
|
||||||
get(|ws: WebSocketUpgrade<ServerMsg, ClientMsg>| async move {
|
get(|ws: WebSocketUpgrade<ServerMsg, ClientMsg>| async move {
|
||||||
handle_websocket(ws, root_ref.clone(), refs.clone(), objects.clone()).await
|
handle_websocket(ws, graph.clone()).await
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -205,13 +121,11 @@ pub fn endpoints(
|
|||||||
// TODO(jwall): Javascript test script
|
// TODO(jwall): Javascript test script
|
||||||
pub async fn serve() {
|
pub async fn serve() {
|
||||||
// run our app with hyper, listening globally on port 3000
|
// run our app with hyper, listening globally on port 3000
|
||||||
let (root_ref, refs, objects) = random_references_and_objects();
|
let graph = Graph::random_graph();
|
||||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000")
|
let listener = tokio::net::TcpListener::bind("127.0.0.1:3000")
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
println!("Server ui starting on http://127.0.0.1:3000/ui/");
|
println!("Server ui starting on http://127.0.0.1:3000/ui/");
|
||||||
println!("WebSocket endpoint available at ws://127.0.0.1:3000/api/v1/ws");
|
println!("WebSocket endpoint available at ws://127.0.0.1:3000/api/v1/ws");
|
||||||
axum::serve(listener, endpoints(root_ref, refs, objects))
|
axum::serve(listener, endpoints(graph)).await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
@ -4,4 +4,6 @@ version = "0.1.0"
|
|||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
blake2 = "0.10.6"
|
||||||
|
rand = "0.9.1"
|
||||||
serde = { version = "1.0.219", features = ["derive", "rc"] }
|
serde = { version = "1.0.219", features = ["derive", "rc"] }
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
use std::sync::Arc;
|
use std::{collections::HashMap, sync::Arc};
|
||||||
|
|
||||||
use serde::{Serialize, Deserialize};
|
use blake2::{Blake2b512, Digest};
|
||||||
|
use rand::Rng;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct Reference {
|
pub struct Reference {
|
||||||
@ -33,4 +35,200 @@ impl Reference {
|
|||||||
pub fn is_leaf(&self) -> bool {
|
pub fn is_leaf(&self) -> bool {
|
||||||
return self.dependents.is_empty();
|
return self.dependents.is_empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Calculates a content address hash based on dependent references
|
||||||
|
pub fn calculate_content_address(&self) -> String {
|
||||||
|
if self.is_leaf() {
|
||||||
|
return self.content_address.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut hasher = Blake2b512::new();
|
||||||
|
for dep in &self.dependents {
|
||||||
|
hasher.update(&dep.content_address);
|
||||||
|
}
|
||||||
|
format!("{:x}", hasher.finalize())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Graph {
|
||||||
|
pub root: Arc<Reference>,
|
||||||
|
pub refs: Arc<HashMap<String, Arc<Reference>>>,
|
||||||
|
pub objects: Arc<HashMap<String, String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Graph {
|
||||||
|
/// Gets a reference by its path
|
||||||
|
pub fn get_reference(&self, path: &str) -> Option<Arc<Reference>> {
|
||||||
|
self.refs.get(path).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets an object by its content address
|
||||||
|
pub fn get_object(&self, content_address: &str) -> Option<&String> {
|
||||||
|
self.objects.get(content_address)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_object() -> (String, String) {
|
||||||
|
let mut rng = rand::rng();
|
||||||
|
let random_size = rng.random_range(50..=4096);
|
||||||
|
let random_string: String = (0..random_size)
|
||||||
|
.map(|_| rng.sample(rand::distr::Alphanumeric) as char)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut hasher = Blake2b512::new();
|
||||||
|
hasher.update(&random_string);
|
||||||
|
let hash = format!("{:x}", hasher.finalize());
|
||||||
|
|
||||||
|
(hash, random_string)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Graph {
|
||||||
|
/// Updates a reference to point to a new object, recalculating content addresses
|
||||||
|
/// for all affected references in the graph
|
||||||
|
pub fn update_reference(&mut self, path: &String, new_object_id: String, new_content: String) -> Result<(), String> {
|
||||||
|
// Create a mutable copy of our maps
|
||||||
|
let mut refs = HashMap::new();
|
||||||
|
for (k, v) in self.refs.as_ref() {
|
||||||
|
refs.insert(k.clone(), v.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut objects = HashMap::new();
|
||||||
|
for (k, v) in self.objects.as_ref() {
|
||||||
|
objects.insert(k.clone(), v.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the reference to update
|
||||||
|
let ref_to_update = refs.get(path).ok_or_else(|| format!("Reference with path {} not found", path))?;
|
||||||
|
|
||||||
|
// Calculate hash for the new content
|
||||||
|
let mut hasher = Blake2b512::new();
|
||||||
|
hasher.update(&new_content);
|
||||||
|
let new_address = format!("{:x}", hasher.finalize());
|
||||||
|
|
||||||
|
// Create updated reference
|
||||||
|
let updated_ref = Arc::new(Reference {
|
||||||
|
object_id: new_object_id,
|
||||||
|
content_address: new_address.clone(),
|
||||||
|
path: path.to_string(),
|
||||||
|
dependents: ref_to_update.dependents.clone(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update objects map with new content
|
||||||
|
objects.insert(new_address.clone(), new_content);
|
||||||
|
|
||||||
|
// Update references map with new reference
|
||||||
|
refs.insert(path.to_string(), updated_ref.clone());
|
||||||
|
|
||||||
|
// Find and update all parent references that contain this reference
|
||||||
|
self.update_parent_references(&mut refs, path)?;
|
||||||
|
|
||||||
|
// Update the root reference if needed
|
||||||
|
if path == &self.root.path {
|
||||||
|
self.root = refs.get(path).unwrap().clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the Arc maps
|
||||||
|
self.refs = Arc::new(refs);
|
||||||
|
self.objects = Arc::new(objects);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively updates parent references when a child reference changes
|
||||||
|
fn update_parent_references(&self, refs: &mut HashMap<String, Arc<Reference>>, updated_path: &str) -> Result<(), String> {
|
||||||
|
// Find all references that have the updated reference as a dependent
|
||||||
|
let parent_paths: Vec<String> = refs
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, r)| r.dependents.iter().any(|dep| dep.path == updated_path))
|
||||||
|
.map(|(path, _)| path.clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
for parent_path in parent_paths {
|
||||||
|
if let Some(parent_ref) = refs.get(&parent_path) {
|
||||||
|
// Create a new list of dependents with the updated reference
|
||||||
|
let mut new_dependents = Vec::new();
|
||||||
|
for dep in &parent_ref.dependents {
|
||||||
|
if dep.path == updated_path {
|
||||||
|
// Use the updated reference
|
||||||
|
new_dependents.push(refs.get(updated_path).unwrap().clone());
|
||||||
|
} else {
|
||||||
|
// Keep the existing dependent
|
||||||
|
new_dependents.push(dep.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate new content address based on updated dependents
|
||||||
|
let mut hasher = Blake2b512::new();
|
||||||
|
for dep in &new_dependents {
|
||||||
|
hasher.update(&dep.content_address);
|
||||||
|
}
|
||||||
|
let new_address = format!("{:x}", hasher.finalize());
|
||||||
|
|
||||||
|
// Create updated parent reference
|
||||||
|
let updated_parent = Arc::new(Reference {
|
||||||
|
object_id: parent_ref.object_id.clone(),
|
||||||
|
content_address: new_address,
|
||||||
|
path: parent_ref.path.clone(),
|
||||||
|
dependents: new_dependents,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update the references map
|
||||||
|
refs.insert(parent_path.clone(), updated_parent);
|
||||||
|
|
||||||
|
// Recursively update parents of this parent
|
||||||
|
self.update_parent_references(refs, &parent_path)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_graph() -> Graph {
|
||||||
|
let path_root = String::from("ref/0");
|
||||||
|
let mut objects = HashMap::new();
|
||||||
|
let mut refs = HashMap::new();
|
||||||
|
let mut root_ref = Reference::new(
|
||||||
|
"username:0".to_string(),
|
||||||
|
String::from("0"),
|
||||||
|
path_root.clone(),
|
||||||
|
);
|
||||||
|
let mut root_hasher = Blake2b512::new();
|
||||||
|
for i in 1..=10 {
|
||||||
|
let mut item_ref = Reference::new(
|
||||||
|
format!("item:{}", i),
|
||||||
|
format!("0:{}", i),
|
||||||
|
format!("/item/{}", i),
|
||||||
|
);
|
||||||
|
let mut hasher = Blake2b512::new();
|
||||||
|
for j in 1..=10 {
|
||||||
|
let (address, content) = random_object();
|
||||||
|
hasher.update(&content);
|
||||||
|
let leaf_ref = Reference::new(
|
||||||
|
format!("item:{}:subitem:{}", i, j),
|
||||||
|
format!("{}", address),
|
||||||
|
format!("/item/{}/subitem/{}", i, j),
|
||||||
|
)
|
||||||
|
.to_arc();
|
||||||
|
item_ref = item_ref.add_dep(leaf_ref.clone());
|
||||||
|
objects.insert(address.clone(), content);
|
||||||
|
hasher.update(&leaf_ref.content_address);
|
||||||
|
refs.insert(leaf_ref.path.clone(), leaf_ref);
|
||||||
|
}
|
||||||
|
let hash = format!("{:x}", hasher.finalize());
|
||||||
|
item_ref.content_address = hash;
|
||||||
|
root_hasher.update(&item_ref.content_address);
|
||||||
|
let rc_ref = item_ref.to_arc();
|
||||||
|
root_ref = root_ref.add_dep(rc_ref.clone());
|
||||||
|
refs.insert(rc_ref.path.clone(), rc_ref);
|
||||||
|
}
|
||||||
|
root_ref.content_address = format!("{:x}", root_hasher.finalize());
|
||||||
|
let rc_root = root_ref.to_arc();
|
||||||
|
refs.insert(rc_root.path.clone(), rc_root.clone());
|
||||||
|
dbg!(&objects);
|
||||||
|
Graph {
|
||||||
|
root: rc_root,
|
||||||
|
refs: Arc::new(refs),
|
||||||
|
objects: Arc::new(objects),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user