mirror of
https://github.com/zaphar/merkle-dag.git
synced 2025-07-23 11:09:51 -04:00
Unit tests for properties around adding a node
This commit is contained in:
parent
58fcee69f5
commit
ce3fd62f52
@ -45,3 +45,12 @@ where
|
|||||||
self.finish().to_le_bytes()
|
self.finish().to_le_bytes()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<S> ByteEncoder for S
|
||||||
|
where
|
||||||
|
S: ToString,
|
||||||
|
{
|
||||||
|
fn bytes(&self) -> Vec<u8> {
|
||||||
|
self.to_string().into_bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
33
src/lib.rs
33
src/lib.rs
@ -19,6 +19,7 @@ use node::Node;
|
|||||||
mod hash;
|
mod hash;
|
||||||
mod node;
|
mod node;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub enum EdgeError {
|
pub enum EdgeError {
|
||||||
NoSuchDependents,
|
NoSuchDependents,
|
||||||
}
|
}
|
||||||
@ -56,23 +57,30 @@ where
|
|||||||
/// Add a new payload with a required set of dependency_ids. This method will construct a new node
|
/// Add a new payload with a required set of dependency_ids. This method will construct a new node
|
||||||
/// and add it to the DAG with the given payload item and dependency id set. It is idempotent for any
|
/// and add it to the DAG with the given payload item and dependency id set. It is idempotent for any
|
||||||
/// given set of inputs.
|
/// given set of inputs.
|
||||||
pub fn add_node(
|
pub fn add_node<'a>(
|
||||||
&mut self,
|
&'a mut self,
|
||||||
item: N,
|
item: N,
|
||||||
dependency_ids: BTreeSet<[u8; HASH_LEN]>,
|
dependency_ids: BTreeSet<[u8; HASH_LEN]>,
|
||||||
) -> Result<(), EdgeError> {
|
) -> Result<[u8; HASH_LEN], EdgeError> {
|
||||||
let node = Node::<N, HW, HASH_LEN>::new(item, dependency_ids.clone());
|
let node = Node::<N, HW, HASH_LEN>::new(item, dependency_ids.clone());
|
||||||
let id = node.id();
|
let id = node.id().clone();
|
||||||
if self.roots.contains(id) {
|
if self.nodes.contains_key(&id) {
|
||||||
// We've already added this node so there is nothing left to do.
|
// We've already added this node so there is nothing left to do.
|
||||||
return Ok(());
|
return Ok(id);
|
||||||
}
|
}
|
||||||
for dep_id in dependency_ids.iter() {
|
for dep_id in dependency_ids.iter() {
|
||||||
if !self.nodes.contains_key(dep_id) {
|
if !self.nodes.contains_key(dep_id) {
|
||||||
return Err(EdgeError::NoSuchDependents);
|
return Err(EdgeError::NoSuchDependents);
|
||||||
}
|
}
|
||||||
|
// If any of our dependencies is in the roots pointer list then
|
||||||
|
// it is time to remove it from there.
|
||||||
|
if self.roots.contains(dep_id) {
|
||||||
|
self.roots.remove(dep_id);
|
||||||
}
|
}
|
||||||
Ok(())
|
}
|
||||||
|
self.roots.insert(id.clone());
|
||||||
|
self.nodes.insert(id.clone(), node);
|
||||||
|
Ok(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a node from the DAG by it's hash identifier if it exists.
|
/// Get a node from the DAG by it's hash identifier if it exists.
|
||||||
@ -80,6 +88,14 @@ where
|
|||||||
self.nodes.get(id)
|
self.nodes.get(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_roots(&self) -> &BTreeSet<[u8; HASH_LEN]> {
|
||||||
|
&self.roots
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nodes(&self) -> &BTreeMap<[u8; HASH_LEN], Node<N, HW, HASH_LEN>> {
|
||||||
|
&self.nodes
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(jwall): How to specify a partial ordering for nodes in a graph?
|
// TODO(jwall): How to specify a partial ordering for nodes in a graph?
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,3 +111,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test;
|
||||||
|
@ -25,6 +25,7 @@ use crate::hash::{ByteEncoder, HashWriter};
|
|||||||
/// Nodes are tied to a specific implementation of the HashWriter trait which is itself tied
|
/// Nodes are tied to a specific implementation of the HashWriter trait which is itself tied
|
||||||
/// to the DAG they are stored in guaranteeing that the same Hashing implementation is used
|
/// to the DAG they are stored in guaranteeing that the same Hashing implementation is used
|
||||||
/// for each node in the DAG.
|
/// for each node in the DAG.
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
pub struct Node<N, HW, const HASH_LEN: usize>
|
pub struct Node<N, HW, const HASH_LEN: usize>
|
||||||
where
|
where
|
||||||
N: ByteEncoder,
|
N: ByteEncoder,
|
||||||
@ -70,7 +71,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn id(&self) -> &[u8] {
|
pub fn id(&self) -> &[u8; HASH_LEN] {
|
||||||
&self.id
|
&self.id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
63
src/test.rs
Normal file
63
src/test.rs
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
// Copyright 2022 Jeremy Wall (Jeremy@marzhilsltudios.com)
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
use std::collections::hash_map::DefaultHasher;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_root_pointer_hygiene() {
|
||||||
|
let mut dag = DAG::<&str, DefaultHasher, 8>::new();
|
||||||
|
let quax_node_id = dag.add_node("quax", BTreeSet::new()).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
quax_node_id,
|
||||||
|
*dag.get_node_by_id(&quax_node_id).unwrap().id()
|
||||||
|
);
|
||||||
|
assert!(dag.get_roots().contains(&quax_node_id));
|
||||||
|
let mut dep_set = BTreeSet::new();
|
||||||
|
dep_set.insert(quax_node_id);
|
||||||
|
let quux_node_id = dag.add_node("quux", dep_set).unwrap();
|
||||||
|
assert!(!dag.get_roots().contains(&quax_node_id));
|
||||||
|
assert!(dag.get_roots().contains(&quux_node_id));
|
||||||
|
assert_eq!(
|
||||||
|
quux_node_id,
|
||||||
|
*dag.get_node_by_id(&quux_node_id).unwrap().id()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_insert_no_such_dependents_error() {
|
||||||
|
let missing_dependent = Node::<&str, DefaultHasher, 8>::new("missing", BTreeSet::new());
|
||||||
|
let mut dag = DAG::<&str, DefaultHasher, 8>::new();
|
||||||
|
let mut dep_set = BTreeSet::new();
|
||||||
|
dep_set.insert(*missing_dependent.id());
|
||||||
|
assert!(dag.add_node("foo", dep_set).is_err());
|
||||||
|
assert!(dag.get_roots().is_empty());
|
||||||
|
assert!(dag.get_nodes().is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_adding_nodes_is_idempotent() {
|
||||||
|
let mut dag = DAG::<&str, DefaultHasher, 8>::new();
|
||||||
|
let quax_node_id = dag.add_node("quax", BTreeSet::new()).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
quax_node_id,
|
||||||
|
*dag.get_node_by_id(&quax_node_id).unwrap().id()
|
||||||
|
);
|
||||||
|
assert!(dag.get_roots().contains(&quax_node_id));
|
||||||
|
let root_size = dag.get_roots().len();
|
||||||
|
let nodes_size = dag.get_nodes().len();
|
||||||
|
dag.add_node("quax", BTreeSet::new()).unwrap();
|
||||||
|
assert_eq!(root_size, dag.get_roots().len());
|
||||||
|
assert_eq!(nodes_size, dag.get_nodes().len());
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user