From d723d427e6e88c6852f547968baf1cca48f89ca6 Mon Sep 17 00:00:00 2001 From: Jeremy Wall Date: Tue, 23 Aug 2022 22:03:48 -0400 Subject: [PATCH] Tests for Node Deserialization --- .vscode/settings.json | 3 +- src/node.rs | 91 +++++++++++++------------------------------ src/test.rs | 43 ++++++++++++++++++++ 3 files changed, 71 insertions(+), 66 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index f97ed39..5b2916e 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,4 @@ { - "rust-analyzer.checkOnSave.features": "all" + "rust-analyzer.checkOnSave.features": "all", + "rust-analyzer.cargo.features": "all" } \ No newline at end of file diff --git a/src/node.rs b/src/node.rs index a63e852..7291890 100644 --- a/src/node.rs +++ b/src/node.rs @@ -27,7 +27,7 @@ use crate::hash::HashWriter; /// Nodes are tied to a specific implementation of the HashWriter trait which is itself tied /// to the DAG they are stored in guaranteeing that the same Hashing implementation is used /// for each node in the DAG. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Eq)] pub struct Node where HW: HashWriter, @@ -54,6 +54,16 @@ where } } +fn coerce_non_const_generic_set( + set: &BTreeSet<[u8; HASH_LEN]>, +) -> BTreeSet<&[u8]> { + let mut coerced_item = BTreeSet::new(); + for arr in set { + coerced_item.insert(arr.as_slice()); + } + coerced_item +} + impl Serialize for Node where HW: HashWriter, @@ -63,15 +73,18 @@ where S: Serializer, { let mut structor = serializer.serialize_struct("Node", 4)?; - structor.serialize_field("id", self.id.as_slice())?; structor.serialize_field("item", &self.item)?; - structor.serialize_field("item_id", self.item_id.as_slice())?; - // TODO(jwall): structor.serialize_field("dependency_ids", &self.dependency_ids)?; + structor.serialize_field( + "dependency_ids", + &coerce_non_const_generic_set(&self.dependency_ids), + )?; structor.end() } } -fn coerce_array(slice: &[u8]) -> Result<[u8; HASH_LEN], String> { +fn coerce_const_generic_array( + slice: &[u8], +) -> Result<[u8; HASH_LEN], String> { let mut coerced_item: [u8; HASH_LEN] = [0; HASH_LEN]; if slice.len() > coerced_item.len() { return Err(format!( @@ -85,12 +98,12 @@ fn coerce_array(slice: &[u8]) -> Result<[u8; HASH_LEN], S Ok(coerced_item) } -fn coerce_set( +fn coerce_const_generic_set( set: BTreeSet<&[u8]>, ) -> Result, String> { let mut coerced_item = BTreeSet::new(); for slice in set { - coerced_item.insert(coerce_array(slice)?); + coerced_item.insert(coerce_const_generic_array(slice)?); } Ok(coerced_item) } @@ -107,9 +120,7 @@ where #[serde(field_identifier, rename_all = "lowercase")] #[allow(non_camel_case_types)] enum Field { - Id, Item, - Item_Id, Dependency_Ids, } @@ -129,53 +140,25 @@ where where A: serde::de::SeqAccess<'de>, { - let id: [u8; HASH_LEN] = coerce_array( - seq.next_element::<&[u8]>()? - .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?, - ) - .map_err(serde::de::Error::custom)?; let item = seq .next_element::>()? .ok_or_else(|| serde::de::Error::invalid_length(1, &self))?; - let item_id: [u8; HASH_LEN] = coerce_array( - seq.next_element::<&[u8]>()? - .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?, - ) - .map_err(serde::de::Error::custom)?; - let dependency_ids: BTreeSet<[u8; HASH_LEN]> = coerce_set( + let dependency_ids: BTreeSet<[u8; HASH_LEN]> = coerce_const_generic_set( seq.next_element::>()? .ok_or_else(|| serde::de::Error::invalid_length(3, &self))?, ) .map_err(serde::de::Error::custom)?; - Ok(Self::Value { - id, - item, - item_id, - dependency_ids, - _phantom: PhantomData, - }) + Ok(Self::Value::new(item, dependency_ids)) } fn visit_map(self, mut map: A) -> Result where A: serde::de::MapAccess<'de>, { - let mut id: Option<[u8; HASH_LEN]> = None; let mut item: Option> = None; - let mut item_id: Option<[u8; HASH_LEN]> = None; let mut dependency_ids: Option> = None; while let Some(key) = map.next_key()? { match key { - Field::Id => { - if id.is_some() { - return Err(serde::de::Error::duplicate_field("id")); - } else { - id = Some( - coerce_array(map.next_value()?) - .map_err(serde::de::Error::custom)?, - ); - } - } Field::Item => { if item.is_some() { return Err(serde::de::Error::duplicate_field("item")); @@ -183,50 +166,28 @@ where item = Some(map.next_value()?); } } - Field::Item_Id => { - if item_id.is_some() { - return Err(serde::de::Error::duplicate_field("item_id")); - } else { - item_id = Some( - coerce_array(map.next_value()?) - .map_err(serde::de::Error::custom)?, - ); - } - } Field::Dependency_Ids => { if dependency_ids.is_some() { return Err(serde::de::Error::duplicate_field("dependency_ids")); } else { dependency_ids = Some( - coerce_set(map.next_value()?) + coerce_const_generic_set(map.next_value()?) .map_err(serde::de::Error::custom)?, ); } } } } - let id = id.ok_or_else(|| serde::de::Error::missing_field("id"))?; let item = item.ok_or_else(|| serde::de::Error::missing_field("item"))?; - let item_id = item_id.ok_or_else(|| serde::de::Error::missing_field("item_id"))?; let dependency_ids = dependency_ids .ok_or_else(|| serde::de::Error::missing_field("dependency_ids"))?; - Ok(Self::Value { - id, - item, - item_id, - dependency_ids, - _phantom: PhantomData, - }) + Ok(Self::Value::new(item, dependency_ids)) } } - const FIELDS: &'static [&'static str] = &["id", "item", "item_id", "dependency_ids"]; - deserializer.deserialize_struct( - "Duration", - FIELDS, - NodeVisitor::(PhantomData), - ) + const FIELDS: &'static [&'static str] = &["item", "dependency_ids"]; + deserializer.deserialize_struct("Node", FIELDS, NodeVisitor::(PhantomData)) } } diff --git a/src/test.rs b/src/test.rs index 82b6e4b..143ade3 100644 --- a/src/test.rs +++ b/src/test.rs @@ -161,3 +161,46 @@ fn test_node_comparison_no_shared_graph() { NodeCompare::Uncomparable ); } + +#[cfg(feature = "cbor")] +mod cbor_serialization_tests { + use super::TestDag; + use crate::prelude::*; + use ciborium::{de::from_reader, ser::into_writer}; + use std::collections::{hash_map::DefaultHasher, BTreeSet}; + + #[test] + fn test_node_deserializaton() { + let mut dag = TestDag::new(); + let simple_node_id = dag.add_node("simple", BTreeSet::new()).unwrap(); + let mut dep_set = BTreeSet::new(); + dep_set.insert(simple_node_id); + let root_node_id = dag.add_node("root", dep_set).unwrap(); + + let simple_node_to_serialize = dag.get_node_by_id(&simple_node_id).unwrap().unwrap(); + let root_node_to_serialize = dag.get_node_by_id(&root_node_id).unwrap().unwrap(); + + let mut simple_node_vec: Vec = Vec::new(); + let mut root_node_vec: Vec = Vec::new(); + into_writer(&simple_node_to_serialize, &mut simple_node_vec).unwrap(); + into_writer(&root_node_to_serialize, &mut root_node_vec).unwrap(); + + let simple_node_de: Node = + from_reader(simple_node_vec.as_slice()).unwrap(); + let root_node_de: Node = from_reader(root_node_vec.as_slice()).unwrap(); + assert_eq!(simple_node_to_serialize.id(), simple_node_de.id()); + assert_eq!(simple_node_to_serialize.item_id(), simple_node_de.item_id()); + assert_eq!(simple_node_to_serialize.item(), simple_node_de.item()); + assert_eq!( + simple_node_to_serialize.dependency_ids(), + simple_node_de.dependency_ids() + ); + assert_eq!(root_node_to_serialize.id(), root_node_de.id()); + assert_eq!(root_node_to_serialize.item_id(), root_node_de.item_id()); + assert_eq!(root_node_to_serialize.item(), root_node_de.item()); + assert_eq!( + root_node_to_serialize.dependency_ids(), + root_node_de.dependency_ids() + ); + } +}