diff --git a/rust/catalyst-types/src/catalyst_id/mod.rs b/rust/catalyst-types/src/catalyst_id/mod.rs index 090f2f0e50..1612cec4a1 100644 --- a/rust/catalyst-types/src/catalyst_id/mod.rs +++ b/rust/catalyst-types/src/catalyst_id/mod.rs @@ -676,6 +676,15 @@ impl TryFrom<&[u8]> for CatalystId { } } +impl minicbor::Encode<()> for CatalystId { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.bytes(self.to_string().into_bytes().as_slice())?; + Ok(()) + } +} + #[cfg(test)] mod tests { use chrono::{DateTime, Utc}; diff --git a/rust/catalyst-types/src/uuid/mod.rs b/rust/catalyst-types/src/uuid/mod.rs index 3e25737e1d..c2df1c4795 100644 --- a/rust/catalyst-types/src/uuid/mod.rs +++ b/rust/catalyst-types/src/uuid/mod.rs @@ -15,8 +15,7 @@ use minicbor::data::Tag; pub const INVALID_UUID: uuid::Uuid = uuid::Uuid::from_bytes([0x00; 16]); /// UUID CBOR tag . -#[allow(dead_code)] -const UUID_CBOR_TAG: u64 = 37; +pub const UUID_CBOR_TAG: u64 = 37; /// Uuid validation errors, which could occur during decoding or converting to /// `UuidV4` or `UuidV7` types. @@ -29,6 +28,9 @@ pub enum UuidError { /// `UUIDv7` invalid error #[error("'{0}' is not a valid UUIDv7")] InvalidUuidV7(uuid::Uuid), + /// Invalid string conversion + #[error("Invalid string conversion: {0}")] + StringConversion(String), } /// Context for `CBOR` encoding and decoding diff --git a/rust/catalyst-types/src/uuid/uuid_v4.rs b/rust/catalyst-types/src/uuid/uuid_v4.rs index c7e2dbb814..a7baf46248 100644 --- a/rust/catalyst-types/src/uuid/uuid_v4.rs +++ b/rust/catalyst-types/src/uuid/uuid_v4.rs @@ -1,5 +1,8 @@ //! `UUIDv4` Type. -use std::fmt::{Display, Formatter}; +use std::{ + fmt::{Display, Formatter}, + str::FromStr, +}; use minicbor::{Decode, Decoder, Encode}; use uuid::Uuid; @@ -7,7 +10,7 @@ use uuid::Uuid; use super::{decode_cbor_uuid, encode_cbor_uuid, CborContext, UuidError, INVALID_UUID}; /// Type representing a `UUIDv4`. -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, serde::Serialize)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, serde::Serialize)] pub struct UuidV4(Uuid); impl UuidV4 { @@ -106,6 +109,15 @@ impl<'de> serde::Deserialize<'de> for UuidV4 { } } +impl FromStr for UuidV4 { + type Err = UuidError; + + fn from_str(s: &str) -> Result { + let uuid = Uuid::parse_str(s).map_err(|_| UuidError::StringConversion(s.to_string()))?; + UuidV4::try_from(uuid).map_err(|_| UuidError::InvalidUuidV4(uuid)) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/rust/catalyst-types/src/uuid/uuid_v7.rs b/rust/catalyst-types/src/uuid/uuid_v7.rs index 98fbd8cda6..f77accc7aa 100644 --- a/rust/catalyst-types/src/uuid/uuid_v7.rs +++ b/rust/catalyst-types/src/uuid/uuid_v7.rs @@ -1,5 +1,8 @@ //! `UUIDv7` Type. -use std::fmt::{Display, Formatter}; +use std::{ + fmt::{Display, Formatter}, + str::FromStr, +}; use minicbor::{Decode, Decoder, Encode}; use uuid::Uuid; @@ -7,7 +10,7 @@ use uuid::Uuid; use super::{decode_cbor_uuid, encode_cbor_uuid, CborContext, UuidError, INVALID_UUID}; /// Type representing a `UUIDv7`. -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, serde::Serialize)] +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Hash, serde::Serialize)] pub struct UuidV7(Uuid); impl UuidV7 { @@ -106,6 +109,15 @@ impl<'de> serde::Deserialize<'de> for UuidV7 { } } +impl FromStr for UuidV7 { + type Err = UuidError; + + fn from_str(s: &str) -> Result { + let uuid = Uuid::parse_str(s).map_err(|_| UuidError::StringConversion(s.to_string()))?; + UuidV7::try_from(uuid).map_err(|_| UuidError::InvalidUuidV7(uuid)) + } +} + #[cfg(test)] mod tests { use uuid::Uuid; diff --git a/rust/cbork-utils/src/deterministic_helper.rs b/rust/cbork-utils/src/deterministic_helper.rs new file mode 100644 index 0000000000..b2855c9e6a --- /dev/null +++ b/rust/cbork-utils/src/deterministic_helper.rs @@ -0,0 +1,895 @@ +//! CBOR decoding helper functions with deterministic encoding validation. +//! +//! Based on RFC 8949 Section 4.2 "Deterministically Encoded CBOR" +//! Rules for deterministic encoding: +//! 1. Integers must use the smallest possible encoding +//! 2. Lengths of arrays, maps, strings must use the smallest possible encoding +//! 3. Indefinite-length items are not allowed +//! 4. Keys in every map must be sorted in lexicographic order +//! 5. Duplicate keys in maps are not allowed +//! 6. Floating point values must use smallest possible encoding +//! 7. Non-finite floating point values are not allowed (NaN, infinite) + +use std::cmp::Ordering; + +use minicbor::Decoder; + +/// Major type indicator for CBOR maps (major type 5: 101 in top 3 bits) +/// As per RFC 8949 Section 4.2.3, maps in deterministic encoding must: +/// - Have keys sorted by length first, then byte wise lexicographically +/// - Contain no duplicate keys +const CBOR_MAJOR_TYPE_MAP: u8 = 5 << 5; + +/// Maximum value that can be encoded in a 5-bit additional info field +/// RFC 8949 Section 4.2.1: "0 to 23 must be expressed in the same byte as the major type" +/// Values 0-23 are encoded directly in the additional info field of the initial byte +const CBOR_MAX_TINY_VALUE: u64 = 23; + +/// Initial byte for a CBOR map whose length is encoded as an 8-bit unsigned integer +/// (uint8). +/// +/// This value combines the map major type (5) with the additional information value (24) +/// that indicates a uint8 length follows. The resulting byte is: +/// - High 3 bits: 101 (major type 5 for map) +/// - Low 5 bits: 24 (indicates uint8 length follows) +/// +/// Used when encoding CBOR maps with lengths between 24 and 255 elements. +const CBOR_MAP_LENGTH_UINT8: u8 = CBOR_MAJOR_TYPE_MAP | 24; // For uint8 length encoding + +/// Represents a CBOR map key-value pair where the key must be deterministically encoded +/// according to RFC 8949 Section 4.2.3. +/// +/// This type stores the raw bytes of both key and value to enable: +/// 1. Length-first ordering of keys (shorter keys before longer ones) +/// 2. Lexicographic comparison of equal-length keys +/// 3. Preservation of the original encoded form +#[derive(Clone, Eq, PartialEq, Debug)] +pub struct MapEntry { + /// Raw bytes of the encoded key, used for deterministic ordering + pub key_bytes: Vec, + /// Raw bytes of the encoded value + pub value: Vec, +} + +impl PartialOrd for MapEntry { + /// Compare map entries according to RFC 8949 Section 4.2.3 rules: + /// 1. Compare by length of encoded key + /// 2. If lengths equal, compare byte wise lexicographically + /// + /// Returns Some(ordering) since comparison is always defined for these types + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for MapEntry { + /// Compare map entries according to RFC 8949 Section 4.2.3 rules: + /// 1. Compare by length of encoded key + /// 2. If lengths equal, compare byte wise lexicographically + fn cmp(&self, other: &Self) -> Ordering { + self.key_bytes + .len() + .cmp(&other.key_bytes.len()) + .then_with(|| self.key_bytes.cmp(&other.key_bytes)) + } +} + +/// Decodes a CBOR map with deterministic encoding validation (RFC 8949 Section 4.2.3) +/// Returns the raw bytes of the map if it passes all deterministic validation rules. +/// +/// From RFC 8949 Section 4.2.3: +/// "The keys in every map must be sorted in the following order: +/// 1. If two keys have different lengths, the shorter one sorts earlier; +/// 2. If two keys have the same length, the one with the lower value in (byte-wise) +/// lexical order sorts earlier." +/// +/// Additionally: +/// - Map lengths must use minimal encoding (Section 4.2.1) +/// - Indefinite-length maps are not allowed (Section 4.2.2) +/// - No two keys may be equal (Section 4.2.3) +/// - The keys themselves must be deterministically encoded +/// +/// # Errors +/// +/// Returns `DeterministicError` if: +/// - Input is empty (`UnexpectedEof`) +/// - Map uses indefinite-length encoding (`IndefiniteLength`) +/// - Map length is not encoded minimally (`NonMinimalInt`) +/// - Map keys are not properly sorted (`UnorderedMapKeys`) +/// - Duplicate keys are found (`DuplicateMapKey`) +/// - Map key or value decoding fails (`DecoderError`) +pub fn decode_map_deterministically(d: &mut Decoder) -> Result, minicbor::decode::Error> { + validate_input_not_empty(d)?; + + // Store the starting position BEFORE consuming the map header + let map_start = d.position(); + + // From RFC 8949 Section 4.2.2: + // "Indefinite-length items must be made definite-length items." + // The specification explicitly prohibits indefinite-length items in + // deterministic encoding to ensure consistent representation. + let map_len = d.map()?.ok_or_else(|| { + minicbor::decode::Error::message( + "Indefinite-length items must be made definite-length items", + ) + })?; + + let header_end_pos = d.position(); + + check_map_minimal_length(d, header_end_pos, map_len)?; + + // Decode entries to validate them + let entries = decode_map_entries(d, map_len)?; + + validate_map_ordering(&entries)?; + + // Get the ending position after validation + let map_end = d.position(); + + get_bytes(d, map_start, map_end) +} + +/// Extracts the raw bytes of a CBOR map from a decoder based on specified positions. +/// This function retrieves the raw byte representation of a CBOR map between the given +/// start and end positions from the decoder's underlying buffer. +fn get_bytes( + d: &Decoder<'_>, map_start: usize, map_end: usize, +) -> Result, minicbor::decode::Error> { + d.input() + .get(map_start..map_end) + .ok_or_else(|| { + minicbor::decode::Error::message("Invalid map byte range: indices out of bounds") + }) + .map(<[u8]>::to_vec) +} + +/// Decodes all key-value pairs in the map +fn decode_map_entries( + d: &mut Decoder, length: u64, +) -> Result, minicbor::decode::Error> { + let capacity = usize::try_from(length).map_err(|_| { + minicbor::decode::Error::message("Map length too large for current platform") + })?; + let mut entries = Vec::with_capacity(capacity); + + // Decode each key-value pair + for _ in 0..length { + // Record the starting position of the key + let key_start = d.position(); + + // Skip over the key to find its end position + d.skip()?; + let key_end = d.position(); + + // Record the starting position of the value + let value_start = d.position(); + + // Skip over the value to find its end position + d.skip()?; + let value_end = d.position(); + + // The keys themselves must be deterministically encoded (4.2.1) + let key_bytes = get_bytes(d, key_start, key_end)?; + map_keys_are_deterministic(&key_bytes)?; + + let value = get_bytes(d, value_start, value_end)?; + + entries.push(MapEntry { key_bytes, value }); + } + + Ok(entries) +} + +/// Validates that a CBOR map key follows the deterministic encoding rules as specified in +/// RFC 8949. In this case, it validates that the keys themselves must be +/// deterministically encoded (4.2.1). +fn map_keys_are_deterministic(key_bytes: &[u8]) -> Result<(), minicbor::decode::Error> { + // if the map keys are not a txt string or byte string we cannot get a declared length + if let Some(key_declared_length) = get_declared_length(key_bytes)? { + let header_size = get_cbor_header_size(key_bytes)?; + let actual_content_size = key_bytes.len().checked_sub(header_size).ok_or_else(|| { + minicbor::decode::Error::message("Integer overflow in content size calculation") + })?; + + if key_declared_length != actual_content_size { + minicbor::decode::Error::message( + "Declared length does not match the actual length. Non deterministic map key.", + ); + } + } + Ok(()) +} + +/// Extracts the declared length from a CBOR data item according to RFC 8949 encoding +/// rules. +/// +/// This function analyzes the major type and additional information in the CBOR initial +/// byte to determine if the data item has a declared length and what that length is. +/// +/// ## CBOR Major Types and Length Semantics (RFC 8949 Section 3): +/// +/// - **Major Type 0/1 (Unsigned/Negative Integers)**: No length concept - the value IS +/// the data +/// - **Major Type 2 (Byte String)**: Length indicates number of bytes in the string +/// - **Major Type 3 (Text String)**: Length indicates number of bytes in UTF-8 encoding +/// - **Major Type 4 (Array)**: Length indicates number of data items (elements) in the +/// array +/// - **Major Type 5 (Map)**: Length indicates number of key-value pairs in the map +/// - **Major Type 6 (Semantic Tag)**: Tags the following data item, length from tagged +/// content +/// - **Major Type 7 (Primitives)**: No length for simple values, floats, etc. +/// +/// ## Errors +pub fn get_declared_length(bytes: &[u8]) -> Result, minicbor::decode::Error> { + let mut decoder = minicbor::Decoder::new(bytes); + + // Extract major type from high 3 bits of initial byte (RFC 8949 Section 3.1) + match bytes.first().map(|&b| b >> 5) { + Some(7 | 0 | 1 | 4 | 5 | 6) => Ok(None), + Some(2) => { + // Read length for byte string header + let len = decoder.bytes()?; + Ok(Some(len.len())) + }, + Some(3) => { + // Read length for text string header + let len = decoder.str()?; + Ok(Some(len.len())) + }, + + _ => Err(minicbor::decode::Error::message("Invalid type")), + } +} + +/// Returns the size of the CBOR header in bytes, based on RFC 8949 encoding rules. +/// +/// CBOR encodes data items with a header that consists of: +/// 1. An initial byte containing: +/// - Major type (3 most significant bits) +/// - Additional information (5 least significant bits) +/// 2. Optional following bytes based on the additional information value +/// +/// This function calculates only the size of the header itself, not including +/// any data that follows the header. It works with all CBOR major types: +/// - 0: Unsigned integer +/// - 1: Negative integer +/// - 2: Byte string +/// - 3: Text string +/// - 4: Array +/// - 5: Map +/// - 6: Tag +/// - 7: Simple/floating-point values +/// +/// For deterministically encoded CBOR (as specified in RFC 8949 Section 4.2), +/// indefinite length items are not allowed, so this function will return an error +/// when encountering additional information value 31. +/// +/// # Arguments +/// * `bytes` - A byte slice containing CBOR-encoded data +/// +/// # Returns +/// * `Ok(usize)` - The size of the CBOR header in bytes +/// * `Err(DeterministicError)` - If the input is invalid or uses indefinite length +/// encoding +/// +/// # Errors +/// Returns an error if: +/// - The input is empty +/// - The input uses indefinite length encoding (additional info = 31) +/// - The additional information value is invalid +pub fn get_cbor_header_size(bytes: &[u8]) -> Result { + // Check if input is empty, which is invalid CBOR + if bytes.is_empty() { + minicbor::decode::Error::message("Empty cbor bytes"); + } + + // Extract the first byte which contains both major type and additional info + let first_byte = bytes + .first() + .copied() + .ok_or_else(|| minicbor::decode::Error::message("Empty cbor data"))?; + // Major type is in the high 3 bits (not used in this function but noted for clarity) + // let major_type = first_byte >> 5; + // Additional info is in the low 5 bits and determines header size + let additional_info = first_byte & 0b0001_1111; + + // Calculate header size based on additional info value + match additional_info { + // Values 0-23 are encoded directly in the additional info bits + // Header is just the initial byte + 0..=23 => Ok(1), + + // Value 24 means the actual value is in the next 1 byte + // Header is 2 bytes (initial byte + 1 byte) + 24 => Ok(2), + + // Value 25 means the actual value is in the next 2 bytes + // Header is 3 bytes (initial byte + 2 bytes) + 25 => Ok(3), + + // Value 26 means the actual value is in the next 4 bytes + // Header is 5 bytes (initial byte + 4 bytes) + 26 => Ok(5), + + // Value 27 means the actual value is in the next 8 bytes + // Header is 9 bytes (initial byte + 8 bytes) + 27 => Ok(9), + // Value 31 indicates indefinite length, which is not allowed in + // deterministic encoding per RFC 8949 section 4.2.1 + 31 => { + Err(minicbor::decode::Error::message( + "Cannot determine size of indefinite length item", + )) + }, + + // Values 28-30 are reserved in RFC 8949 and not valid in current CBOR + _ => { + Err(minicbor::decode::Error::message( + "Invalid additional info in CBOR header", + )) + }, + } +} + +/// Validates map keys are properly ordered according to RFC 8949 Section 4.2.3 +/// and checks for duplicate keys +fn validate_map_ordering(entries: &[MapEntry]) -> Result<(), minicbor::decode::Error> { + let mut iter = entries.iter(); + + // Get the first element if it exists + let Some(mut current) = iter.next() else { + // Empty slice is valid + return Ok(()); + }; + + // Compare each adjacent pair + for next in iter { + check_pair_ordering(current, next)?; + current = next; + } + + Ok(()) +} + +/// Checks if two adjacent map entries are in the correct order: +/// - Keys must be in ascending order (current < next) +/// - Duplicate keys are not allowed (current != next) +fn check_pair_ordering(current: &MapEntry, next: &MapEntry) -> Result<(), minicbor::decode::Error> { + match current.cmp(next) { + Ordering::Less => Ok(()), // Valid: keys are in ascending order + Ordering::Equal => Err(minicbor::decode::Error::message("Duplicate map key found")), + Ordering::Greater => { + Err(minicbor::decode::Error::message( + "Map keys not in canonical order", + )) + }, + } +} + +/// Validates that the decoder's input buffer is not empty. +fn validate_input_not_empty(d: &Decoder) -> Result<(), minicbor::decode::Error> { + if d.input().is_empty() { + return Err(minicbor::decode::Error::end_of_input()); + } + Ok(()) +} + +/// Validates that a CBOR map's length is encoded using the minimal number of bytes as +/// required by RFC 8949's deterministic encoding rules. +/// +/// According to the deterministic encoding requirements: +/// - The length of a map MUST be encoded using the smallest possible CBOR additional +/// information value +/// - For values 0 through 23, the additional info byte is used directly +/// - For values that fit in 8, 16, 32, or 64 bits, the appropriate multi-byte encoding +/// must be used +/// +/// # Specification Reference +/// This implementation follows RFC 8949 Section 4.2.1 which requires that: +/// "The length of arrays, maps, and strings MUST be encoded using the smallest possible +/// CBOR additional information value." +fn check_map_minimal_length( + decoder: &Decoder, position: usize, value: u64, +) -> Result<(), minicbor::decode::Error> { + // For zero length, 0xA0 is always the minimal encoding + if value == 0 { + return Ok(()); + } + + let initial_byte = decoder.input().get(position).copied().ok_or_else(|| { + minicbor::decode::Error::message(minicbor::decode::Error::message( + "Cannot read initial byte for minimality check", + )) + })?; + // Only check minimality for map length encodings using uint8 + // Immediate values (0-23) are already minimal by definition + if initial_byte == CBOR_MAP_LENGTH_UINT8 && value <= CBOR_MAX_TINY_VALUE { + minicbor::decode::Error::message(minicbor::decode::Error::message( + "map minimal length failure", + )); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + /// Test the deterministic map validation rules from RFC 8949 Section 4.2.3. + /// + /// The RFC mandates: + /// 1. Keys must be sorted by length first + /// 2. Equal length keys must be sorted lexicographically + /// 3. No duplicate keys are allowed + #[test] + fn test_map_validation() { + // Test case 1: Valid ordering - shorter key before longer key + let valid_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: 2-byte string + 0x41, 0x01, // Value 1: 1-byte string + 0x43, 0x01, 0x02, 0x03, // Key 2: 3-byte string + 0x41, 0x02, // Value 2: 1-byte string + ]; + let mut decoder = Decoder::new(&valid_map); + assert!(decode_map_deterministically(&mut decoder).is_ok()); + + // Test case 2: Invalid ordering - longer key before shorter key + let invalid_map = vec![ + 0xA2, // Map with 2 pairs + 0x43, 0x01, 0x02, 0x03, // Key 1: 3-byte string (longer first - invalid) + 0x41, 0x01, // Value 1: 1-byte string + 0x42, 0x01, 0x02, // Key 2: 2-byte string + 0x41, 0x02, // Value 2: 1-byte string + ]; + let mut decoder = Decoder::new(&invalid_map); + match decode_map_deterministically(&mut decoder) { + Ok(_) => (), + Err(err) => { + assert_eq!( + "decode error: Map keys not in canonical order", + err.to_string() + ); + }, + } + + // Test case 3: Duplicate keys + let duplicate_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: 2-byte string + 0x41, 0x01, // Value 1: 1-byte string + 0x42, 0x01, 0x02, // Key 2: same as Key 1 (duplicate - invalid) + 0x41, 0x02, // Value 2: 1-byte string + ]; + let mut decoder = Decoder::new(&duplicate_map); + match decode_map_deterministically(&mut decoder) { + Ok(_) => (), + Err(err) => assert_eq!("decode error: Duplicate map key found", err.to_string()), + } + } + + #[test] + fn test_map_keys_are_deterministically_encoded() { + // bad encoding for keys + let valid_map = vec![ + 0xA4, 0x42, 0x01, 0x02, // Key 1: 2-byte string + 0x41, 0x01, // Value 1: 1-byte string + 0x43, 0x01, 0x02, 0x03, // Key 2: 3-byte string + 0x41, 0x02, // Value 2: 1-byte string + ]; + let mut decoder = Decoder::new(&valid_map); + assert!(decode_map_deterministically(&mut decoder).is_err()); + } + + #[test] + // Ensures that encoding and decoding a map preserves: + /// - The byte wise lexicographic ordering of keys + /// - The exact byte representation of values + /// - The definite length encoding format + fn test_map_bytes_roundtrip() { + // Create a valid deterministic map encoding + let valid_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: 2-byte string + 0x41, 0x01, // Value 1: 1-byte string + 0x43, 0x01, 0x02, 0x03, // Key 2: 3-byte string + 0x41, 0x02, // Value 2: 1-byte string + ]; + + let mut decoder = Decoder::new(&valid_map); + let result = decode_map_deterministically(&mut decoder).unwrap(); + + // Verify we got back exactly the same bytes + assert_eq!(result, valid_map); + } + + /// Test cases for lexicographic ordering of map keys as specified in RFC 8949 Section + /// 4.2.3. + /// + /// From RFC 8949 Section 4.2.3: + /// "The keys in every map must be sorted in the following order: + /// 1. If two keys have different lengths, the shorter one sorts earlier; + /// 2. If two keys have the same length, the one with the lower value in (byte-wise) + /// lexical order sorts earlier." + #[test] + fn test_map_lexicographic_ordering() { + // Test case: Equal length keys must be sorted lexicographically + // This follows rule 2 from RFC 8949 Section 4.2.3 for same-length keys + let valid_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: [0x01, 0x02] + 0x41, 0x01, // Value 1 + 0x42, 0x01, 0x03, // Key 2: [0x01, 0x03] (lexicographically larger) + 0x41, 0x02, // Value 2 + ]; + let mut decoder = Decoder::new(&valid_map); + assert!(decode_map_deterministically(&mut decoder).is_ok()); + + // Invalid ordering - violates RFC 8949 Section 4.2.3 rule 2: + // "If two keys have the same length, the one with the lower value in + // (byte-wise) lexical order sorts earlier" + let invalid_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x03, // Key 1: [0x01, 0x03] + 0x41, 0x01, // Value 1 + 0x42, 0x01, 0x02, // Key 2: [0x01, 0x02] (should come first) + 0x41, 0x02, // Value 2 + ]; + let mut decoder = Decoder::new(&invalid_map); + match decode_map_deterministically(&mut decoder) { + Ok(_) => (), + Err(err) => { + assert_eq!( + "decode error: Map keys not in canonical order", + err.to_string() + ); + }, + } + } + + /// Test empty map handling - special case mentioned in RFC 8949. + /// An empty map is valid and must still follow length encoding rules + /// from Section 4.2.1. + #[test] + fn test_empty_map() { + let empty_map = vec![ + 0xA0, // Map with 0 pairs - encoded with immediate value as per Section 4.2.1 + ]; + let mut decoder = Decoder::new(&empty_map); + assert!(decode_map_deterministically(&mut decoder).is_ok()); + } + + /// Test minimal length encoding rules for maps as specified in RFC 8949 Section 4.2.1 + /// + /// From RFC 8949 Section 4.2.1 "Integer Encoding": + /// "The following must be encoded only with the shortest form that can represent + /// the value: + /// 1. Integer values in items that use integer encoding + /// 2. The length of arrays, maps, strings, and byte strings + /// + /// Specifically for integers: + /// * 0 to 23 must be expressed in the same byte as the major type + /// * 24 to 255 must be expressed only with an additional `uint8_t` + /// * 256 to 65535 must be expressed only with an additional `uint16_t` + /// * 65536 to 4294967295 must be expressed only with an additional `uint32_t`" + /// + /// For maps (major type 5), the length must follow these rules. This ensures + /// a deterministic encoding where the same length is always encoded the same way. + /// Test minimal length encoding rules for maps as specified in RFC 8949 Section 4.2.1 + /// + /// From RFC 8949 Section 4.2.1: + /// "The length of arrays, maps, strings, and byte strings must be encoded in the + /// smallest possible way. For maps (major type 5), lengths 0-23 must be encoded + /// in the initial byte." + #[test] + fn test_map_minimal_length_encoding() { + // Test case 1: Valid minimal encoding (length = 1) + let valid_small = vec![ + 0xA1, // Map, length 1 (major type 5 with immediate value 1) + 0x01, // Key: unsigned int 1 + 0x02, // Value: unsigned int 2 + ]; + let mut decoder = Decoder::new(&valid_small); + + assert!(decode_map_deterministically(&mut decoder).is_ok()); + + // Test case 2: Invalid non-minimal encoding (using additional info 24 for length 1) + let invalid_small = vec![ + 0xB8, // Map with additional info = 24 (0xa0 | 0x18) + 0x01, // Length encoded as uint8 = 1 + 0x01, // Key: unsigned int 1 + 0x02, // Value: unsigned int 2 + ]; + let mut decoder = Decoder::new(&invalid_small); + + match decode_map_deterministically(&mut decoder) { + Ok(_) => (), + Err(err) => assert!(matches!("", "{:?}"), "{}", err.to_string()), + } + } + + /// Test handling of complex key structures while maintaining canonical ordering + /// + /// RFC 8949 Section 4.2.3 requires correct ordering regardless of key complexity: + /// "The keys in every map must be sorted [...] Note that this rule allows maps + /// to be deterministically ordered regardless of the specific data model of + /// the key values." + #[test] + fn test_map_complex_keys() { + // Test nested structures in keys while maintaining order + // Following RFC 8949 Section 4.2.3 length-first rule + let valid_complex = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: simple 2-byte string (shorter, so comes first) + 0x41, 0x01, // Value 1 + 0x44, 0x01, 0x02, 0x03, 0x04, // Key 2: 4-byte string (longer, so comes second) + 0x41, 0x02, // Value 2 + ]; + let mut decoder = Decoder::new(&valid_complex); + assert!(decode_map_deterministically(&mut decoder).is_ok()); + } + + /// Test edge cases for map encoding while maintaining compliance with RFC 8949 + /// + /// These cases test boundary conditions that must still follow all rules from + /// Section 4.2: + /// - Minimal length encoding (4.2.1) + /// - No indefinite lengths (4.2.2) + /// - Canonical ordering (4.2.3) + #[test] + fn test_map_edge_cases() { + // Single entry map - must still follow minimal length encoding rules + let single_entry = vec![ + 0xA1, // Map with 1 pair (using immediate value as per Section 4.2.1) + 0x41, 0x01, // Key: 1-byte string + 0x41, 0x02, // Value: 1-byte string + ]; + let mut decoder = Decoder::new(&single_entry); + assert!(decode_map_deterministically(&mut decoder).is_ok()); + + // Map with zero-length string key - tests smallest possible key case + // Still must follow sorting rules from Section 4.2.3 + let zero_length_key = vec![ + 0xA1, // Map with 1 pair + 0x40, // Key: 0-byte string (smallest possible key length) + 0x41, 0x01, // Value: 1-byte string + ]; + let mut decoder = Decoder::new(&zero_length_key); + assert!(decode_map_deterministically(&mut decoder).is_ok()); + } + + /// Test duplicate key detection as required by RFC 8949 Section 4.2.3 + /// + /// From RFC 8949 Section 4.2.3: + /// "The keys in every map must be sorted [...] Note that this rule + /// automatically implies that no two keys in a map can be equal (have + /// the same length and the same value)." + #[test] + fn test_duplicate_keys() { + let map_with_duplicates = vec![ + 0xA2, // Map with 2 pairs + 0x41, 0x01, // Key 1: 1-byte string [0x01] + 0x41, 0x02, // Value 1 + 0x41, 0x01, // Key 2: same as Key 1 (duplicate - invalid) + 0x41, 0x03, // Value 2 + ]; + let mut decoder = Decoder::new(&map_with_duplicates); + match decode_map_deterministically(&mut decoder) { + Ok(_) => (), + Err(err) => assert_eq!("decode error: Duplicate map key found", err.to_string()), + } + } + + /// Test `get_declared_length` for all CBOR major types per RFC 8949 + #[test] + fn test_get_declared_length() { + // Example 1: Empty byte string + // Encoding: [0x40] + // - 0x40 = 0b010_00000 (major type 2, additional info 0) + // - Length: 0 bytes + // - Content: none + let empty_bytes = vec![0x40]; + + let declared_length = get_declared_length(&empty_bytes).unwrap().unwrap(); + + assert_eq!(declared_length, 0); + + // Example 2: 2-byte string with immediate length encoding + // Encoding: [0x42, 0x01, 0x02] + // - 0x42 = 0b010_00010 (major type 2, additional info 2) + // - Length: 2 bytes (encoded immediately in additional info) + // - Content: [0x01, 0x02] + let short_bytes = vec![0x42, 0x01, 0x02]; + + let declared_length = get_declared_length(&short_bytes).unwrap().unwrap(); + + assert_eq!(declared_length, 2); + + // Example 3: 24-byte string requiring uint8 length encoding + // Encoding: [0x58, 0x18, 0x01, 0x02, ..., 0x18] + // - 0x58 = 0b010_11000 (major type 2, additional info 24) + // - Length: 24 (encoded as uint8 in next byte: 0x18 = 24) + // - Content: 24 bytes [0x01, 0x02, ..., 0x18] + let mut medium_bytes = vec![0x58, 0x18]; // Header: byte string, uint8 length 24 + medium_bytes.extend((1..=24).collect::>()); // Content: 24 bytes + + let declared_length = get_declared_length(&medium_bytes).unwrap().unwrap(); + assert_eq!(declared_length, 24); + + // Example 4: 256-byte string requiring uint16 length encoding + // Encoding: [0x59, 0x01, 0x00, 0x00, 0x00, ..., 0xFF] + // - 0x59 = 0b010_11001 (major type 2, additional info 25) + // - Length: 256 (encoded as uint16 big-endian: [0x01, 0x00]) + // - Content: 256 bytes [0x00, 0x00, ..., 0xFF] + let mut large_bytes = vec![0x59, 0x01, 0x00]; // Header: byte string, uint16 length 256 + large_bytes.extend(vec![0x00; 256]); // Content: 256 zero bytes + + let declared_length = get_declared_length(&large_bytes).unwrap().unwrap(); + assert_eq!(declared_length, 256); + } + + #[test] + fn test_get_cbor_header_size() { + // Test direct values (additional info 0-23) + assert_eq!(get_cbor_header_size(&[0b000_00000]).unwrap(), 1); // Major type 0, value 0 + assert_eq!(get_cbor_header_size(&[0b001_10111]).unwrap(), 1); // Major type 1, value 23 + + // Test 1-byte uint (additional info 24) + assert_eq!(get_cbor_header_size(&[0b010_11000, 0x42]).unwrap(), 2); // Major type 2 + + // Test 2-byte uint (additional info 25) + assert_eq!(get_cbor_header_size(&[0b011_11001, 0x12, 0x34]).unwrap(), 3); // Major type 3 + + // Test 4-byte uint (additional info 26) + assert_eq!( + get_cbor_header_size(&[0b100_11010, 0x12, 0x34, 0x56, 0x78]).unwrap(), + 5 + ); // Major type 4 + + // Test 8-byte uint (additional info 27) + assert_eq!( + get_cbor_header_size(&[0b101_11011, 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF]) + .unwrap(), + 9 + ); // Major type 5 + + // Error cases + // Empty input + assert!(get_cbor_header_size(&[]).is_err()); + + // Indefinite length (additional info 31) + let result = get_cbor_header_size(&[0b110_11111]); + assert!(result.is_err()); + + // Small map (size 1) - additional info 1 + assert_eq!(get_cbor_header_size(&[0b101_00001]).unwrap(), 1); // Map with 1 pair + + // Large map (size 65535) - additional info 25 (2-byte uint follows) + assert_eq!(get_cbor_header_size(&[0b101_11001, 0xFF, 0xFF]).unwrap(), 3); // Map with 65535 pairs + + // Reserved values (additional info 28-30) + assert!(get_cbor_header_size(&[0b111_11100]).is_err()); // Major type 7, value 28 + } + + #[test] + fn test_map_entry_ord_comprehensive() { + // Test 1: Length-first ordering + // According to RFC 8949, shorter keys must come before longer keys + // regardless of their actual byte values + let short_key = MapEntry { + key_bytes: vec![0x41], // Single byte key + value: vec![0x01], + }; + let long_key = MapEntry { + key_bytes: vec![0x41, 0x42, 0x43], // Three byte key (longer) + value: vec![0x01], + }; + // Even though both start with 0x41, the shorter one comes first + assert!(short_key < long_key); + assert!(long_key > short_key); + + // Test 2: Lexicographic ordering for equal-length keys + // When keys have the same length, they are compared byte by byte + // lexicographically (like dictionary ordering) + let key_a = MapEntry { + key_bytes: vec![0x41, 0x41], // Represents "AA" in ASCII + value: vec![0x01], + }; + let key_b = MapEntry { + key_bytes: vec![0x41, 0x42], // Represents "AB" in ASCII + value: vec![0x01], + }; + // "AA" comes before "AB" lexicographically + assert!(key_a < key_b); + assert!(key_b > key_a); + assert!(key_a == key_a); + + // Test 3: Identical entries (same key AND value) + // Complete MapEntry equality requires both key and value to be identical + let entry1 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x01], + }; + let entry2 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x01], // Same value as entry1 + }; + // These are truly identical entries + assert_eq!(entry1, entry2); + + // Test 4: Same key, different values - these are NOT equal + // In CBOR maps, this would represent duplicate keys (invalid) + let entry_v1 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x01], + }; + let entry_v2 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x02], // Different value + }; + // These entries are NOT equal (different values) + assert_ne!(entry_v1, entry_v2); + // But they have the same ordering position (same key) + assert_eq!(entry_v1.cmp(&entry_v2), std::cmp::Ordering::Equal); + + // Test 5: Empty key vs non-empty key + // Empty keys should come before any non-empty key (shortest length rule) + let empty_key = MapEntry { + key_bytes: vec![], // Empty key (length 0) + value: vec![0x01], + }; + let non_empty_key = MapEntry { + key_bytes: vec![0x00], // Single null byte (length 1) + value: vec![0x01], + }; + // Empty key (length 0) comes before single byte key (length 1) + assert!(empty_key < non_empty_key); + + // Test 6: Numerical byte value ordering + // Test that individual byte values are compared correctly (0x00 < 0xFF) + let key_0 = MapEntry { + key_bytes: vec![0x00], // Null byte + value: vec![0x01], + }; + let key_255 = MapEntry { + key_bytes: vec![0xFF], // Maximum byte value + value: vec![0x01], + }; + // 0x00 is numerically less than 0xFF + assert!(key_0 < key_255); + + // Test 7: Complex multi-byte lexicographic comparison + // Test lexicographic ordering when keys differ in later bytes + let key_complex1 = MapEntry { + key_bytes: vec![0x01, 0x02, 0x03], // Differs in last byte (0x03) + value: vec![0x01], + }; + let key_complex2 = MapEntry { + key_bytes: vec![0x01, 0x02, 0x04], // Differs in last byte (0x04) + value: vec![0x01], + }; + // First two bytes are identical (0x01, 0x02), so compare third byte: 0x03 < 0x04 + assert!(key_complex1 < key_complex2); + } + /// An edge case where slice [`Ord`] isn't minimal length byte-wise lexicographic. + #[test] + fn test_map_entry_ord_len_edge_case() { + // Shorter length key with greater first byte. + let lhs = MapEntry { + key_bytes: minicbor::to_vec("a").unwrap(), + value: vec![], + }; + assert_eq!(lhs.key_bytes, &[97, 97]); + + // Longer length key with lesser first byte. + let rhs = MapEntry { + key_bytes: minicbor::to_vec(65535u32).unwrap(), + value: vec![], + }; + assert_eq!(rhs.key_bytes, &[25, 255, 255]); + + // Shorter must go first. + assert!(lhs < rhs); + } +} diff --git a/rust/cbork-utils/src/lib.rs b/rust/cbork-utils/src/lib.rs index 1b29a48aaf..d7456a84b2 100644 --- a/rust/cbork-utils/src/lib.rs +++ b/rust/cbork-utils/src/lib.rs @@ -1,3 +1,4 @@ //! CBOR utility modules. pub mod decode_helper; +pub mod deterministic_helper; diff --git a/rust/signed_doc/Cargo.toml b/rust/signed_doc/Cargo.toml index 62c4c6b8f7..151de0a34f 100644 --- a/rust/signed_doc/Cargo.toml +++ b/rust/signed_doc/Cargo.toml @@ -15,19 +15,20 @@ catalyst-types = { version = "0.0.3", path = "../catalyst-types" } anyhow = "1.0.95" serde = { version = "1.0.217", features = ["derive"] } -serde_json = "1.0.134" +serde_json = { version = "1.0.134", features = ["raw_value"] } coset = "0.3.8" minicbor = { version = "0.25.1", features = ["half"] } brotli = "7.0.0" ed25519-dalek = { version = "2.1.1", features = ["rand_core", "pem"] } hex = "0.4.3" -strum = { version = "0.26.3", features = ["derive"] } +strum = { version = "0.27.1", features = ["derive"] } clap = { version = "4.5.23", features = ["derive", "env"] } jsonschema = "0.28.3" jsonpath-rust = "0.7.5" futures = "0.3.31" ed25519-bip32 = "0.4.1" # used by the `mk_signed_doc` cli tool - +tracing = "0.1.40" +thiserror = "2.0.11" [dev-dependencies] base64-url = "3.0.0" diff --git a/rust/signed_doc/bins/mk_signed_doc.rs b/rust/signed_doc/bins/mk_signed_doc.rs index a22b1ddf91..dd024f8c12 100644 --- a/rust/signed_doc/bins/mk_signed_doc.rs +++ b/rust/signed_doc/bins/mk_signed_doc.rs @@ -63,13 +63,11 @@ impl Cli { println!("{metadata}"); // Load Document from JSON file let json_doc: serde_json::Value = load_json_from_file(&doc)?; - // Possibly encode if Metadata has an encoding set. - let payload = serde_json::to_vec(&json_doc)?; // Start with no signatures. let signed_doc = Builder::new() - .with_decoded_content(payload) .with_json_metadata(metadata)? - .build(); + .with_json_content(&json_doc)? + .build()?; println!( "report {}", serde_json::to_string(&signed_doc.problem_report())? @@ -83,8 +81,11 @@ impl Cli { let new_signed_doc = signed_doc .into_builder() - .add_signature(|message| sk.sign::<()>(&message).to_bytes().to_vec(), &kid)? - .build(); + .add_signature( + |message| sk.sign::<()>(&message).to_bytes().to_vec(), + kid.clone(), + )? + .build()?; save_signed_doc(new_signed_doc, &doc)?; }, Self::Inspect { path } => { diff --git a/rust/signed_doc/src/builder.rs b/rust/signed_doc/src/builder.rs index 6efdca778d..b04d917d57 100644 --- a/rust/signed_doc/src/builder.rs +++ b/rust/signed_doc/src/builder.rs @@ -1,33 +1,43 @@ //! Catalyst Signed Document Builder. -use catalyst_types::{catalyst_id::CatalystId, problem_report::ProblemReport}; +use catalyst_types::catalyst_id::CatalystId; use crate::{ - signature::Signature, CatalystSignedDocument, Content, InnerCatalystSignedDocument, Metadata, - Signatures, PROBLEM_REPORT_CTX, + signature::{tbs_data, Signature}, + CatalystSignedDocument, Content, ContentType, Metadata, Signatures, }; /// Catalyst Signed Document Builder. -#[derive(Debug)] -pub struct Builder(InnerCatalystSignedDocument); +/// Its a type sage state machine which iterates type safely during different stages of +/// the Catalyst Signed Document build process: +/// Setting Metadata -> Setting Content -> Setting Signatures +pub type Builder = MetadataBuilder; -impl Default for Builder { - fn default() -> Self { - Self::new() - } +/// Only `metadata` builder part +pub struct MetadataBuilder(BuilderInner); + +/// Only `content` builder part +pub struct ContentBuilder(BuilderInner); + +/// Only `Signatures` builder part +pub struct SignaturesBuilder(BuilderInner); + +/// Inner state of the Catalyst Signed Documents `Builder` +#[derive(Default)] +pub struct BuilderInner { + /// metadata + metadata: Metadata, + /// content + content: Content, + /// signatures + signatures: Signatures, } -impl Builder { +impl MetadataBuilder { /// Start building a signed document #[must_use] + #[allow(clippy::new_without_default)] pub fn new() -> Self { - let report = ProblemReport::new(PROBLEM_REPORT_CTX); - Self(InnerCatalystSignedDocument { - report, - metadata: Metadata::default(), - content: Content::default(), - signatures: Signatures::default(), - raw_bytes: None, - }) + Self(BuilderInner::default()) } /// Set document metadata in JSON format @@ -35,19 +45,44 @@ impl Builder { /// /// # Errors /// - Fails if it is invalid metadata fields JSON object. - pub fn with_json_metadata(mut self, json: serde_json::Value) -> anyhow::Result { - let metadata = serde_json::from_value(json)?; - self.0.metadata = Metadata::from_metadata_fields(metadata, &self.0.report); - Ok(self) + pub fn with_json_metadata(mut self, json: serde_json::Value) -> anyhow::Result { + self.0.metadata = Metadata::from_json(json)?; + Ok(ContentBuilder(self.0)) } +} - /// Set decoded (original) document content bytes - #[must_use] - pub fn with_decoded_content(mut self, content: Vec) -> Self { - self.0.content = Content::from_decoded(content); - self +impl ContentBuilder { + /// Sets an empty content + pub fn empty_content(self) -> SignaturesBuilder { + SignaturesBuilder(self.0) } + /// Set the provided JSON content, applying already set `content-encoding`. + /// + /// # Errors + /// - Verifies that `content-type` field is set to JSON + /// - Cannot serialize provided JSON + /// - Compression failure + pub fn with_json_content( + mut self, json: &serde_json::Value, + ) -> anyhow::Result { + anyhow::ensure!( + self.0.metadata.content_type()? == ContentType::Json, + "Already set metadata field `content-type` is not JSON value" + ); + + let content = serde_json::to_vec(&json)?; + if let Some(encoding) = self.0.metadata.content_encoding() { + self.0.content = encoding.encode(&content)?.into(); + } else { + self.0.content = content.into(); + } + + Ok(SignaturesBuilder(self.0)) + } +} + +impl SignaturesBuilder { /// Add a signature to the document /// /// # Errors @@ -56,43 +91,109 @@ impl Builder { /// content, due to malformed data, or when the signed document cannot be /// converted into `coset::CoseSign`. pub fn add_signature( - mut self, sign_fn: impl FnOnce(Vec) -> Vec, kid: &CatalystId, + mut self, sign_fn: impl FnOnce(Vec) -> Vec, kid: CatalystId, ) -> anyhow::Result { - let cose_sign = self - .0 - .as_cose_sign() - .map_err(|e| anyhow::anyhow!("Failed to sign: {e}"))?; - - let protected_header = coset::HeaderBuilder::new().key_id(kid.to_string().into_bytes()); - - let mut signature = coset::CoseSignatureBuilder::new() - .protected(protected_header.build()) - .build(); - let data_to_sign = cose_sign.tbs_data(&[], &signature); - signature.signature = sign_fn(data_to_sign); - if let Some(sign) = Signature::from_cose_sig(signature, &self.0.report) { - self.0.signatures.push(sign); + if kid.is_id() { + anyhow::bail!("Provided kid should be in a uri format, kid: {kid}"); } + let data_to_sign = tbs_data(&kid, &self.0.metadata, &self.0.content)?; + let sign_bytes = sign_fn(data_to_sign); + self.0.signatures.push(Signature::new(kid, sign_bytes)); Ok(self) } - /// Build a signed document with the collected error report. - /// Could provide an invalid document. - #[must_use] - pub fn build(self) -> CatalystSignedDocument { - self.0.into() + /// Builds a document from the set `metadata`, `content` and `signatures`. + /// + /// # Errors: + /// - CBOR encoding/decoding failures + pub fn build(self) -> anyhow::Result { + let doc = build_document(&self.0.metadata, &self.0.content, &self.0.signatures)?; + Ok(doc) } } -impl From<&CatalystSignedDocument> for Builder { +/// Build document from the provided `metadata`, `content` and `signatures`, performs all +/// the decoding validation and collects a problem report. +fn build_document( + metadata: &Metadata, content: &Content, signatures: &Signatures, +) -> anyhow::Result { + let mut e = minicbor::Encoder::new(Vec::new()); + // COSE_Sign tag + // + e.tag(minicbor::data::Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes(minicbor::to_vec(metadata)?.as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.encode(content)?; + // signatures + e.encode(signatures)?; + CatalystSignedDocument::try_from(e.into_writer().as_slice()) +} + +impl From<&CatalystSignedDocument> for SignaturesBuilder { fn from(value: &CatalystSignedDocument) -> Self { - Self(InnerCatalystSignedDocument { + Self(BuilderInner { metadata: value.inner.metadata.clone(), content: value.inner.content.clone(), signatures: value.inner.signatures.clone(), - report: value.inner.report.clone(), - raw_bytes: None, }) } } + +#[cfg(test)] +pub(crate) mod tests { + use crate::builder::SignaturesBuilder; + + /// A test version of the builder, which allows to build a not fully valid catalyst + /// signed document + pub(crate) struct Builder(super::BuilderInner); + + impl Default for Builder { + fn default() -> Self { + Self::new() + } + } + + impl Builder { + /// Start building a signed document + #[must_use] + pub(crate) fn new() -> Self { + Self(super::BuilderInner::default()) + } + + /// Add provided `SupportedField` into the `Metadata`. + pub(crate) fn with_metadata_field( + mut self, field: crate::metadata::SupportedField, + ) -> Self { + self.0.metadata.add_field(field); + self + } + + /// Set the content (COSE payload) to the document builder. + /// It will set the content as its provided, make sure by yourself that + /// `content-type` and `content-encoding` fields are aligned with the + /// provided content bytes. + pub(crate) fn with_content(mut self, content: Vec) -> Self { + self.0.content = content.into(); + self + } + + /// Add a signature to the document + pub(crate) fn add_signature( + mut self, sign_fn: impl FnOnce(Vec) -> Vec, kid: super::CatalystId, + ) -> anyhow::Result { + self.0 = SignaturesBuilder(self.0).add_signature(sign_fn, kid)?.0; + Ok(self) + } + + /// Build a signed document with the collected error report. + /// Could provide an invalid document. + pub(crate) fn build(self) -> super::CatalystSignedDocument { + super::build_document(&self.0.metadata, &self.0.content, &self.0.signatures).unwrap() + } + } +} diff --git a/rust/signed_doc/src/content.rs b/rust/signed_doc/src/content.rs index 506ba1f7da..30ed78174e 100644 --- a/rust/signed_doc/src/content.rs +++ b/rust/signed_doc/src/content.rs @@ -1,78 +1,38 @@ //! Catalyst Signed Document Content Payload -use anyhow::Context; -use catalyst_types::problem_report::ProblemReport; - -use crate::metadata::ContentEncoding; - -/// Decompressed Document Content type bytes. +/// Document Content bytes (COSE payload). #[derive(Debug, Clone, PartialEq, Default)] -pub struct Content { - /// Original Decompressed Document's data bytes - data: Option>, -} +pub struct Content(Vec); impl Content { - /// Creates a new `Content` value, from the encoded data. - /// verifies a Document's content, that it is correctly encoded and it corresponds and - /// parsed to the specified type - pub(crate) fn from_encoded( - mut data: Vec, content_encoding: Option, report: &ProblemReport, - ) -> Self { - if let Some(content_encoding) = content_encoding { - if let Ok(decoded_data) = content_encoding.decode(&data) { - data = decoded_data; - } else { - report.invalid_value( - "payload", - &hex::encode(&data), - &format!("Invalid Document content, should {content_encoding} encodable"), - "Invalid Document content type.", - ); - return Self::default(); - } - } - Self::from_decoded(data) + /// Return content bytes. + #[must_use] + pub fn bytes(&self) -> &[u8] { + self.0.as_slice() } - /// Creates a new `Content` value, from the decoded (original) data. - pub(crate) fn from_decoded(data: Vec) -> Self { - Self { data: Some(data) } + /// Return content byte size. + #[must_use] + pub fn size(&self) -> usize { + self.0.len() } +} - /// Return an decoded (original) content bytes. - /// - /// # Errors - /// - Missing Document content - pub fn decoded_bytes(&self) -> anyhow::Result<&[u8]> { - self.data - .as_deref() - .ok_or(anyhow::anyhow!("Missing Document content")) +impl From> for Content { + fn from(value: Vec) -> Self { + Self(value) } +} - /// Return an encoded content bytes, - /// by the provided `content_encoding` provided field. - /// - /// # Errors - /// - Missing Document content - /// - Failed to encode content. - pub(crate) fn encoded_bytes( - &self, content_encoding: Option, - ) -> anyhow::Result> { - let content = self.decoded_bytes()?; - if let Some(content_encoding) = content_encoding { - content_encoding - .encode(content) - .context(format!("Failed to encode {content_encoding} content")) +impl minicbor::Encode<()> for Content { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + if self.0.is_empty() { + e.null()?; } else { - Ok(content.to_vec()) + e.bytes(self.0.as_slice())?; } - } - - /// Return content byte size. - /// If content is empty returns `0`. - #[must_use] - pub fn size(&self) -> usize { - self.data.as_ref().map(Vec::len).unwrap_or_default() + Ok(()) } } diff --git a/rust/signed_doc/src/decode_context.rs b/rust/signed_doc/src/decode_context.rs new file mode 100644 index 0000000000..6c85e9c247 --- /dev/null +++ b/rust/signed_doc/src/decode_context.rs @@ -0,0 +1,25 @@ +//! Context used to pass in decoder for additional information. + +use catalyst_types::problem_report::ProblemReport; + +/// Compatibility policy +#[allow(dead_code)] +#[derive(Copy, Clone)] +pub(crate) enum CompatibilityPolicy { + /// Silently allow obsoleted type conversions or non deterministic encoding. + Accept, + /// Allow but log Warnings for all obsoleted type conversions or non deterministic + /// encoding. + Warn, + /// Fail and update problem report when an obsolete type is encountered or the data is + /// not deterministically encoded. + Fail, +} + +/// A context use to pass to decoder. +pub(crate) struct DecodeContext<'r> { + /// Compatibility policy. + pub compatibility_policy: CompatibilityPolicy, + /// Problem report. + pub report: &'r mut ProblemReport, +} diff --git a/rust/signed_doc/src/doc_types/mod.rs b/rust/signed_doc/src/doc_types/mod.rs index 683e36db2f..79939755b3 100644 --- a/rust/signed_doc/src/doc_types/mod.rs +++ b/rust/signed_doc/src/doc_types/mod.rs @@ -1,53 +1,179 @@ //! An implementation of different defined document types //! +use std::sync::LazyLock; + use catalyst_types::uuid::Uuid; -/// Proposal document `UuidV4` type. -pub const PROPOSAL_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x7808_D2BA_D511_40AF_84E8_C0D1_625F_DFDC); -/// Proposal template `UuidV4` type. -pub const PROPOSAL_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x0CE8_AB38_9258_4FBC_A62E_7FAA_6E58_318F); -/// Comment document `UuidV4` type. -pub const COMMENT_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0xB679_DED3_0E7C_41BA_89F8_DA62_A178_98EA); -/// Comment template `UuidV4` type. -pub const COMMENT_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x0B84_24D4_EBFD_46E3_9577_1775_A69D_290C); -/// Review document `UuidV4` type. -pub const REVIEW_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0xE4CA_F5F0_098B_45FD_94F3_0702_A457_3DB5); -/// Review template `UuidV4` type. -pub const REVIEW_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0xEBE5_D0BF_5D86_4577_AF4D_008F_DDBE_2EDC); -/// Category document `UuidV4` type. -pub const CATEGORY_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x48C2_0109_362A_4D32_9BBA_E0A9_CF8B_45BE); -/// Category template `UuidV4` type. -pub const CATEGORY_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x65B1_E8B0_51F1_46A5_9970_72CD_F268_84BE); -/// Campaign parameters document `UuidV4` type. -pub const CAMPAIGN_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x0110_EA96_A555_47CE_8408_36EF_E6ED_6F7C); -/// Campaign parameters template `UuidV4` type. -pub const CAMPAIGN_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x7E8F_5FA2_44CE_49C8_BFD5_02AF_42C1_79A3); -/// Brand parameters document `UuidV4` type. -pub const BRAND_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x3E48_08CC_C86E_467B_9702_D60B_AA9D_1FCA); -/// Brand parameters template `UuidV4` type. -pub const BRAND_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0xFD3C_1735_80B1_4EEA_8D63_5F43_6D97_EA31); -/// Proposal action document `UuidV4` type. -pub const PROPOSAL_ACTION_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x5E60_E623_AD02_4A1B_A1AC_406D_B978_EE48); -/// Public vote transaction v2 `UuidV4` type. -pub const PUBLIC_VOTE_TX_V2_UUID_TYPE: Uuid = - Uuid::from_u128(0x8DE5_586C_E998_4B95_8742_7BE3_C859_2803); -/// Private vote transaction v2 `UuidV4` type. -pub const PRIVATE_VOTE_TX_V2_UUID_TYPE: Uuid = - Uuid::from_u128(0xE78E_E18D_F380_44C1_A852_80AA_6ECB_07FE); -/// Immutable ledger block `UuidV4` type. -pub const IMMUTABLE_LEDGER_BLOCK_UUID_TYPE: Uuid = - Uuid::from_u128(0xD9E7_E6CE_2401_4D7D_9492_F4F7_C642_41C3); +use crate::DocType; + +/// -------------- Document Types -------------- +/// Brand document type. +#[allow(clippy::expect_used)] +pub static BRAND_PARAMETERS: LazyLock = LazyLock::new(|| { + let ids = &[BRAND_BASE_TYPE]; + ids.to_vec() + .try_into() + .expect("Failed to convert brand base types Uuid to DocType") +}); + +/// Campaign Parameters document type. +#[allow(clippy::expect_used)] +pub static CAMPAIGN_PARAMETERS: LazyLock = LazyLock::new(|| { + let ids = &[CAMPAIGN_BASE_TYPE]; + ids.to_vec() + .try_into() + .expect("Failed to convert campaign base types Uuid to DocType") +}); + +/// Category Parameters document type. +#[allow(clippy::expect_used)] +pub static CATEGORY_PARAMETERS: LazyLock = LazyLock::new(|| { + let ids = &[CATEGORY_BASE_TYPE]; + ids.to_vec() + .try_into() + .expect("Failed to convert category base types Uuid to DocType") +}); + +/// Proposal document type. +#[allow(clippy::expect_used)] +pub static PROPOSAL: LazyLock = LazyLock::new(|| { + let ids = &[PROPOSAL_BASE_TYPE]; + ids.to_vec() + .try_into() + .expect("Failed to convert proposal document Uuid to DocType") +}); + +/// Proposal comment document type. +#[allow(clippy::expect_used)] +pub static PROPOSAL_COMMENT: LazyLock = LazyLock::new(|| { + let ids = &[COMMENT_BASE_TYPE, PROPOSAL_BASE_TYPE]; + ids.to_vec() + .try_into() + .expect("Failed to convert proposal comment document Uuid to DocType") +}); + +/// Proposal action document type. +#[allow(clippy::expect_used)] +pub static PROPOSAL_SUBMISSION_ACTION: LazyLock = LazyLock::new(|| { + let ids = &[ + ACTION_BASE_TYPE, + PROPOSAL_BASE_TYPE, + SUBMISSION_ACTION_BASE_TYPE, + ]; + ids.to_vec() + .try_into() + .expect("Failed to convert proposal action document Uuid to DocType") +}); + +/// Proposal Comment Meta Template document type. +#[allow(clippy::expect_used)] +pub static PROPOSAL_COMMENT_META_TEMPLATE: LazyLock = LazyLock::new(|| { + let ids = &[ + TEMPLATE_BASE_TYPE, + TEMPLATE_BASE_TYPE, + COMMENT_BASE_TYPE, + PROPOSAL_BASE_TYPE, + ]; + ids.to_vec() + .try_into() + .expect("Failed to convert proposal comment meta template document Uuid to DocType") +}); + +/// Proposal Comment Template document type. +#[allow(clippy::expect_used)] +pub static PROPOSAL_COMMENT_TEMPLATE: LazyLock = LazyLock::new(|| { + let ids = &[TEMPLATE_BASE_TYPE, COMMENT_BASE_TYPE, PROPOSAL_BASE_TYPE]; + ids.to_vec() + .try_into() + .expect("Failed to convert proposal comment template document Uuid to DocType") +}); + +/// Proposal Template document type. +#[allow(clippy::expect_used)] +pub static PROPOSAL_TEMPLATE: LazyLock = LazyLock::new(|| { + let ids = &[TEMPLATE_BASE_TYPE, PROPOSAL_BASE_TYPE]; + ids.to_vec() + .try_into() + .expect("Failed to convert proposal template document Uuid to DocType") +}); + +/// -------------- Base Types -------------- +/// Action UUID base type. +pub const ACTION_BASE_TYPE: Uuid = Uuid::from_u128(0x5E60_E623_AD02_4A1B_A1AC_406D_B978_EE48); +/// Brand UUID base type. +pub const BRAND_BASE_TYPE: Uuid = Uuid::from_u128(0xEBCA_BEEB_5BC5_4F95_91E8_CAB8_CA72_4172); +/// Campaign UUID base type. +pub const CAMPAIGN_BASE_TYPE: Uuid = Uuid::from_u128(0x5EF3_2D5D_F240_462C_A7A4_BA4A_F221_FA23); +/// Category UUID base type. +pub const CATEGORY_BASE_TYPE: Uuid = Uuid::from_u128(0x8189_38C3_3139_4DAA_AFE6_974C_7848_8E95); +/// Comment UUID base type. +pub const COMMENT_BASE_TYPE: Uuid = Uuid::from_u128(0xB679_DED3_0E7C_41BA_89F8_DA62_A178_98EA); +/// Decision UUID base type. +pub const DECISION_BASE_TYPE: Uuid = Uuid::from_u128(0x788F_F4C6_D65A_451F_BB33_575F_E056_B411); +/// Moderation Action UUID base type. +pub const MODERATION_ACTION_BASE_TYPE: Uuid = + Uuid::from_u128(0xA5D2_32B8_5E03_4117_9AFD_BE32_B878_FCDD); +/// Proposal UUID base type. +pub const PROPOSAL_BASE_TYPE: Uuid = Uuid::from_u128(0x7808_D2BA_D511_40AF_84E8_C0D1_625F_DFDC); +/// Submission Action UUID base type. +pub const SUBMISSION_ACTION_BASE_TYPE: Uuid = + Uuid::from_u128(0x7892_7329_CFD9_4EA1_9C71_0E01_9B12_6A65); +/// Template UUID base type. +pub const TEMPLATE_BASE_TYPE: Uuid = Uuid::from_u128(0x0CE8_AB38_9258_4FBC_A62E_7FAA_6E58_318F); + +/// Document type which will be deprecated. +pub mod deprecated { + use catalyst_types::uuid::Uuid; + + /// Proposal document `UuidV4` type. + pub const PROPOSAL_DOCUMENT_UUID_TYPE: Uuid = + Uuid::from_u128(0x7808_D2BA_D511_40AF_84E8_C0D1_625F_DFDC); + /// Proposal template `UuidV4` type. + pub const PROPOSAL_TEMPLATE_UUID_TYPE: Uuid = + Uuid::from_u128(0x0CE8_AB38_9258_4FBC_A62E_7FAA_6E58_318F); + /// Comment document `UuidV4` type. + pub const COMMENT_DOCUMENT_UUID_TYPE: Uuid = + Uuid::from_u128(0xB679_DED3_0E7C_41BA_89F8_DA62_A178_98EA); + /// Comment template `UuidV4` type. + pub const COMMENT_TEMPLATE_UUID_TYPE: Uuid = + Uuid::from_u128(0x0B84_24D4_EBFD_46E3_9577_1775_A69D_290C); + /// Review document `UuidV4` type. + pub const REVIEW_DOCUMENT_UUID_TYPE: Uuid = + Uuid::from_u128(0xE4CA_F5F0_098B_45FD_94F3_0702_A457_3DB5); + /// Review template `UuidV4` type. + pub const REVIEW_TEMPLATE_UUID_TYPE: Uuid = + Uuid::from_u128(0xEBE5_D0BF_5D86_4577_AF4D_008F_DDBE_2EDC); + /// Category document `UuidV4` type. + pub const CATEGORY_DOCUMENT_UUID_TYPE: Uuid = + Uuid::from_u128(0x48C2_0109_362A_4D32_9BBA_E0A9_CF8B_45BE); + /// Category template `UuidV4` type. + pub const CATEGORY_TEMPLATE_UUID_TYPE: Uuid = + Uuid::from_u128(0x65B1_E8B0_51F1_46A5_9970_72CD_F268_84BE); + /// Campaign parameters document `UuidV4` type. + pub const CAMPAIGN_DOCUMENT_UUID_TYPE: Uuid = + Uuid::from_u128(0x0110_EA96_A555_47CE_8408_36EF_E6ED_6F7C); + /// Campaign parameters template `UuidV4` type. + pub const CAMPAIGN_TEMPLATE_UUID_TYPE: Uuid = + Uuid::from_u128(0x7E8F_5FA2_44CE_49C8_BFD5_02AF_42C1_79A3); + /// Brand parameters document `UuidV4` type. + pub const BRAND_DOCUMENT_UUID_TYPE: Uuid = + Uuid::from_u128(0x3E48_08CC_C86E_467B_9702_D60B_AA9D_1FCA); + /// Brand parameters template `UuidV4` type. + pub const BRAND_TEMPLATE_UUID_TYPE: Uuid = + Uuid::from_u128(0xFD3C_1735_80B1_4EEA_8D63_5F43_6D97_EA31); + /// Proposal action document `UuidV4` type. + pub const PROPOSAL_ACTION_DOCUMENT_UUID_TYPE: Uuid = + Uuid::from_u128(0x5E60_E623_AD02_4A1B_A1AC_406D_B978_EE48); + /// Public vote transaction v2 `UuidV4` type. + pub const PUBLIC_VOTE_TX_V2_UUID_TYPE: Uuid = + Uuid::from_u128(0x8DE5_586C_E998_4B95_8742_7BE3_C859_2803); + /// Private vote transaction v2 `UuidV4` type. + pub const PRIVATE_VOTE_TX_V2_UUID_TYPE: Uuid = + Uuid::from_u128(0xE78E_E18D_F380_44C1_A852_80AA_6ECB_07FE); + /// Immutable ledger block `UuidV4` type. + pub const IMMUTABLE_LEDGER_BLOCK_UUID_TYPE: Uuid = + Uuid::from_u128(0xD9E7_E6CE_2401_4D7D_9492_F4F7_C642_41C3); + /// Submission Action `UuidV4` type. + pub const SUBMISSION_ACTION: Uuid = Uuid::from_u128(0x7892_7329_CFD9_4EA1_9C71_0E01_9B12_6A65); +} diff --git a/rust/signed_doc/src/lib.rs b/rust/signed_doc/src/lib.rs index c5d5779733..f9b67abd58 100644 --- a/rust/signed_doc/src/lib.rs +++ b/rust/signed_doc/src/lib.rs @@ -2,6 +2,7 @@ mod builder; mod content; +mod decode_context; pub mod doc_types; mod metadata; pub mod providers; @@ -14,18 +15,22 @@ use std::{ sync::Arc, }; -use anyhow::Context; pub use builder::Builder; pub use catalyst_types::{ problem_report::ProblemReport, uuid::{Uuid, UuidV4, UuidV7}, }; pub use content::Content; -use coset::{CborSerializable, Header, TaggedCborSerializable}; -pub use metadata::{ContentEncoding, ContentType, DocumentRef, ExtraFields, Metadata, Section}; +use coset::{CborSerializable, TaggedCborSerializable}; +use decode_context::{CompatibilityPolicy, DecodeContext}; +pub use metadata::{ + ContentEncoding, ContentType, DocLocator, DocType, DocumentRef, DocumentRefs, Metadata, Section, +}; use minicbor::{decode, encode, Decode, Decoder, Encode}; pub use signature::{CatalystId, Signatures}; +use crate::builder::SignaturesBuilder; + /// A problem report content string const PROBLEM_REPORT_CTX: &str = "Catalyst Signed Document"; @@ -45,7 +50,7 @@ struct InnerCatalystSignedDocument { /// raw CBOR bytes of the `CatalystSignedDocument` object. /// It is important to keep them to have a consistency what comes from the decoding /// process, so we would return the same data again - raw_bytes: Option>, + raw_bytes: Vec, } /// Keep all the contents private. @@ -66,7 +71,7 @@ impl Display for CatalystSignedDocument { if self.inner.signatures.is_empty() { writeln!(f, " This document is unsigned.")?; } else { - for kid in &self.inner.signatures.kids() { + for kid in &self.kids() { writeln!(f, " Signature Key ID: {kid}")?; } } @@ -85,11 +90,11 @@ impl From for CatalystSignedDocument { impl CatalystSignedDocument { // A bunch of getters to access the contents, or reason through the document, such as. - /// Return Document Type `UUIDv4`. + /// Return Document Type `DocType` - List of `UUIDv4`. /// /// # Errors /// - Missing 'type' field. - pub fn doc_type(&self) -> anyhow::Result { + pub fn doc_type(&self) -> anyhow::Result<&DocType> { self.inner.metadata.doc_type() } @@ -109,12 +114,30 @@ impl CatalystSignedDocument { self.inner.metadata.doc_ver() } - /// Return document `Content`. + /// Return document content object. #[must_use] - pub fn doc_content(&self) -> &Content { + pub(crate) fn content(&self) -> &Content { &self.inner.content } + /// Return document decoded (original/non compressed) content bytes. + /// + /// # Errors + /// - Decompression failure + pub fn decoded_content(&self) -> anyhow::Result> { + if let Some(encoding) = self.doc_content_encoding() { + encoding.decode(self.encoded_content()) + } else { + Ok(self.encoded_content().to_vec()) + } + } + + /// Return document encoded (compressed) content bytes. + #[must_use] + pub fn encoded_content(&self) -> &[u8] { + self.content().bytes() + } + /// Return document `ContentType`. /// /// # Errors @@ -130,9 +153,10 @@ impl CatalystSignedDocument { } /// Return document metadata content. + // TODO: remove this and provide getters from metadata like the rest of its fields have. #[must_use] - pub fn doc_meta(&self) -> &ExtraFields { - self.inner.metadata.extra() + pub fn doc_meta(&self) -> &Metadata { + &self.inner.metadata } /// Return a Document's signatures @@ -144,13 +168,21 @@ impl CatalystSignedDocument { /// Return a list of Document's Catalyst IDs. #[must_use] pub fn kids(&self) -> Vec { - self.inner.signatures.kids() + self.inner + .signatures + .iter() + .map(|s| s.kid().clone()) + .collect() } /// Return a list of Document's author IDs (short form of Catalyst IDs). #[must_use] pub fn authors(&self) -> Vec { - self.inner.signatures.authors() + self.inner + .signatures + .iter() + .map(|s| s.kid().as_short_id()) + .collect() } /// Returns a collected problem report for the document. @@ -170,55 +202,14 @@ impl CatalystSignedDocument { &self.inner.report } - /// Convert Catalyst Signed Document into `coset::CoseSign` - /// - /// # Errors - /// Could fails if the `CatalystSignedDocument` object is not valid. - pub(crate) fn as_cose_sign(&self) -> anyhow::Result { - self.inner.as_cose_sign() - } - /// Returns a signed document `Builder` pre-loaded with the current signed document's /// data. #[must_use] - pub fn into_builder(&self) -> Builder { + pub fn into_builder(&self) -> SignaturesBuilder { self.into() } } -impl InnerCatalystSignedDocument { - /// Convert Catalyst Signed Document into `coset::CoseSign` - /// - /// # Errors - /// Could fails if the `CatalystSignedDocument` object is not valid. - fn as_cose_sign(&self) -> anyhow::Result { - if let Some(raw_bytes) = self.raw_bytes.clone() { - let cose_sign = coset::CoseSign::from_tagged_slice(raw_bytes.as_slice()) - .or_else(|_| coset::CoseSign::from_slice(raw_bytes.as_slice())) - .map_err(|e| { - minicbor::decode::Error::message(format!("Invalid COSE Sign document: {e}")) - })?; - Ok(cose_sign) - } else { - let protected_header = - Header::try_from(&self.metadata).context("Failed to encode Document Metadata")?; - - let content = self - .content - .encoded_bytes(self.metadata.content_encoding())?; - - let mut builder = coset::CoseSignBuilder::new() - .protected(protected_header) - .payload(content); - - for signature in self.signatures.cose_signatures() { - builder = builder.add_signature(signature); - } - Ok(builder.build()) - } - } -} - impl Decode<'_, ()> for CatalystSignedDocument { fn decode(d: &mut Decoder<'_>, _ctx: &mut ()) -> Result { let start = d.position(); @@ -235,12 +226,16 @@ impl Decode<'_, ()> for CatalystSignedDocument { minicbor::decode::Error::message(format!("Invalid COSE Sign document: {e}")) })?; - let report = ProblemReport::new(PROBLEM_REPORT_CTX); - let metadata = Metadata::from_protected_header(&cose_sign.protected, &report); + let mut report = ProblemReport::new(PROBLEM_REPORT_CTX); + let mut ctx = DecodeContext { + compatibility_policy: CompatibilityPolicy::Accept, + report: &mut report, + }; + let metadata = Metadata::from_protected_header(&cose_sign.protected, &mut ctx); let signatures = Signatures::from_cose_sig_list(&cose_sign.signatures, &report); let content = if let Some(payload) = cose_sign.payload { - Content::from_encoded(payload, metadata.content_encoding(), &report) + payload.into() } else { report.missing_field("COSE Sign Payload", "Missing document content (payload)"); Content::default() @@ -251,24 +246,21 @@ impl Decode<'_, ()> for CatalystSignedDocument { content, signatures, report, - raw_bytes: Some(cose_bytes.to_vec()), + raw_bytes: cose_bytes.to_vec(), } .into()) } } -impl Encode<()> for CatalystSignedDocument { +impl Encode for CatalystSignedDocument { fn encode( - &self, e: &mut encode::Encoder, _ctx: &mut (), + &self, e: &mut encode::Encoder, _ctx: &mut C, ) -> Result<(), encode::Error> { - let cose_sign = self.as_cose_sign().map_err(encode::Error::message)?; - let cose_bytes = cose_sign.to_tagged_vec().map_err(|e| { - minicbor::encode::Error::message(format!("Failed to encode COSE Sign document: {e}")) - })?; - + let raw_bytes = &self.inner.raw_bytes; e.writer_mut() - .write_all(&cose_bytes) - .map_err(|_| minicbor::encode::Error::message("Failed to encode to CBOR")) + .write_all(raw_bytes) + .map_err(minicbor::encode::Error::write)?; + Ok(()) } } diff --git a/rust/signed_doc/src/metadata/content_encoding.rs b/rust/signed_doc/src/metadata/content_encoding.rs index d47f696e7f..e736dc74d6 100644 --- a/rust/signed_doc/src/metadata/content_encoding.rs +++ b/rust/signed_doc/src/metadata/content_encoding.rs @@ -84,3 +84,12 @@ impl TryFrom<&coset::cbor::Value> for ContentEncoding { } } } + +impl minicbor::Encode<()> for ContentEncoding { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.str(self.to_string().as_str())?; + Ok(()) + } +} diff --git a/rust/signed_doc/src/metadata/content_type.rs b/rust/signed_doc/src/metadata/content_type.rs index b72cb4b9c2..260c0196b5 100644 --- a/rust/signed_doc/src/metadata/content_type.rs +++ b/rust/signed_doc/src/metadata/content_type.rs @@ -18,25 +18,6 @@ pub enum ContentType { Json, } -impl ContentType { - /// Validates the provided `content` bytes to be a defined `ContentType`. - pub(crate) fn validate(self, content: &[u8]) -> anyhow::Result<()> { - match self { - Self::Json => { - if let Err(e) = serde_json::from_slice::(content) { - anyhow::bail!("Invalid {self} content: {e}") - } - }, - Self::Cbor => { - if let Err(e) = minicbor::decode::(content) { - anyhow::bail!("Invalid {self} content: {e}") - } - }, - } - Ok(()) - } -} - impl Display for ContentType { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match self { @@ -74,33 +55,34 @@ impl<'de> Deserialize<'de> for ContentType { } } -impl From for CoapContentFormat { - fn from(value: ContentType) -> Self { - match value { - ContentType::Cbor => Self::Cbor, - ContentType::Json => Self::Json, - } - } -} - impl TryFrom<&coset::ContentType> for ContentType { type Error = anyhow::Error; fn try_from(value: &coset::ContentType) -> Result { - let content_type = match value { - coset::ContentType::Assigned(CoapContentFormat::Json) => ContentType::Json, - coset::ContentType::Assigned(CoapContentFormat::Cbor) => ContentType::Cbor, - _ => { + match value { + coset::ContentType::Assigned(CoapContentFormat::Json) => Ok(ContentType::Json), + coset::ContentType::Assigned(CoapContentFormat::Cbor) => Ok(ContentType::Cbor), + coset::ContentType::Text(str) => str.parse(), + coset::RegisteredLabel::Assigned(_) => { anyhow::bail!( - "Unsupported Content Type {value:?}, Supported only: {:?}", + "Unsupported Content Type: {value:?}, Supported only: {:?}", ContentType::VARIANTS .iter() .map(ToString::to_string) .collect::>() ) }, - }; - Ok(content_type) + } + } +} + +impl minicbor::Encode<()> for ContentType { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + // encode as media types, not in CoAP Content-Formats + e.str(self.to_string().as_str())?; + Ok(()) } } @@ -108,17 +90,6 @@ impl TryFrom<&coset::ContentType> for ContentType { mod tests { use super::*; - #[test] - fn content_type_validate_test() { - let json_bytes = serde_json::to_vec(&serde_json::Value::Null).unwrap(); - assert!(ContentType::Json.validate(&json_bytes).is_ok()); - assert!(ContentType::Cbor.validate(&json_bytes).is_err()); - - let cbor_bytes = minicbor::to_vec(minicbor::data::Token::Null).unwrap(); - assert!(ContentType::Json.validate(&cbor_bytes).is_err()); - assert!(ContentType::Cbor.validate(&cbor_bytes).is_ok()); - } - #[test] fn content_type_string_test() { assert_eq!( diff --git a/rust/signed_doc/src/metadata/doc_type.rs b/rust/signed_doc/src/metadata/doc_type.rs new file mode 100644 index 0000000000..c55a45be5e --- /dev/null +++ b/rust/signed_doc/src/metadata/doc_type.rs @@ -0,0 +1,512 @@ +//! Document Type. + +use std::{ + fmt::{Display, Formatter}, + hash::{Hash, Hasher}, +}; + +use catalyst_types::uuid::{CborContext, Uuid, UuidV4, UUID_CBOR_TAG}; +use coset::cbor::Value; +use minicbor::{Decode, Decoder, Encode}; +use serde::{Deserialize, Deserializer}; +use tracing::warn; + +use crate::{ + decode_context::{CompatibilityPolicy, DecodeContext}, + doc_types::{deprecated, PROPOSAL, PROPOSAL_COMMENT, PROPOSAL_SUBMISSION_ACTION}, +}; + +/// List of `UUIDv4` document type. +#[derive(Clone, Debug, serde::Serialize, Eq)] +pub struct DocType(Vec); + +/// `DocType` Errors. +#[derive(Debug, Clone, thiserror::Error)] +pub enum DocTypeError { + /// Invalid UUID. + #[error("Invalid UUID: {0}")] + InvalidUuid(Uuid), + /// `DocType` cannot be empty. + #[error("DocType cannot be empty")] + Empty, + /// Invalid string conversion + #[error("Invalid string conversion: {0}")] + StringConversion(String), +} + +impl DocType { + /// Get a list of `UUIDv4` document types. + #[must_use] + pub fn doc_types(&self) -> &Vec { + &self.0 + } +} + +impl Hash for DocType { + fn hash(&self, state: &mut H) { + let list = self + .0 + .iter() + .map(std::string::ToString::to_string) + .collect::>(); + list.hash(state); + } +} + +impl From for DocType { + fn from(value: UuidV4) -> Self { + DocType(vec![value]) + } +} + +impl TryFrom for DocType { + type Error = DocTypeError; + + fn try_from(value: Uuid) -> Result { + let uuid_v4 = UuidV4::try_from(value).map_err(|_| DocTypeError::InvalidUuid(value))?; + Ok(DocType(vec![uuid_v4])) + } +} + +impl TryFrom> for DocType { + type Error = DocTypeError; + + fn try_from(value: Vec) -> Result { + if value.is_empty() { + return Err(DocTypeError::Empty); + } + + let converted = value + .into_iter() + .map(|u| UuidV4::try_from(u).map_err(|_| DocTypeError::InvalidUuid(u))) + .collect::, DocTypeError>>()?; + + DocType::try_from(converted) + } +} + +impl From for Vec { + fn from(value: DocType) -> Vec { + value.0.into_iter().map(Uuid::from).collect() + } +} + +impl From for Vec { + fn from(val: DocType) -> Self { + val.0.into_iter().map(|uuid| uuid.to_string()).collect() + } +} + +impl TryFrom> for DocType { + type Error = DocTypeError; + + fn try_from(value: Vec) -> Result { + if value.is_empty() { + return Err(DocTypeError::Empty); + } + Ok(DocType(value)) + } +} + +impl TryFrom> for DocType { + type Error = DocTypeError; + + fn try_from(value: Vec) -> Result { + if value.is_empty() { + return Err(DocTypeError::Empty); + } + let converted = value + .into_iter() + .map(|s| { + s.parse::() + .map_err(|_| DocTypeError::StringConversion(s)) + }) + .collect::, _>>()?; + + Ok(DocType(converted)) + } +} + +impl Display for DocType { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { + write!( + f, + "[{}]", + self.0 + .iter() + .map(UuidV4::to_string) + .collect::>() + .join(", ") + ) + } +} + +// ; Document Type +// document_type = [ 1* uuid_v4 ] +// ; UUIDv4 +// uuid_v4 = #6.37(bytes .size 16) +impl Decode<'_, DecodeContext<'_>> for DocType { + fn decode( + d: &mut Decoder, decode_context: &mut DecodeContext, + ) -> Result { + const CONTEXT: &str = "DocType decoding"; + let parse_uuid = |d: &mut Decoder| UuidV4::decode(d, &mut CborContext::Tagged); + + match d.datatype()? { + minicbor::data::Type::Array => { + let len = d.array()?.ok_or_else(|| { + decode_context + .report + .other("Unable to decode array length", CONTEXT); + minicbor::decode::Error::message(format!( + "{CONTEXT}: Unable to decode array length" + )) + })?; + + if len == 0 { + decode_context.report.invalid_value( + "array length", + "0", + "must contain at least one UUIDv4", + CONTEXT, + ); + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: empty array" + ))); + } + + (0..len) + .map(|_| parse_uuid(d)) + .collect::, _>>() + .map(Self) + .map_err(|e| { + decode_context + .report + .other(&format!("Invalid UUIDv4 in array: {e}"), CONTEXT); + minicbor::decode::Error::message(format!( + "{CONTEXT}: Invalid UUIDv4 in array: {e}" + )) + }) + }, + minicbor::data::Type::Tag => { + // Handle single tagged UUID + match decode_context.compatibility_policy { + CompatibilityPolicy::Accept | CompatibilityPolicy::Warn => { + if matches!( + decode_context.compatibility_policy, + CompatibilityPolicy::Warn + ) { + warn!("{CONTEXT}: Conversion of document type single UUID to type DocType"); + } + + let uuid = parse_uuid(d).map_err(|e| { + let msg = format!("Cannot decode single UUIDv4: {e}"); + decode_context.report.invalid_value( + "Decode single UUIDv4", + &e.to_string(), + &msg, + CONTEXT, + ); + minicbor::decode::Error::message(format!("{CONTEXT}: {msg}")) + })?; + + Ok(map_doc_type(uuid)) + }, + + CompatibilityPolicy::Fail => { + let msg = "Conversion of document type single UUID to type DocType is not allowed"; + decode_context.report.other(msg, CONTEXT); + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: {msg}" + ))) + }, + } + }, + other => { + decode_context.report.invalid_value( + "decoding type", + &format!("{other:?}"), + "array or tag cbor", + CONTEXT, + ); + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected array of UUIDor tagged UUIDv4, got {other:?}", + ))) + }, + } + } +} + +/// Map single UUID doc type to new list of doc types +/// +fn map_doc_type(uuid: UuidV4) -> DocType { + match uuid { + id if Uuid::from(id) == deprecated::PROPOSAL_DOCUMENT_UUID_TYPE => PROPOSAL.clone(), + id if Uuid::from(id) == deprecated::COMMENT_DOCUMENT_UUID_TYPE => PROPOSAL_COMMENT.clone(), + id if Uuid::from(id) == deprecated::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE => { + PROPOSAL_SUBMISSION_ACTION.clone() + }, + id => DocType(vec![id]), + } +} + +impl Encode for DocType { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut C, + ) -> Result<(), minicbor::encode::Error> { + e.array( + self.0 + .len() + .try_into() + .map_err(minicbor::encode::Error::message)?, + )?; + + for id in &self.0 { + id.encode(e, &mut CborContext::Tagged)?; + } + Ok(()) + } +} + +impl<'de> Deserialize<'de> for DocType { + fn deserialize(deserializer: D) -> Result + where D: Deserializer<'de> { + #[derive(Deserialize)] + #[serde(untagged)] + enum DocTypeInput { + /// Single UUID string. + Single(String), + /// List of UUID string. + Multiple(Vec), + } + + let input = DocTypeInput::deserialize(deserializer)?; + let dt = match input { + DocTypeInput::Single(s) => { + let uuid = s.parse().map_err(|_| { + serde::de::Error::custom(DocTypeError::StringConversion(s.clone())) + })?; + // If there is a map from old (single uuid) to new use that list, else convert that + // single uuid to [uuid] - of type DocType + map_doc_type(uuid) + }, + DocTypeInput::Multiple(v) => v.try_into().map_err(serde::de::Error::custom)?, + }; + Ok(dt) + } +} + +impl From for Value { + fn from(value: DocType) -> Self { + Value::Array( + value + .0 + .iter() + .map(|uuidv4| { + Value::Tag( + UUID_CBOR_TAG, + Box::new(Value::Bytes(uuidv4.uuid().as_bytes().to_vec())), + ) + }) + .collect(), + ) + } +} + +// This is needed to preserve backward compatibility with the old solution. +impl PartialEq for DocType { + fn eq(&self, other: &Self) -> bool { + // List of special-case (single UUID) -> new DocType + // The old one should equal to the new one + let special_cases = [ + (deprecated::PROPOSAL_DOCUMENT_UUID_TYPE, &*PROPOSAL), + (deprecated::COMMENT_DOCUMENT_UUID_TYPE, &*PROPOSAL_COMMENT), + ( + deprecated::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, + &*PROPOSAL_SUBMISSION_ACTION, + ), + ]; + for (uuid, expected) in special_cases { + match DocType::try_from(uuid) { + Ok(single) => { + if (self.0 == single.0 && other.0 == expected.0) + || (other.0 == single.0 && self.0 == expected.0) + { + return true; + } + }, + Err(_) => return false, + } + } + self.0 == other.0 + } +} + +#[cfg(test)] +mod tests { + use catalyst_types::problem_report::ProblemReport; + use minicbor::Encoder; + use serde_json::json; + + use super::*; + + // + // Proposal Submission Action = 37(h'5e60e623ad024a1ba1ac406db978ee48') should map to + // [37(h'5e60e623ad024a1ba1ac406db978ee48'), 37(h'7808d2bad51140af84e8c0d1625fdfdc'), + // 37(h'78927329cfd94ea19c710e019b126a65')] + const PSA: &str = "D825505E60E623AD024A1BA1AC406DB978EE48"; + + #[test] + fn test_empty_doc_type_cbor_decode() { + assert!(>>::try_from(vec![]).is_err()); + + let mut report = ProblemReport::new("Test empty doc type"); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Accept, + report: &mut report, + }; + let mut decoder = Decoder::new(&[]); + assert!(DocType::decode(&mut decoder, &mut decoded_context).is_err()); + } + + #[test] + fn test_single_uuid_doc_type_fail_policy_cbor_decode() { + let mut report = ProblemReport::new("Test single uuid doc type - fail"); + let data = hex::decode(PSA).unwrap(); + let decoder = Decoder::new(&data); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Fail, + report: &mut report, + }; + assert!(DocType::decode(&mut decoder.clone(), &mut decoded_context).is_err()); + } + + #[test] + fn test_single_uuid_doc_type_warn_policy_cbor_decode() { + let mut report = ProblemReport::new("Test single uuid doc type - warn"); + let data = hex::decode(PSA).unwrap(); + let decoder = Decoder::new(&data); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Warn, + report: &mut report, + }; + let decoded_doc_type = DocType::decode(&mut decoder.clone(), &mut decoded_context).unwrap(); + assert_eq!(decoded_doc_type.doc_types().len(), 3); + } + + #[test] + fn test_single_uuid_doc_type_accept_policy_cbor_decode() { + let mut report = ProblemReport::new("Test single uuid doc type - accept"); + let data = hex::decode(PSA).unwrap(); + let decoder = Decoder::new(&data); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Accept, + report: &mut report, + }; + let decoded_doc_type = DocType::decode(&mut decoder.clone(), &mut decoded_context).unwrap(); + assert_eq!(decoded_doc_type.doc_types().len(), 3); + } + + #[test] + fn test_multi_uuid_doc_type_cbor_decode_encode() { + let uuidv4 = UuidV4::new(); + let mut report = ProblemReport::new("Test multi uuid doc type"); + let doc_type_list: DocType = vec![uuidv4, uuidv4].try_into().unwrap(); + let mut buffer = Vec::new(); + let mut encoder = Encoder::new(&mut buffer); + doc_type_list.encode(&mut encoder, &mut report).unwrap(); + let mut decoder = Decoder::new(&buffer); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Accept, + report: &mut report.clone(), + }; + let decoded_doc_type = DocType::decode(&mut decoder, &mut decoded_context).unwrap(); + assert_eq!(decoded_doc_type, doc_type_list); + } + + #[test] + fn test_valid_vec_string() { + let uuid = Uuid::new_v4().to_string(); + let input = vec![uuid.clone()]; + let doc_type = DocType::try_from(input).expect("should succeed"); + + assert_eq!(doc_type.0.len(), 1); + assert_eq!(doc_type.0.first().unwrap().to_string(), uuid); + } + + #[test] + fn test_empty_vec_string_fails() { + let input: Vec = vec![]; + let result = DocType::try_from(input); + assert!(matches!(result, Err(DocTypeError::Empty))); + } + + #[test] + fn test_invalid_uuid_vec_string() { + let input = vec!["not-a-uuid".to_string()]; + let result = DocType::try_from(input); + assert!(matches!(result, Err(DocTypeError::StringConversion(s)) if s == "not-a-uuid")); + } + + #[test] + fn test_doc_type_to_value() { + let uuid = uuid::Uuid::new_v4(); + let doc_type: Value = DocType(vec![UuidV4::try_from(uuid).unwrap()]).into(); + + for d in &doc_type.into_array().unwrap() { + let t = d.clone().into_tag().unwrap(); + assert_eq!(t.0, UUID_CBOR_TAG); + assert_eq!(t.1.as_bytes().unwrap().len(), 16); + } + } + + #[test] + fn test_doctype_equal_special_cases() { + // Direct equal + let uuid = deprecated::PROPOSAL_DOCUMENT_UUID_TYPE; + let dt1 = DocType::try_from(vec![uuid]).unwrap(); + let dt2 = DocType::try_from(vec![uuid]).unwrap(); + assert_eq!(dt1, dt2); + + // single -> special mapped type + let single = DocType::try_from(deprecated::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); + assert_eq!(single, *PROPOSAL); + let single = DocType::try_from(deprecated::COMMENT_DOCUMENT_UUID_TYPE).unwrap(); + assert_eq!(single, *PROPOSAL_COMMENT); + let single = DocType::try_from(deprecated::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE).unwrap(); + assert_eq!(single, *PROPOSAL_SUBMISSION_ACTION); + } + + #[test] + fn test_deserialize_single_uuid_normal() { + let uuid = uuid::Uuid::new_v4().to_string(); + let json = json!(uuid); + let dt: DocType = serde_json::from_value(json).unwrap(); + + assert_eq!(dt.0.len(), 1); + assert_eq!(dt.0.first().unwrap().to_string(), uuid); + } + + #[test] + fn test_deserialize_multiple_uuids() { + let uuid1 = uuid::Uuid::new_v4().to_string(); + let uuid2 = uuid::Uuid::new_v4().to_string(); + let json = json!([uuid1.clone(), uuid2.clone()]); + + let dt: DocType = serde_json::from_value(json).unwrap(); + let actual = + dt.0.iter() + .map(std::string::ToString::to_string) + .collect::>(); + assert_eq!(actual, vec![uuid1, uuid2]); + } + + #[test] + fn test_deserialize_special_case() { + let uuid = deprecated::PROPOSAL_DOCUMENT_UUID_TYPE.to_string(); + let json = json!(uuid); + let dt: DocType = serde_json::from_value(json).unwrap(); + + assert_eq!(dt, *PROPOSAL); + } +} diff --git a/rust/signed_doc/src/metadata/document_ref.rs b/rust/signed_doc/src/metadata/document_ref.rs deleted file mode 100644 index 00e0bba241..0000000000 --- a/rust/signed_doc/src/metadata/document_ref.rs +++ /dev/null @@ -1,55 +0,0 @@ -//! Catalyst Signed Document Metadata. - -use std::fmt::Display; - -use coset::cbor::Value; - -use super::{utils::CborUuidV7, UuidV7}; - -/// Reference to a Document. -#[derive(Copy, Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize)] -pub struct DocumentRef { - /// Reference to the Document Id - pub id: UuidV7, - /// Reference to the Document Ver - pub ver: UuidV7, -} - -impl Display for DocumentRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "id: {}, ver: {}", self.id, self.ver) - } -} - -impl TryFrom for Value { - type Error = anyhow::Error; - - fn try_from(value: DocumentRef) -> Result { - Ok(Value::Array(vec![ - Value::try_from(CborUuidV7(value.id))?, - Value::try_from(CborUuidV7(value.ver))?, - ])) - } -} - -impl TryFrom<&Value> for DocumentRef { - type Error = anyhow::Error; - - #[allow(clippy::indexing_slicing)] - fn try_from(val: &Value) -> anyhow::Result { - let Some(array) = val.as_array() else { - anyhow::bail!("Document Reference must be either a single UUID or an array of two"); - }; - anyhow::ensure!( - array.len() == 2, - "Document Reference array of two UUIDs was expected" - ); - let CborUuidV7(id) = CborUuidV7::try_from(&array[0])?; - let CborUuidV7(ver) = CborUuidV7::try_from(&array[1])?; - anyhow::ensure!( - ver >= id, - "Document Reference Version can never be smaller than its ID" - ); - Ok(DocumentRef { id, ver }) - } -} diff --git a/rust/signed_doc/src/metadata/document_refs/doc_locator.rs b/rust/signed_doc/src/metadata/document_refs/doc_locator.rs new file mode 100644 index 0000000000..61e6bac229 --- /dev/null +++ b/rust/signed_doc/src/metadata/document_refs/doc_locator.rs @@ -0,0 +1,175 @@ +//! Document Locator, where a document can be located. +//! A [CBOR Encoded IPLD Content Identifier](https://github.com/ipld/cid-cbor/) +//! or also known as [IPFS CID](https://docs.ipfs.tech/concepts/content-addressing/#what-is-a-cid). + +use std::fmt::Display; + +use catalyst_types::problem_report::ProblemReport; +use coset::cbor::Value; +use minicbor::{Decode, Decoder, Encode}; + +/// CBOR tag of IPLD content identifiers (CIDs). +const CID_TAG: u64 = 42; + +/// CID map key. +const CID_MAP_KEY: &str = "cid"; + +/// Document locator number of map item. +const DOC_LOC_MAP_ITEM: u64 = 1; + +/// Document locator, no size limit. +#[derive(Clone, Debug, Default, PartialEq, Hash, Eq, serde::Serialize)] +pub struct DocLocator(Vec); + +impl DocLocator { + #[must_use] + /// Length of the document locator. + pub fn len(&self) -> usize { + self.0.len() + } + + #[must_use] + /// Is the document locator empty. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl From> for DocLocator { + fn from(value: Vec) -> Self { + DocLocator(value) + } +} + +impl Display for DocLocator { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "cid: 0x{}", hex::encode(self.0.as_slice())) + } +} + +impl From for Value { + fn from(value: DocLocator) -> Self { + Value::Map(vec![( + Value::Text(CID_MAP_KEY.to_string()), + Value::Tag(CID_TAG, Box::new(Value::Bytes(value.0.clone()))), + )]) + } +} + +// document_locator = { "cid" => cid } +impl Decode<'_, ProblemReport> for DocLocator { + fn decode( + d: &mut Decoder, report: &mut ProblemReport, + ) -> Result { + const CONTEXT: &str = "DocLocator decoding"; + + let len = d.map()?.ok_or_else(|| { + report.invalid_value("Map", "Invalid length", "Valid length", CONTEXT); + minicbor::decode::Error::message(format!("{CONTEXT}: expected valid map length")) + })?; + + if len != DOC_LOC_MAP_ITEM { + report.invalid_value( + "Map length", + &len.to_string(), + &DOC_LOC_MAP_ITEM.to_string(), + CONTEXT, + ); + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected map length {DOC_LOC_MAP_ITEM}, found {len}" + ))); + } + + let key = d.str().map_err(|e| { + report.invalid_value("Key", "Not a string", "String", CONTEXT); + e.with_message(format!("{CONTEXT}: expected string")) + })?; + + if key != "cid" { + report.invalid_value("Key", key, "'cid'", CONTEXT); + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected key 'cid', found '{key}'" + ))); + } + + let tag = d.tag().map_err(|e| { + report.invalid_value("CBOR tag", "Invalid tag", "Valid tag", CONTEXT); + e.with_message(format!("{CONTEXT}: expected tag")) + })?; + + if tag.as_u64() != CID_TAG { + report.invalid_value("CBOR tag", &tag.to_string(), &CID_TAG.to_string(), CONTEXT); + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected tag {CID_TAG}, found {tag}", + ))); + } + + // No length limit + let cid_bytes = d.bytes().map_err(|e| { + report.invalid_value("CID bytes", "Invalid bytes", "Valid bytes", CONTEXT); + e.with_message(format!("{CONTEXT}: expected bytes")) + })?; + + Ok(DocLocator(cid_bytes.to_vec())) + } +} + +impl Encode<()> for DocLocator { + fn encode( + &self, e: &mut minicbor::Encoder, (): &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.map(DOC_LOC_MAP_ITEM)?; + e.str(CID_MAP_KEY)?; + e.tag(minicbor::data::Tag::new(CID_TAG))?; + e.bytes(&self.0)?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + + use minicbor::{Decoder, Encoder}; + + use super::*; + + #[test] + fn test_doc_locator_encode_decode() { + let mut report = ProblemReport::new("Test doc locator"); + let locator = DocLocator(vec![1, 2, 3, 4]); + let mut buffer = Vec::new(); + let mut encoder = Encoder::new(&mut buffer); + locator.encode(&mut encoder, &mut ()).unwrap(); + let mut decoder = Decoder::new(&buffer); + let decoded_doc_loc = DocLocator::decode(&mut decoder, &mut report).unwrap(); + assert_eq!(locator, decoded_doc_loc); + } + + // Empty doc locator should not fail + #[test] + fn test_doc_locator_encode_decode_empty() { + let mut report = ProblemReport::new("Test doc locator empty"); + let locator = DocLocator(vec![]); + let mut buffer = Vec::new(); + let mut encoder = Encoder::new(&mut buffer); + locator.encode(&mut encoder, &mut ()).unwrap(); + let mut decoder = Decoder::new(&buffer); + let decoded_doc_loc = DocLocator::decode(&mut decoder, &mut report).unwrap(); + assert_eq!(locator, decoded_doc_loc); + } + + #[test] + #[allow(clippy::indexing_slicing)] + fn test_doc_locator_to_value() { + let data = vec![1, 2, 3, 4]; + let locator = DocLocator(data.clone()); + let value: Value = locator.into(); + let map = value.into_map().unwrap(); + assert_eq!(map.len(), usize::try_from(DOC_LOC_MAP_ITEM).unwrap()); + let key = map[0].0.clone().into_text().unwrap(); + assert_eq!(key, CID_MAP_KEY); + let (tag, value) = map[0].1.clone().into_tag().unwrap(); + assert_eq!(tag, CID_TAG); + assert_eq!(value.into_bytes().unwrap(), data); + } +} diff --git a/rust/signed_doc/src/metadata/document_refs/doc_ref.rs b/rust/signed_doc/src/metadata/document_refs/doc_ref.rs new file mode 100644 index 0000000000..2339fc6450 --- /dev/null +++ b/rust/signed_doc/src/metadata/document_refs/doc_ref.rs @@ -0,0 +1,171 @@ +//! Document reference. + +use std::fmt::Display; + +use catalyst_types::uuid::{CborContext, UuidV7}; +use coset::cbor::Value; +use minicbor::{Decode, Decoder, Encode}; + +use super::{doc_locator::DocLocator, DocRefError}; +use crate::{metadata::utils::CborUuidV7, DecodeContext}; + +/// Number of item that should be in each document reference instance. +const DOC_REF_ARR_ITEM: u64 = 3; + +/// Reference to a Document. +#[derive(Clone, Debug, PartialEq, Hash, Eq, serde::Serialize)] +pub struct DocumentRef { + /// Reference to the Document Id + id: UuidV7, + /// Reference to the Document Ver + ver: UuidV7, + /// Document locator + doc_locator: DocLocator, +} + +impl DocumentRef { + /// Create a new instance of document reference. + #[must_use] + pub fn new(id: UuidV7, ver: UuidV7, doc_locator: DocLocator) -> Self { + Self { + id, + ver, + doc_locator, + } + } + + /// Get Document Id. + #[must_use] + pub fn id(&self) -> &UuidV7 { + &self.id + } + + /// Get Document Ver. + #[must_use] + pub fn ver(&self) -> &UuidV7 { + &self.ver + } + + /// Get Document Locator. + #[must_use] + pub fn doc_locator(&self) -> &DocLocator { + &self.doc_locator + } +} + +impl Display for DocumentRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "id: {}, ver: {}, document_locator: {}", + self.id, self.ver, self.doc_locator + ) + } +} + +impl TryFrom for Value { + type Error = DocRefError; + + fn try_from(value: DocumentRef) -> Result { + let id = Value::try_from(CborUuidV7(value.id)) + .map_err(|_| DocRefError::InvalidUuidV7(value.id, "id".to_string()))?; + + let ver = Value::try_from(CborUuidV7(value.ver)) + .map_err(|_| DocRefError::InvalidUuidV7(value.ver, "ver".to_string()))?; + + let locator = value.doc_locator.clone().into(); + + Ok(Value::Array(vec![id, ver, locator])) + } +} + +impl Decode<'_, DecodeContext<'_>> for DocumentRef { + fn decode( + d: &mut minicbor::Decoder<'_>, decode_context: &mut DecodeContext<'_>, + ) -> Result { + const CONTEXT: &str = "DocumentRef decoding"; + let parse_uuid = |d: &mut Decoder| UuidV7::decode(d, &mut CborContext::Tagged); + + let arr = d.array()?.ok_or_else(|| { + decode_context + .report + .other("Unable to decode array length", CONTEXT); + minicbor::decode::Error::message(format!("{CONTEXT}: Unable to decode array length")) + })?; + if arr != DOC_REF_ARR_ITEM { + decode_context.report.invalid_value( + "Array length", + &arr.to_string(), + &DOC_REF_ARR_ITEM.to_string(), + CONTEXT, + ); + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected {DOC_REF_ARR_ITEM} items, found {arr}" + ))); + } + let id = parse_uuid(d).map_err(|e| { + decode_context + .report + .other(&format!("Invalid ID UUIDv7: {e}"), CONTEXT); + e.with_message("Invalid ID UUIDv7") + })?; + + let ver = parse_uuid(d).map_err(|e| { + decode_context + .report + .other(&format!("Invalid Ver UUIDv7: {e}"), CONTEXT); + e.with_message("Invalid Ver UUIDv7") + })?; + + let locator = DocLocator::decode(d, decode_context.report).map_err(|e| { + decode_context + .report + .other(&format!("Failed to decode locator {e}"), CONTEXT); + e.with_message("Failed to decode locator") + })?; + + Ok(DocumentRef { + id, + ver, + doc_locator: locator, + }) + } +} + +impl Encode<()> for DocumentRef { + fn encode( + &self, e: &mut minicbor::Encoder, ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.array(DOC_REF_ARR_ITEM)?; + self.id.encode(e, &mut CborContext::Tagged)?; + self.ver.encode(e, &mut CborContext::Tagged)?; + self.doc_locator.encode(e, ctx)?; + Ok(()) + } +} + +#[cfg(test)] +mod test { + use catalyst_types::uuid::{UuidV7, UUID_CBOR_TAG}; + use coset::cbor::Value; + + use crate::metadata::document_refs::{doc_ref::DOC_REF_ARR_ITEM, DocumentRef}; + + #[test] + #[allow(clippy::indexing_slicing)] + fn test_doc_refs_to_value() { + let uuidv7 = UuidV7::new(); + let doc_ref = DocumentRef::new(uuidv7, uuidv7, vec![1, 2, 3].into()); + let value: Value = doc_ref.try_into().unwrap(); + let arr = value.into_array().unwrap(); + assert_eq!(arr.len(), usize::try_from(DOC_REF_ARR_ITEM).unwrap()); + let (id_tag, value) = arr[0].clone().into_tag().unwrap(); + assert_eq!(id_tag, UUID_CBOR_TAG); + assert_eq!(value.as_bytes().unwrap().len(), 16); + let (ver_tag, value) = arr[1].clone().into_tag().unwrap(); + assert_eq!(ver_tag, UUID_CBOR_TAG); + assert_eq!(value.as_bytes().unwrap().len(), 16); + let map = arr[2].clone().into_map().unwrap(); + assert_eq!(map.len(), 1); + } +} diff --git a/rust/signed_doc/src/metadata/document_refs/mod.rs b/rust/signed_doc/src/metadata/document_refs/mod.rs new file mode 100644 index 0000000000..3c3cf6704f --- /dev/null +++ b/rust/signed_doc/src/metadata/document_refs/mod.rs @@ -0,0 +1,413 @@ +//! Document references. + +mod doc_locator; +mod doc_ref; +use std::{fmt::Display, str::FromStr}; + +use catalyst_types::uuid::{CborContext, UuidV7}; +use coset::cbor::Value; +pub use doc_locator::DocLocator; +pub use doc_ref::DocumentRef; +use minicbor::{Decode, Decoder, Encode}; +use serde::{Deserialize, Deserializer}; +use tracing::warn; + +use crate::{CompatibilityPolicy, DecodeContext}; + +/// List of document reference instance. +#[derive(Clone, Debug, PartialEq, Hash, Eq, serde::Serialize)] +pub struct DocumentRefs(Vec); + +/// Document reference error. +#[derive(Debug, Clone, thiserror::Error)] +pub enum DocRefError { + /// Invalid `UUIDv7`. + #[error("Invalid UUID: {0} for field {1}")] + InvalidUuidV7(UuidV7, String), + /// `DocRef` cannot be empty. + #[error("DocType cannot be empty")] + Empty, + /// Invalid string conversion + #[error("Invalid string conversion: {0}")] + StringConversion(String), + /// Cannot decode hex. + #[error("Cannot decode hex: {0}")] + HexDecode(String), +} + +impl DocumentRefs { + /// Get a list of document reference instance. + #[must_use] + pub fn doc_refs(&self) -> &Vec { + &self.0 + } +} + +impl From> for DocumentRefs { + fn from(value: Vec) -> Self { + DocumentRefs(value) + } +} + +impl Display for DocumentRefs { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let items = self + .0 + .iter() + .map(|inner| format!("{inner}")) + .collect::>() + .join(", "); + write!(f, "[{items}]") + } +} + +impl Decode<'_, DecodeContext<'_>> for DocumentRefs { + fn decode( + d: &mut minicbor::Decoder<'_>, decode_context: &mut DecodeContext<'_>, + ) -> Result { + const CONTEXT: &str = "DocumentRefs decoding"; + let parse_uuid = |d: &mut Decoder| UuidV7::decode(d, &mut CborContext::Tagged); + + // Old: [id, ver] + // New: [ 1* [id, ver, locator] ] + let outer_arr = d.array()?.ok_or_else(|| { + decode_context.report.invalid_value( + "Array", + "Invalid array length", + "Valid array length", + CONTEXT, + ); + minicbor::decode::Error::message(format!("{CONTEXT}: expected valid array length")) + })?; + + match d.datatype()? { + // New structure inner part [id, ver, locator] + minicbor::data::Type::Array => { + let mut doc_refs = vec![]; + for _ in 0..outer_arr { + let doc_ref = DocumentRef::decode(d, decode_context)?; + doc_refs.push(doc_ref); + } + Ok(DocumentRefs(doc_refs)) + }, + // Old structure [id, ver] + minicbor::data::Type::Tag => { + match decode_context.compatibility_policy { + CompatibilityPolicy::Accept | CompatibilityPolicy::Warn => { + if matches!( + decode_context.compatibility_policy, + CompatibilityPolicy::Warn + ) { + warn!("{CONTEXT}: Conversion of document reference, id and version, to list of document reference with doc locator"); + } + let id = parse_uuid(d).map_err(|e| { + decode_context + .report + .other(&format!("Invalid ID UUIDv7: {e}"), CONTEXT); + e.with_message("Invalid ID UUIDv7") + })?; + let ver = parse_uuid(d).map_err(|e| { + decode_context + .report + .other(&format!("Invalid Ver UUIDv7: {e}"), CONTEXT); + e.with_message("Invalid Ver UUIDv7") + })?; + + Ok(DocumentRefs(vec![DocumentRef::new( + id, + ver, + // If old implementation is used, the locator will be empty + DocLocator::default(), + )])) + }, + CompatibilityPolicy::Fail => { + let msg = "Conversion of document reference id and version to list of document reference with doc locator is not allowed"; + decode_context.report.other(msg, CONTEXT); + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: {msg}" + ))) + }, + } + }, + other => { + decode_context.report.invalid_value( + "Decoding type", + &other.to_string(), + "Array or tag", + CONTEXT, + ); + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: Expected array of document reference, or tag of version and id, found {other}" + ))) + }, + } + } +} + +impl Encode<()> for DocumentRefs { + fn encode( + &self, e: &mut minicbor::Encoder, ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + const CONTEXT: &str = "DocumentRefs encoding"; + if self.0.is_empty() { + return Err(minicbor::encode::Error::message(format!( + "{CONTEXT}: DocumentRefs cannot be empty" + ))); + } + e.array( + self.0 + .len() + .try_into() + .map_err(|e| minicbor::encode::Error::message(format!("{CONTEXT}, {e}")))?, + )?; + + for doc_ref in &self.0 { + doc_ref.encode(e, ctx)?; + } + Ok(()) + } +} + +impl TryFrom for Value { + type Error = DocRefError; + + fn try_from(value: DocumentRefs) -> Result { + if value.0.is_empty() { + return Err(DocRefError::Empty); + } + + let array_values: Result, Self::Error> = value + .0 + .iter() + .map(|inner| Value::try_from(inner.to_owned())) + .collect(); + + Ok(Value::Array(array_values?)) + } +} + +impl TryFrom<&DocumentRefs> for Value { + type Error = DocRefError; + + fn try_from(value: &DocumentRefs) -> Result { + value.clone().try_into() + } +} + +impl<'de> Deserialize<'de> for DocumentRefs { + fn deserialize(deserializer: D) -> Result + where D: Deserializer<'de> { + /// Old structure deserialize as map {id, ver} + #[derive(Deserialize)] + struct OldRef { + /// "id": "uuidv7 + id: String, + /// "ver": "uuidv7" + ver: String, + } + + /// New structure as deserialize as map {id, ver, cid} + #[derive(Deserialize)] + struct NewRef { + /// "id": "uuidv7" + id: String, + /// "ver": "uuidv7" + ver: String, + /// "cid": "0x..." + cid: String, + } + + #[derive(Deserialize)] + #[serde(untagged)] + enum DocRefInput { + /// Old structure of document reference. + Old(OldRef), + /// New structure of document reference. + New(Vec), + } + + let input = DocRefInput::deserialize(deserializer)?; + let dr = match input { + DocRefInput::Old(value) => { + let id = UuidV7::from_str(&value.id).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(value.id.clone())) + })?; + let ver = UuidV7::from_str(&value.ver).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(value.ver.clone())) + })?; + + DocumentRefs(vec![DocumentRef::new(id, ver, DocLocator::default())]) + }, + DocRefInput::New(value) => { + let mut dr = vec![]; + for v in value { + let id = UuidV7::from_str(&v.id).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(v.id.clone())) + })?; + let ver = UuidV7::from_str(&v.ver).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(v.ver.clone())) + })?; + let cid = &v.cid.strip_prefix("0x").unwrap_or(&v.cid); + let locator = hex::decode(cid).map_err(|_| { + serde::de::Error::custom(DocRefError::HexDecode(v.cid.clone())) + })?; + dr.push(DocumentRef::new(id, ver, locator.into())); + } + DocumentRefs(dr) + }, + }; + + Ok(dr) + } +} + +#[cfg(test)] +mod tests { + + use catalyst_types::problem_report::ProblemReport; + use minicbor::Encoder; + use serde_json::json; + + use super::*; + + #[allow(clippy::unwrap_used)] + fn gen_old_doc_ref(id: UuidV7, ver: UuidV7) -> Vec { + let mut buffer = Vec::new(); + let mut encoder = Encoder::new(&mut buffer); + encoder.array(2).unwrap(); + id.encode(&mut encoder, &mut CborContext::Tagged).unwrap(); + ver.encode(&mut encoder, &mut CborContext::Tagged).unwrap(); + buffer + } + + #[test] + fn test_old_doc_refs_fail_policy_cbor_decode() { + let mut report = ProblemReport::new("Test doc ref fail policy"); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Fail, + report: &mut report, + }; + let uuidv7 = UuidV7::new(); + let old_doc_ref = gen_old_doc_ref(uuidv7, uuidv7); + let decoder = Decoder::new(&old_doc_ref); + assert!(DocumentRefs::decode(&mut decoder.clone(), &mut decoded_context).is_err()); + } + + #[test] + fn test_old_doc_refs_warn_policy_cbor_decode() { + let mut report = ProblemReport::new("Test doc ref warn policy"); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Warn, + report: &mut report, + }; + let uuidv7 = UuidV7::new(); + let old_doc_ref = gen_old_doc_ref(uuidv7, uuidv7); + let decoder = Decoder::new(&old_doc_ref); + let decoded_doc_ref = + DocumentRefs::decode(&mut decoder.clone(), &mut decoded_context).unwrap(); + assert_eq!(decoded_doc_ref.doc_refs().len(), 1); + assert_eq!( + decoded_doc_ref + .doc_refs() + .first() + .unwrap() + .doc_locator() + .len(), + 0 + ); + } + + #[test] + fn test_old_doc_refs_accept_policy_cbor_decode() { + let mut report = ProblemReport::new("Test doc ref accept policy"); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Accept, + report: &mut report, + }; + let uuidv7 = UuidV7::new(); + let old_doc_ref = gen_old_doc_ref(uuidv7, uuidv7); + let decoder = Decoder::new(&old_doc_ref); + let decoded_doc_ref = + DocumentRefs::decode(&mut decoder.clone(), &mut decoded_context).unwrap(); + assert_eq!(decoded_doc_ref.doc_refs().len(), 1); + assert_eq!( + decoded_doc_ref + .doc_refs() + .first() + .unwrap() + .doc_locator() + .len(), + 0 + ); + } + + #[test] + fn test_doc_refs_cbor_encode_decode() { + let mut report = ProblemReport::new("Test doc refs"); + + let uuidv7 = UuidV7::new(); + let doc_ref = DocumentRef::new(uuidv7, uuidv7, vec![1, 2, 3, 4].into()); + let doc_refs = DocumentRefs(vec![doc_ref.clone(), doc_ref]); + let mut buffer = Vec::new(); + let mut encoder = Encoder::new(&mut buffer); + doc_refs.encode(&mut encoder, &mut ()).unwrap(); + let mut decoder = Decoder::new(&buffer); + let mut decoded_context = DecodeContext { + compatibility_policy: CompatibilityPolicy::Accept, + report: &mut report, + }; + let decoded_doc_refs = DocumentRefs::decode(&mut decoder, &mut decoded_context).unwrap(); + assert_eq!(decoded_doc_refs, doc_refs); + } + + #[test] + fn test_doc_refs_to_value() { + let uuidv7 = UuidV7::new(); + let doc_ref = DocumentRef::new(uuidv7, uuidv7, vec![1, 2, 3].into()); + let doc_ref = DocumentRefs(vec![doc_ref.clone(), doc_ref]); + let value: Value = doc_ref.try_into().unwrap(); + assert_eq!(value.as_array().unwrap().len(), 2); + } + + #[test] + fn test_deserialize_old_doc_ref() { + let uuidv7 = UuidV7::new(); + let json = json!( + { + "id": uuidv7.to_string(), + "ver": uuidv7.to_string(), + } + ); + let doc_ref: DocumentRefs = serde_json::from_value(json).unwrap(); + let dr = doc_ref.doc_refs().first().unwrap(); + assert_eq!(*dr.id(), uuidv7); + assert_eq!(*dr.ver(), uuidv7); + assert_eq!(dr.doc_locator().len(), 0); + } + + #[test] + fn test_deserialize_new_doc_ref() { + let uuidv7 = UuidV7::new(); + let data = vec![1, 2, 3, 4]; + let hex_data = format!("0x{}", hex::encode(data.clone())); + let json = json!( + [{ + "id": uuidv7.to_string(), + "ver": uuidv7.to_string(), + "cid": hex_data, + }, + { + "id": uuidv7.to_string(), + "ver": uuidv7.to_string(), + "cid": hex_data, + }, + ] + ); + let doc_ref: DocumentRefs = serde_json::from_value(json).unwrap(); + assert!(doc_ref.doc_refs().len() == 2); + let dr = doc_ref.doc_refs().first().unwrap(); + assert_eq!(*dr.id(), uuidv7); + assert_eq!(*dr.ver(), uuidv7); + assert_eq!(*dr.doc_locator(), data.into()); + } +} diff --git a/rust/signed_doc/src/metadata/extra_fields.rs b/rust/signed_doc/src/metadata/extra_fields.rs deleted file mode 100644 index 5decc4a784..0000000000 --- a/rust/signed_doc/src/metadata/extra_fields.rs +++ /dev/null @@ -1,239 +0,0 @@ -//! Catalyst Signed Document Extra Fields. - -use catalyst_types::problem_report::ProblemReport; -use coset::{cbor::Value, Label, ProtectedHeader}; - -use super::{ - cose_protected_header_find, utils::decode_document_field_from_protected_header, DocumentRef, - Section, -}; - -/// `ref` field COSE key value -const REF_KEY: &str = "ref"; -/// `template` field COSE key value -const TEMPLATE_KEY: &str = "template"; -/// `reply` field COSE key value -const REPLY_KEY: &str = "reply"; -/// `section` field COSE key value -const SECTION_KEY: &str = "section"; -/// `collabs` field COSE key value -const COLLABS_KEY: &str = "collabs"; -/// `parameters` field COSE key value -const PARAMETERS_KEY: &str = "parameters"; -/// `brand_id` field COSE key value (alias of the `parameters` field) -const BRAND_ID_KEY: &str = "brand_id"; -/// `campaign_id` field COSE key value (alias of the `parameters` field) -const CAMPAIGN_ID_KEY: &str = "campaign_id"; -/// `category_id` field COSE key value (alias of the `parameters` field) -const CATEGORY_ID_KEY: &str = "category_id"; - -/// Extra Metadata Fields. -/// -/// These values are extracted from the COSE Sign protected header labels. -#[derive(Clone, Default, Debug, PartialEq, serde::Serialize, serde::Deserialize)] -pub struct ExtraFields { - /// Reference to the latest document. - #[serde(rename = "ref", skip_serializing_if = "Option::is_none")] - doc_ref: Option, - /// Reference to the document template. - #[serde(skip_serializing_if = "Option::is_none")] - template: Option, - /// Reference to the document reply. - #[serde(skip_serializing_if = "Option::is_none")] - reply: Option, - /// Reference to the document section. - #[serde(skip_serializing_if = "Option::is_none")] - section: Option
, - /// Reference to the document collaborators. Collaborator type is TBD. - #[serde(default = "Vec::new", skip_serializing_if = "Vec::is_empty")] - collabs: Vec, - /// Reference to the parameters document. - #[serde(skip_serializing_if = "Option::is_none")] - parameters: Option, -} - -impl ExtraFields { - /// Return `ref` field. - #[must_use] - pub fn doc_ref(&self) -> Option { - self.doc_ref - } - - /// Return `template` field. - #[must_use] - pub fn template(&self) -> Option { - self.template - } - - /// Return `reply` field. - #[must_use] - pub fn reply(&self) -> Option { - self.reply - } - - /// Return `section` field. - #[must_use] - pub fn section(&self) -> Option<&Section> { - self.section.as_ref() - } - - /// Return `collabs` field. - #[must_use] - pub fn collabs(&self) -> &Vec { - &self.collabs - } - - /// Return `parameters` field. - #[must_use] - pub fn parameters(&self) -> Option { - self.parameters - } - - /// Fill the COSE header `ExtraFields` data into the header builder. - pub(super) fn fill_cose_header_fields( - &self, mut builder: coset::HeaderBuilder, - ) -> anyhow::Result { - if let Some(doc_ref) = &self.doc_ref { - builder = builder.text_value(REF_KEY.to_string(), Value::try_from(*doc_ref)?); - } - if let Some(template) = &self.template { - builder = builder.text_value(TEMPLATE_KEY.to_string(), Value::try_from(*template)?); - } - if let Some(reply) = &self.reply { - builder = builder.text_value(REPLY_KEY.to_string(), Value::try_from(*reply)?); - } - - if let Some(section) = &self.section { - builder = builder.text_value(SECTION_KEY.to_string(), Value::from(section.clone())); - } - - if !self.collabs.is_empty() { - builder = builder.text_value( - COLLABS_KEY.to_string(), - Value::Array(self.collabs.iter().cloned().map(Value::Text).collect()), - ); - } - - if let Some(parameters) = &self.parameters { - builder = builder.text_value(PARAMETERS_KEY.to_string(), Value::try_from(*parameters)?); - } - - Ok(builder) - } - - /// Converting COSE Protected Header to `ExtraFields`. - pub(crate) fn from_protected_header( - protected: &ProtectedHeader, error_report: &ProblemReport, - ) -> Self { - /// Context for problem report messages during decoding from COSE protected - /// header. - const COSE_DECODING_CONTEXT: &str = "COSE ProtectedHeader to ExtraFields"; - - let doc_ref = decode_document_field_from_protected_header( - protected, - REF_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - let template = decode_document_field_from_protected_header( - protected, - TEMPLATE_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - let reply = decode_document_field_from_protected_header( - protected, - REPLY_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - let section = decode_document_field_from_protected_header( - protected, - SECTION_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - - // process `parameters` field and all its aliases - let (parameters, has_multiple_fields) = [ - PARAMETERS_KEY, - BRAND_ID_KEY, - CAMPAIGN_ID_KEY, - CATEGORY_ID_KEY, - ] - .iter() - .filter_map(|field_name| -> Option { - decode_document_field_from_protected_header( - protected, - field_name, - COSE_DECODING_CONTEXT, - error_report, - ) - }) - .fold((None, false), |(res, _), v| (Some(v), res.is_some())); - if has_multiple_fields { - error_report.duplicate_field( - "brand_id, campaign_id, category_id", - "Only value at the same time is allowed parameters, brand_id, campaign_id, category_id", - "Validation of parameters field aliases" - ); - } - - let mut extra = ExtraFields { - doc_ref, - template, - reply, - section, - parameters, - ..Default::default() - }; - - if let Some(cbor_doc_collabs) = cose_protected_header_find(protected, |key| { - key == &Label::Text(COLLABS_KEY.to_string()) - }) { - if let Ok(collabs) = cbor_doc_collabs.clone().into_array() { - let mut c = Vec::new(); - for (ids, collaborator) in collabs.iter().cloned().enumerate() { - match collaborator.clone().into_text() { - Ok(collaborator) => { - c.push(collaborator); - }, - Err(_) => { - error_report.conversion_error( - &format!("COSE protected header collaborator index {ids}"), - &format!("{collaborator:?}"), - "Expected a CBOR String", - &format!( - "{COSE_DECODING_CONTEXT}, converting collaborator to String", - ), - ); - }, - } - } - extra.collabs = c; - } else { - error_report.conversion_error( - "CBOR COSE protected header collaborators", - &format!("{cbor_doc_collabs:?}"), - "Expected a CBOR Array", - &format!("{COSE_DECODING_CONTEXT}, converting collaborators to Array",), - ); - }; - } - - extra - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn empty_extra_fields_json_serde_test() { - let extra = ExtraFields::default(); - - let json = serde_json::to_value(extra).unwrap(); - assert_eq!(json, serde_json::json!({})); - } -} diff --git a/rust/signed_doc/src/metadata/mod.rs b/rust/signed_doc/src/metadata/mod.rs index bbbdb1677d..878e9e72aa 100644 --- a/rust/signed_doc/src/metadata/mod.rs +++ b/rust/signed_doc/src/metadata/mod.rs @@ -1,26 +1,30 @@ //! Catalyst Signed Document Metadata. -use std::fmt::{Display, Formatter}; +use std::{ + collections::{btree_map, BTreeMap}, + error::Error, + fmt::{Display, Formatter}, +}; mod content_encoding; mod content_type; -mod document_ref; -mod extra_fields; +pub(crate) mod doc_type; +mod document_refs; mod section; +mod supported_field; pub(crate) mod utils; -use catalyst_types::{ - problem_report::ProblemReport, - uuid::{UuidV4, UuidV7}, -}; +use catalyst_types::{problem_report::ProblemReport, uuid::UuidV7}; pub use content_encoding::ContentEncoding; pub use content_type::ContentType; -use coset::{cbor::Value, iana::CoapContentFormat}; -pub use document_ref::DocumentRef; -pub use extra_fields::ExtraFields; +pub use doc_type::DocType; +pub use document_refs::{DocLocator, DocumentRef, DocumentRefs}; +use minicbor::Decoder; pub use section::Section; -use utils::{ - cose_protected_header_find, decode_document_field_from_protected_header, CborUuidV4, CborUuidV7, -}; +use strum::IntoDiscriminant as _; +use utils::{cose_protected_header_find, decode_document_field_from_protected_header, CborUuidV7}; + +pub(crate) use crate::metadata::supported_field::{SupportedField, SupportedLabel}; +use crate::{decode_context::DecodeContext, metadata::utils::decode_cose_protected_header_value}; /// `content_encoding` field COSE key value const CONTENT_ENCODING_KEY: &str = "Content-Encoding"; @@ -31,41 +35,40 @@ const ID_KEY: &str = "id"; /// `ver` field COSE key value const VER_KEY: &str = "ver"; +/// `ref` field COSE key value +const REF_KEY: &str = "ref"; +/// `template` field COSE key value +const TEMPLATE_KEY: &str = "template"; +/// `reply` field COSE key value +const REPLY_KEY: &str = "reply"; +/// `section` field COSE key value +const SECTION_KEY: &str = "section"; +/// `collabs` field COSE key value +const COLLABS_KEY: &str = "collabs"; +/// `parameters` field COSE key value +const PARAMETERS_KEY: &str = "parameters"; +/// `brand_id` field COSE key value (alias of the `parameters` field) +const BRAND_ID_KEY: &str = "brand_id"; +/// `campaign_id` field COSE key value (alias of the `parameters` field) +const CAMPAIGN_ID_KEY: &str = "campaign_id"; +/// `category_id` field COSE key value (alias of the `parameters` field) +const CATEGORY_ID_KEY: &str = "category_id"; + /// Document Metadata. /// /// These values are extracted from the COSE Sign protected header. #[derive(Clone, Debug, PartialEq, Default)] -pub struct Metadata(InnerMetadata); - -/// An actual representation of all metadata fields. -#[derive(Clone, Debug, PartialEq, serde::Deserialize, Default)] -pub(crate) struct InnerMetadata { - /// Document Type `UUIDv4`. - #[serde(rename = "type")] - doc_type: Option, - /// Document ID `UUIDv7`. - id: Option, - /// Document Version `UUIDv7`. - ver: Option, - /// Document Payload Content Type. - #[serde(rename = "content-type")] - content_type: Option, - /// Document Payload Content Encoding. - #[serde(rename = "content-encoding")] - content_encoding: Option, - /// Additional Metadata Fields. - #[serde(flatten)] - extra: ExtraFields, -} +pub struct Metadata(BTreeMap); impl Metadata { - /// Return Document Type `UUIDv4`. + /// Return Document Type `DocType` - a list of `UUIDv4`. /// /// # Errors /// - Missing 'type' field. - pub fn doc_type(&self) -> anyhow::Result { + pub fn doc_type(&self) -> anyhow::Result<&DocType> { self.0 - .doc_type + .get(&SupportedLabel::Type) + .and_then(SupportedField::try_as_type_ref) .ok_or(anyhow::anyhow!("Missing 'type' field")) } @@ -74,7 +77,11 @@ impl Metadata { /// # Errors /// - Missing 'id' field. pub fn doc_id(&self) -> anyhow::Result { - self.0.id.ok_or(anyhow::anyhow!("Missing 'id' field")) + self.0 + .get(&SupportedLabel::Id) + .and_then(SupportedField::try_as_id_ref) + .copied() + .ok_or(anyhow::anyhow!("Missing 'id' field")) } /// Return Document Version `UUIDv7`. @@ -82,7 +89,11 @@ impl Metadata { /// # Errors /// - Missing 'ver' field. pub fn doc_ver(&self) -> anyhow::Result { - self.0.ver.ok_or(anyhow::anyhow!("Missing 'ver' field")) + self.0 + .get(&SupportedLabel::Ver) + .and_then(SupportedField::try_as_ver_ref) + .copied() + .ok_or(anyhow::anyhow!("Missing 'ver' field")) } /// Returns the Document Content Type, if any. @@ -91,74 +102,145 @@ impl Metadata { /// - Missing 'content-type' field. pub fn content_type(&self) -> anyhow::Result { self.0 - .content_type + .get(&SupportedLabel::ContentType) + .and_then(SupportedField::try_as_content_type_ref) + .copied() .ok_or(anyhow::anyhow!("Missing 'content-type' field")) } /// Returns the Document Content Encoding, if any. #[must_use] pub fn content_encoding(&self) -> Option { - self.0.content_encoding + self.0 + .get(&SupportedLabel::ContentEncoding) + .and_then(SupportedField::try_as_content_encoding_ref) + .copied() + } + + /// Return `ref` field. + #[must_use] + pub fn doc_ref(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Ref) + .and_then(SupportedField::try_as_ref_ref) + } + + /// Return `template` field. + #[must_use] + pub fn template(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Template) + .and_then(SupportedField::try_as_template_ref) + } + + /// Return `reply` field. + #[must_use] + pub fn reply(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Reply) + .and_then(SupportedField::try_as_reply_ref) + } + + /// Return `section` field. + #[must_use] + pub fn section(&self) -> Option<&Section> { + self.0 + .get(&SupportedLabel::Section) + .and_then(SupportedField::try_as_section_ref) } - /// Return reference to additional metadata fields. + /// Return `collabs` field. #[must_use] - pub fn extra(&self) -> &ExtraFields { - &self.0.extra + pub fn collabs(&self) -> &[String] { + self.0 + .get(&SupportedLabel::Collabs) + .and_then(SupportedField::try_as_collabs_ref) + .map_or(&[], Vec::as_slice) + } + + /// Return `parameters` field. + #[must_use] + pub fn parameters(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Parameters) + .and_then(SupportedField::try_as_parameters_ref) + } + + /// Add `SupportedField` into the `Metadata`. + /// + /// # Warning + /// + /// Building metadata by-field with this function doesn't ensure the presence of + /// required fields. Use [`Self::from_fields`] or [`Self::from_json`] if it's + /// important for metadata to be valid. + #[cfg(test)] + pub(crate) fn add_field(&mut self, field: SupportedField) { + self.0.insert(field.discriminant(), field); } /// Build `Metadata` object from the metadata fields, doing all necessary validation. - pub(crate) fn from_metadata_fields(metadata: InnerMetadata, report: &ProblemReport) -> Self { - if metadata.doc_type.is_none() { - report.missing_field("type", "Missing type field in COSE protected header"); + pub(crate) fn from_fields(fields: Vec, report: &ProblemReport) -> Self { + const REPORT_CONTEXT: &str = "Metadata building"; + + let mut metadata = Metadata(BTreeMap::new()); + for v in fields { + let k = v.discriminant(); + if metadata.0.insert(k, v).is_some() { + report.duplicate_field( + &k.to_string(), + "Duplicate metadata fields are not allowed", + REPORT_CONTEXT, + ); + } } - if metadata.id.is_none() { - report.missing_field("id", "Missing id field in COSE protected header"); + + if metadata.doc_type().is_err() { + report.missing_field("type", REPORT_CONTEXT); } - if metadata.ver.is_none() { - report.missing_field("ver", "Missing ver field in COSE protected header"); + if metadata.doc_id().is_err() { + report.missing_field("id", REPORT_CONTEXT); } - - if metadata.content_type.is_none() { - report.missing_field( - "content type", - "Missing content_type field in COSE protected header", - ); + if metadata.doc_ver().is_err() { + report.missing_field("ver", REPORT_CONTEXT); + } + if metadata.content_type().is_err() { + report.missing_field("content-type", REPORT_CONTEXT); } - Self(metadata) + metadata } - /// Converting COSE Protected Header to Metadata. - pub(crate) fn from_protected_header( - protected: &coset::ProtectedHeader, report: &ProblemReport, - ) -> Self { - let metadata = InnerMetadata::from_protected_header(protected, report); - Self::from_metadata_fields(metadata, report) + /// Build `Metadata` object from the metadata fields, doing all necessary validation. + pub(crate) fn from_json(fields: serde_json::Value) -> anyhow::Result { + let fields = serde::Deserializer::deserialize_map(fields, MetadataDeserializeVisitor)?; + let report = ProblemReport::new("Deserializing metadata from json"); + let metadata = Self::from_fields(fields, &report); + anyhow::ensure!(!report.is_problematic(), "{:?}", report); + Ok(metadata) } } -impl InnerMetadata { +impl Metadata { /// Converting COSE Protected Header to Metadata fields, collecting decoding report /// issues. + #[allow( + clippy::too_many_lines, + reason = "This is a compilation of `coset` decoding and should be replaced once migrated to `minicbor`." + )] pub(crate) fn from_protected_header( - protected: &coset::ProtectedHeader, report: &ProblemReport, + protected: &coset::ProtectedHeader, context: &mut DecodeContext, ) -> Self { /// Context for problem report messages during decoding from COSE protected /// header. const COSE_DECODING_CONTEXT: &str = "COSE Protected Header to Metadata"; - let extra = ExtraFields::from_protected_header(protected, report); - let mut metadata = Self { - extra, - ..Self::default() - }; + let mut metadata_fields = vec![]; if let Some(value) = protected.header.content_type.as_ref() { match ContentType::try_from(value) { - Ok(ct) => metadata.content_type = Some(ct), + Ok(ct) => metadata_fields.push(SupportedField::ContentType(ct)), Err(e) => { - report.conversion_error( + context.report.conversion_error( "COSE protected header content type", &format!("{value:?}"), &format!("Expected ContentType: {e}"), @@ -173,9 +255,9 @@ impl InnerMetadata { |key| matches!(key, coset::Label::Text(label) if label.eq_ignore_ascii_case(CONTENT_ENCODING_KEY)), ) { match ContentEncoding::try_from(value) { - Ok(ce) => metadata.content_encoding = Some(ce), + Ok(ce) => metadata_fields.push(SupportedField::ContentEncoding(ce)), Err(e) => { - report.conversion_error( + context.report.conversion_error( "COSE protected header content encoding", &format!("{value:?}"), &format!("Expected ContentEncoding: {e}"), @@ -185,77 +267,270 @@ impl InnerMetadata { } } - metadata.doc_type = decode_document_field_from_protected_header::( + if let Some(value) = decode_document_field_from_protected_header::( protected, - TYPE_KEY, + ID_KEY, COSE_DECODING_CONTEXT, - report, + context.report, ) - .map(|v| v.0); + .map(|v| v.0) + { + metadata_fields.push(SupportedField::Id(value)); + } - metadata.id = decode_document_field_from_protected_header::( + if let Some(value) = decode_document_field_from_protected_header::( protected, - ID_KEY, + VER_KEY, COSE_DECODING_CONTEXT, - report, + context.report, ) - .map(|v| v.0); + .map(|v| v.0) + { + metadata_fields.push(SupportedField::Ver(value)); + } - metadata.ver = decode_document_field_from_protected_header::( + // DocType and DocRef now using minicbor decoding. + if let Some(value) = decode_cose_protected_header_value::( + protected, context, TYPE_KEY, + ) { + metadata_fields.push(SupportedField::Type(value)); + }; + if let Some(value) = decode_cose_protected_header_value::( + protected, context, REF_KEY, + ) { + metadata_fields.push(SupportedField::Ref(value)); + }; + if let Some(value) = decode_cose_protected_header_value::( protected, - VER_KEY, + context, + TEMPLATE_KEY, + ) { + metadata_fields.push(SupportedField::Template(value)); + } + if let Some(value) = decode_cose_protected_header_value::( + protected, context, REPLY_KEY, + ) { + metadata_fields.push(SupportedField::Reply(value)); + } + + if let Some(value) = decode_document_field_from_protected_header( + protected, + SECTION_KEY, COSE_DECODING_CONTEXT, - report, - ) - .map(|v| v.0); + context.report, + ) { + metadata_fields.push(SupportedField::Section(value)); + } - metadata + // process `parameters` field and all its aliases + let (parameters, has_multiple_fields) = [ + PARAMETERS_KEY, + BRAND_ID_KEY, + CAMPAIGN_ID_KEY, + CATEGORY_ID_KEY, + ] + .iter() + .filter_map(|field_name| -> Option { + decode_cose_protected_header_value(protected, context, field_name) + }) + .fold((None, false), |(res, _), v| (Some(v), res.is_some())); + if has_multiple_fields { + context.report.duplicate_field( + "Parameters field", + "Only one parameter can be used at a time: either brand_id, campaign_id, category_id", + COSE_DECODING_CONTEXT + ); + } + if let Some(value) = parameters { + metadata_fields.push(SupportedField::Parameters(value)); + } + + if let Some(cbor_doc_collabs) = cose_protected_header_find(protected, |key| { + key == &coset::Label::Text(COLLABS_KEY.to_string()) + }) { + if let Ok(collabs) = cbor_doc_collabs.clone().into_array() { + let mut c = Vec::new(); + for (ids, collaborator) in collabs.iter().cloned().enumerate() { + match collaborator.clone().into_text() { + Ok(collaborator) => { + c.push(collaborator); + }, + Err(_) => { + context.report.conversion_error( + &format!("COSE protected header collaborator index {ids}"), + &format!("{collaborator:?}"), + "Expected a CBOR String", + &format!( + "{COSE_DECODING_CONTEXT}, converting collaborator to String", + ), + ); + }, + } + } + if !c.is_empty() { + metadata_fields.push(SupportedField::Collabs(c)); + } + } else { + context.report.conversion_error( + "CBOR COSE protected header collaborators", + &format!("{cbor_doc_collabs:?}"), + "Expected a CBOR Array", + &format!("{COSE_DECODING_CONTEXT}, converting collaborators to Array",), + ); + }; + } + + Self::from_fields(metadata_fields, context.report) } } impl Display for Metadata { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { writeln!(f, "Metadata {{")?; - writeln!(f, " type: {:?},", self.0.doc_type)?; - writeln!(f, " id: {:?},", self.0.id)?; - writeln!(f, " ver: {:?},", self.0.ver)?; - writeln!(f, " content_type: {:?}", self.0.content_type)?; - writeln!(f, " content_encoding: {:?}", self.0.content_encoding)?; - writeln!(f, " additional_fields: {:?},", self.0.extra)?; + writeln!(f, " type: {:?},", self.doc_type().ok())?; + writeln!(f, " id: {:?},", self.doc_id().ok())?; + writeln!(f, " ver: {:?},", self.doc_ver().ok())?; + writeln!(f, " content_type: {:?},", self.content_type().ok())?; + writeln!(f, " content_encoding: {:?},", self.content_encoding())?; + writeln!(f, " additional_fields: {{")?; + writeln!(f, " ref: {:?}", self.doc_ref())?; + writeln!(f, " template: {:?},", self.template())?; + writeln!(f, " reply: {:?},", self.reply())?; + writeln!(f, " section: {:?},", self.section())?; + writeln!(f, " collabs: {:?},", self.collabs())?; + writeln!(f, " parameters: {:?},", self.parameters())?; + writeln!(f, " }},")?; writeln!(f, "}}") } } -impl TryFrom<&Metadata> for coset::Header { - type Error = anyhow::Error; +impl minicbor::Encode<()> for Metadata { + /// Encode as a CBOR map. + /// + /// Note that to put it in an [RFC 8152] protected header. + /// The header must be then encoded as a binary string. + /// + /// Also note that this won't check the presence of the required fields, + /// so the checks must be done elsewhere. + /// + /// [RFC 8152]: https://datatracker.ietf.org/doc/html/rfc8152#autoid-8 + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.map( + self.0 + .len() + .try_into() + .map_err(minicbor::encode::Error::message)?, + )?; + self.0 + .values() + .try_fold(e, |e, field| e.encode(field))? + .ok() + } +} + +/// An error that's been reported, but doesn't affect the further decoding. +/// [`minicbor::Decoder`] should be assumed to be in a correct state and advanced towards +/// the next item. +/// +/// The wrapped error can be returned up the call stack. +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct TransientDecodeError(pub minicbor::decode::Error); + +/// Creates a [`TransientDecodeError`] and wraps it in a +/// [`minicbor::decode::Error::custom`]. +fn custom_transient_decode_error( + message: &str, position: Option, +) -> minicbor::decode::Error { + let mut inner = minicbor::decode::Error::message(message); + if let Some(pos) = position { + inner = inner.at(pos); + } + minicbor::decode::Error::custom(TransientDecodeError(inner)) +} + +impl minicbor::Decode<'_, crate::decode_context::DecodeContext<'_>> for Metadata { + /// Decode from a CBOR map. + /// + /// Note that this won't decode an [RFC 8152] protected header as is. + /// The header must be first decoded as a binary string. + /// + /// Also note that this won't check the absence of the required fields, + /// so the checks must be done elsewhere. + /// + /// [RFC 8152]: https://datatracker.ietf.org/doc/html/rfc8152#autoid-8 + fn decode( + d: &mut Decoder<'_>, ctx: &mut crate::decode_context::DecodeContext<'_>, + ) -> Result { + const REPORT_CONTEXT: &str = "Metadata decoding"; + + let Some(len) = d.map()? else { + return Err(minicbor::decode::Error::message( + "Indefinite map is not supported", + )); + }; - fn try_from(meta: &Metadata) -> Result { - let mut builder = coset::HeaderBuilder::new() - .content_format(CoapContentFormat::from(meta.content_type()?)); + // TODO: verify key order. + // TODO: use helpers from once it's merged. - if let Some(content_encoding) = meta.content_encoding() { - builder = builder.text_value( - CONTENT_ENCODING_KEY.to_string(), - format!("{content_encoding}").into(), - ); + let mut metadata_map = BTreeMap::new(); + let mut first_err = None; + + // This will return an error on the end of input. + for _ in 0..len { + let entry_pos = d.position(); + match d.decode_with::<_, SupportedField>(ctx) { + Ok(field) => { + let label = field.discriminant(); + let entry = metadata_map.entry(label); + if let btree_map::Entry::Vacant(entry) = entry { + entry.insert(field); + } else { + ctx.report.duplicate_field( + &label.to_string(), + "Duplicate metadata fields are not allowed", + REPORT_CONTEXT, + ); + first_err.get_or_insert(custom_transient_decode_error( + "Duplicate fields", + Some(entry_pos), + )); + } + }, + Err(err) + if err + .source() + .is_some_and(::is::) => + { + first_err.get_or_insert(err); + }, + Err(err) => return Err(err), + } } - builder = builder - .text_value( - TYPE_KEY.to_string(), - Value::try_from(CborUuidV4(meta.doc_type()?))?, - ) - .text_value( - ID_KEY.to_string(), - Value::try_from(CborUuidV7(meta.doc_id()?))?, - ) - .text_value( - VER_KEY.to_string(), - Value::try_from(CborUuidV7(meta.doc_ver()?))?, - ); - - builder = meta.0.extra.fill_cose_header_fields(builder)?; - - Ok(builder.build()) + first_err.map_or(Ok(Self(metadata_map)), Err) + } +} + +/// Implements [`serde::de::Visitor`], so that [`Metadata`] can be +/// deserialized by [`serde::Deserializer::deserialize_map`]. +struct MetadataDeserializeVisitor; + +impl<'de> serde::de::Visitor<'de> for MetadataDeserializeVisitor { + type Value = Vec; + + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.write_str("Catalyst Signed Document metadata key-value pairs") + } + + fn visit_map>(self, mut d: A) -> Result { + let mut res = Vec::with_capacity(d.size_hint().unwrap_or(0)); + while let Some(k) = d.next_key::()? { + let v = d.next_value_seed(k)?; + res.push(v); + } + Ok(res) } } diff --git a/rust/signed_doc/src/metadata/section.rs b/rust/signed_doc/src/metadata/section.rs index 01e6a02a1b..f4d4834415 100644 --- a/rust/signed_doc/src/metadata/section.rs +++ b/rust/signed_doc/src/metadata/section.rs @@ -40,12 +40,6 @@ impl FromStr for Section { } } -impl From
for Value { - fn from(value: Section) -> Self { - Value::Text(value.to_string()) - } -} - impl TryFrom<&Value> for Section { type Error = anyhow::Error; @@ -56,3 +50,12 @@ impl TryFrom<&Value> for Section { Self::from_str(str) } } + +impl minicbor::Encode<()> for Section { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.str(self.to_string().as_str())?; + Ok(()) + } +} diff --git a/rust/signed_doc/src/metadata/supported_field.rs b/rust/signed_doc/src/metadata/supported_field.rs new file mode 100644 index 0000000000..99572c4d7d --- /dev/null +++ b/rust/signed_doc/src/metadata/supported_field.rs @@ -0,0 +1,333 @@ +//! Catalyst Signed Document unified metadata field. + +use std::fmt::{self, Display}; +#[cfg(test)] +use std::{cmp, convert::Infallible}; + +use catalyst_types::uuid::UuidV7; +use serde::Deserialize; +use strum::{EnumDiscriminants, EnumTryAs, IntoDiscriminant as _}; + +use crate::{ + metadata::custom_transient_decode_error, ContentEncoding, ContentType, DocType, DocumentRefs, + Section, +}; + +/// COSE label. May be either a signed integer or a string. +#[derive(Copy, Clone, Eq, PartialEq)] +enum Label<'a> { + /// Integer label. + /// + /// Note that COSE isn't strictly limited to 8 bits for a label, but in practice it + /// fits. + /// + /// If for any reason wider bounds would be necessary, + /// then additional variants could be added to the [`Label`]. + U8(u8), + /// Text label. + Str(&'a str), +} + +impl minicbor::Encode<()> for Label<'_> { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + match self { + &Label::U8(u) => e.u8(u), + Label::Str(s) => e.str(s), + }? + .ok() + } +} + +impl<'a, C> minicbor::Decode<'a, C> for Label<'a> { + fn decode(d: &mut minicbor::Decoder<'a>, _: &mut C) -> Result { + match d.datatype()? { + minicbor::data::Type::U8 => d.u8().map(Self::U8), + minicbor::data::Type::String => d.str().map(Self::Str), + _ => { + Err(minicbor::decode::Error::message( + "Datatype is neither 8bit unsigned integer nor text", + ) + .at(d.position())) + }, + } + } +} + +#[cfg(test)] +impl Label<'_> { + /// Compare by [RFC 8949 section 4.2.1] specification. + /// + /// [RFC 8949 section 4.2.1]: https://www.rfc-editor.org/rfc/rfc8949.html#section-4.2.1 + fn rfc8949_cmp( + &self, other: &Self, + ) -> Result> { + let lhs = minicbor::to_vec(self)?; + let rhs = minicbor::to_vec(other)?; + let ord = lhs.len().cmp(&rhs.len()).then_with(|| lhs.cmp(&rhs)); + Ok(ord) + } +} + +impl Display for Label<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Label::U8(u) => write!(f, "{u}"), + Label::Str(s) => f.write_str(s), + } + } +} + +/// Catalyst Signed Document metadata field. +/// Fields are assigned discriminants based on deterministic ordering (see [RFC 8949 +/// section 4.2.1]). +/// +/// Note that [`PartialEq`] implementation compares both keys and values. +/// +/// [RFC 8949 section 4.2.1]: https://www.rfc-editor.org/rfc/rfc8949.html#section-4.2.1 +#[derive(Clone, Debug, PartialEq, EnumDiscriminants, EnumTryAs)] +#[strum_discriminants( + name(SupportedLabel), + derive(Ord, PartialOrd, serde::Deserialize), + serde(rename_all = "kebab-case"), + cfg_attr(test, derive(strum::VariantArray)) +)] +#[non_exhaustive] +#[repr(usize)] +pub(crate) enum SupportedField { + /// `content-type` field. In COSE it's represented as the signed integer `3` (see [RFC + /// 8949 section 3.1]). + /// + /// [RFC 8949 section 3.1]: https://datatracker.ietf.org/doc/html/rfc8152#section-3.1 + ContentType(ContentType) = 0, + /// `id` field. + Id(UuidV7) = 1, + /// `ref` field. + Ref(DocumentRefs) = 2, + /// `ver` field. + Ver(UuidV7) = 3, + /// `type` field. + Type(DocType) = 4, + /// `reply` field. + Reply(DocumentRefs) = 5, + /// `collabs` field. + Collabs(Vec) = 7, + /// `section` field. + Section(Section) = 8, + /// `template` field. + Template(DocumentRefs) = 9, + /// `parameters` field. + Parameters(DocumentRefs) = 10, + /// `Content-Encoding` field. + ContentEncoding(ContentEncoding) = 11, +} + +impl SupportedLabel { + /// Try to convert from an arbitrary COSE [`Label`]. + /// This doesn't allow any aliases. + fn from_cose(label: Label<'_>) -> Option { + match label { + Label::U8(3) => Some(Self::ContentType), + Label::Str("id") => Some(Self::Id), + Label::Str("ref") => Some(Self::Ref), + Label::Str("ver") => Some(Self::Ver), + Label::Str("type") => Some(Self::Type), + Label::Str("reply") => Some(Self::Reply), + Label::Str("collabs") => Some(Self::Collabs), + Label::Str("section") => Some(Self::Section), + Label::Str("template") => Some(Self::Template), + Label::Str("parameters" | "brand_id" | "campaign_id" | "category_id") => { + Some(Self::Parameters) + }, + Label::Str(s) if s.eq_ignore_ascii_case("content-encoding") => { + Some(Self::ContentEncoding) + }, + _ => None, + } + } + + /// Convert to the corresponding COSE [`Label`]. + fn to_cose(self) -> Label<'static> { + match self { + Self::ContentType => Label::U8(3), + Self::Id => Label::Str("id"), + Self::Ref => Label::Str("ref"), + Self::Ver => Label::Str("ver"), + Self::Type => Label::Str("type"), + Self::Reply => Label::Str("reply"), + Self::Collabs => Label::Str("collabs"), + Self::Section => Label::Str("section"), + Self::Template => Label::Str("template"), + Self::Parameters => Label::Str("parameters"), + Self::ContentEncoding => Label::Str("content-encoding"), + } + } +} + +impl Display for SupportedLabel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&self.to_cose(), f) + } +} + +impl<'de> serde::de::DeserializeSeed<'de> for SupportedLabel { + type Value = SupportedField; + + fn deserialize>(self, d: D) -> Result { + match self { + SupportedLabel::ContentType => { + Deserialize::deserialize(d).map(SupportedField::ContentType) + }, + SupportedLabel::Id => Deserialize::deserialize(d).map(SupportedField::Id), + SupportedLabel::Ref => Deserialize::deserialize(d).map(SupportedField::Ref), + SupportedLabel::Ver => Deserialize::deserialize(d).map(SupportedField::Ver), + SupportedLabel::Type => Deserialize::deserialize(d).map(SupportedField::Type), + SupportedLabel::Reply => Deserialize::deserialize(d).map(SupportedField::Reply), + SupportedLabel::Collabs => Deserialize::deserialize(d).map(SupportedField::Collabs), + SupportedLabel::Section => Deserialize::deserialize(d).map(SupportedField::Section), + SupportedLabel::Template => Deserialize::deserialize(d).map(SupportedField::Template), + SupportedLabel::Parameters => { + Deserialize::deserialize(d).map(SupportedField::Parameters) + }, + SupportedLabel::ContentEncoding => { + Deserialize::deserialize(d).map(SupportedField::ContentEncoding) + }, + } + } +} + +impl minicbor::Decode<'_, crate::decode_context::DecodeContext<'_>> for SupportedField { + #[allow(clippy::todo, reason = "Not migrated to `minicbor` yet.")] + fn decode( + d: &mut minicbor::Decoder<'_>, ctx: &mut crate::decode_context::DecodeContext<'_>, + ) -> Result { + const REPORT_CONTEXT: &str = "Metadata field decoding"; + + let label_pos = d.position(); + let label = Label::decode(d, &mut ())?; + let Some(key) = SupportedLabel::from_cose(label) else { + let value_start = d.position(); + d.skip()?; + let value_end = d.position(); + // Since the high level type isn't know, the value CBOR is tokenized and reported as + // such. + let value = minicbor::decode::Tokenizer::new( + d.input().get(value_start..value_end).unwrap_or_default(), + ) + .to_string(); + ctx.report + .unknown_field(&label.to_string(), &value, REPORT_CONTEXT); + return Err(custom_transient_decode_error( + "Not a supported key", + Some(label_pos), + )); + }; + + let field = match key { + SupportedLabel::ContentType => todo!(), + SupportedLabel::Id => { + d.decode_with(&mut catalyst_types::uuid::CborContext::Tagged) + .map(Self::Id) + }, + SupportedLabel::Ref => d.decode_with(ctx).map(Self::Ref), + SupportedLabel::Ver => { + d.decode_with(&mut catalyst_types::uuid::CborContext::Tagged) + .map(Self::Ver) + }, + SupportedLabel::Type => d.decode_with(ctx).map(Self::Type), + SupportedLabel::Reply => d.decode_with(ctx).map(Self::Reply), + SupportedLabel::Collabs => todo!(), + SupportedLabel::Section => todo!(), + SupportedLabel::Template => d.decode_with(ctx).map(Self::Template), + SupportedLabel::Parameters => d.decode_with(ctx).map(Self::Parameters), + SupportedLabel::ContentEncoding => todo!(), + }?; + + Ok(field) + } +} + +impl minicbor::Encode<()> for SupportedField { + fn encode( + &self, e: &mut minicbor::Encoder, ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + let key = self.discriminant().to_cose(); + e.encode(key)?; + + match self { + SupportedField::ContentType(content_type) => content_type.encode(e, ctx), + SupportedField::Id(uuid_v7) | SupportedField::Ver(uuid_v7) => { + uuid_v7.encode(e, &mut catalyst_types::uuid::CborContext::Tagged) + }, + SupportedField::Ref(document_ref) + | SupportedField::Reply(document_ref) + | SupportedField::Template(document_ref) + | SupportedField::Parameters(document_ref) => document_ref.encode(e, ctx), + SupportedField::Type(doc_type) => doc_type.encode(e, ctx), + SupportedField::Collabs(collabs) => { + if !collabs.is_empty() { + e.array( + collabs + .len() + .try_into() + .map_err(minicbor::encode::Error::message)?, + )?; + for c in collabs { + e.str(c)?; + } + } + Ok(()) + }, + SupportedField::Section(section) => section.encode(e, ctx), + SupportedField::ContentEncoding(content_encoding) => content_encoding.encode(e, ctx), + } + } +} + +#[cfg(test)] +mod tests { + use strum::VariantArray as _; + + use super::*; + + /// Checks that [`Label::rfc8949_cmp`] ordering is compliant with the RFC. + #[test] + fn label_rfc8949_cmp() { + assert_eq!( + Label::Str("a").rfc8949_cmp(&Label::Str("a")).unwrap(), + cmp::Ordering::Equal + ); + assert_eq!( + Label::Str("a").rfc8949_cmp(&Label::Str("aa")).unwrap(), + cmp::Ordering::Less + ); + assert_eq!( + Label::Str("a").rfc8949_cmp(&Label::Str("b")).unwrap(), + cmp::Ordering::Less + ); + assert_eq!( + Label::Str("aa").rfc8949_cmp(&Label::Str("b")).unwrap(), + cmp::Ordering::Greater + ); + assert_eq!( + Label::U8(3).rfc8949_cmp(&Label::Str("id")).unwrap(), + cmp::Ordering::Less + ); + } + + /// Checks that [`SupportedLabel`] enum integer values correspond to + /// [`Label::rfc8949_cmp`] ordering. + #[test] + fn supported_label_rfc8949_ord() { + let mut enum_ord = SupportedLabel::VARIANTS.to_vec(); + // Sorting by the Rust enum representation. + enum_ord.sort_unstable(); + + let mut cose_ord = SupportedLabel::VARIANTS.to_vec(); + // Sorting by the corresponding COSE labels. + cose_ord.sort_unstable_by(|lhs, rhs| lhs.to_cose().rfc8949_cmp(&rhs.to_cose()).unwrap()); + + assert_eq!(enum_ord, cose_ord); + } +} diff --git a/rust/signed_doc/src/metadata/utils.rs b/rust/signed_doc/src/metadata/utils.rs index 0e54f10c43..f2df23eb81 100644 --- a/rust/signed_doc/src/metadata/utils.rs +++ b/rust/signed_doc/src/metadata/utils.rs @@ -2,9 +2,22 @@ use catalyst_types::{ problem_report::ProblemReport, - uuid::{CborContext, UuidV4, UuidV7}, + uuid::{CborContext, UuidV7}, }; use coset::{CborSerializable, Label, ProtectedHeader}; +use minicbor::{Decode, Decoder}; + +/// Decode cose protected header value using minicbor decoder. +pub(crate) fn decode_cose_protected_header_value( + protected: &ProtectedHeader, context: &mut C, label: &str, +) -> Option +where T: for<'a> Decode<'a, C> { + cose_protected_header_find(protected, |key| matches!(key, Label::Text(l) if l == label)) + .and_then(|value| { + let bytes = value.clone().to_vec().unwrap_or_default(); + Decoder::new(&bytes).decode_with(context).ok() + }) +} /// Find a value for a predicate in the protected header. pub(crate) fn cose_protected_header_find( @@ -39,24 +52,6 @@ where T: for<'a> TryFrom<&'a coset::cbor::Value> { None } -/// A convenient wrapper over the `UuidV4` type, to implement -/// `TryFrom` and `TryFrom for coset::cbor::Value` traits. -pub(crate) struct CborUuidV4(pub(crate) UuidV4); -impl TryFrom<&coset::cbor::Value> for CborUuidV4 { - type Error = anyhow::Error; - - fn try_from(value: &coset::cbor::Value) -> Result { - Ok(Self(decode_cbor_uuid(value)?)) - } -} -impl TryFrom for coset::cbor::Value { - type Error = anyhow::Error; - - fn try_from(value: CborUuidV4) -> Result { - encode_cbor_uuid(value.0) - } -} - /// A convenient wrapper over the `UuidV7` type, to implement /// `TryFrom` and `TryFrom for coset::cbor::Value` traits. pub(crate) struct CborUuidV7(pub(crate) UuidV7); diff --git a/rust/signed_doc/src/providers.rs b/rust/signed_doc/src/providers.rs index 9fd41d1c63..b839c8166e 100644 --- a/rust/signed_doc/src/providers.rs +++ b/rust/signed_doc/src/providers.rs @@ -17,7 +17,7 @@ pub trait VerifyingKeyProvider { /// `CatalystSignedDocument` Provider trait pub trait CatalystSignedDocumentProvider: Send + Sync { - /// Try to get `CatalystSignedDocument` + /// Try to get `CatalystSignedDocument`from document reference fn try_get_doc( &self, doc_ref: &DocumentRef, ) -> impl Future>> + Send; @@ -38,24 +38,34 @@ pub mod tests { use std::{collections::HashMap, time::Duration}; - use catalyst_types::uuid::Uuid; - use super::{ - CatalystId, CatalystSignedDocument, CatalystSignedDocumentProvider, DocumentRef, - VerifyingKey, VerifyingKeyProvider, + CatalystId, CatalystSignedDocument, CatalystSignedDocumentProvider, VerifyingKey, + VerifyingKeyProvider, }; + use crate::{DocLocator, DocumentRef}; /// Simple testing implementation of `CatalystSignedDocumentProvider` - #[derive(Default)] - pub struct TestCatalystSignedDocumentProvider(HashMap); + #[derive(Default, Debug)] + + pub struct TestCatalystSignedDocumentProvider(HashMap); impl TestCatalystSignedDocumentProvider { - /// Inserts document into the `TestCatalystSignedDocumentProvider` + /// Inserts document into the `TestCatalystSignedDocumentProvider` where + /// if document reference is provided use that value. + /// if not use the id and version of the provided doc. /// /// # Errors - /// - Missing document id - pub fn add_document(&mut self, doc: CatalystSignedDocument) -> anyhow::Result<()> { - self.0.insert(doc.doc_id()?.uuid(), doc); + /// Returns error if document reference is not provided and its fail to create one + /// from the given doc. + pub fn add_document( + &mut self, doc_ref: Option, doc: &CatalystSignedDocument, + ) -> anyhow::Result<()> { + if let Some(dr) = doc_ref { + self.0.insert(dr, doc.clone()); + } else { + let dr = DocumentRef::new(doc.doc_id()?, doc.doc_ver()?, DocLocator::default()); + self.0.insert(dr, doc.clone()); + } Ok(()) } } @@ -64,7 +74,7 @@ pub mod tests { async fn try_get_doc( &self, doc_ref: &DocumentRef, ) -> anyhow::Result> { - Ok(self.0.get(&doc_ref.id.uuid()).cloned()) + Ok(self.0.get(doc_ref).cloned()) } fn future_threshold(&self) -> Option { diff --git a/rust/signed_doc/src/signature/mod.rs b/rust/signed_doc/src/signature/mod.rs index 31dec15b51..5452707ffb 100644 --- a/rust/signed_doc/src/signature/mod.rs +++ b/rust/signed_doc/src/signature/mod.rs @@ -4,20 +4,37 @@ pub use catalyst_types::catalyst_id::CatalystId; use catalyst_types::problem_report::ProblemReport; use coset::CoseSignature; +use crate::{Content, Metadata}; + /// Catalyst Signed Document COSE Signature. #[derive(Debug, Clone)] pub struct Signature { /// Key ID kid: CatalystId, - /// COSE Signature - signature: CoseSignature, + /// Raw signature data + signature: Vec, } impl Signature { + /// Creates a `Signature` object from `kid` and raw `signature` bytes + pub(crate) fn new(kid: CatalystId, signature: Vec) -> Self { + Self { kid, signature } + } + + /// Return `kid` field (`CatalystId`), identifier who made a signature + pub fn kid(&self) -> &CatalystId { + &self.kid + } + + /// Return raw signature bytes itself + pub fn signature(&self) -> &[u8] { + &self.signature + } + /// Convert COSE Signature to `Signature`. pub(crate) fn from_cose_sig(signature: CoseSignature, report: &ProblemReport) -> Option { match CatalystId::try_from(signature.protected.header.key_id.as_ref()) { - Ok(kid) if kid.is_uri() => Some(Self { kid, signature }), + Ok(kid) if kid.is_uri() => Some(Self::new(kid, signature.signature)), Ok(kid) => { report.invalid_value( "COSE signature protected header key ID", @@ -47,28 +64,9 @@ impl Signature { pub struct Signatures(Vec); impl Signatures { - /// Return a list of author IDs (short form of Catalyst IDs). - #[must_use] - pub(crate) fn authors(&self) -> Vec { - self.kids().into_iter().map(|k| k.as_short_id()).collect() - } - - /// Return a list of Document's Catalyst IDs. - #[must_use] - pub(crate) fn kids(&self) -> Vec { - self.0.iter().map(|sig| sig.kid.clone()).collect() - } - - /// Iterator of COSE signatures object with kids. - pub(crate) fn cose_signatures_with_kids( - &self, - ) -> impl Iterator + use<'_> { - self.0.iter().map(|sig| (&sig.signature, &sig.kid)) - } - - /// List of COSE signatures object. - pub(crate) fn cose_signatures(&self) -> impl Iterator + use<'_> { - self.0.iter().map(|sig| sig.signature.clone()) + /// Return an iterator over the signatures + pub fn iter(&self) -> impl Iterator + use<'_> { + self.0.iter() } /// Add a `Signature` object into the list @@ -105,3 +103,63 @@ impl Signatures { Self(res) } } + +/// Create a binary blob that will be signed. No support for unprotected headers. +/// +/// Described in [section 2 of RFC 8152](https://datatracker.ietf.org/doc/html/rfc8152#section-2). +pub(crate) fn tbs_data( + kid: &CatalystId, metadata: &Metadata, content: &Content, +) -> anyhow::Result> { + Ok(minicbor::to_vec(( + // The context string as per [RFC 8152 section 4.4](https://datatracker.ietf.org/doc/html/rfc8152#section-4.4). + "Signature", + ::from(minicbor::to_vec(metadata)?), + ::from(protected_header_bytes(kid)?), + minicbor::bytes::ByteArray::from([]), + content, + ))?) +} + +impl minicbor::Encode<()> for Signature { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.array(3)?; + e.bytes( + protected_header_bytes(&self.kid) + .map_err(minicbor::encode::Error::message)? + .as_slice(), + )?; + // empty unprotected headers + e.map(0)?; + e.bytes(&self.signature)?; + Ok(()) + } +} + +impl minicbor::Encode<()> for Signatures { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.array( + self.0 + .len() + .try_into() + .map_err(minicbor::encode::Error::message)?, + )?; + for sign in self.iter() { + e.encode(sign)?; + } + Ok(()) + } +} + +/// Signatures protected header bytes +/// +/// Described in [section 3.1 of RFC 8152](https://datatracker.ietf.org/doc/html/rfc8152#section-3.1). +fn protected_header_bytes(kid: &CatalystId) -> anyhow::Result> { + let mut p_headers = minicbor::Encoder::new(Vec::new()); + // protected headers (kid field) + p_headers.map(1)?.u8(4)?.encode(kid)?; + Ok(p_headers.into_writer()) +} diff --git a/rust/signed_doc/src/validator/mod.rs b/rust/signed_doc/src/validator/mod.rs index 0c755bdcb5..f2535f0344 100644 --- a/rust/signed_doc/src/validator/mod.rs +++ b/rust/signed_doc/src/validator/mod.rs @@ -5,50 +5,55 @@ pub(crate) mod utils; use std::{ collections::HashMap, - fmt, - sync::LazyLock, + sync::{Arc, LazyLock}, time::{Duration, SystemTime}, }; use anyhow::Context; -use catalyst_types::{ - catalyst_id::{role_index::RoleId, CatalystId}, - problem_report::ProblemReport, - uuid::{Uuid, UuidV4}, -}; -use coset::{CoseSign, CoseSignature}; +use catalyst_types::{catalyst_id::role_index::RoleId, problem_report::ProblemReport}; use rules::{ - ContentEncodingRule, ContentRule, ContentSchema, ContentTypeRule, ParametersRule, RefRule, - ReplyRule, Rules, SectionRule, SignatureKidRule, + ContentEncodingRule, ContentRule, ContentSchema, ContentTypeRule, LinkField, + ParameterLinkRefRule, ParametersRule, RefRule, ReplyRule, Rules, SectionRule, SignatureKidRule, }; use crate::{ doc_types::{ - CATEGORY_DOCUMENT_UUID_TYPE, COMMENT_DOCUMENT_UUID_TYPE, COMMENT_TEMPLATE_UUID_TYPE, - PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, PROPOSAL_DOCUMENT_UUID_TYPE, - PROPOSAL_TEMPLATE_UUID_TYPE, + deprecated::{self}, + BRAND_PARAMETERS, CAMPAIGN_PARAMETERS, CATEGORY_PARAMETERS, PROPOSAL, PROPOSAL_COMMENT, + PROPOSAL_COMMENT_TEMPLATE, PROPOSAL_SUBMISSION_ACTION, PROPOSAL_TEMPLATE, }, + metadata::DocType, providers::{CatalystSignedDocumentProvider, VerifyingKeyProvider}, + signature::{tbs_data, Signature}, CatalystSignedDocument, ContentEncoding, ContentType, }; /// A table representing a full set or validation rules per document id. -static DOCUMENT_RULES: LazyLock> = LazyLock::new(document_rules_init); +static DOCUMENT_RULES: LazyLock>> = LazyLock::new(document_rules_init); -/// Returns an [`UuidV4`] from the provided argument, panicking if the argument is -/// invalid. +/// Returns an `DocType` from the provided argument. +/// Reduce redundant conversion. +/// This function should be used for hardcoded values, panic if conversion fail. #[allow(clippy::expect_used)] -fn expect_uuidv4(t: T) -> UuidV4 -where T: TryInto { - t.try_into().expect("Must be a valid UUID V4") +pub(crate) fn expect_doc_type(t: T) -> DocType +where + T: TryInto, + T::Error: std::fmt::Debug, +{ + t.try_into().expect("Failed to convert to DocType") } -/// `DOCUMENT_RULES` initialization function -#[allow(clippy::expect_used)] -fn document_rules_init() -> HashMap { - let mut document_rules_map = HashMap::new(); - - let proposal_document_rules = Rules { +/// Proposal +/// Require field: type, id, ver, template, parameters +/// +fn proposal_rule() -> Rules { + // Parameter can be either brand, campaign or category + let parameters = vec![ + BRAND_PARAMETERS.clone(), + CAMPAIGN_PARAMETERS.clone(), + CATEGORY_PARAMETERS.clone(), + ]; + Rules { content_type: ContentTypeRule { exp: ContentType::Json, }, @@ -57,11 +62,11 @@ fn document_rules_init() -> HashMap { optional: false, }, content: ContentRule::Templated { - exp_template_type: expect_uuidv4(PROPOSAL_TEMPLATE_UUID_TYPE), + exp_template_type: PROPOSAL_TEMPLATE.clone(), }, parameters: ParametersRule::Specified { - exp_parameters_type: expect_uuidv4(CATEGORY_DOCUMENT_UUID_TYPE), - optional: true, + exp_parameters_type: parameters.clone(), + optional: false, }, doc_ref: RefRule::NotSpecified, reply: ReplyRule::NotSpecified, @@ -69,11 +74,23 @@ fn document_rules_init() -> HashMap { kid: SignatureKidRule { exp: &[RoleId::Proposer], }, - }; - - document_rules_map.insert(PROPOSAL_DOCUMENT_UUID_TYPE, proposal_document_rules); + param_link_ref: ParameterLinkRefRule::Specified { + field: LinkField::Template, + }, + } +} - let comment_document_rules = Rules { +/// Proposal Comment +/// Require field: type, id, ver, ref, template, parameters +/// +fn proposal_comment_rule() -> Rules { + // Parameter can be either brand, campaign or category + let parameters = vec![ + BRAND_PARAMETERS.clone(), + CAMPAIGN_PARAMETERS.clone(), + CATEGORY_PARAMETERS.clone(), + ]; + Rules { content_type: ContentTypeRule { exp: ContentType::Json, }, @@ -82,34 +99,54 @@ fn document_rules_init() -> HashMap { optional: false, }, content: ContentRule::Templated { - exp_template_type: expect_uuidv4(COMMENT_TEMPLATE_UUID_TYPE), + exp_template_type: PROPOSAL_COMMENT_TEMPLATE.clone(), }, doc_ref: RefRule::Specified { - exp_ref_type: expect_uuidv4(PROPOSAL_DOCUMENT_UUID_TYPE), + exp_ref_type: PROPOSAL.clone(), optional: false, }, reply: ReplyRule::Specified { - exp_reply_type: expect_uuidv4(COMMENT_DOCUMENT_UUID_TYPE), + exp_reply_type: PROPOSAL_COMMENT.clone(), optional: true, }, - section: SectionRule::Specified { optional: true }, - parameters: ParametersRule::NotSpecified, + section: SectionRule::NotSpecified, + parameters: ParametersRule::Specified { + exp_parameters_type: parameters.clone(), + optional: false, + }, kid: SignatureKidRule { exp: &[RoleId::Role0], }, - }; - document_rules_map.insert(COMMENT_DOCUMENT_UUID_TYPE, comment_document_rules); + // Link field can be either template or ref + param_link_ref: ParameterLinkRefRule::Specified { + field: LinkField::Template, + }, + } +} + +/// Proposal Submission Action +/// Require fields: type, id, ver, ref, parameters +/// +#[allow(clippy::expect_used)] +fn proposal_submission_action_rule() -> Rules { + // Parameter can be either brand, campaign or category + let parameters = vec![ + BRAND_PARAMETERS.clone(), + CAMPAIGN_PARAMETERS.clone(), + CATEGORY_PARAMETERS.clone(), + ]; let proposal_action_json_schema = jsonschema::options() - .with_draft(jsonschema::Draft::Draft7) - .build( - &serde_json::from_str(include_str!( - "./../../../../specs/signed_docs/docs/payload_schemas/proposal_submission_action.schema.json" - )) - .expect("Must be a valid json file"), - ) - .expect("Must be a valid json scheme file"); - let proposal_submission_action_rules = Rules { + .with_draft(jsonschema::Draft::Draft7) + .build( + &serde_json::from_str(include_str!( + "./../../../../specs/signed_docs/docs/payload_schemas/proposal_submission_action.schema.json" + )) + .expect("Must be a valid json file"), + ) + .expect("Must be a valid json scheme file"); + + Rules { content_type: ContentTypeRule { exp: ContentType::Json, }, @@ -119,11 +156,11 @@ fn document_rules_init() -> HashMap { }, content: ContentRule::Static(ContentSchema::Json(proposal_action_json_schema)), parameters: ParametersRule::Specified { - exp_parameters_type: expect_uuidv4(CATEGORY_DOCUMENT_UUID_TYPE), - optional: true, + exp_parameters_type: parameters, + optional: false, }, doc_ref: RefRule::Specified { - exp_ref_type: expect_uuidv4(PROPOSAL_DOCUMENT_UUID_TYPE), + exp_ref_type: PROPOSAL.clone(), optional: false, }, reply: ReplyRule::NotSpecified, @@ -131,11 +168,35 @@ fn document_rules_init() -> HashMap { kid: SignatureKidRule { exp: &[RoleId::Proposer], }, - }; + param_link_ref: ParameterLinkRefRule::Specified { + field: LinkField::Ref, + }, + } +} + +/// `DOCUMENT_RULES` initialization function +fn document_rules_init() -> HashMap> { + let mut document_rules_map = HashMap::new(); + + let proposal_rules = Arc::new(proposal_rule()); + let comment_rules = Arc::new(proposal_comment_rule()); + let action_rules = Arc::new(proposal_submission_action_rule()); + + document_rules_map.insert(PROPOSAL.clone(), Arc::clone(&proposal_rules)); + document_rules_map.insert(PROPOSAL_COMMENT.clone(), Arc::clone(&comment_rules)); + document_rules_map.insert( + PROPOSAL_SUBMISSION_ACTION.clone(), + Arc::clone(&action_rules), + ); + // Insert old rules (for backward compatibility) + document_rules_map.insert( + expect_doc_type(deprecated::COMMENT_DOCUMENT_UUID_TYPE), + Arc::clone(&comment_rules), + ); document_rules_map.insert( - PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - proposal_submission_action_rules, + expect_doc_type(deprecated::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE), + Arc::clone(&action_rules), ); document_rules_map @@ -164,7 +225,7 @@ where Provider: CatalystSignedDocumentProvider { return Ok(false); } - let Some(rules) = DOCUMENT_RULES.get(&doc_type.uuid()) else { + let Some(rules) = DOCUMENT_RULES.get(doc_type) else { doc.report().invalid_value( "`type`", &doc.doc_type()?.to_string(), @@ -281,14 +342,6 @@ where Provider: CatalystSignedDocumentProvider { pub async fn validate_signatures( doc: &CatalystSignedDocument, provider: &impl VerifyingKeyProvider, ) -> anyhow::Result { - let Ok(cose_sign) = doc.as_cose_sign() else { - doc.report().other( - "Cannot build a COSE sign object", - "During encoding signed document as COSE SIGN", - ); - return Ok(false); - }; - if doc.signatures().is_empty() { doc.report().other( "Catalyst Signed Document is unsigned", @@ -299,10 +352,8 @@ pub async fn validate_signatures( let sign_rules = doc .signatures() - .cose_signatures_with_kids() - .map(|(signature, kid)| { - validate_signature(&cose_sign, signature, kid, provider, doc.report()) - }); + .iter() + .map(|sign| validate_signature(doc, sign, provider, doc.report())); let res = futures::future::join_all(sign_rules) .await @@ -316,12 +367,11 @@ pub async fn validate_signatures( /// A single signature validation function async fn validate_signature( - cose_sign: &CoseSign, signature: &CoseSignature, kid: &CatalystId, provider: &Provider, - report: &ProblemReport, + doc: &CatalystSignedDocument, sign: &Signature, provider: &Provider, report: &ProblemReport, ) -> anyhow::Result -where - Provider: VerifyingKeyProvider, -{ +where Provider: VerifyingKeyProvider { + let kid = sign.kid(); + let Some(pk) = provider.try_get_key(kid).await? else { report.other( &format!("Missing public key for {kid}."), @@ -330,11 +380,18 @@ where return Ok(false); }; - let tbs_data = cose_sign.tbs_data(&[], signature); - let Ok(signature_bytes) = signature.signature.as_slice().try_into() else { + let Ok(tbs_data) = tbs_data(kid, doc.doc_meta(), doc.content()) else { + doc.report().other( + "Cannot build a COSE to be signed data", + "During creating COSE to be signed data", + ); + return Ok(false); + }; + + let Ok(signature_bytes) = sign.signature().try_into() else { report.invalid_value( "cose signature", - &format!("{}", signature.signature.len()), + &format!("{}", sign.signature().len()), &format!("must be {}", ed25519_dalek::Signature::BYTE_SIZE), "During encoding cose signature to bytes", ); @@ -360,9 +417,11 @@ mod tests { use uuid::{Timestamp, Uuid}; use crate::{ + builder::tests::Builder, + metadata::SupportedField, providers::{tests::TestCatalystSignedDocumentProvider, CatalystSignedDocumentProvider}, validator::{document_rules_init, validate_id_and_ver}, - Builder, UuidV7, + UuidV7, }; #[test] @@ -375,25 +434,23 @@ mod tests { let uuid_v7 = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(uuid_v7)) + .with_metadata_field(SupportedField::Ver(uuid_v7)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); assert!(is_valid); - let ver = Uuid::new_v7(Timestamp::from_unix_time(now - 1, 0, 0, 0)); - let id = Uuid::new_v7(Timestamp::from_unix_time(now + 1, 0, 0, 0)); + let ver = Uuid::new_v7(Timestamp::from_unix_time(now - 1, 0, 0, 0)) + .try_into() + .unwrap(); + let id = Uuid::new_v7(Timestamp::from_unix_time(now + 1, 0, 0, 0)) + .try_into() + .unwrap(); assert!(ver < id); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": id.to_string(), - "ver": ver.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(id)) + .with_metadata_field(SupportedField::Ver(ver)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); @@ -404,13 +461,12 @@ mod tests { 0, 0, 0, - )); + )) + .try_into() + .unwrap(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": to_far_in_past.to_string(), - "ver": to_far_in_past.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(to_far_in_past)) + .with_metadata_field(SupportedField::Ver(to_far_in_past)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); @@ -421,13 +477,12 @@ mod tests { 0, 0, 0, - )); + )) + .try_into() + .unwrap(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": to_far_in_future.to_string(), - "ver": to_far_in_future.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(to_far_in_future)) + .with_metadata_field(SupportedField::Ver(to_far_in_future)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); diff --git a/rust/signed_doc/src/validator/rules/content_encoding.rs b/rust/signed_doc/src/validator/rules/content_encoding.rs index f75bcf81e3..4860ae7106 100644 --- a/rust/signed_doc/src/validator/rules/content_encoding.rs +++ b/rust/signed_doc/src/validator/rules/content_encoding.rs @@ -24,6 +24,17 @@ impl ContentEncodingRule { ); return Ok(false); } + if content_encoding.decode(doc.encoded_content()).is_err() { + doc.report().invalid_value( + "payload", + &hex::encode(doc.encoded_content()), + &format!( + "Document content (payload) must decodable by the set content encoding type: {content_encoding}" + ), + "Invalid Document content value", + ); + return Ok(false); + } } else if !self.optional { doc.report().missing_field( "content-encoding", @@ -38,7 +49,7 @@ impl ContentEncodingRule { #[cfg(test)] mod tests { use super::*; - use crate::Builder; + use crate::{builder::tests::Builder, metadata::SupportedField}; #[tokio::test] async fn content_encoding_rule_test() { @@ -50,17 +61,18 @@ mod tests { }; let doc = Builder::new() - .with_json_metadata( - serde_json::json!({"content-encoding": content_encoding.to_string() }), - ) - .unwrap() + .with_metadata_field(SupportedField::ContentEncoding(content_encoding)) + .with_content(content_encoding.encode(&[1, 2, 3]).unwrap()) .build(); assert!(rule.check(&doc).await.unwrap()); + // empty content (empty bytes) could not be brotli decoded let doc = Builder::new() - .with_json_metadata(serde_json::json!({})) - .unwrap() + .with_metadata_field(SupportedField::ContentEncoding(content_encoding)) .build(); + assert!(!rule.check(&doc).await.unwrap()); + + let doc = Builder::new().build(); assert!(rule.check(&doc).await.unwrap()); rule.optional = false; diff --git a/rust/signed_doc/src/validator/rules/content_type.rs b/rust/signed_doc/src/validator/rules/content_type.rs index 26aa702fa7..ee6355edb3 100644 --- a/rust/signed_doc/src/validator/rules/content_type.rs +++ b/rust/signed_doc/src/validator/rules/content_type.rs @@ -29,14 +29,14 @@ impl ContentTypeRule { ); return Ok(false); } - let Ok(content) = doc.doc_content().decoded_bytes() else { - doc.report().missing_field( - "payload", + let Ok(content) = doc.decoded_content() else { + doc.report().functional_validation( + "Invalid Document content, cannot get decoded bytes", "Cannot get a document content during the content type field validation", ); return Ok(false); }; - if content_type.validate(content).is_err() { + if self.validate(&content).is_err() { doc.report().invalid_value( "payload", &hex::encode(content), @@ -48,47 +48,137 @@ impl ContentTypeRule { Ok(true) } + + /// Validates the provided `content` bytes to be a defined `ContentType`. + fn validate(&self, content: &[u8]) -> anyhow::Result<()> { + match self.exp { + ContentType::Json => { + if let Err(e) = serde_json::from_slice::<&serde_json::value::RawValue>(content) { + anyhow::bail!("Invalid {} content: {e}", self.exp) + } + }, + ContentType::Cbor => { + let mut decoder = minicbor::Decoder::new(content); + + decoder.skip()?; + + if decoder.position() != content.len() { + anyhow::bail!("Unused bytes remain in the input after decoding") + } + }, + } + Ok(()) + } } #[cfg(test)] mod tests { use super::*; - use crate::Builder; + use crate::{builder::tests::Builder, metadata::SupportedField}; #[tokio::test] - async fn content_type_rule_test() { - let content_type = ContentType::Json; + async fn cbor_with_trailing_bytes_test() { + // valid cbor: {1: 2} but with trailing 0xff + let mut buf = Vec::new(); + let mut enc = minicbor::Encoder::new(&mut buf); + enc.map(1).unwrap().u8(1).unwrap().u8(2).unwrap(); + buf.push(0xFF); // extra byte + + let cbor_rule = ContentTypeRule { + exp: ContentType::Cbor, + }; + + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(buf) + .build(); + + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + } + + #[tokio::test] + async fn malformed_cbor_bytes_test() { + // 0xa2 means a map with 2 key-value pairs, but we only give 1 key + let invalid_bytes = &[0xA2, 0x01]; + + let cbor_rule = ContentTypeRule { + exp: ContentType::Cbor, + }; + + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(invalid_bytes.into()) + .build(); - let rule = ContentTypeRule { exp: content_type }; + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + } + + #[tokio::test] + async fn content_type_cbor_rule_test() { + let cbor_rule = ContentTypeRule { + exp: ContentType::Cbor, + }; + + // with json bytes + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + + // with cbor bytes + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(minicbor::to_vec(minicbor::data::Token::Null).unwrap()) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(true))); + + // without content + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + + // with empty content + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + } + + #[tokio::test] + async fn content_type_json_rule_test() { + let json_rule = ContentTypeRule { + exp: ContentType::Json, + }; + // with json bytes let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": content_type.to_string() })) - .unwrap() - .with_decoded_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) + .with_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) .build(); - assert!(rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(true))); + // with cbor bytes let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": ContentType::Cbor.to_string() })) - .unwrap() - .with_decoded_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) + .with_content(minicbor::to_vec(minicbor::data::Token::Null).unwrap()) .build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); + // without content let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": content_type.to_string() })) - .unwrap() + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) .build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); + // with empty content let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": content_type.to_string() })) - .unwrap() - .with_decoded_content(vec![]) + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) .build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); let doc = Builder::new().build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); } } diff --git a/rust/signed_doc/src/validator/rules/doc_ref.rs b/rust/signed_doc/src/validator/rules/doc_ref.rs index 53fec6825f..a3ad0f5932 100644 --- a/rust/signed_doc/src/validator/rules/doc_ref.rs +++ b/rust/signed_doc/src/validator/rules/doc_ref.rs @@ -1,13 +1,10 @@ //! `ref` rule type impl. -use catalyst_types::{ - problem_report::ProblemReport, - uuid::{Uuid, UuidV4}, -}; +use catalyst_types::problem_report::ProblemReport; use crate::{ - providers::CatalystSignedDocumentProvider, validator::utils::validate_provided_doc, - CatalystSignedDocument, + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, DocType, }; /// `ref` field validation rule @@ -16,7 +13,7 @@ pub(crate) enum RefRule { /// Is 'ref' specified Specified { /// expected `type` field of the referenced doc - exp_ref_type: UuidV4, + exp_ref_type: DocType, /// optional flag for the `ref` field optional: bool, }, @@ -29,6 +26,7 @@ impl RefRule { &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context: &str = "Ref rule check"; if let Self::Specified { exp_ref_type, optional, @@ -36,13 +34,12 @@ impl RefRule { { if let Some(doc_ref) = doc.doc_meta().doc_ref() { let ref_validator = |ref_doc: CatalystSignedDocument| { - referenced_doc_check(&ref_doc, exp_ref_type.uuid(), "ref", doc.report()) + referenced_doc_check(&ref_doc, exp_ref_type, "ref", doc.report()) }; - return validate_provided_doc(&doc_ref, provider, doc.report(), ref_validator) - .await; + return validate_doc_refs(doc_ref, provider, doc.report(), ref_validator).await; } else if !optional { doc.report() - .missing_field("ref", "Document must have a ref field"); + .missing_field("ref", &format!("{context}, document must have ref field")); return Ok(false); } } @@ -51,7 +48,7 @@ impl RefRule { doc.report().unknown_field( "ref", &doc_ref.to_string(), - "Document does not expect to have a ref field", + &format!("{context}, document does not expect to have a ref field"), ); return Ok(false); } @@ -63,17 +60,19 @@ impl RefRule { /// A generic implementation of the referenced document validation. pub(crate) fn referenced_doc_check( - ref_doc: &CatalystSignedDocument, exp_ref_type: Uuid, field_name: &str, report: &ProblemReport, + ref_doc: &CatalystSignedDocument, exp_ref_type: &DocType, field_name: &str, + report: &ProblemReport, ) -> bool { let Ok(ref_doc_type) = ref_doc.doc_type() else { report.missing_field("type", "Referenced document must have type field"); return false; }; - if ref_doc_type.uuid() != exp_ref_type { + + if ref_doc_type != exp_ref_type { report.invalid_value( field_name, - ref_doc_type.to_string().as_str(), - exp_ref_type.to_string().as_str(), + &ref_doc_type.to_string(), + &exp_ref_type.to_string(), "Invalid referenced document type", ); return false; @@ -82,11 +81,15 @@ pub(crate) fn referenced_doc_check( } #[cfg(test)] +#[allow(clippy::similar_names, clippy::too_many_lines)] mod tests { - use catalyst_types::uuid::UuidV7; + use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[tokio::test] async fn ref_rule_specified_test() { @@ -96,98 +99,152 @@ mod tests { let valid_referenced_doc_id = UuidV7::new(); let valid_referenced_doc_ver = UuidV7::new(); + let different_id_and_ver_referenced_doc_id = UuidV7::new(); + let different_id_and_ver_referenced_doc_ver = UuidV7::new(); let another_type_referenced_doc_id = UuidV7::new(); let another_type_referenced_doc_ver = UuidV7::new(); let missing_type_referenced_doc_id = UuidV7::new(); let missing_type_referenced_doc_ver = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": valid_referenced_doc_id.to_string(), - "ver": valid_referenced_doc_ver.to_string(), - "type": exp_ref_type.to_string() - })) - .unwrap() + // Valid one + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_referenced_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_ref_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // reply doc with other `type` field - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": another_type_referenced_doc_id.to_string(), - "ver": another_type_referenced_doc_ver.to_string(), - "type": UuidV4::new().to_string() - })) - .unwrap() + provider.add_document(None, &doc).unwrap(); + + // Having different id and ver in registered reference + let doc_ref = DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()); + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(different_id_and_ver_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(different_id_and_ver_referenced_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_ref_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // missing `type` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_type_referenced_doc_id.to_string(), - "ver": missing_type_referenced_doc_ver.to_string(), - })) - .unwrap() + provider.add_document(Some(doc_ref), &doc).unwrap(); + + // Having another `type` field + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(another_type_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_referenced_doc_id)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); + + // Missing `type` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_type_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_referenced_doc_ver)) + .build(); + provider.add_document(None, &doc).unwrap(); } - // all correct + // Create a document where `ref` field is required and referencing a valid document in + // provider. Using doc ref of new implementation. let rule = RefRule::Specified { - exp_ref_type, + exp_ref_type: exp_ref_type.into(), optional: false, }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": valid_referenced_doc_id.to_string(), "ver": valid_referenced_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + valid_referenced_doc_id, + valid_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // all correct, `ref` field is missing, but its optional + // Having multiple refs, where one ref doc is not found. + // Checking match all of + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![ + DocumentRef::new( + valid_referenced_doc_id, + valid_referenced_doc_ver, + DocLocator::default(), + ), + DocumentRef::new( + different_id_and_ver_referenced_doc_id, + different_id_and_ver_referenced_doc_ver, + DocLocator::default(), + ), + ] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + + // Invalid the ref doc id and ver doesn't match the id and ver in ref doc ref + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + different_id_and_ver_referenced_doc_id, + different_id_and_ver_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + + // All correct, `ref` field is missing, but its optional let rule = RefRule::Specified { - exp_ref_type, + exp_ref_type: exp_ref_type.into(), optional: true, }; let doc = Builder::new().build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // missing `ref` field, but its required + // Missing `ref` field, but its required let rule = RefRule::Specified { - exp_ref_type, + exp_ref_type: exp_ref_type.into(), optional: false, }; let doc = Builder::new().build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // reference to the document with another `type` field + // Reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": another_type_referenced_doc_id.to_string(), "ver": another_type_referenced_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + another_type_referenced_doc_id, + another_type_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // missing `type` field in the referenced document + // Missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": missing_type_referenced_doc_id.to_string(), "ver": missing_type_referenced_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + missing_type_referenced_doc_id, + missing_type_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -203,8 +260,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"ref": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/mod.rs b/rust/signed_doc/src/validator/rules/mod.rs index 165dcb043a..47fafa5cb7 100644 --- a/rust/signed_doc/src/validator/rules/mod.rs +++ b/rust/signed_doc/src/validator/rules/mod.rs @@ -8,6 +8,7 @@ use crate::{providers::CatalystSignedDocumentProvider, CatalystSignedDocument}; mod content_encoding; mod content_type; mod doc_ref; +mod param_link_ref; mod parameters; mod reply; mod section; @@ -17,6 +18,7 @@ mod template; pub(crate) use content_encoding::ContentEncodingRule; pub(crate) use content_type::ContentTypeRule; pub(crate) use doc_ref::RefRule; +pub(crate) use param_link_ref::{LinkField, ParameterLinkRefRule}; pub(crate) use parameters::ParametersRule; pub(crate) use reply::ReplyRule; pub(crate) use section::SectionRule; @@ -41,6 +43,8 @@ pub(crate) struct Rules { pub(crate) parameters: ParametersRule, /// `kid` field validation rule pub(crate) kid: SignatureKidRule, + /// Link reference rule + pub(crate) param_link_ref: ParameterLinkRefRule, } impl Rules { @@ -52,12 +56,13 @@ impl Rules { let rules = [ self.content_type.check(doc).boxed(), self.content_encoding.check(doc).boxed(), - self.doc_ref.check(doc, provider).boxed(), self.content.check(doc, provider).boxed(), + self.doc_ref.check(doc, provider).boxed(), self.reply.check(doc, provider).boxed(), self.section.check(doc).boxed(), self.parameters.check(doc, provider).boxed(), self.kid.check(doc).boxed(), + self.param_link_ref.check(doc, provider).boxed(), ]; let res = futures::future::join_all(rules) diff --git a/rust/signed_doc/src/validator/rules/param_link_ref.rs b/rust/signed_doc/src/validator/rules/param_link_ref.rs new file mode 100644 index 0000000000..8c9206d617 --- /dev/null +++ b/rust/signed_doc/src/validator/rules/param_link_ref.rs @@ -0,0 +1,187 @@ +//! Parameter linked reference rule impl. + +use crate::{ + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, +}; + +/// Filed that is being used for linked ref +pub(crate) enum LinkField { + /// Ref field + Ref, + /// Template field + Template, +} + +/// Parameter Link reference validation rule +pub(crate) enum ParameterLinkRefRule { + /// Link ref specified + Specified { + /// Filed that is being used for linked ref + field: LinkField, + }, + /// Link ref is not specified + #[allow(dead_code)] + NotSpecified, +} + +impl ParameterLinkRefRule { + /// Validation rule + pub(crate) async fn check( + &self, doc: &CatalystSignedDocument, provider: &Provider, + ) -> anyhow::Result + where Provider: CatalystSignedDocumentProvider { + let context: &str = "Parameter link ref rule check"; + if let Self::Specified { field } = self { + let param_link_ref_validator = |ref_doc: CatalystSignedDocument| { + // The parameters MUST be the same, if not record the error + if doc.doc_meta().parameters() != ref_doc.doc_meta().parameters() { + doc.report().invalid_value( + "parameters", + &format!("Reference doc param: {:?}", ref_doc.doc_meta().parameters()), + &format!("Doc param: {:?}", doc.doc_meta().parameters()), + &format!("{context}, parameters must be the same"), + ); + return false; + } + true + }; + // Which field is use for linked reference + let param_link_ref = match field { + LinkField::Ref => doc.doc_meta().doc_ref(), + LinkField::Template => doc.doc_meta().template(), + }; + + let Some(param_link_ref) = param_link_ref else { + doc.report() + .missing_field("Link ref", &format!("{context}: Invalid link reference")); + return Ok(false); + }; + + return validate_doc_refs( + param_link_ref, + provider, + doc.report(), + param_link_ref_validator, + ) + .await; + } + Ok(true) + } +} + +#[cfg(test)] +mod tests { + use catalyst_types::uuid::{UuidV4, UuidV7}; + + use crate::{ + builder::tests::Builder, + metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, + validator::rules::param_link_ref::{LinkField, ParameterLinkRefRule}, + DocLocator, DocumentRef, + }; + #[tokio::test] + async fn param_link_ref_specified_test() { + let mut provider = TestCatalystSignedDocumentProvider::default(); + + let doc1_id = UuidV7::new(); + let doc1_ver = UuidV7::new(); + let doc2_id = UuidV7::new(); + let doc2_ver = UuidV7::new(); + + let doc_type = UuidV4::new(); + + let category_id = UuidV7::new(); + let category_ver = UuidV7::new(); + let category_type = UuidV4::new(); + + let campaign_id = UuidV7::new(); + let campaign_ver = UuidV7::new(); + let campaign_type = UuidV4::new(); + + // Prepare provider documents + { + // Doc being referenced - parameter MUST match + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(doc1_id)) + .with_metadata_field(SupportedField::Ver(doc1_ver)) + .with_metadata_field(SupportedField::Type(doc_type.into())) + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new(category_id, category_ver, DocLocator::default()), + DocumentRef::new(campaign_id, campaign_ver, DocLocator::default()), + ] + .into(), + )) + .build(); + provider.add_document(None, &doc).unwrap(); + + // Doc being referenced - parameter does not match + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(doc2_id)) + .with_metadata_field(SupportedField::Ver(doc2_ver)) + .with_metadata_field(SupportedField::Type(doc_type.into())) + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + campaign_id, + campaign_ver, + DocLocator::default(), + )] + .into(), + )) + .build(); + provider.add_document(None, &doc).unwrap(); + + // Category doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(category_id)) + .with_metadata_field(SupportedField::Ver(category_ver)) + .with_metadata_field(SupportedField::Type(category_type.into())) + .build(); + provider.add_document(None, &doc).unwrap(); + + // Campaign doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(campaign_id)) + .with_metadata_field(SupportedField::Ver(campaign_ver)) + .with_metadata_field(SupportedField::Type(campaign_type.into())) + .build(); + provider.add_document(None, &doc).unwrap(); + } + + // Use Ref as a linked reference + let rule = ParameterLinkRefRule::Specified { + field: LinkField::Ref, + }; + // Parameter must match + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new(doc1_id, doc1_ver, DocLocator::default())].into(), + )) + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new(category_id, category_ver, DocLocator::default()), + DocumentRef::new(campaign_id, campaign_ver, DocLocator::default()), + ] + .into(), + )) + .build(); + assert!(rule.check(&doc, &provider).await.unwrap()); + + // Parameter does not match + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new(doc2_id, doc2_ver, DocLocator::default())].into(), + )) + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new(category_id, category_ver, DocLocator::default()), + DocumentRef::new(campaign_id, campaign_ver, DocLocator::default()), + ] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + } +} diff --git a/rust/signed_doc/src/validator/rules/parameters.rs b/rust/signed_doc/src/validator/rules/parameters.rs index 290d158439..80fc9447bd 100644 --- a/rust/signed_doc/src/validator/rules/parameters.rs +++ b/rust/signed_doc/src/validator/rules/parameters.rs @@ -1,11 +1,9 @@ //! `parameters` rule type impl. -use catalyst_types::uuid::UuidV4; - use super::doc_ref::referenced_doc_check; use crate::{ - providers::CatalystSignedDocumentProvider, validator::utils::validate_provided_doc, - CatalystSignedDocument, + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, DocType, }; /// `parameters` field validation rule @@ -14,11 +12,12 @@ pub(crate) enum ParametersRule { /// Is `parameters` specified Specified { /// expected `type` field of the parameter doc - exp_parameters_type: UuidV4, + exp_parameters_type: Vec, /// optional flag for the `parameters` field optional: bool, }, /// `parameters` is not specified + #[allow(unused)] NotSpecified, } @@ -28,30 +27,31 @@ impl ParametersRule { &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context: &str = "Parameter rule check"; if let Self::Specified { exp_parameters_type, optional, } = self { - if let Some(parameters) = doc.doc_meta().parameters() { - let parameters_validator = |replied_doc: CatalystSignedDocument| { - referenced_doc_check( - &replied_doc, - exp_parameters_type.uuid(), - "parameters", - doc.report(), - ) + if let Some(parameters_ref) = doc.doc_meta().parameters() { + let parameters_validator = |ref_doc: CatalystSignedDocument| { + // Check that the type matches one of the expected ones + exp_parameters_type.iter().any(|exp_type| { + referenced_doc_check(&ref_doc, exp_type, "parameters", doc.report()) + }) }; - return validate_provided_doc( - ¶meters, + return validate_doc_refs( + parameters_ref, provider, doc.report(), parameters_validator, ) .await; } else if !optional { - doc.report() - .missing_field("parameters", "Document must have a parameters field"); + doc.report().missing_field( + "parameters", + &format!("{context}, document must have parameters field"), + ); return Ok(false); } } @@ -60,7 +60,7 @@ impl ParametersRule { doc.report().unknown_field( "parameters", ¶meters.to_string(), - "Document does not expect to have a parameters field", + &format!("{context}, document does not expect to have a parameters field"), ); return Ok(false); } @@ -71,112 +71,179 @@ impl ParametersRule { } #[cfg(test)] +#[allow(clippy::similar_names, clippy::too_many_lines)] mod tests { use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[tokio::test] async fn ref_rule_specified_test() { let mut provider = TestCatalystSignedDocumentProvider::default(); - let exp_parameters_type = UuidV4::new(); + let exp_parameters_cat_type = UuidV4::new(); + let exp_parameters_cam_type = UuidV4::new(); + let exp_parameters_brand_type = UuidV4::new(); + + let exp_param_type: Vec = vec![ + exp_parameters_cat_type.into(), + exp_parameters_cam_type.into(), + exp_parameters_brand_type.into(), + ]; let valid_category_doc_id = UuidV7::new(); let valid_category_doc_ver = UuidV7::new(); + let valid_brand_doc_id = UuidV7::new(); + let valid_brand_doc_ver = UuidV7::new(); let another_type_category_doc_id = UuidV7::new(); let another_type_category_doc_ver = UuidV7::new(); let missing_type_category_doc_id = UuidV7::new(); let missing_type_category_doc_ver = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": valid_category_doc_id.to_string(), - "ver": valid_category_doc_ver.to_string(), - "type": exp_parameters_type.to_string() - })) - .unwrap() + // Category doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_category_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_category_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_parameters_cat_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // reply doc with other `type` field - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": another_type_category_doc_id.to_string(), - "ver": another_type_category_doc_ver.to_string(), - "type": UuidV4::new().to_string() - })) - .unwrap() + provider.add_document(None, &doc).unwrap(); + + // Brand doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_brand_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_brand_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_parameters_cat_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // missing `type` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_type_category_doc_id.to_string(), - "ver": missing_type_category_doc_ver.to_string(), - })) - .unwrap() + provider.add_document(None, &doc).unwrap(); + + // Other type + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(another_type_category_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_category_doc_ver)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); + + // Missing `type` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_type_category_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_category_doc_ver)) + .build(); + provider.add_document(None, &doc).unwrap(); } - // all correct + // Create a document where `parameters` field is required and referencing a valid document + // in provider. Using doc ref of new implementation. let rule = ParametersRule::Specified { - exp_parameters_type, + exp_parameters_type: exp_param_type.clone(), optional: false, }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": valid_category_doc_id.to_string(), "ver": valid_category_doc_ver } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + valid_category_doc_id, + valid_category_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // all correct, `parameters` field is missing, but its optional + // Parameters contain multiple ref + let doc = Builder::new() + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new( + valid_category_doc_id, + valid_category_doc_ver, + DocLocator::default(), + ), + DocumentRef::new( + valid_brand_doc_id, + valid_brand_doc_ver, + DocLocator::default(), + ), + ] + .into(), + )) + .build(); + assert!(rule.check(&doc, &provider).await.unwrap()); + + // Parameters contain multiple ref, but one of them is invalid (not registered). + let doc = Builder::new() + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new( + valid_category_doc_id, + valid_category_doc_ver, + DocLocator::default(), + ), + DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()), + ] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + + // All correct, `parameters` field is missing, but its optional let rule = ParametersRule::Specified { - exp_parameters_type, + exp_parameters_type: exp_param_type.clone(), optional: true, }; let doc = Builder::new().build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // missing `parameters` field, but its required + // Missing `parameters` field, but its required let rule = ParametersRule::Specified { - exp_parameters_type, + exp_parameters_type: exp_param_type, optional: false, }; let doc = Builder::new().build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // reference to the document with another `type` field + // Reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": another_type_category_doc_id.to_string(), "ver": another_type_category_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + another_type_category_doc_id, + another_type_category_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // missing `type` field in the referenced document + // Missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": missing_type_category_doc_id.to_string(), "ver": missing_type_category_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + missing_type_category_doc_id, + missing_type_category_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // cannot find a referenced document + // Cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -192,8 +259,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"parameters": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/reply.rs b/rust/signed_doc/src/validator/rules/reply.rs index 5ac256667d..43696f2adc 100644 --- a/rust/signed_doc/src/validator/rules/reply.rs +++ b/rust/signed_doc/src/validator/rules/reply.rs @@ -1,11 +1,9 @@ //! `reply` rule type impl. -use catalyst_types::uuid::UuidV4; - use super::doc_ref::referenced_doc_check; use crate::{ - providers::CatalystSignedDocumentProvider, validator::utils::validate_provided_doc, - CatalystSignedDocument, + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, DocType, }; /// `reply` field validation rule @@ -14,7 +12,7 @@ pub(crate) enum ReplyRule { /// Is 'reply' specified Specified { /// expected `type` field of the replied doc - exp_reply_type: UuidV4, + exp_reply_type: DocType, /// optional flag for the `ref` field optional: bool, }, @@ -28,50 +26,50 @@ impl ReplyRule { &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context: &str = "Reply rule check"; if let Self::Specified { exp_reply_type, optional, } = self { - if let Some(reply) = doc.doc_meta().reply() { - let reply_validator = |replied_doc: CatalystSignedDocument| { - if !referenced_doc_check( - &replied_doc, - exp_reply_type.uuid(), - "reply", - doc.report(), - ) { + if let Some(reply_ref) = doc.doc_meta().reply() { + let reply_validator = |ref_doc: CatalystSignedDocument| { + // Validate type + if !referenced_doc_check(&ref_doc, exp_reply_type, "reply", doc.report()) { return false; } - let Some(doc_ref) = doc.doc_meta().doc_ref() else { + + // Get `ref` from both the doc and the ref doc + let Some(ref_doc_dr) = ref_doc.doc_meta().doc_ref() else { doc.report() - .missing_field("ref", "Document must have a ref field"); + .missing_field("Referenced doc `ref` field", context); return false; }; - let Some(replied_doc_ref) = replied_doc.doc_meta().doc_ref() else { - doc.report() - .missing_field("ref", "Referenced document must have ref field"); + let Some(doc_dr) = doc.doc_meta().doc_ref() else { + doc.report().missing_field("Document `ref` field", context); return false; }; - if replied_doc_ref.id != doc_ref.id { + // Checking the ref field of ref doc, it should match the ref field of the doc + // If not record the error + if ref_doc_dr != doc_dr { doc.report().invalid_value( - "reply", - doc_ref.id .to_string().as_str(), - replied_doc_ref.id.to_string().as_str(), - "Invalid referenced document. Document ID should aligned with the replied document.", - ); + "ref", + &format!("Reference doc ref: {ref_doc_dr}"), + &format!("Doc ref: {doc_dr}"), + &format!("{context}, ref must be the same"), + ); return false; } - true }; - return validate_provided_doc(&reply, provider, doc.report(), reply_validator) - .await; + return validate_doc_refs(reply_ref, provider, doc.report(), reply_validator).await; } else if !optional { - doc.report() - .missing_field("reply", "Document must have a reply field"); + doc.report().missing_field( + "reply", + &format!("{context}, document must have reply field"), + ); return Ok(false); } } @@ -80,7 +78,7 @@ impl ReplyRule { doc.report().unknown_field( "reply", &reply.to_string(), - "Document does not expect to have a reply field", + &format!("{context}, document does not expect to have a reply field"), ); return Ok(false); } @@ -95,7 +93,10 @@ mod tests { use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[allow(clippy::too_many_lines)] #[tokio::test] @@ -110,76 +111,104 @@ mod tests { let valid_replied_doc_ver = UuidV7::new(); let another_type_replied_doc_ver = UuidV7::new(); let another_type_replied_doc_id = UuidV7::new(); - let missing_ref_replied_doc_ver = UuidV7::new(); let missing_ref_replied_doc_id = UuidV7::new(); + let missing_ref_replied_doc_ver = UuidV7::new(); let missing_type_replied_doc_ver = UuidV7::new(); let missing_type_replied_doc_id = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "id": valid_replied_doc_id.to_string(), - "ver": valid_replied_doc_ver.to_string(), - "type": exp_reply_type.to_string() - })) - .unwrap() + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_replied_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_reply_type.into())) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); - // reply doc with other `type` field - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "id": another_type_replied_doc_id.to_string(), - "ver": another_type_replied_doc_ver.to_string(), - "type": UuidV4::new().to_string() - })) - .unwrap() + // Reply doc with other `type` field + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(another_type_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_replied_doc_ver)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); - // missing `ref` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_ref_replied_doc_id.to_string(), - "ver": missing_ref_replied_doc_ver.to_string(), - "type": exp_reply_type.to_string() - })) - .unwrap() + // Missing `type` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_type_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_replied_doc_ver)) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); - // missing `type` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "id": missing_type_replied_doc_id.to_string(), - "ver": missing_type_replied_doc_ver.to_string(), - })) - .unwrap() + // Missing `ref` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_ref_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_ref_replied_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_reply_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); } - // all correct + // Create a document where `reply` field is required and referencing a valid document in + // provider. let rule = ReplyRule::Specified { - exp_reply_type, + exp_reply_type: exp_reply_type.into(), optional: false, }; + + // common_ref_id ref reply to valid_replied_doc_id. common_ref_id ref filed should match + // valid_replied_doc_id ref field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": valid_replied_doc_id.to_string(), "ver": valid_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + valid_replied_doc_id, + valid_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - assert!(rule.check(&doc, &provider).await.unwrap()); + assert!( + rule.check(&doc, &provider).await.unwrap(), + "{:?}", + doc.problem_report() + ); // all correct, `reply` field is missing, but its optional let rule = ReplyRule::Specified { - exp_reply_type, + exp_reply_type: exp_reply_type.into(), optional: true, }; let doc = Builder::new().build(); @@ -187,73 +216,136 @@ mod tests { // missing `reply` field, but its required let rule = ReplyRule::Specified { - exp_reply_type, + exp_reply_type: exp_reply_type.into(), optional: false, }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `ref` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "reply": { "id": valid_replied_doc_id.to_string(), "ver": valid_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + valid_replied_doc_id, + valid_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": another_type_replied_doc_id.to_string(), "ver": another_type_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + another_type_replied_doc_id, + another_type_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `ref` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": missing_ref_replied_doc_id.to_string(), "ver": missing_type_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + missing_ref_replied_doc_id, + missing_ref_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": missing_type_replied_doc_id.to_string(), "ver": missing_type_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + missing_type_replied_doc_id, + missing_type_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // `ref` field does not align with the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() }, - "reply": { "id": valid_replied_doc_id.to_string(), "ver": valid_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + valid_replied_doc_id, + valid_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -269,8 +361,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"reply": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/section.rs b/rust/signed_doc/src/validator/rules/section.rs index 8720353425..46a51cc9c7 100644 --- a/rust/signed_doc/src/validator/rules/section.rs +++ b/rust/signed_doc/src/validator/rules/section.rs @@ -5,6 +5,7 @@ use crate::CatalystSignedDocument; /// `section` field validation rule pub(crate) enum SectionRule { /// Is 'section' specified + #[allow(dead_code)] Specified { /// optional flag for the `section` field optional: bool, @@ -42,15 +43,12 @@ impl SectionRule { #[cfg(test)] mod tests { use super::*; - use crate::Builder; + use crate::{builder::tests::Builder, metadata::SupportedField}; #[tokio::test] async fn section_rule_specified_test() { let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "section": "$".to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Section("$".parse().unwrap())) .build(); let rule = SectionRule::Specified { optional: false }; assert!(rule.check(&doc).await.unwrap()); @@ -72,10 +70,7 @@ mod tests { assert!(rule.check(&doc).await.unwrap()); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "section": "$".to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Section("$".parse().unwrap())) .build(); assert!(!rule.check(&doc).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/signature_kid.rs b/rust/signed_doc/src/validator/rules/signature_kid.rs index 2e45517b8e..60d8f2e101 100644 --- a/rust/signed_doc/src/validator/rules/signature_kid.rs +++ b/rust/signed_doc/src/validator/rules/signature_kid.rs @@ -47,7 +47,7 @@ mod tests { use ed25519_dalek::ed25519::signature::Signer; use super::*; - use crate::{Builder, ContentType}; + use crate::{builder::tests::Builder, metadata::SupportedField, ContentType}; #[tokio::test] async fn signature_kid_rule_test() { @@ -60,15 +60,12 @@ mod tests { let kid = CatalystId::new("cardano", None, pk).with_role(RoleId::Role0); let doc = Builder::new() - .with_decoded_content(serde_json::to_vec(&serde_json::Value::Null).unwrap()) - .with_json_metadata(serde_json::json!({ - "type": UuidV4::new().to_string(), - "id": UuidV7::new().to_string(), - "ver": UuidV7::new().to_string(), - "content-type": ContentType::Json.to_string(), - })) - .unwrap() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) + .with_metadata_field(SupportedField::Id(UuidV7::new())) + .with_metadata_field(SupportedField::Ver(UuidV7::new())) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) + .with_metadata_field(SupportedField::ContentType(ContentType::Json)) + .with_content(vec![1, 2, 3]) + .add_signature(|m| sk.sign(&m).to_vec(), kid) .unwrap() .build(); diff --git a/rust/signed_doc/src/validator/rules/template.rs b/rust/signed_doc/src/validator/rules/template.rs index 0d5b0c9aaa..17bc432bfb 100644 --- a/rust/signed_doc/src/validator/rules/template.rs +++ b/rust/signed_doc/src/validator/rules/template.rs @@ -2,12 +2,10 @@ use std::fmt::Write; -use catalyst_types::uuid::UuidV4; - use super::doc_ref::referenced_doc_check; use crate::{ metadata::ContentType, providers::CatalystSignedDocumentProvider, - validator::utils::validate_provided_doc, CatalystSignedDocument, + validator::utils::validate_doc_refs, CatalystSignedDocument, DocType, }; /// Enum represents different content schemas, against which documents content would be @@ -21,9 +19,10 @@ pub(crate) enum ContentSchema { /// Document's content validation rule pub(crate) enum ContentRule { /// Based on the 'template' field and loaded corresponding template document + #[allow(dead_code)] Templated { /// expected `type` field of the template - exp_template_type: UuidV4, + exp_template_type: DocType, }, /// Statically defined document's content schema. /// `template` field should not been specified @@ -36,31 +35,27 @@ pub(crate) enum ContentRule { impl ContentRule { /// Field validation rule + #[allow(dead_code)] pub(crate) async fn check( &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context = "Content/Template rule check"; if let Self::Templated { exp_template_type } = self { let Some(template_ref) = doc.doc_meta().template() else { doc.report() - .missing_field("template", "Document must have a template field"); + .missing_field("template", &format!("{context}, doc")); return Ok(false); }; - let template_validator = |template_doc: CatalystSignedDocument| { - if !referenced_doc_check( - &template_doc, - exp_template_type.uuid(), - "template", - doc.report(), - ) { + if !referenced_doc_check(&template_doc, exp_template_type, "template", doc.report()) + { return false; } - let Ok(template_content_type) = template_doc.doc_content_type() else { doc.report().missing_field( "content-type", - "Referenced template document must have a content-type field", + &format!("{context}, referenced document must have a content-type field"), ); return false; }; @@ -72,20 +67,15 @@ impl ContentRule { }, } }; - return validate_provided_doc( - &template_ref, - provider, - doc.report(), - template_validator, - ) - .await; + return validate_doc_refs(template_ref, provider, doc.report(), template_validator) + .await; } if let Self::Static(content_schema) = self { if let Some(template) = doc.doc_meta().template() { doc.report().unknown_field( "template", &template.to_string(), - "Document does not expect to have a template field", + &format!("{context} Static, Document does not expect to have a template field",) ); return Ok(false); } @@ -97,7 +87,7 @@ impl ContentRule { doc.report().unknown_field( "template", &template.to_string(), - "Document does not expect to have a template field", + &format!("{context} Not Specified, Document does not expect to have a template field",) ); return Ok(false); } @@ -112,14 +102,14 @@ impl ContentRule { fn templated_json_schema_check( doc: &CatalystSignedDocument, template_doc: &CatalystSignedDocument, ) -> bool { - let Ok(template_content) = template_doc.doc_content().decoded_bytes() else { - doc.report().missing_field( - "payload", - "Referenced template document must have a content", + let Ok(template_content) = template_doc.decoded_content() else { + doc.report().functional_validation( + "Invalid document content, cannot get decoded bytes", + "Cannot get a referenced template document content during the templated validation", ); return false; }; - let Ok(template_json_schema) = serde_json::from_slice(template_content) else { + let Ok(template_json_schema) = serde_json::from_slice(&template_content) else { doc.report().functional_validation( "Template document content must be json encoded", "Invalid referenced template document content", @@ -139,15 +129,22 @@ fn templated_json_schema_check( content_schema_check(doc, &ContentSchema::Json(schema_validator)) } - +#[allow(dead_code)] /// Validating the document's content against the provided schema fn content_schema_check(doc: &CatalystSignedDocument, schema: &ContentSchema) -> bool { - let Ok(doc_content) = doc.doc_content().decoded_bytes() else { + let Ok(doc_content) = doc.decoded_content() else { + doc.report().functional_validation( + "Invalid Document content, cannot get decoded bytes", + "Cannot get a document content during the templated validation", + ); + return false; + }; + if doc_content.is_empty() { doc.report() .missing_field("payload", "Document must have a content"); return false; }; - let Ok(doc_json) = serde_json::from_slice(doc_content) else { + let Ok(doc_json) = serde_json::from_slice(&doc_content) else { doc.report().functional_validation( "Document content must be json encoded", "Invalid referenced template document content", @@ -181,10 +178,13 @@ fn content_schema_check(doc: &CatalystSignedDocument, schema: &ContentSchema) -> #[cfg(test)] mod tests { - use catalyst_types::uuid::UuidV7; + use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[allow(clippy::too_many_lines)] #[tokio::test] @@ -203,181 +203,204 @@ mod tests { let missing_content_template_doc_id = UuidV7::new(); let invalid_content_template_doc_id = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": valid_template_doc_id.to_string(), - "ver": valid_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_template_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); // reply doc with other `type` field let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": another_type_template_doc_id.to_string(), - "ver": another_type_template_doc_id.to_string(), - "type": UuidV4::new().to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + .with_metadata_field(SupportedField::Id(another_type_template_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_template_doc_id)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // missing `type` field in the referenced document let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_type_template_doc_id.to_string(), - "ver": missing_type_template_doc_id.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + .with_metadata_field(SupportedField::Id(missing_type_template_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_template_doc_id)) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // missing `content-type` field in the referenced document let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_content_type_template_doc_id.to_string(), - "ver": missing_content_type_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + .with_metadata_field(SupportedField::Id(missing_content_type_template_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_content_type_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // missing content let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_content_template_doc_id.to_string(), - "ver": missing_content_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() + .with_metadata_field(SupportedField::Id(missing_content_template_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_content_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_metadata_field(SupportedField::ContentType(content_type)) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // invalid content, must be json encoded let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": invalid_content_template_doc_id.to_string(), - "ver": invalid_content_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(vec![]) + .with_metadata_field(SupportedField::Id(invalid_content_template_doc_id)) + .with_metadata_field(SupportedField::Ver(invalid_content_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(vec![]) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); } - // all correct - let rule = ContentRule::Templated { exp_template_type }; + // Create a document where `templates` field is required and referencing a valid document + // in provider. Using doc ref of new implementation. + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": valid_template_doc_id.to_string(), "ver": valid_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + valid_template_doc_id, + valid_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(rule.check(&doc, &provider).await.unwrap()); // missing `template` field, but its required - let doc = Builder::new() - .with_json_metadata(serde_json::json!({})) - .unwrap() - .with_decoded_content(json_content.clone()) - .build(); + let doc = Builder::new().with_content(json_content.clone()).build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing content - let rule = ContentRule::Templated { exp_template_type }; + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": valid_template_doc_id.to_string(), "ver": valid_template_doc_id.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + valid_template_doc_id, + valid_template_doc_id, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // content not a json encoded - let rule = ContentRule::Templated { exp_template_type }; + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": valid_template_doc_id.to_string(), "ver": valid_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(vec![]) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + valid_template_doc_id, + valid_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(vec![1, 2, 3]) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": another_type_template_doc_id.to_string(), "ver": another_type_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + another_type_template_doc_id, + another_type_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": missing_type_template_doc_id.to_string(), "ver": missing_type_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + missing_type_template_doc_id, + missing_type_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `content-type` field in the referenced doc - let rule = ContentRule::Templated { exp_template_type }; + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": missing_content_type_template_doc_id.to_string(), "ver": missing_content_type_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + missing_content_type_template_doc_id, + missing_content_type_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing content in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": missing_content_template_doc_id.to_string(), "ver": missing_content_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + missing_content_template_doc_id, + missing_content_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // content not a json encoded in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": invalid_content_template_doc_id.to_string(), "ver": invalid_content_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + invalid_content_template_doc_id, + invalid_content_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -397,9 +420,7 @@ mod tests { // all correct let rule = ContentRule::Static(json_schema); - let doc = Builder::new() - .with_decoded_content(json_content.clone()) - .build(); + let doc = Builder::new().with_content(json_content.clone()).build(); assert!(rule.check(&doc, &provider).await.unwrap()); // missing content @@ -407,15 +428,17 @@ mod tests { assert!(!rule.check(&doc, &provider).await.unwrap()); // content not a json encoded - let doc = Builder::new().with_decoded_content(vec![]).build(); + let doc = Builder::new().with_content(vec![1, 2, 3]).build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // defined `template` field which should be absent let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); - let doc = Builder::new().with_decoded_content(json_content) - .with_json_metadata(serde_json::json!({"template": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + let doc = Builder::new() + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) + .with_content(json_content) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -432,8 +455,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"template": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/utils.rs b/rust/signed_doc/src/validator/utils.rs index 4b25a9bbfa..1dc2a06cb3 100644 --- a/rust/signed_doc/src/validator/utils.rs +++ b/rust/signed_doc/src/validator/utils.rs @@ -2,7 +2,9 @@ use catalyst_types::problem_report::ProblemReport; -use crate::{providers::CatalystSignedDocumentProvider, CatalystSignedDocument, DocumentRef}; +use crate::{ + providers::CatalystSignedDocumentProvider, CatalystSignedDocument, DocumentRef, DocumentRefs, +}; /// A helper validation document function, which validates a document from the /// `ValidationDataProvider`. @@ -13,13 +15,56 @@ where Provider: CatalystSignedDocumentProvider, Validator: Fn(CatalystSignedDocument) -> bool, { + const CONTEXT: &str = "Validation data provider"; + + // General check for document ref + + // Getting the Signed Document instance from a doc ref. + // The reference document must exist if let Some(doc) = provider.try_get_doc(doc_ref).await? { + let id = doc + .doc_id() + .inspect_err(|_| report.missing_field("id", CONTEXT))?; + + let ver = doc + .doc_ver() + .inspect_err(|_| report.missing_field("ver", CONTEXT))?; + // id and version must match the values in ref doc + if &id != doc_ref.id() && &ver != doc_ref.ver() { + report.invalid_value( + "id and version", + &format!("id: {id}, ver: {ver}"), + &format!("id: {}, ver: {}", doc_ref.id(), doc_ref.ver()), + CONTEXT, + ); + return Ok(false); + } Ok(validator(doc)) } else { report.functional_validation( format!("Cannot retrieve a document {doc_ref}").as_str(), - "Validation data provider could not return a corresponding document.", + CONTEXT, ); Ok(false) } } + +/// Validate the document references +/// Document all possible error in doc report (no fail fast) +pub(crate) async fn validate_doc_refs( + doc_refs: &DocumentRefs, provider: &Provider, report: &ProblemReport, validator: Validator, +) -> anyhow::Result +where + Provider: CatalystSignedDocumentProvider, + Validator: Fn(CatalystSignedDocument) -> bool, +{ + let mut all_valid = true; + + for dr in doc_refs.doc_refs() { + let is_valid = validate_provided_doc(dr, provider, report, &validator).await?; + if !is_valid { + all_valid = false; + } + } + Ok(all_valid) +} diff --git a/rust/signed_doc/tests/comment.rs b/rust/signed_doc/tests/comment.rs index 1c746e589c..16f9364694 100644 --- a/rust/signed_doc/tests/comment.rs +++ b/rust/signed_doc/tests/comment.rs @@ -1,143 +1,319 @@ -//! Integration test for comment document validation part. +//! Test for Proposal Comment document. +//! Require fields: type, id, ver, ref, template, parameters +//! -use catalyst_signed_doc::{providers::tests::TestCatalystSignedDocumentProvider, *}; -use catalyst_types::catalyst_id::role_index::RoleId; +use std::sync::LazyLock; -mod common; +use catalyst_signed_doc::{ + doc_types::deprecated, providers::tests::TestCatalystSignedDocumentProvider, *, +}; +#[allow(clippy::unwrap_used)] +static DUMMY_PROPOSAL_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::PROPOSAL.clone(), + })) + .unwrap() + .empty_content() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static DUMMY_BRAND_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::BRAND_PARAMETERS.clone(), + })) + .unwrap() + .empty_content() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static COMMENT_TEMPLATE_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_COMMENT_TEMPLATE.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": {}, + "required": [], + "additionalProperties": false + })) + .unwrap() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static COMMENT_REF_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, + "template": { + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap() +}); + +// Given a proposal comment document `doc`: +// +// - Parameters: +// The `parameters` field in `doc` points to a brand document. +// The parameter rule defines the link reference as `template`, This mean the document +// that `ref` field in `doc` points to (in this case = template_doc), must have the same +// `parameters` value as `doc`. +// +// - Reply: +// The `reply` field in `doc` points to another comment (`ref_doc`). +// The rule requires that the `ref` field in `ref_doc` must match the `ref` field in `doc` #[tokio::test] async fn test_valid_comment_doc() { - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::COMMENT_TEMPLATE_UUID_TYPE).unwrap(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + // Create a main comment doc, contain all fields mention in the document (except + // revocations and section) + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), }, - "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), } - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); - provider.add_document(proposal_doc).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); - - assert!(is_valid); + assert!(is_valid, "{:?}", doc.problem_report()); } +// The same as above but test with the old type #[tokio::test] -async fn test_valid_comment_doc_with_reply() { - let empty_json = serde_json::to_vec(&serde_json::json!({})).unwrap(); - - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::COMMENT_TEMPLATE_UUID_TYPE).unwrap(); - - let comment_doc_id = UuidV7::new(); - let comment_doc_ver = UuidV7::new(); - let comment_doc = Builder::new() +async fn test_valid_comment_doc_old_type() { + let doc = Builder::new() .with_json_metadata(serde_json::json!({ - "id": comment_doc_id, - "ver": comment_doc_ver, - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, "content-type": ContentType::Json.to_string(), - "template": { "id": template_doc_id.to_string(), "ver": template_doc_ver.to_string() }, + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": deprecated::COMMENT_DOCUMENT_UUID_TYPE, + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, + "template": { + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), }, + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } })) .unwrap() - .with_decoded_content(empty_json.clone()) - .build(); + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); + + let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(is_valid, "{:?}", doc.problem_report()); +} - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +#[tokio::test] +async fn test_invalid_comment_doc_missing_parameters() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + }, + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() }, + // "parameters": { + // "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + // "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + // } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); + + let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(!is_valid); +} + +#[tokio::test] +async fn test_invalid_comment_doc_missing_template() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, + // "template": { + // "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + // "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + // }, "reply": { - "id": comment_doc_id, - "ver": comment_doc_ver + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), } - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); - provider.add_document(proposal_doc).unwrap(); - provider.add_document(comment_doc).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); - - assert!(is_valid); + assert!(!is_valid); } #[tokio::test] -async fn test_invalid_comment_doc() { - let (proposal_doc, ..) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::COMMENT_TEMPLATE_UUID_TYPE).unwrap(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_invalid_comment_doc_missing_ref() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + // "ref": { + // "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + // "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + // }, "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + }, + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() }, - // without ref - "ref": serde_json::Value::Null - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); - provider.add_document(proposal_doc).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); - assert!(!is_valid); } diff --git a/rust/signed_doc/tests/common/mod.rs b/rust/signed_doc/tests/common/mod.rs index d7ea84150b..e7a52b25f1 100644 --- a/rust/signed_doc/tests/common/mod.rs +++ b/rust/signed_doc/tests/common/mod.rs @@ -4,28 +4,6 @@ use std::str::FromStr; use catalyst_signed_doc::*; use catalyst_types::catalyst_id::role_index::RoleId; -use ed25519_dalek::ed25519::signature::Signer; - -pub fn test_metadata() -> (UuidV7, UuidV4, serde_json::Value) { - let uuid_v7 = UuidV7::new(); - let uuid_v4 = UuidV4::new(); - - let metadata_fields = serde_json::json!({ - "content-type": ContentType::Json.to_string(), - "content-encoding": ContentEncoding::Brotli.to_string(), - "type": uuid_v4.to_string(), - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - "ref": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - "reply": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - "template": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - "section": "$".to_string(), - "collabs": vec!["Alex1".to_string(), "Alex2".to_string()], - "parameters": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - }); - - (uuid_v7, uuid_v4, metadata_fields) -} pub fn create_dummy_key_pair( role_index: RoleId, @@ -44,47 +22,7 @@ pub fn create_dummy_key_pair( Ok((sk, pk, kid)) } -pub fn create_dummy_doc( - doc_type_id: Uuid, -) -> anyhow::Result<(CatalystSignedDocument, UuidV7, UuidV7)> { - let empty_json = serde_json::to_vec(&serde_json::json!({}))?; - - let doc_id = UuidV7::new(); - let doc_ver = UuidV7::new(); - - let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "content-type": ContentType::Json.to_string(), - "type": doc_type_id, - "id": doc_id, - "ver": doc_ver, - "template": { "id": doc_id.to_string(), "ver": doc_ver.to_string() } - }))? - .with_decoded_content(empty_json.clone()) - .build(); - - Ok((doc, doc_id, doc_ver)) -} - pub fn create_signing_key() -> ed25519_dalek::SigningKey { let mut csprng = rand::rngs::OsRng; ed25519_dalek::SigningKey::generate(&mut csprng) } - -pub fn create_dummy_signed_doc( - metadata: serde_json::Value, content: Vec, with_role_index: RoleId, -) -> anyhow::Result<( - CatalystSignedDocument, - ed25519_dalek::VerifyingKey, - CatalystId, -)> { - let (sk, pk, kid) = create_dummy_key_pair(with_role_index)?; - - let signed_doc = Builder::new() - .with_decoded_content(content) - .with_json_metadata(metadata)? - .add_signature(|m| sk.sign(&m).to_vec(), &kid)? - .build(); - - Ok((signed_doc, pk, kid)) -} diff --git a/rust/signed_doc/tests/decoding.rs b/rust/signed_doc/tests/decoding.rs index c1f632f84a..04da22a063 100644 --- a/rust/signed_doc/tests/decoding.rs +++ b/rust/signed_doc/tests/decoding.rs @@ -1,183 +1,140 @@ //! Integration test for COSE decoding part. -use catalyst_signed_doc::{providers::tests::TestVerifyingKeyProvider, *}; +use catalyst_signed_doc::*; use catalyst_types::catalyst_id::role_index::RoleId; use common::create_dummy_key_pair; -use coset::TaggedCborSerializable; -use ed25519_dalek::ed25519::signature::Signer; +use minicbor::{data::Tag, Encoder}; mod common; -#[test] -fn catalyst_signed_doc_cbor_roundtrip_test() { - let (uuid_v7, uuid_v4, metadata_fields) = common::test_metadata(); - let (sk, _, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); - - let content = serde_json::to_vec(&serde_json::Value::Null).unwrap(); - - let doc = Builder::new() - .with_json_metadata(metadata_fields.clone()) - .unwrap() - .with_decoded_content(content.clone()) - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - - assert!(!doc.problem_report().is_problematic()); - - let bytes: Vec = doc.try_into().unwrap(); - let decoded: CatalystSignedDocument = bytes.as_slice().try_into().unwrap(); - let extra_fields: ExtraFields = serde_json::from_value(metadata_fields).unwrap(); - - assert_eq!(decoded.doc_type().unwrap(), uuid_v4); - assert_eq!(decoded.doc_id().unwrap(), uuid_v7); - assert_eq!(decoded.doc_ver().unwrap(), uuid_v7); - assert_eq!(decoded.doc_content().decoded_bytes().unwrap(), &content); - assert_eq!(decoded.doc_meta(), &extra_fields); +type PostCheck = dyn Fn(&CatalystSignedDocument) -> anyhow::Result<()>; + +struct TestCase { + name: &'static str, + bytes_gen: Box anyhow::Result>>>, + // If the provided bytes can be even decoded without error (valid COSE or not). + // If set to `false` all further checks will not even happen. + can_decode: bool, + // If the decoded doc is a valid `CatalystSignedDocument`, underlying problem report is empty. + valid_doc: bool, + post_checks: Option>, } -#[test] -fn catalyst_signed_doc_cbor_roundtrip_kid_as_id_test() { - let (_, _, metadata_fields) = common::test_metadata(); - let (sk, _, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); - // transform Catalyst ID URI form to the ID form - let kid = kid.as_id(); - - let content = serde_json::to_vec(&serde_json::Value::Null).unwrap(); - - let doc = Builder::new() - .with_json_metadata(metadata_fields.clone()) - .unwrap() - .with_decoded_content(content.clone()) - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - - assert!(doc.problem_report().is_problematic()); +fn decoding_empty_bytes_case() -> TestCase { + TestCase { + name: "Decoding empty bytes", + bytes_gen: Box::new(|| Ok(Encoder::new(Vec::new()))), + can_decode: false, + valid_doc: false, + post_checks: None, + } } -#[tokio::test] -#[allow(clippy::too_many_lines)] -async fn catalyst_signed_doc_parameters_aliases_test() { - let (_, _, metadata_fields) = common::test_metadata(); - let (sk, pk, kid) = common::create_dummy_key_pair(RoleId::Role0).unwrap(); - let mut provider = TestVerifyingKeyProvider::default(); - provider.add_pk(kid.clone(), pk); - - let content = serde_json::to_vec(&serde_json::Value::Null).unwrap(); - - let doc = Builder::new() - .with_json_metadata(metadata_fields.clone()) - .unwrap() - .with_decoded_content(content.clone()) - .build(); - assert!(!doc.problem_report().is_problematic()); - - let parameters_val = doc.doc_meta().parameters().unwrap(); - let parameters_val_cbor: coset::cbor::Value = parameters_val.try_into().unwrap(); - // replace parameters with the alias values `category_id`, `brand_id`, `campaign_id`. - let bytes: Vec = doc.try_into().unwrap(); - let mut cose = coset::CoseSign::from_tagged_slice(bytes.as_slice()).unwrap(); - cose.protected.original_data = None; - cose.protected - .header - .rest - .retain(|(l, _)| l != &coset::Label::Text("parameters".to_string())); - - let doc: CatalystSignedDocument = cose - .clone() - .to_tagged_vec() - .unwrap() - .as_slice() - .try_into() - .unwrap(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_none()); - - // case: `category_id`. - let mut cose_with_category_id = cose.clone(); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("category_id".to_string()), - parameters_val_cbor.clone(), - )); - - let cbor_bytes = cose_with_category_id.to_tagged_vec().unwrap(); - let doc: CatalystSignedDocument = cbor_bytes.as_slice().try_into().unwrap(); - let doc = doc - .into_builder() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_some()); - assert!(validator::validate_signatures(&doc, &provider) - .await - .unwrap()); - - // case: `brand_id`. - let mut cose_with_brand_id = cose.clone(); - cose_with_brand_id.protected.header.rest.push(( - coset::Label::Text("brand_id".to_string()), - parameters_val_cbor.clone(), - )); - - let cbor_bytes = cose_with_brand_id.to_tagged_vec().unwrap(); - let doc: CatalystSignedDocument = cbor_bytes.as_slice().try_into().unwrap(); - let doc = doc - .into_builder() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_some()); - assert!(validator::validate_signatures(&doc, &provider) - .await - .unwrap()); - - // case: `campaign_id`. - let mut cose_with_campaign_id = cose.clone(); - cose_with_campaign_id.protected.header.rest.push(( - coset::Label::Text("campaign_id".to_string()), - parameters_val_cbor.clone(), - )); - - let cbor_bytes = cose_with_campaign_id.to_tagged_vec().unwrap(); - let doc: CatalystSignedDocument = cbor_bytes.as_slice().try_into().unwrap(); - let doc = doc - .into_builder() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_some()); - assert!(validator::validate_signatures(&doc, &provider) - .await - .unwrap()); - - // `parameters` value along with its aliases are not allowed to be present at the - let mut cose_with_category_id = cose.clone(); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("parameters".to_string()), - parameters_val_cbor.clone(), - )); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("category_id".to_string()), - parameters_val_cbor.clone(), - )); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("brand_id".to_string()), - parameters_val_cbor.clone(), - )); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("campaign_id".to_string()), - parameters_val_cbor.clone(), - )); +fn signed_doc_with_all_fields_case() -> TestCase { + let uuid_v7 = UuidV7::new(); + let uuid_v4 = UuidV4::new(); + + TestCase { + name: "Catalyst Signed Doc with minimally defined metadata fields, signed (one signature), CBOR tagged.", + bytes_gen: Box::new({ + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode_with(uuid_v4, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.str("id")?.encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.str("ver")?.encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + + e.bytes(p_headers.into_writer().as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers.map(1)?.u8(4)?.encode(kid)?; + e.bytes(p_headers.into_writer().as_slice())?; + e.map(0)?; + e.bytes(&[1,2,3])?; + Ok(e) + } + }), + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_type()? == &DocType::from(uuid_v4)); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!(doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)?); + anyhow::ensure!(doc.kids().len() == 1); + Ok(()) + } + })), + } +} - let doc: CatalystSignedDocument = cose_with_category_id - .to_tagged_vec() - .unwrap() - .as_slice() - .try_into() - .unwrap(); - assert!(doc.problem_report().is_problematic()); +#[test] +fn catalyst_signed_doc_decoding_test() { + let test_cases = [ + decoding_empty_bytes_case(), + signed_doc_with_all_fields_case(), + ]; + + for case in test_cases { + let bytes_res = case.bytes_gen.as_ref()(); + assert!( + bytes_res.is_ok(), + "Case: [{}], error: {:?}", + case.name, + bytes_res.err() + ); + let bytes = bytes_res.unwrap().into_writer(); + let doc_res = CatalystSignedDocument::try_from(bytes.as_slice()); + assert_eq!( + doc_res.is_ok(), + case.can_decode, + "Case: [{}], error: {:?}", + case.name, + doc_res.err() + ); + if let Ok(doc) = doc_res { + assert_eq!( + !doc.problem_report().is_problematic(), + case.valid_doc, + "Case: [{}]. Problem report: {:?}", + case.name, + doc.problem_report() + ); + + if let Some(post_checks) = &case.post_checks { + let post_checks_res = post_checks(&doc); + assert!( + post_checks_res.is_ok(), + "Case: [{}]. Post checks fails: {:?}", + case.name, + post_checks_res.err() + ); + } + + assert_eq!( + bytes, + Vec::::try_from(doc).unwrap(), + "Case: [{}]. Asymmetric encoding and decoding procedure", + case.name + ); + } + } } diff --git a/rust/signed_doc/tests/proposal.rs b/rust/signed_doc/tests/proposal.rs index 50ce1799e4..e2f129dca2 100644 --- a/rust/signed_doc/tests/proposal.rs +++ b/rust/signed_doc/tests/proposal.rs @@ -1,93 +1,192 @@ //! Integration test for proposal document validation part. +//! Require fields: type, id, ver, template, parameters +//! -use catalyst_signed_doc::{providers::tests::TestCatalystSignedDocumentProvider, *}; -use catalyst_types::catalyst_id::role_index::RoleId; +use std::sync::LazyLock; -mod common; +use catalyst_signed_doc::{ + doc_types::deprecated, providers::tests::TestCatalystSignedDocumentProvider, *, +}; +#[allow(clippy::unwrap_used)] +static DUMMY_BRAND_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::BRAND_PARAMETERS.clone(), + })) + .unwrap() + .empty_content() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static PROPOSAL_TEMPLATE_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_TEMPLATE.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": {}, + "required": [], + "additionalProperties": false + })) + .unwrap() + .build() + .unwrap() +}); + +// Given a proposal document `doc`: +// +// - Parameters: +// The `parameters` field in `doc` points to a brand document. +// The parameter rule defines the link reference as `template`, This mean the document +// that `ref` field in `doc` points to (in this case = `template_doc`), must have the same +// `parameters` value as `doc`. #[tokio::test] async fn test_valid_proposal_doc() { - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_TEMPLATE_UUID_TYPE).unwrap(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + // Create a main proposal doc, contain all fields mention in the document (except + // collaborations and revocations) + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); - let is_valid = validator::validate(&doc, &provider).await.unwrap(); + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(is_valid); } #[tokio::test] -async fn test_valid_proposal_doc_with_empty_provider() { - // dummy template doc to dummy provider - let template_doc_id = UuidV7::new(); - let template_doc_ver = UuidV7::new(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_valid_proposal_doc_old_type() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": deprecated::PROPOSAL_DOCUMENT_UUID_TYPE, + "id": UuidV7::new(), + "ver": UuidV7::new(), "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); - let provider = TestCatalystSignedDocumentProvider::default(); + let mut provider = TestCatalystSignedDocumentProvider::default(); + + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(is_valid); +} + +#[tokio::test] +async fn test_invalid_proposal_doc_missing_template() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": deprecated::PROPOSAL_DOCUMENT_UUID_TYPE, + "id": UuidV7::new(), + "ver": UuidV7::new(), + // "template": { + // "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + // "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), + // }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); } #[tokio::test] -async fn test_invalid_proposal_doc() { - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_invalid_proposal_doc_missing_parameters() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - // without specifying template id - "template": serde_json::Value::Null, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); - - let provider = TestCatalystSignedDocumentProvider::default(); + "type": deprecated::PROPOSAL_DOCUMENT_UUID_TYPE, + "id": UuidV7::new(), + "ver": UuidV7::new(), + "template": { + "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), + }, + // "parameters": { + // "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + // "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + // } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); - let is_valid = validator::validate(&doc, &provider).await.unwrap(); + let mut provider = TestCatalystSignedDocumentProvider::default(); + + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); } diff --git a/rust/signed_doc/tests/signature.rs b/rust/signed_doc/tests/signature.rs index 5c93ec25bb..b675b19947 100644 --- a/rust/signed_doc/tests/signature.rs +++ b/rust/signed_doc/tests/signature.rs @@ -2,20 +2,42 @@ use catalyst_signed_doc::{providers::tests::TestVerifyingKeyProvider, *}; use catalyst_types::catalyst_id::role_index::RoleId; -use common::test_metadata; use ed25519_dalek::ed25519::signature::Signer; +use crate::common::create_dummy_key_pair; + mod common; +fn metadata() -> serde_json::Value { + serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": UuidV4::new(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": {"id": UuidV7::new(), "ver": UuidV7::new()}, + "reply": {"id": UuidV7::new(), "ver": UuidV7::new()}, + "template": {"id": UuidV7::new(), "ver": UuidV7::new()}, + "section": "$", + "collabs": vec!["Alex1", "Alex2"], + "parameters": {"id": UuidV7::new(), "ver": UuidV7::new()}, + }) +} + #[tokio::test] async fn single_signature_validation_test() { - let (_, _, metadata) = test_metadata(); - let (signed_doc, pk, kid) = common::create_dummy_signed_doc( - metadata, - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + let (sk, pk, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); + + let signed_doc = Builder::new() + .with_json_metadata(metadata()) + .unwrap() + .with_json_content(&serde_json::Value::Null) + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid.clone()) + .unwrap() + .build() + .unwrap(); + assert!(!signed_doc.problem_report().is_problematic()); // case: has key @@ -32,8 +54,31 @@ async fn single_signature_validation_test() { .unwrap() ); + // case: signed with different key + let (another_sk, ..) = create_dummy_key_pair(RoleId::Role0).unwrap(); + let invalid_doc = signed_doc + .into_builder() + .add_signature(|m| another_sk.sign(&m).to_vec(), kid.clone()) + .unwrap() + .build() + .unwrap(); + assert!(!validator::validate_signatures(&invalid_doc, &provider) + .await + .unwrap()); + // case: missing signatures - let (unsigned_doc, ..) = common::create_dummy_doc(UuidV4::new().into()).unwrap(); + let unsigned_doc = Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": UuidV4::new(), + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); assert!(!validator::validate_signatures(&unsigned_doc, &provider) .await .unwrap()); @@ -47,16 +92,18 @@ async fn multiple_signatures_validation_test() { let (_, pk_n, kid_n) = common::create_dummy_key_pair(RoleId::Role0).unwrap(); let signed_doc = Builder::new() - .with_decoded_content(serde_json::to_vec(&serde_json::Value::Null).unwrap()) - .with_json_metadata(common::test_metadata().2) + .with_json_metadata(metadata()) .unwrap() - .add_signature(|m| sk1.sign(&m).to_vec(), &kid1) + .with_json_content(&serde_json::Value::Null) .unwrap() - .add_signature(|m| sk2.sign(&m).to_vec(), &kid2) + .add_signature(|m| sk1.sign(&m).to_vec(), kid1.clone()) .unwrap() - .add_signature(|m| sk3.sign(&m).to_vec(), &kid3) + .add_signature(|m| sk2.sign(&m).to_vec(), kid2.clone()) .unwrap() - .build(); + .add_signature(|m| sk3.sign(&m).to_vec(), kid3.clone()) + .unwrap() + .build() + .unwrap(); assert!(!signed_doc.problem_report().is_problematic()); @@ -67,27 +114,27 @@ async fn multiple_signatures_validation_test() { provider.add_pk(kid3.clone(), pk3); assert!(validator::validate_signatures(&signed_doc, &provider) .await - .is_ok_and(|v| v)); + .unwrap()); // case: partially available signatures let mut provider = TestVerifyingKeyProvider::default(); provider.add_pk(kid1.clone(), pk1); provider.add_pk(kid2.clone(), pk2); - assert!(validator::validate_signatures(&signed_doc, &provider) + assert!(!validator::validate_signatures(&signed_doc, &provider) .await - .is_ok_and(|v| !v)); + .unwrap()); // case: with unrecognized provider let mut provider = TestVerifyingKeyProvider::default(); provider.add_pk(kid_n.clone(), pk_n); - assert!(validator::validate_signatures(&signed_doc, &provider) + assert!(!validator::validate_signatures(&signed_doc, &provider) .await - .is_ok_and(|v| !v)); + .unwrap()); // case: no valid signatures available assert!( - validator::validate_signatures(&signed_doc, &TestVerifyingKeyProvider::default()) + !validator::validate_signatures(&signed_doc, &TestVerifyingKeyProvider::default()) .await - .is_ok_and(|v| !v) + .unwrap() ); } diff --git a/rust/signed_doc/tests/submission.rs b/rust/signed_doc/tests/submission.rs index d10c6c3952..a4199d064a 100644 --- a/rust/signed_doc/tests/submission.rs +++ b/rust/signed_doc/tests/submission.rs @@ -1,150 +1,224 @@ -//! Test for proposal submission action. +//! Test for Proposal Submission Action. +//! Require fields: type, id, ver, ref, parameters +//! -use catalyst_signed_doc::{providers::tests::TestCatalystSignedDocumentProvider, *}; -use catalyst_types::catalyst_id::role_index::RoleId; +use std::sync::LazyLock; -mod common; +use catalyst_signed_doc::{ + doc_types::deprecated, providers::tests::TestCatalystSignedDocumentProvider, *, +}; +#[allow(clippy::unwrap_used)] +static DUMMY_PROPOSAL_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::PROPOSAL.clone(), + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .empty_content() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static DUMMY_BRAND_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::BRAND_PARAMETERS.clone(), + })) + .unwrap() + .empty_content() + .build() + .unwrap() +}); + +// Given a proposal comment document `doc`: +// +// - Parameters: +// The `parameters` field in `doc` points to a brand document. +// The parameter rule defines the link reference as `ref`, This mean the document that +// `ref` field in `doc` points to (in this case = `proposal_doc`), must have the same +// `parameters` value as `doc`. #[tokio::test] async fn test_valid_submission_action() { - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + // Create a main proposal submission doc, contain all fields mention in the document + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::json!({ + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ "action": "final" })) - .unwrap(), - RoleId::Proposer, - ) - .unwrap(); + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(proposal_doc).unwrap(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(is_valid, "{:?}", doc.problem_report()); } #[tokio::test] -async fn test_valid_submission_action_with_empty_provider() { - let proposal_doc_id = UuidV7::new(); - let proposal_doc_ver = UuidV7::new(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_valid_submission_action_old_type() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": deprecated::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::json!({ + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ "action": "final" })) - .unwrap(), - RoleId::Proposer, - ) - .unwrap(); + .unwrap() + .build() + .unwrap(); - let provider = TestCatalystSignedDocumentProvider::default(); + let mut provider = TestCatalystSignedDocumentProvider::default(); - let is_valid = validator::validate(&doc, &provider).await.unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); - assert!(!is_valid); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(is_valid, "{:?}", doc.problem_report()); } #[tokio::test] -async fn test_invalid_submission_action() { - let uuid_v7 = UuidV7::new(); - // missing `ref` field - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_invalid_submission_action_corrupted_json() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - // without specifying ref - "ref": serde_json::Value::Null, - }), - serde_json::to_vec(&serde_json::json!({ - "action": "final" + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } })) - .unwrap(), - RoleId::Proposer, - ) - .unwrap(); + .unwrap() + .with_json_content(&serde_json::Value::Null) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); - let provider = TestCatalystSignedDocumentProvider::default(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); +} - // corrupted JSON - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +#[tokio::test] +async fn test_invalid_submission_action_missing_ref() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver - }, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + // "ref": { + // "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + // "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + // }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "action": "final" + })) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(proposal_doc).unwrap(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); +} - // empty content - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +#[tokio::test] +async fn test_invalid_submission_action_missing_parameters() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, - }), - vec![], - RoleId::Proposer, - ) - .unwrap(); + // "parameters": { + // "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + // "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + // } + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "action": "final" + })) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(proposal_doc).unwrap(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); }