diff --git a/README.md b/README.md index 026c1f8c..27c3edf9 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ cargo build ``` Install the Trustchain CLI with: ```shell -cargo install --path trustchain-ion +cargo install --path trustchain-cli ``` ## Usage Guide diff --git a/trustchain-core/Cargo.toml b/trustchain-core/Cargo.toml index b8eb3a71..8c5f6333 100644 --- a/trustchain-core/Cargo.toml +++ b/trustchain-core/Cargo.toml @@ -15,7 +15,7 @@ petgraph = {version = "0.6"} serde = { version = "1.0", features = ["derive"] } serde_jcs = "0.1.0" serde_json = "1.0" -sha2 = "0.10" +sha2 = "0.10.7" ssi = { version = "0.4", features = ["http-did", "secp256k1"] } tempfile = { version = "3.3" } thiserror = "1.0" diff --git a/trustchain-core/src/commitment.rs b/trustchain-core/src/commitment.rs index 59bcfac1..8529cb94 100644 --- a/trustchain-core/src/commitment.rs +++ b/trustchain-core/src/commitment.rs @@ -1,12 +1,13 @@ //! Commitment scheme API with default implementation. -use crate::utils::{json_contains, type_of, HasEndpoints, HasKeys}; +use crate::utils::{json_contains, HasEndpoints, HasKeys}; use crate::verifier::Timestamp; +use serde::Serialize; use serde_json::{json, Value}; use ssi::{ did::{Document, ServiceEndpoint}, jwk::JWK, }; -use std::convert::TryInto; +use std::fmt::Display; use thiserror::Error; /// Type for commitment result. @@ -25,7 +26,7 @@ pub enum CommitmentError { #[error("Failed to compute hash: {0}")] FailedToComputeHash(String), /// Failed hash verification. - #[error("Failed hash verification. Computed hash not equal to target.")] + #[error("Failed hash verification.")] FailedHashVerification(String), /// Failed content verification. #[error("Failed content verification. Expected data {0} not found in candidate: {1}.")] @@ -48,7 +49,7 @@ impl From for CommitmentError { } /// A cryptographic commitment with no expected data content. -pub trait TrivialCommitment { +pub trait TrivialCommitment { /// Gets the hasher (as a function pointer). fn hasher(&self) -> fn(&[u8]) -> CommitmentResult; /// Gets the candidate data. @@ -89,13 +90,16 @@ pub trait TrivialCommitment { } // See https://users.rust-lang.org/t/is-there-a-way-to-move-a-trait-object/707 for Box hint. /// Converts this TrivialCommitment to a Commitment. - fn to_commitment(self: Box, expected_data: serde_json::Value) -> Box; + fn to_commitment(self: Box, expected_data: T) -> Box>; } /// A cryptographic commitment with expected data content. -pub trait Commitment: TrivialCommitment { +pub trait Commitment: TrivialCommitment +where + T: Serialize + Display, +{ /// Gets the expected data. - fn expected_data(&self) -> &serde_json::Value; + fn expected_data(&self) -> &T; /// Verifies that the expected data is found in the candidate data. fn verify_content(&self) -> CommitmentResult<()> { @@ -103,7 +107,9 @@ pub trait Commitment: TrivialCommitment { let candidate_data = self.commitment_content()?; // Verify the content. - if !json_contains(&candidate_data, self.expected_data()) { + // Note the call `json!(self.expected_data())` acts as the identity function when called on + // a `Value` type as it is simply serialized by the underlying methods. + if !json_contains(&candidate_data, &json!(self.expected_data())) { return Err(CommitmentError::FailedContentVerification( self.expected_data().to_string(), candidate_data.to_string(), @@ -118,7 +124,9 @@ pub trait Commitment: TrivialCommitment { self.verify_content()?; // Verify the target by comparing with the computed hash. if self.hash()?.ne(target) { - return Err(CommitmentError::FailedHashVerification(type_of(&self))); + return Err(CommitmentError::FailedHashVerification( + "Computed hash not equal to target.".to_string(), + )); } Ok(()) } @@ -264,79 +272,13 @@ pub trait DIDCommitment: Commitment { fn candidate_endpoints(&self) -> Option> { self.did_document().get_endpoints() } - /// Get the candidate data in which we expect to find a timestamp. - fn timestamp_candidate_data(&self) -> CommitmentResult<&[u8]>; - /// Gets the decoder (function) for the timestamp candidate data. - fn decode_timestamp_candidate_data( - &self, - ) -> CommitmentResult CommitmentResult>; -} - -/// A Commitment whose expected data is a Unix time and hasher -/// and candidate data are obtained from a given DIDCommitment. -pub struct TimestampCommitment { - expected_data: serde_json::Value, - hasher: fn(&[u8]) -> CommitmentResult, - candidate_data: Vec, - decode_candidate_data: fn(&[u8]) -> CommitmentResult, + fn as_any(&self) -> &dyn std::any::Any; } -impl TimestampCommitment { - /// Constructs a TimestampCommitment from a given DIDCommitment, with a Unix - /// timestamp as expected data. - pub fn new( - did_commitment: &dyn DIDCommitment, - expected_data: Timestamp, - ) -> CommitmentResult { - // Note the expected data in the TimestampCommitment is the timestamp, but the - // hasher & candidate data are identical to those in the DIDCommitment. Therefore, - // by verifying both the DIDCommitment and the TimestampCommitment we confirm - // that the *same* hash commits to *both* the DID Document data and the timestamp. - // - // The decoded candidate data must contain the timestamp such that it is found - // by the json_contains function, otherwise the content verification will fail. - Ok(Self { - expected_data: json!(expected_data), - hasher: did_commitment.hasher(), - candidate_data: did_commitment.timestamp_candidate_data()?.to_vec(), - decode_candidate_data: did_commitment.decode_timestamp_candidate_data()?, - }) - } - +/// A Commitment whose expected data is a Unix time. +pub trait TimestampCommitment: Commitment { /// Gets the timestamp as a Unix time. - pub fn timestamp(&self) -> Timestamp { - self.expected_data - .as_u64() - .unwrap() - .try_into() - .expect("Construction guarantees u32.") - } -} - -impl TrivialCommitment for TimestampCommitment { - fn hasher(&self) -> fn(&[u8]) -> CommitmentResult { - self.hasher - } - - fn candidate_data(&self) -> &[u8] { - &self.candidate_data - } - - fn decode_candidate_data(&self) -> fn(&[u8]) -> CommitmentResult { - self.decode_candidate_data - } - - fn to_commitment(self: Box, expected_data: serde_json::Value) -> Box { - if !expected_data.eq(self.expected_data()) { - panic!("Attempted modification of expected timestamp data not permitted."); - } - self - } -} - -impl Commitment for TimestampCommitment { - fn expected_data(&self) -> &serde_json::Value { - // Safe to unwrap as a complete commitment must have expected data - &self.expected_data + fn timestamp(&self) -> Timestamp { + self.expected_data().to_owned() } } diff --git a/trustchain-core/src/verifier.rs b/trustchain-core/src/verifier.rs index 10e7ee56..756d21e5 100644 --- a/trustchain-core/src/verifier.rs +++ b/trustchain-core/src/verifier.rs @@ -2,11 +2,9 @@ use std::error::Error; use crate::chain::{Chain, ChainError, DIDChain}; -use crate::commitment::{Commitment, CommitmentError, DIDCommitment, TimestampCommitment}; +use crate::commitment::{CommitmentError, DIDCommitment, TimestampCommitment}; use crate::resolver::{Resolver, ResolverError}; -use crate::utils::{json_contains, HasEndpoints, HasKeys}; use async_trait::async_trait; -use serde_json::json; use ssi::did_resolve::DIDResolver; use thiserror::Error; @@ -20,8 +18,11 @@ pub enum VerifierError { #[error("Invalid signature for proof in dDID: {0}.")] InvalidSignature(String), /// Invalid root DID after self-controller reached in path. - #[error("Invalid root DID: {0}.")] - InvalidRoot(String), + #[error("Invalid root DID error: {0}")] + InvalidRoot(Box), + /// Invalid root with error: + #[error("Invalid root DID ({0}) with timestamp: {1}.")] + InvalidRootTimestamp(String, Timestamp), /// Failed to build DID chain. #[error("Failed to build chain: {0}.")] ChainBuildFailure(String), @@ -129,7 +130,7 @@ pub enum VerifierError { #[error("Verification material not yet fetched for DID: {0}.")] VerificationMaterialNotYetFetched(String), /// Wrapped commitment error. - #[error("A commitment error during verification.")] + #[error("A commitment error during verification: {0}")] CommitmentFailure(CommitmentError), /// Wrapped resolver error. #[error("A resolver error during verification.")] @@ -169,78 +170,24 @@ impl From for VerifierError { /// A Unix timestamp. pub type Timestamp = u32; -/// A verifiably timestamped DID Document. -pub struct VerifiableTimestamp { - did_commitment: Box, - timestamp: Timestamp, -} - -impl VerifiableTimestamp { - fn new(did_commitment: Box, expected_timestamp: Timestamp) -> Self { - Self { - did_commitment, - timestamp: expected_timestamp, - } - } - - /// Gets the DID Commitment. - fn did_commitment(&self) -> &dyn DIDCommitment { - self.did_commitment.as_ref() - } - - /// Gets a Timestamp Commitment with hash, hasher and candidate data identical to the - /// owned DID Commitment, and with the expected timestamp as expected data. - pub fn timestamp_commitment(&self) -> Result { - Ok(TimestampCommitment::new( - self.did_commitment(), - self.timestamp, - )?) - } - - /// Gets the hash (PoW) commitment. - pub fn hash(&self) -> Result { - Ok(self.did_commitment.hash()?) - } - - /// Gets the timestamp as a Unix time. - pub fn timestamp(&self) -> Timestamp { - self.timestamp +/// A verifiably-timestamped DID. +pub trait VerifiableTimestamp { + /// Gets the wrapped DIDCommitment. + fn did_commitment(&self) -> &dyn DIDCommitment; + /// Gets the wrapped TimestampCommitment. + fn timestamp_commitment(&self) -> &dyn TimestampCommitment; + /// Gets the Timestamp. + fn timestamp(&self) -> Timestamp { + self.timestamp_commitment().timestamp() } - - /// Verifies that the DID Document data (public keys & endpoints) - /// are contained as expected data in the DID Commitment. - fn verify_content(&self) -> Result<(), VerifierError> { - // Check each expected key is found in the vector of verified keys. - if let Some(expected_keys) = self.did_commitment().did_document().get_keys() { - if let Some(candidate_keys) = self.did_commitment().candidate_keys() { - if !expected_keys.iter().all(|key| candidate_keys.contains(key)) { - return Err(VerifierError::KeyNotFoundInVerifiedContent( - self.did_commitment().did().to_string(), - )); - } - } else { - return Err(VerifierError::NoKeysFoundInVerifiedContent( - self.did_commitment().did().to_string(), - )); - } - } - // Check each expected endpoint is found in the vector of verified endpoints. - if let Some(expected_endpoints) = self.did_commitment().did_document().get_endpoints() { - if let Some(candidate_endpoints) = self.did_commitment().candidate_endpoints() { - if !expected_endpoints - .iter() - .all(|uri| candidate_endpoints.contains(uri)) - { - return Err(VerifierError::EndpointNotFoundInVerifiedContent( - self.did_commitment().did().to_string(), - )); - } - } else { - return Err(VerifierError::NoEndpointsFoundInVerifiedContent( - self.did_commitment().did().to_string(), - )); - } - } + /// Verifies both the DIDCommitment and the TimestampCommitment against the same target. + fn verify(&self, target: &str) -> Result<(), CommitmentError> { + // The expected data in the TimestampCommitment is the timestamp, while in the + // DIDCommitment the expected data are the public keys & service endpoints. + // By verifying both commitments using the same target, we confirm that the *same* + // hash commits to *both* the DID Document data and the timestamp. + self.did_commitment().verify(target)?; + self.timestamp_commitment().verify(target)?; Ok(()) } } @@ -263,69 +210,42 @@ pub trait Verifier { // Verify the root timestamp. let root = chain.root(); - let verifiable_timestamp = self.verifiable_timestamp(root).await?; - self.verify_timestamp(&verifiable_timestamp)?; - // At this point we know that the same PoW commits to both the timestamp - // in verifiable_timestamp and the data (keys & endpoints) in the root DID Document. - // It only remains to check that the verified timestamp matches the expected root timestamp. + let verifiable_timestamp = self.verifiable_timestamp(root, root_timestamp).await?; + + // Verify that the root DID content (keys & endpoints) and the timestamp share a common + // commitment target. + verifiable_timestamp.verify(&verifiable_timestamp.timestamp_commitment().hash()?)?; + + // Validate the PoW on the common target hash. + self.validate_pow_hash(&verifiable_timestamp.timestamp_commitment().hash()?)?; + + // Verify explicitly that the return value from the timestamp method equals the expected + // root timestamp (in case the default timestamp method implementation has been overridden). if !verifiable_timestamp.timestamp().eq(&root_timestamp) { - Err(VerifierError::InvalidRoot(root.to_string())) + Err(VerifierError::InvalidRootTimestamp( + root.to_string(), + verifiable_timestamp.timestamp(), + )) } else { Ok(chain) } } /// Constructs a verifiable timestamp for the given DID, including an expected - /// value for the timestamp retreived from a local PoW network node. - async fn verifiable_timestamp(&self, did: &str) -> Result { - // Get the DID Commitment. - let did_commitment = self.did_commitment(did).await?; - // Hash the DID commitment - let hash = did_commitment.hash()?; - // Get the expected timestamp for the PoW hash by querying a *local* node. - let expected_timestamp = self.expected_timestamp(&hash)?; - Ok(VerifiableTimestamp::new(did_commitment, expected_timestamp)) - } - - /// Verifies a given verifiable timestamp. - fn verify_timestamp( + /// value for the timestamp retrieved from a local PoW network node. + async fn verifiable_timestamp( &self, - verifiable_timestamp: &VerifiableTimestamp, - ) -> Result<(), VerifierError> { - // Verify that the DID Commitment commits to the DID Document data. - verifiable_timestamp.verify_content()?; - - let did_commitment = verifiable_timestamp.did_commitment(); - let timestamp_commitment = verifiable_timestamp.timestamp_commitment()?; - - // Verify that the expected data in the Timestamp Commitment matches the timestamp itself. - if !json_contains( - timestamp_commitment.expected_data(), - &json!(verifiable_timestamp.timestamp()), - ) { - return Err(VerifierError::TimestampVerificationError( - did_commitment.did().to_string(), - )); - } - - // Verify both the commitments with the *same* target hash, thereby confirming - // that the same PoW commits to both the DID Document data & the timestamp. - let hash = verifiable_timestamp.hash()?; - did_commitment.verify(&hash)?; - timestamp_commitment.verify(&hash)?; - - Ok(()) - } - - /// Queries a local PoW node to get the expected timestamp for a given PoW hash. - fn expected_timestamp(&self, hash: &str) -> Result; + did: &str, + expected_timestamp: Timestamp, + ) -> Result, VerifierError>; /// Gets a block hash (PoW) Commitment for the given DID. - /// The mutable reference to self enables a newly-fetched Commitment - /// to be stored locally for faster subsequent retrieval. async fn did_commitment(&self, did: &str) -> Result, VerifierError>; + /// Queries a local PoW node to get the expected timestamp for a given PoW hash. + fn validate_pow_hash(&self, hash: &str) -> Result<(), VerifierError>; + /// Gets the resolver used for DID verification. fn resolver(&self) -> &Resolver; } diff --git a/trustchain-http/src/errors.rs b/trustchain-http/src/errors.rs index 78589960..a13b460a 100644 --- a/trustchain-http/src/errors.rs +++ b/trustchain-http/src/errors.rs @@ -62,6 +62,9 @@ impl IntoResponse for TrustchainHTTPError { err @ TrustchainHTTPError::VerifierError(VerifierError::InvalidRoot(_)) => { (StatusCode::OK, err.to_string()) } + err @ TrustchainHTTPError::VerifierError(VerifierError::CommitmentFailure(_)) => { + (StatusCode::OK, err.to_string()) + } err @ TrustchainHTTPError::VerifierError(_) => { (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()) } diff --git a/trustchain-http/src/resolver.rs b/trustchain-http/src/resolver.rs index 13fb1bac..22c1777c 100644 --- a/trustchain-http/src/resolver.rs +++ b/trustchain-http/src/resolver.rs @@ -15,7 +15,7 @@ use ssi::{ use std::sync::Arc; use trustchain_core::chain::{Chain, DIDChain}; use trustchain_core::resolver::Resolver; -use trustchain_core::verifier::{Timestamp, Verifier}; +use trustchain_core::verifier::{Timestamp, Verifier, VerifierError}; use trustchain_ion::verifier::{IONVerifier, VerificationBundle}; /// A HTTP API for resolving DID documents, chains, and verification bundles. @@ -67,7 +67,14 @@ impl TrustchainHTTP for TrustchainHTTPHandler { root_event_time: Timestamp, ) -> Result { debug!("Verifying..."); - let chain = verifier.verify(did, root_event_time).await?; + let chain = verifier + .verify(did, root_event_time) + .await + // Any commitment error implies invalid root + .map_err(|err| match err { + err @ VerifierError::CommitmentFailure(_) => VerifierError::InvalidRoot(err.into()), + err => err, + })?; debug!("Verified did..."); Ok(DIDChainResolutionResult::new(&chain)) } @@ -210,7 +217,8 @@ mod tests { #[ignore = "requires ION, MongoDB, IPFS and Bitcoin RPC"] async fn test_resolve_chain() { let app = TrustchainRouter::from(HTTPConfig::default()).into_router(); - let uri = "/did/chain/did:ion:test:EiAtHHKFJWAk5AsM3tgCut3OiBY4ekHTf66AAjoysXL65Q?root_event_time=1666265405".to_string(); + let root_event_time = 1666265405; + let uri = format!("/did/chain/did:ion:test:EiAtHHKFJWAk5AsM3tgCut3OiBY4ekHTf66AAjoysXL65Q?root_event_time={root_event_time}"); let client = TestClient::new(app); let response = client.get(&uri).send().await; assert_eq!(response.status(), StatusCode::OK); @@ -221,17 +229,19 @@ mod tests { // Test for case where incorrect root_event_time for the root of the given DID, expected to // return Ok but with a JSON containing the wrapped Trustchain error. - let incorrect_root_event_time = "001234500"; + let incorrect_root_event_time = 1234500; let uri_incorrect_root = format!( "/did/chain/did:ion:test:EiAtHHKFJWAk5AsM3tgCut3OiBY4ekHTf66AAjoysXL65Q?root_event_time={incorrect_root_event_time}" ) .to_string(); let response = client.get(&uri_incorrect_root).send().await; assert_eq!(response.status(), StatusCode::OK); - assert_eq!( - response.text().await, - r#"{"error":"Trustchain Verifier error: Invalid root DID: did:ion:test:EiCClfEdkTv_aM3UnBBhlOV89LlGhpQAbfeZLFdFxVFkEg."}"#.to_string() - ) + // A wrapped CommitmentError is now returned here mapped to VerifierError::InvalidRoot + // println!("{}", response.text().await); + assert!(response + .text() + .await + .starts_with(r#"{"error":"Trustchain Verifier error: Invalid root DID error:"#),) } #[tokio::test] @@ -268,8 +278,10 @@ mod tests { let listener = TcpListener::bind("127.0.0.1:0").expect("Could not bind ephemeral socket"); let addr = listener.local_addr().unwrap(); let port = addr.port(); - let mut http_config = HTTPConfig::default(); - http_config.port = port; + let http_config = HTTPConfig { + port, + ..Default::default() + }; assert_eq!(http_config.host.to_string(), addr.ip().to_string()); // Run server @@ -281,12 +293,22 @@ mod tests { }); // Make a verifier instance and fetch bundle from server bundle endpoint - let verifier = IONVerifier::new(get_ion_resolver("http://localhost:3000/")); + let verifier = IONVerifier::with_endpoint( + get_ion_resolver("http://localhost:3000/"), + format!("http://127.0.0.1:{}/did/bundle/", port), + ); let did = "did:ion:test:EiBcLZcELCKKtmun_CUImSlb2wcxK5eM8YXSq3MrqNe5wA"; - let result = verifier - .fetch_bundle(did, Some(format!("http://127.0.0.1:{}/did/bundle/", port))) - .await; - - assert!(result.is_ok()); + // Check verification + let root_event_time = 1666971942; + verifier.verify(did, root_event_time).await.unwrap(); + // Check verification for another root + let root_event_time = 1666265405; + verifier + .verify( + "did:ion:test:EiAtHHKFJWAk5AsM3tgCut3OiBY4ekHTf66AAjoysXL65Q", + root_event_time, + ) + .await + .unwrap(); } } diff --git a/trustchain-ion/Cargo.toml b/trustchain-ion/Cargo.toml index f48005c8..8ee38ff8 100644 --- a/trustchain-ion/Cargo.toml +++ b/trustchain-ion/Cargo.toml @@ -24,7 +24,7 @@ reqwest = "0.11" serde = { version = "1.0", features = ["derive"] } serde_jcs = "0.1.0" serde_json = "1.0" -sha256 = "1.1.1" +sha2 = "0.10.7" ssi = { version = "0.4", features = ["http-did", "secp256k1"] } thiserror = "1.0" toml="0.7.2" diff --git a/trustchain-ion/src/commitment.rs b/trustchain-ion/src/commitment.rs index 6b509b2d..84d81402 100644 --- a/trustchain-ion/src/commitment.rs +++ b/trustchain-ion/src/commitment.rs @@ -4,17 +4,22 @@ use bitcoin::MerkleBlock; use bitcoin::Transaction; use ipfs_hasher::IpfsHasher; use serde_json::{json, Value}; +use sha2::{Digest, Sha256}; use ssi::did::Document; use std::convert::TryInto; use std::marker::PhantomData; +use trustchain_core::commitment::TimestampCommitment; use trustchain_core::commitment::{ChainedCommitment, CommitmentChain, CommitmentResult}; use trustchain_core::commitment::{Commitment, CommitmentError}; use trustchain_core::commitment::{DIDCommitment, TrivialCommitment}; use trustchain_core::utils::{HasEndpoints, HasKeys}; +use trustchain_core::verifier::Timestamp; use crate::sidetree::CoreIndexFile; use crate::utils::tx_to_op_return_cid; use crate::utils::{decode_block_header, decode_ipfs_content, reverse_endianness}; +use crate::MERKLE_ROOT_KEY; +use crate::TIMESTAMP_KEY; const CID_KEY: &str = "cid"; const DELTAS_KEY: &str = "deltas"; @@ -27,6 +32,40 @@ fn ipfs_decode_candidate_data() -> fn(&[u8]) -> CommitmentResult { |x| decode_ipfs_content(x).map_err(|_| CommitmentError::DataDecodingFailure) } +fn block_header_hasher() -> fn(&[u8]) -> CommitmentResult { + // Candidate data the block header bytes. + |x| { + // Bitcoin block hash is a double SHA256 hash of the block header. + // We use a generic sha2 crate to avoid trust in rust-bitcoin. + let double_hash_hex = hex::encode(Sha256::digest(Sha256::digest(x))); + // For leading (not trailing) zeros, convert the hex to big-endian. + Ok(reverse_endianness(&double_hash_hex).unwrap()) + } +} + +fn block_header_decoder() -> fn(&[u8]) -> CommitmentResult { + |x| { + if x.len() != 80 { + return Err(CommitmentError::DataDecodingError( + "Error: Bitcoin block header must be 80 bytes.".to_string(), + )); + }; + let decoded_header = decode_block_header(x.try_into().map_err(|err| { + CommitmentError::DataDecodingError(format!( + "Error: Bitcoin block header must be 80 bytes with error: {err}" + )) + })?); + + match decoded_header { + Ok(x) => Ok(x), + Err(e) => Err(CommitmentError::DataDecodingError(format!( + "Error decoding Bitcoin block header: {}.", + e + ))), + } + } +} + /// Unit struct for incomplete commitments. pub struct Incomplete; /// Unit struct for complete commitments. @@ -363,18 +402,10 @@ impl BlockHashCommitment { } } } + impl TrivialCommitment for BlockHashCommitment { fn hasher(&self) -> fn(&[u8]) -> CommitmentResult { - // Candidate data the block header bytes. - |x| { - // Bitcoin block hash is a double SHA256 hash of the block header. - // We use a generic SHA256 library to avoid trust in rust-bitcoin. - let hash1_hex = sha256::digest(x); - let hash1_bytes = hex::decode(hash1_hex).unwrap(); - let hash2_hex = sha256::digest(&*hash1_bytes); - // For leading (not trailing) zeros, convert the hex to big-endian. - Ok(reverse_endianness(&hash2_hex).unwrap()) - } + block_header_hasher() } fn candidate_data(&self) -> &[u8] { @@ -383,26 +414,21 @@ impl TrivialCommitment for BlockHashCommitment { /// Deserializes the candidate data into a Block header (as JSON). fn decode_candidate_data(&self) -> fn(&[u8]) -> CommitmentResult { - |x| { - if x.len() != 80 { - return Err(CommitmentError::DataDecodingError( - "Error: Bitcoin block header must be 80 bytes.".to_string(), - )); - }; - let decoded_header = decode_block_header(x.try_into().map_err(|err| { - CommitmentError::DataDecodingError(format!( - "Error: Bitcoin block header must be 80 bytes with error: {err}" - )) - })?); - - match decoded_header { - Ok(x) => Ok(x), - Err(e) => Err(CommitmentError::DataDecodingError(format!( - "Error decoding Bitcoin block header: {}.", - e - ))), + block_header_decoder() + } + + /// Override the filter method to ensure only the Merkle root content is considered. + fn filter(&self) -> Option CommitmentResult>> { + Some(Box::new(move |value| { + if let Value::Object(map) = value { + match map.get(MERKLE_ROOT_KEY) { + Some(Value::String(str)) => Ok(Value::String(str.clone())), + _ => Err(CommitmentError::DataDecodingFailure), + } + } else { + Err(CommitmentError::DataDecodingFailure) } - } + })) } fn to_commitment(self: Box, expected_data: serde_json::Value) -> Box { @@ -480,6 +506,10 @@ impl IONCommitment { chained_commitment: iterated_commitment, }) } + + pub fn chained_commitment(&self) -> &ChainedCommitment { + &self.chained_commitment + } } // Delegate all Commitment trait methods to the wrapped ChainedCommitment. @@ -528,45 +558,129 @@ impl DIDCommitment for IONCommitment { &self.did_doc } - fn timestamp_candidate_data(&self) -> CommitmentResult<&[u8]> { - // The candidate data for the timestamp is the Bitcoin block header, - // which is the candidate data in the last commitment in the chain - // (i.e. the BlockHashCommitment). - if let Some(commitment) = self.chained_commitment.commitments().last() { - return Ok(commitment.candidate_data()); - } - Err(CommitmentError::EmptyChainedCommitment) + fn as_any(&self) -> &dyn std::any::Any { + self } +} - fn decode_timestamp_candidate_data( - &self, - ) -> CommitmentResult CommitmentResult> { - // The required candidate data decoder (function) is the one for the - // Bitcoin block header, which is the decoder in the last commitment - // in the chain (i.e. the BlockHashCommitment). - if let Some(commitment) = self.chained_commitment.commitments().last() { - return Ok(commitment.decode_candidate_data()); - } - Err(CommitmentError::EmptyChainedCommitment) +/// A Commitment whose expected data is a Unix time and hasher +/// and candidate data are obtained from a given DIDCommitment. +pub struct BlockTimestampCommitment { + candidate_data: Vec, + expected_data: Timestamp, +} + +impl BlockTimestampCommitment { + pub fn new(candidate_data: Vec, expected_data: Timestamp) -> CommitmentResult { + // The decoded candidate data must contain the timestamp such that it is found + // by the json_contains function, otherwise the content verification will fail. + Ok(Self { + candidate_data, + expected_data, + }) } } -#[cfg(test)] -mod tests { +impl TrivialCommitment for BlockTimestampCommitment { + fn hasher(&self) -> fn(&[u8]) -> CommitmentResult { + block_header_hasher() + } - use std::str::FromStr; + fn candidate_data(&self) -> &[u8] { + &self.candidate_data + } + /// Deserializes the candidate data into a Block header (as JSON). + fn decode_candidate_data(&self) -> fn(&[u8]) -> CommitmentResult { + block_header_decoder() + } + + /// Override the filter method to ensure only timestamp content is considered. + fn filter(&self) -> Option CommitmentResult>> { + Some(Box::new(move |value| { + if let Value::Object(map) = value { + match map.get(TIMESTAMP_KEY) { + Some(Value::Number(timestamp)) => Ok(Value::Number(timestamp.clone())), + _ => Err(CommitmentError::DataDecodingFailure), + } + } else { + Err(CommitmentError::DataDecodingFailure) + } + })) + } + + fn to_commitment(self: Box, _: Timestamp) -> Box> { + self + } +} + +impl Commitment for BlockTimestampCommitment { + fn expected_data(&self) -> &Timestamp { + &self.expected_data + } +} + +impl TimestampCommitment for BlockTimestampCommitment {} + +#[cfg(test)] +mod tests { use bitcoin::util::psbt::serialize::Serialize; use bitcoin::BlockHash; use ipfs_api_backend_hyper::IpfsClient; + use std::str::FromStr; use trustchain_core::{data::TEST_ROOT_DOCUMENT, utils::json_contains}; use super::*; use crate::{ + data::TEST_BLOCK_HEADER_HEX, utils::{block_header, merkle_proof, query_ipfs, transaction}, - MERKLE_ROOT_KEY, TIMESTAMP_KEY, }; + #[test] + fn test_block_timestamp_commitment() { + let expected_data: Timestamp = 1666265405; + let candidate_data = hex::decode(TEST_BLOCK_HEADER_HEX).unwrap(); + let target = BlockTimestampCommitment::new(candidate_data.clone(), expected_data).unwrap(); + target.verify_content().unwrap(); + let pow_hash = "000000000000000eaa9e43748768cd8bf34f43aaa03abd9036c463010a0c6e7f"; + target.verify(pow_hash).unwrap(); + + // Both calls should instead error with incorrect timestamp + let bad_expected_data: Timestamp = 1666265406; + let target = BlockTimestampCommitment::new(candidate_data, bad_expected_data).unwrap(); + match target.verify_content() { + Err(CommitmentError::FailedContentVerification(s1, s2)) => { + assert_eq!( + (s1, s2), + (format!("{bad_expected_data}"), format!("{expected_data}")) + ) + } + _ => panic!(), + }; + match target.verify(pow_hash) { + Err(CommitmentError::FailedContentVerification(s1, s2)) => { + assert_eq!( + (s1, s2), + (format!("{bad_expected_data}"), format!("{expected_data}")) + ) + } + _ => panic!(), + }; + } + + #[test] + fn test_block_hash_commitment_filter() { + // The expected data is the Merkle root inside the block header. + // For the testnet block at height 2377445, the Merkle root is: + let expected_data = + json!("7dce795209d4b5051da3f5f5293ac97c2ec677687098062044654111529cad69"); + let candidate_data = hex::decode(TEST_BLOCK_HEADER_HEX).unwrap(); + let target = BlockHashCommitment::::new(candidate_data, expected_data); + target.verify_content().unwrap(); + let pow_hash = "000000000000000eaa9e43748768cd8bf34f43aaa03abd9036c463010a0c6e7f"; + target.verify(pow_hash).unwrap(); + } + #[tokio::test] #[ignore = "Integration test requires IPFS"] async fn test_extract_suffix_idx() { @@ -716,21 +830,14 @@ mod tests { // We expect to find the Merkle root in the block header. // For the testnet block at height 2377445, the Merkle root is: let merkle_root_str = "7dce795209d4b5051da3f5f5293ac97c2ec677687098062044654111529cad69"; - let expected_str = format!(r#"{{"{}":"{}"}}"#, MERKLE_ROOT_KEY, merkle_root_str); - let expected_data: serde_json::Value = serde_json::from_str(&expected_str).unwrap(); + let expected_data = json!(merkle_root_str); // The candidate data is the serialized block header. let block_header = block_header(&block_hash, None).unwrap(); - let candidate_data_ = bitcoin::consensus::serialize(&block_header); - let candidate_data = candidate_data_.clone(); - - let commitment = BlockHashCommitment::::new(candidate_data, expected_data); - assert!(commitment.verify(target).is_ok()); - - // Check the timestamp is a u32 Unix time. - let binding = commitment.commitment_content().unwrap(); - let actual_timestamp = binding.get(TIMESTAMP_KEY).unwrap(); - assert_eq!(actual_timestamp, &json!(1666265405)); + let candidate_data = bitcoin::consensus::serialize(&block_header); + let commitment = + BlockHashCommitment::::new(candidate_data.clone(), expected_data); + commitment.verify(target).unwrap(); // We do *not* expect a different target to succeed. let bad_target = "100000000000000eaa9e43748768cd8bf34f43aaa03abd9036c463010a0c6e7f"; @@ -743,14 +850,31 @@ mod tests { // We do *not* expect to find a different Merkle root. let bad_merkle_root_str = "6dce795209d4b5051da3f5f5293ac97c2ec677687098062044654111529cad69"; - let bad_expected_data = serde_json::json!(bad_merkle_root_str); - let candidate_data = candidate_data_; - let commitment = BlockHashCommitment::::new(candidate_data, bad_expected_data); + let bad_expected_data = json!(bad_merkle_root_str); + let commitment = + BlockHashCommitment::::new(candidate_data.clone(), bad_expected_data); assert!(commitment.verify(target).is_err()); match commitment.verify(target) { Err(CommitmentError::FailedContentVerification(..)) => (), _ => panic!("Expected FailedContentVerification error."), }; + + // We do *not* expect the (correct) timestamp to be valid expected data, + // since the candidate data is filtered to contain only the Merkle root field. + let wrong_expected_data_commitment = + BlockHashCommitment::::new(candidate_data.clone(), json!(1666265405)); + assert!(wrong_expected_data_commitment.verify(target).is_err()); + + // Also test as timestamp commitment + let expected_data = 1666265405; + let commitment = + BlockTimestampCommitment::new(candidate_data.clone(), expected_data).unwrap(); + commitment.verify_content().unwrap(); + commitment.verify(target).unwrap(); + let bad_expected_data = 1666265406; + let commitment = BlockTimestampCommitment::new(candidate_data, bad_expected_data).unwrap(); + assert!(commitment.verify_content().is_err()); + assert!(commitment.verify(target).is_err()); } #[tokio::test] diff --git a/trustchain-ion/src/lib.rs b/trustchain-ion/src/lib.rs index dd2f934a..246dc4dc 100644 --- a/trustchain-ion/src/lib.rs +++ b/trustchain-ion/src/lib.rs @@ -139,3 +139,7 @@ pub const HASH_PREV_BLOCK_KEY: &str = "hash_prev_block"; pub const TIMESTAMP_KEY: &str = "timestamp"; pub const BITS_KEY: &str = "bits"; pub const NONCE_KEY: &str = "nonce"; + +// Minimum number of zeros for PoW block hash of root +// TODO: set differently for mainnet and testnet with features +pub const MIN_POW_ZEROS: usize = 14; diff --git a/trustchain-ion/src/verifier.rs b/trustchain-ion/src/verifier.rs index 22a1cd71..87d51427 100644 --- a/trustchain-ion/src/verifier.rs +++ b/trustchain-ion/src/verifier.rs @@ -1,5 +1,5 @@ //! Implementation of `Verifier` API for ION DID method. -use crate::commitment::IONCommitment; +use crate::commitment::{BlockTimestampCommitment, IONCommitment}; use crate::config::ion_config; use crate::sidetree::{ChunkFile, ChunkFileUri, CoreIndexFile, ProvisionalIndexFile}; use crate::utils::{ @@ -20,12 +20,15 @@ use ssi::did::Document; use ssi::did_resolve::{DIDResolver, DocumentMetadata}; use std::collections::HashMap; use std::convert::TryInto; +use std::marker::PhantomData; use std::str::FromStr; use std::sync::{Arc, Mutex}; -use trustchain_core::commitment::{CommitmentError, DIDCommitment}; +use trustchain_core::commitment::{ + CommitmentChain, CommitmentError, DIDCommitment, TimestampCommitment, +}; use trustchain_core::resolver::{Resolver, ResolverError}; use trustchain_core::utils::get_did_suffix; -use trustchain_core::verifier::{Timestamp, Verifier, VerifierError}; +use trustchain_core::verifier::{Timestamp, VerifiableTimestamp, Verifier, VerifierError}; /// Locator for a transaction on the PoW ledger, given by the pair: /// (block_hash, tx_index_within_block). @@ -76,22 +79,30 @@ impl VerificationBundle { } } +/// Full client zero sized type for marker in `IONVerifier`. +pub struct FullClient; +/// Light client zero sized type for marker in `IONVerifier`. +pub struct LightClient; + /// Trustchain Verifier implementation via the ION DID method. -pub struct IONVerifier +pub struct IONVerifier where T: Sync + Send + DIDResolver, { resolver: Resolver, - rpc_client: bitcoincore_rpc::Client, - ipfs_client: IpfsClient, + rpc_client: Option, + ipfs_client: Option, bundles: Mutex>>, + endpoint: Option, + _marker: PhantomData, } -impl IONVerifier +impl IONVerifier where T: Send + Sync + DIDResolver, { /// Constructs a new IONVerifier. + // TODO: refactor to use config struct over direct config file lookup pub fn new(resolver: Resolver) -> Self { // Construct a Bitcoin RPC client to communicate with the ION Bitcoin node. let rpc_client = bitcoincore_rpc::Client::new( @@ -113,82 +124,49 @@ where let bundles = Mutex::new(HashMap::new()); Self { resolver, - rpc_client, - ipfs_client, + rpc_client: Some(rpc_client), + ipfs_client: Some(ipfs_client), bundles, + endpoint: None, + _marker: PhantomData, } } - /// Returns a DID verification bundle. - pub async fn verification_bundle( - &self, - did: &str, - ) -> Result, VerifierError> { - // Fetch (and store) the bundle if it isn't already available. - if !self.bundles.lock().unwrap().contains_key(did) { - self.fetch_bundle(did, None).await?; - } - Ok(self.bundles.lock().unwrap().get(did).cloned().unwrap()) + /// Gets RPC client. + fn rpc_client(&self) -> &bitcoincore_rpc::Client { + self.rpc_client.as_ref().unwrap() + } + + /// Gets IPFS client. + fn ipfs_client(&self) -> &IpfsClient { + self.ipfs_client.as_ref().unwrap() } /// Fetches the data needed to verify the DID's timestamp and stores it as a verification bundle. // TODO: offline functionality will require interfacing with a persistent cache instead of the // in-memory verifier HashMap. - pub async fn fetch_bundle( - &self, - did: &str, - endpoint: Option, - ) -> Result<(), VerifierError> { - let bundle = match endpoint.as_ref() { - // If running on a Trustchain light client, make an API call to a full node to - // request the bundle. - Some(endpoint) => { - let response = reqwest::get(format!("{endpoint}{did}")) - .await - .map_err(|e| { - VerifierError::ErrorFetchingVerificationMaterial( - format!("Error requesting bundle from endpoint: {endpoint}"), - e.into(), - ) - })?; - serde_json::from_str::( - &response - .text() - .map_err(|e| { - VerifierError::ErrorFetchingVerificationMaterial( - format!( - "Error extracting bundle response body from endpoint: {endpoint}" - ), - e.into(), - ) - }) - .await?, - )? - } - _ => { - let (did_doc, did_doc_meta) = self.resolve_did(did).await?; - let (block_hash, tx_index) = self.locate_transaction(did).await?; - let tx = self.fetch_transaction(&block_hash, tx_index)?; - let transaction = bitcoin::util::psbt::serialize::Serialize::serialize(&tx); - let cid = self.op_return_cid(&tx)?; - let core_index_file = self.fetch_core_index_file(&cid).await?; - let provisional_index_file = self.fetch_prov_index_file(&core_index_file).await?; - let chunk_file = self.fetch_chunk_file(&provisional_index_file).await?; - let merkle_block = self.fetch_merkle_block(&block_hash, &tx)?; - let block_header = self.fetch_block_header(&block_hash)?; - // TODO: Consider extracting the block header (bytes) from the MerkleBlock to avoid one RPC call. - VerificationBundle::new( - did_doc, - did_doc_meta, - chunk_file, - provisional_index_file, - core_index_file, - transaction, - merkle_block, - block_header, - ) - } - }; + pub async fn fetch_bundle(&self, did: &str) -> Result<(), VerifierError> { + let (did_doc, did_doc_meta) = self.resolve_did(did).await?; + let (block_hash, tx_index) = self.locate_transaction(did).await?; + let tx = self.fetch_transaction(&block_hash, tx_index)?; + let transaction = bitcoin::util::psbt::serialize::Serialize::serialize(&tx); + let cid = self.op_return_cid(&tx)?; + let core_index_file = self.fetch_core_index_file(&cid).await?; + let provisional_index_file = self.fetch_prov_index_file(&core_index_file).await?; + let chunk_file = self.fetch_chunk_file(&provisional_index_file).await?; + let merkle_block = self.fetch_merkle_block(&block_hash, &tx)?; + let block_header = self.fetch_block_header(&block_hash)?; + // TODO: Consider extracting the block header (bytes) from the MerkleBlock to avoid one RPC call. + let bundle = VerificationBundle::new( + did_doc, + did_doc_meta, + chunk_file, + provisional_index_file, + core_index_file, + transaction, + merkle_block, + block_header, + ); // Insert the bundle into the HashMap of bundles, keyed by the DID. self.bundles .lock() @@ -197,25 +175,12 @@ where Ok(()) } - /// Resolves the given DID to obtain the DID Document and Document Metadata. - async fn resolve_did(&self, did: &str) -> Result<(Document, DocumentMetadata), VerifierError> { - let (res_meta, doc, doc_meta) = self.resolver.resolve_as_result(did).await?; - if let (Some(doc), Some(doc_meta)) = (doc, doc_meta) { - Ok((doc, doc_meta)) - } else { - Err(VerifierError::DIDResolutionError( - format!("Missing Document and/or DocumentMetadata for DID: {}", did), - ResolverError::FailureWithMetadata(res_meta).into(), - )) - } - } - fn fetch_transaction( &self, block_hash: &BlockHash, tx_index: u32, ) -> Result { - transaction(block_hash, tx_index, Some(&self.rpc_client)).map_err(|e| { + transaction(block_hash, tx_index, Some(self.rpc_client())).map_err(|e| { VerifierError::ErrorFetchingVerificationMaterial( "Failed to fetch transaction.".to_string(), e.into(), @@ -224,7 +189,7 @@ where } async fn fetch_core_index_file(&self, cid: &str) -> Result, VerifierError> { - query_ipfs(cid, &self.ipfs_client) + query_ipfs(cid, self.ipfs_client()) .map_err(|e| { VerifierError::ErrorFetchingVerificationMaterial( "Failed to fetch core index file".to_string(), @@ -249,7 +214,7 @@ where .ok_or(VerifierError::FailureToFetchVerificationMaterial(format!( "Missing provisional index file URI in core index file: {content}." )))?; - query_ipfs(&provisional_index_file_uri, &self.ipfs_client) + query_ipfs(&provisional_index_file_uri, self.ipfs_client()) .map_err(|e| { VerifierError::ErrorFetchingVerificationMaterial( "Failed to fetch ION provisional index file.".to_string(), @@ -286,7 +251,7 @@ where }; // Get Chunk File - query_ipfs(chunk_file_uri, &self.ipfs_client) + query_ipfs(chunk_file_uri, self.ipfs_client()) .map_err(|err| { VerifierError::ErrorFetchingVerificationMaterial( "Failed to fetch ION provisional index file.".to_string(), @@ -302,7 +267,7 @@ where block_hash: &BlockHash, tx: &Transaction, ) -> Result, VerifierError> { - self.rpc_client + self.rpc_client() .get_tx_out_proof(&[tx.txid()], Some(block_hash)) .map_err(|e| { VerifierError::ErrorFetchingVerificationMaterial( @@ -313,7 +278,7 @@ where } fn fetch_block_header(&self, block_hash: &BlockHash) -> Result, VerifierError> { - block_header(block_hash, Some(&self.rpc_client)) + block_header(block_hash, Some(self.rpc_client())) .map_err(|e| { VerifierError::ErrorFetchingVerificationMaterial( "Failed to fetch Bitcoin block header via RPC.".to_string(), @@ -358,19 +323,115 @@ where .map_err(|_| VerifierError::FailureToGetDIDOperation(suffix.to_owned()))?; // If call to get_network_info fails, return error - self.rpc_client + self.rpc_client() .get_network_info() .map_err(|_| VerifierError::LedgerClientError("getblockhash".to_string()))?; // Convert the block height to a block hash. let block_hash = self - .rpc_client + .rpc_client() .get_block_hash(u64::from(block_height)) .map_err(|_| VerifierError::InvalidBlockHeight(block_height.into()))?; Ok((block_hash, tx_index)) } + /// Gets a DID verification bundle, including a fetch if not initially cached. + pub async fn verification_bundle( + &self, + did: &str, + ) -> Result, VerifierError> { + // Fetch (and store) the bundle if it isn't already available. + if !self.bundles.lock().unwrap().contains_key(did) { + self.fetch_bundle(did).await?; + } + Ok(self.bundles.lock().unwrap().get(did).cloned().unwrap()) + } +} +impl IONVerifier +where + T: Send + Sync + DIDResolver, +{ + /// Constructs a new IONVerifier. + pub fn with_endpoint(resolver: Resolver, endpoint: URL) -> Self { + Self { + resolver, + rpc_client: None, + ipfs_client: None, + bundles: Mutex::new(HashMap::new()), + endpoint: Some(endpoint), + _marker: PhantomData, + } + } + /// Gets endpoint of verifier. + fn endpoint(&self) -> &str { + self.endpoint.as_ref().unwrap() + } + /// Fetches the data needed to verify the DID's timestamp and stores it as a verification bundle. + // TODO: offline functionality will require interfacing with a persistent cache instead of the + // in-memory verifier HashMap. + // If running on a Trustchain light client, make an API call to a full node to request the bundle. + pub async fn fetch_bundle(&self, did: &str) -> Result<(), VerifierError> { + let response = reqwest::get(format!("{}{did}", self.endpoint())) + .await + .map_err(|e| { + VerifierError::ErrorFetchingVerificationMaterial( + format!("Error requesting bundle from endpoint: {}", self.endpoint()), + e.into(), + ) + })?; + let bundle: VerificationBundle = serde_json::from_str( + &response + .text() + .map_err(|e| { + VerifierError::ErrorFetchingVerificationMaterial( + format!( + "Error extracting bundle response body from endpoint: {}", + self.endpoint() + ), + e.into(), + ) + }) + .await?, + )?; + // Insert the bundle into the HashMap of bundles, keyed by the DID. + self.bundles + .lock() + .unwrap() + .insert(did.to_string(), Arc::new(bundle)); + Ok(()) + } + + /// Gets a DID verification bundle, including a fetch if not initially cached. + pub async fn verification_bundle( + &self, + did: &str, + ) -> Result, VerifierError> { + // Fetch (and store) the bundle if it isn't already available. + if !self.bundles.lock().unwrap().contains_key(did) { + self.fetch_bundle(did).await?; + } + Ok(self.bundles.lock().unwrap().get(did).cloned().unwrap()) + } +} + +impl IONVerifier +where + T: Send + Sync + DIDResolver, +{ + /// Resolves the given DID to obtain the DID Document and Document Metadata. + async fn resolve_did(&self, did: &str) -> Result<(Document, DocumentMetadata), VerifierError> { + let (res_meta, doc, doc_meta) = self.resolver.resolve_as_result(did).await?; + if let (Some(doc), Some(doc_meta)) = (doc, doc_meta) { + Ok((doc, doc_meta)) + } else { + Err(VerifierError::DIDResolutionError( + format!("Missing Document and/or DocumentMetadata for DID: {}", did), + ResolverError::FailureWithMetadata(res_meta).into(), + )) + } + } + /// Extracts the IPFS content identifier from the ION OP_RETURN data inside a Bitcoin transaction. fn op_return_cid(&self, tx: &Transaction) -> Result { tx_to_op_return_cid(tx) @@ -404,17 +465,79 @@ pub fn content_deltas(chunk_file_json: &Value) -> Result, VerifierErr Ok(chunk_file.deltas) } +// TODO: consider whether duplication can be avoided in the LightClient impl #[async_trait] -impl Verifier for IONVerifier +impl Verifier for IONVerifier where T: Sync + Send + DIDResolver, { - fn expected_timestamp(&self, hash: &str) -> Result { + fn validate_pow_hash(&self, hash: &str) -> Result<(), VerifierError> { let block_hash = BlockHash::from_str(hash) .map_err(|_| VerifierError::InvalidProofOfWorkHash(hash.to_string()))?; - let block_header = block_header(&block_hash, Some(&self.rpc_client)) + let _block_header = block_header(&block_hash, Some(self.rpc_client())) .map_err(|_| VerifierError::FailureToGetBlockHeader(hash.to_string()))?; - Ok(block_header.time) + Ok(()) + } + + async fn did_commitment(&self, did: &str) -> Result, VerifierError> { + let bundle = self.verification_bundle(did).await?; + Ok(construct_commitment(bundle).map(Box::new)?) + } + + fn resolver(&self) -> &Resolver { + &self.resolver + } + + async fn verifiable_timestamp( + &self, + did: &str, + expected_timestamp: Timestamp, + ) -> Result, VerifierError> { + let did_commitment = self.did_commitment(did).await?; + // Downcast to IONCommitment to extract data for constructing a TimestampCommitment. + let ion_commitment = did_commitment + .as_any() + .downcast_ref::() + .unwrap(); // Safe because IONCommitment implements DIDCommitment. + let timestamp_commitment = Box::new(BlockTimestampCommitment::new( + ion_commitment + .chained_commitment() + .commitments() + .last() + .expect("Unexpected empty commitment chain.") + .candidate_data() + .to_owned(), + expected_timestamp, + )?); + Ok(Box::new(IONTimestamp::new( + did_commitment, + timestamp_commitment, + ))) + } +} + +#[async_trait] +impl Verifier for IONVerifier +where + T: Sync + Send + DIDResolver, +{ + fn validate_pow_hash(&self, hash: &str) -> Result<(), VerifierError> { + // Check the PoW difficulty of the hash against the configured minimum threshold. + // TODO: update Cargo.toml to use version 0.30.0+ of the bitcoin Rust library + // and specify a minimum work/target in the Trustchain client config, see: + // https://docs.rs/bitcoin/0.30.0/src/bitcoin/pow.rs.html#72-78 + // In the meantime, just check for a minimum number of leading zeros in the hash. + if hash.chars().take_while(|&c| c == '0').count() < crate::MIN_POW_ZEROS { + return Err(VerifierError::InvalidProofOfWorkHash(format!( + "{}, only has {} zeros but MIN_POW_ZEROS is {}", + hash, + hash.chars().take_while(|&c| c == '0').count(), + crate::MIN_POW_ZEROS + ))); + } + + // If the PoW difficulty is satisfied, accept the timestamp in the DID commitment. + Ok(()) } async fn did_commitment(&self, did: &str) -> Result, VerifierError> { @@ -425,6 +548,70 @@ where fn resolver(&self) -> &Resolver { &self.resolver } + + async fn verifiable_timestamp( + &self, + did: &str, + expected_timestamp: Timestamp, + ) -> Result, VerifierError> { + let did_commitment = self.did_commitment(did).await?; + // Downcast to IONCommitment to extract data for constructing a TimestampCommitment. + let ion_commitment = did_commitment + .as_any() + .downcast_ref::() + .unwrap(); // Safe because IONCommitment implements DIDCommitment. + let timestamp_commitment = Box::new(BlockTimestampCommitment::new( + ion_commitment + .chained_commitment() + .commitments() + .last() + .expect("Unexpected empty commitment chain.") + .candidate_data() + .to_owned(), + expected_timestamp, + )?); + Ok(Box::new(IONTimestamp::new( + did_commitment, + timestamp_commitment, + ))) + } +} + +/// Contains the corresponding `DIDCommitment` and `TimestampCommitment` for a given DID. +pub struct IONTimestamp { + did_commitment: Box, + timestamp_commitment: Box, +} + +impl IONTimestamp { + fn new( + did_commitment: Box, + timestamp_commitment: Box, + ) -> Self { + Self { + did_commitment, + timestamp_commitment, + } + } + + /// Gets the DID. + pub fn did(&self) -> &str { + self.did_commitment.did() + } + /// Gets the DID Document. + pub fn did_document(&self) -> &Document { + self.did_commitment.did_document() + } +} + +impl VerifiableTimestamp for IONTimestamp { + fn did_commitment(&self) -> &dyn DIDCommitment { + self.did_commitment.as_ref() + } + + fn timestamp_commitment(&self) -> &dyn TimestampCommitment { + self.timestamp_commitment.as_ref() + } } #[cfg(test)] @@ -504,7 +691,7 @@ mod tests { BlockHash::from_str("000000000000000eaa9e43748768cd8bf34f43aaa03abd9036c463010a0c6e7f") .unwrap(); let tx_index = 3; - let tx = transaction(&block_hash, tx_index, Some(&target.rpc_client)).unwrap(); + let tx = transaction(&block_hash, tx_index, Some(target.rpc_client())).unwrap(); let actual = target.op_return_cid(&tx).unwrap(); assert_eq!(expected, actual); @@ -576,7 +763,7 @@ mod tests { assert!(target.bundles.lock().unwrap().is_empty()); let did = "did:ion:test:EiCClfEdkTv_aM3UnBBhlOV89LlGhpQAbfeZLFdFxVFkEg"; - target.fetch_bundle(did, None).await.unwrap(); + target.fetch_bundle(did).await.unwrap(); assert!(!target.bundles.lock().unwrap().is_empty()); assert_eq!(target.bundles.lock().unwrap().len(), 1); diff --git a/trustchain-ion/tests/verifier.rs b/trustchain-ion/tests/verifier.rs index db8cb7ba..91f243d4 100644 --- a/trustchain-ion/tests/verifier.rs +++ b/trustchain-ion/tests/verifier.rs @@ -1,5 +1,4 @@ -use serde_json::json; -use trustchain_core::commitment::Commitment; +use trustchain_core::utils::type_of; use trustchain_core::verifier::{Timestamp, Verifier}; use trustchain_ion::get_ion_resolver; use trustchain_ion::verifier::IONVerifier; @@ -32,9 +31,10 @@ async fn trustchain_verification() { async fn test_verifiable_timestamp() { let resolver = get_ion_resolver("http://localhost:3000/"); let target = IONVerifier::new(resolver); + let timestamp: Timestamp = 1666265405; let did = "did:ion:test:EiCClfEdkTv_aM3UnBBhlOV89LlGhpQAbfeZLFdFxVFkEg"; - let result = target.verifiable_timestamp(did).await; + let result = target.verifiable_timestamp(did, timestamp).await; assert!(result.is_ok()); @@ -43,24 +43,34 @@ async fn test_verifiable_timestamp() { // Check that the DID commitment is the expected PoW hash. // See https://blockstream.info/testnet/block/000000000000000eaa9e43748768cd8bf34f43aaa03abd9036c463010a0c6e7f let expected_hash = "000000000000000eaa9e43748768cd8bf34f43aaa03abd9036c463010a0c6e7f"; - assert_eq!(verifiable_timestamp.hash().unwrap(), expected_hash); + assert_eq!( + verifiable_timestamp.did_commitment().hash().unwrap(), + expected_hash + ); + assert_eq!( + verifiable_timestamp.timestamp_commitment().hash().unwrap(), + expected_hash + ); // Check that the DID timestamp is correct by comparing to the known header. - let timestamp: Timestamp = 1666265405; assert_eq!(verifiable_timestamp.timestamp(), timestamp); // Confirm that the same timestamp is the expected data in the TimestampCommitment. assert_eq!( - verifiable_timestamp - .timestamp_commitment() - .unwrap() - .expected_data(), - &json!(timestamp) + verifiable_timestamp.timestamp_commitment().expected_data(), + ×tamp + ); + assert_eq!( + type_of(verifiable_timestamp.timestamp_commitment().expected_data()), + type_of(×tamp) ); // Verify the timestamp. - let _ = target.verify_timestamp(&verifiable_timestamp); + verifiable_timestamp + .verify(&verifiable_timestamp.timestamp_commitment().hash().unwrap()) + .unwrap(); // Verify a second time to check data is not consumed - let actual = target.verify_timestamp(&verifiable_timestamp); - assert!(actual.is_ok()); + verifiable_timestamp + .verify(&verifiable_timestamp.timestamp_commitment().hash().unwrap()) + .unwrap(); }