diff --git a/.github/workflows/tai64_update_leap_seconds.yml b/.github/workflows/tai64_update_leap_seconds.yml new file mode 100644 index 000000000..df0be3a87 --- /dev/null +++ b/.github/workflows/tai64_update_leap_seconds.yml @@ -0,0 +1,28 @@ +name: Update Leap Seconds +on: + schedule: + - cron: 0 0 * * 1 + +jobs: + update: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Update leap seconds in code + run: | + curl -o leap_seconds_list -L https://data.iana.org/time-zones/data/leap-seconds.list + number=$(grep -v '^#' leap_seconds_list | tail -n1 | awk '{print $2}') + sed -i "s/\(1970-01-01 00:00:\)[0-9]\+ TAI/\1${number} TAI/" tai64/src/lib.rs + sed -i -E 's/(Self\()[0-9]+ \+ \(1 << 62\)\)/\1'"${number}"' + (1 << 62))/' tai64/src/lib.rs + rm leap_seconds_list + - name: Create Pull Request + uses: peter-evans/create-pull-request@v7 + with: + commit-message: update leap seconds in tai64 + title: Update leap seconds in tai64 + body: 'Following this source: https://data.iana.org/time-zones/data/leap-seconds.list, the leap seconds counter has been updated.' + branch: update-leap-seconds + base: master + delete-branch: true diff --git a/Cargo.lock b/Cargo.lock index 588d739c7..e53bfe18e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -265,9 +265,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" [[package]] name = "cast" @@ -405,10 +405,12 @@ checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "const-oid" -version = "0.10.0-rc.2" +version = "0.10.0-rc.3" dependencies = [ "arbitrary", "hex-literal", + "proptest", + "regex", ] [[package]] @@ -852,9 +854,9 @@ dependencies = [ [[package]] name = "hybrid-array" -version = "0.2.0-rc.11" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5a41e5b0754cae5aaf7915f1df1147ba8d316fc6e019cfcc00fbaba96d5e030" +checksum = "45a9a965bb102c1c891fb017c09a05c965186b1265a207640f323ddd009f9deb" dependencies = [ "typenum", "zeroize", @@ -1656,7 +1658,7 @@ dependencies = [ [[package]] name = "tai64" -version = "5.0.0-pre" +version = "4.1.0" dependencies = [ "serde", "zeroize", @@ -1751,9 +1753,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.40.0" +version = "1.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" dependencies = [ "backtrace", "pin-project-lite", diff --git a/cms/Cargo.toml b/cms/Cargo.toml index ff8650d40..8758ef7c8 100644 --- a/cms/Cargo.toml +++ b/cms/Cargo.toml @@ -44,7 +44,7 @@ rand = "0.8.5" rsa = { version = "=0.10.0-pre.3", features = ["sha2"] } ecdsa = { version = "=0.17.0-pre.9", features = ["digest", "pem"] } p256 = "=0.14.0-pre.2" -tokio = { version = "1.40.0", features = ["macros", "rt"] } +tokio = { version = "1.41.0", features = ["macros", "rt"] } x509-cert = { version = "=0.3.0-pre.0", features = ["pem"] } [features] diff --git a/const-oid/Cargo.toml b/const-oid/Cargo.toml index 50e7a878b..7d00671f0 100644 --- a/const-oid/Cargo.toml +++ b/const-oid/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "const-oid" -version = "0.10.0-rc.2" +version = "0.10.0-rc.3" authors = ["RustCrypto Developers"] license = "Apache-2.0 OR MIT" description = """ @@ -22,6 +22,8 @@ arbitrary = { version = "1.2", optional = true, features = ["derive"] } [dev-dependencies] hex-literal = "0.4" +proptest = "1" +regex = "1" [features] db = [] diff --git a/const-oid/src/arcs.rs b/const-oid/src/arcs.rs index 95cbbe0ef..7e5056cee 100644 --- a/const-oid/src/arcs.rs +++ b/const-oid/src/arcs.rs @@ -1,21 +1,21 @@ //! Arcs are integer values which exist within an OID's hierarchy. use crate::{Error, Result}; -use core::mem::size_of; #[cfg(doc)] use crate::ObjectIdentifier; -/// Type alias used to represent an "arc" (i.e. integer identifier value). +/// Type alias used to represent an "arc", i.e. integer identifier value, where an OID comprises a +/// sequence of arcs. /// -/// X.660 does not define a maximum size of an arc. +/// X.660 does not define a maximum size of an arc. We instead follow Mozilla* conventions for +/// maximum values of an arc, with a maximum value of 2^32-1 (4294967295), a.k.a. [`u32::MAX`] +/// with [`Arc`] being a type alias for [`u32`]. /// -/// The current representation is `u32`, which has been selected as being -/// sufficient to cover the current PKCS/PKIX use cases this library has been -/// used in conjunction with. +/// Note that this means we deliberately do *NOT* support UUIDs used as OIDs. /// -/// Future versions may potentially make it larger if a sufficiently important -/// use case is discovered. +/// *NOTE: please see this study for a survey of how various OID libraries handle maximum arcs: +/// pub type Arc = u32; /// Maximum value of the first arc in an OID. @@ -25,7 +25,10 @@ pub(crate) const ARC_MAX_FIRST: Arc = 2; pub(crate) const ARC_MAX_SECOND: Arc = 39; /// Maximum number of bytes supported in an arc. -const ARC_MAX_BYTES: usize = size_of::(); +/// +/// Note that OIDs are base 128 encoded (with continuation bits), so we must consider how many bytes +/// are required when each byte can only represent 7-bits of the input. +const ARC_MAX_BYTES: usize = (Arc::BITS as usize).div_ceil(7); /// Maximum value of the last byte in an arc. const ARC_MAX_LAST_OCTET: u8 = 0b11110000; // Max bytes of leading 1-bits diff --git a/const-oid/src/checked.rs b/const-oid/src/checked.rs index 7ff16a2a7..d8dacf3c7 100644 --- a/const-oid/src/checked.rs +++ b/const-oid/src/checked.rs @@ -5,7 +5,27 @@ macro_rules! checked_add { ($a:expr, $b:expr) => { match $a.checked_add($b) { Some(n) => n, - None => return Err(Error::Length), + None => return Err(Error::Overflow), + } + }; +} + +/// `const fn`-friendly checked subtraction helper. +macro_rules! checked_sub { + ($a:expr, $b:expr) => { + match $a.checked_sub($b) { + Some(n) => n, + None => return Err(Error::Overflow), + } + }; +} + +/// `const fn`-friendly checked multiplication helper. +macro_rules! checked_mul { + ($a:expr, $b:expr) => { + match $a.checked_mul($b) { + Some(n) => n, + None => return Err(Error::Overflow), } }; } diff --git a/const-oid/src/encoder.rs b/const-oid/src/encoder.rs index 3076a3499..c901e492a 100644 --- a/const-oid/src/encoder.rs +++ b/const-oid/src/encoder.rs @@ -24,7 +24,7 @@ enum State { /// Initial state - no arcs yet encoded. Initial, - /// First arc parsed. + /// First arc has been supplied and stored as the wrapped [`Arc`]. FirstArc(Arc), /// Encoding base 128 body of the OID. @@ -61,22 +61,20 @@ impl Encoder { self.state = State::FirstArc(arc); Ok(self) } - // Ensured not to overflow by `ARC_MAX_SECOND` check - #[allow(clippy::arithmetic_side_effects)] State::FirstArc(first_arc) => { if arc > ARC_MAX_SECOND { return Err(Error::ArcInvalid { arc }); } self.state = State::Body; - self.bytes[0] = (first_arc * (ARC_MAX_SECOND + 1)) as u8 + arc as u8; + self.bytes[0] = checked_add!( + checked_mul!(checked_add!(ARC_MAX_SECOND, 1), first_arc), + arc + ) as u8; self.cursor = 1; Ok(self) } - State::Body => { - let nbytes = base128_len(arc); - self.encode_base128(arc, nbytes) - } + State::Body => self.encode_base128(arc), } } @@ -94,56 +92,48 @@ impl Encoder { Ok(ObjectIdentifier { ber }) } - /// Encode a single byte of a Base 128 value. - const fn encode_base128(mut self, n: u32, remaining_len: usize) -> Result { - if self.cursor >= MAX_SIZE { + /// Encode base 128. + const fn encode_base128(mut self, arc: Arc) -> Result { + let nbytes = base128_len(arc); + let end_pos = checked_add!(self.cursor, nbytes); + + if end_pos > MAX_SIZE { return Err(Error::Length); } - let mask = if remaining_len > 0 { 0b10000000 } else { 0 }; - let (hi, lo) = split_high_bits(n); - self.bytes[self.cursor] = hi | mask; - self.cursor = checked_add!(self.cursor, 1); - - match remaining_len.checked_sub(1) { - Some(len) => self.encode_base128(lo, len), - None => Ok(self), + let mut i = 0; + while i < nbytes { + // TODO(tarcieri): use `?` when stable in `const fn` + self.bytes[self.cursor] = match base128_byte(arc, i, nbytes) { + Ok(byte) => byte, + Err(e) => return Err(e), + }; + self.cursor = checked_add!(self.cursor, 1); + i = checked_add!(i, 1); } + + Ok(self) } } -/// Compute the length - 1 of an arc when encoded in base 128. +/// Compute the length of an arc when encoded in base 128. const fn base128_len(arc: Arc) -> usize { match arc { - 0..=0x7f => 0, - 0x80..=0x3fff => 1, - 0x4000..=0x1fffff => 2, - 0x200000..=0x1fffffff => 3, - _ => 4, + 0..=0x7f => 1, + 0x80..=0x3fff => 2, + 0x4000..=0x1fffff => 3, + 0x200000..=0x1fffffff => 4, + _ => 5, } } -/// Split the highest 7-bits of an [`Arc`] from the rest of an arc. -/// -/// Returns: `(hi, lo)` -// TODO(tarcieri): always use checked arithmetic -#[allow(clippy::arithmetic_side_effects)] -const fn split_high_bits(arc: Arc) -> (u8, Arc) { - if arc < 0x80 { - return (arc as u8, 0); - } - - let hi_bit = 32 - arc.leading_zeros(); - let hi_bit_mod7 = hi_bit % 7; - let upper_bit_pos = hi_bit - - if hi_bit > 0 && hi_bit_mod7 == 0 { - 7 - } else { - hi_bit_mod7 - }; - let upper_bits = arc >> upper_bit_pos; - let lower_bits = arc ^ (upper_bits << upper_bit_pos); - (upper_bits as u8, lower_bits) +/// Compute the big endian base 128 encoding of the given [`Arc`] at the given byte. +const fn base128_byte(arc: Arc, pos: usize, total: usize) -> Result { + debug_assert!(pos < total); + let last_byte = checked_add!(pos, 1) == total; + let mask = if last_byte { 0 } else { 0b10000000 }; + let shift = checked_sub!(checked_sub!(total, pos), 1) * 7; + Ok(((arc >> shift) & 0b1111111) as u8 | mask) } #[cfg(test)] @@ -155,6 +145,17 @@ mod tests { /// OID `1.2.840.10045.2.1` encoded as ASN.1 BER/DER const EXAMPLE_OID_BER: &[u8] = &hex!("2A8648CE3D0201"); + #[test] + fn base128_byte() { + let example_arc = 0x44332211; + assert_eq!(super::base128_len(example_arc), 5); + assert_eq!(super::base128_byte(example_arc, 0, 5).unwrap(), 0b10000100); + assert_eq!(super::base128_byte(example_arc, 1, 5).unwrap(), 0b10100001); + assert_eq!(super::base128_byte(example_arc, 2, 5).unwrap(), 0b11001100); + assert_eq!(super::base128_byte(example_arc, 3, 5).unwrap(), 0b11000100); + assert_eq!(super::base128_byte(example_arc, 4, 5).unwrap(), 0b10001); + } + #[test] fn encode() { let encoder = Encoder::<7>::new(); diff --git a/const-oid/src/error.rs b/const-oid/src/error.rs index faa01769b..263746c6f 100644 --- a/const-oid/src/error.rs +++ b/const-oid/src/error.rs @@ -37,6 +37,14 @@ pub enum Error { /// OID length is invalid (too short or too long). Length, + /// Arithmetic overflow (or underflow) errors. + /// + /// These generally indicate a bug in the `const-oid` crate. + Overflow, + + /// Repeated `..` characters in input data. + RepeatedDot, + /// Trailing `.` character at end of input. TrailingDot, } @@ -53,6 +61,8 @@ impl Error { Error::DigitExpected { .. } => panic!("OID expected to start with digit"), Error::Empty => panic!("OID value is empty"), Error::Length => panic!("OID length invalid"), + Error::Overflow => panic!("arithmetic calculation overflowed"), + Error::RepeatedDot => panic!("repeated consecutive '..' characters in OID"), Error::TrailingDot => panic!("OID ends with invalid trailing '.'"), } } @@ -69,6 +79,8 @@ impl fmt::Display for Error { } Error::Empty => f.write_str("OID value is empty"), Error::Length => f.write_str("OID length invalid"), + Error::Overflow => f.write_str("arithmetic calculation overflowed"), + Error::RepeatedDot => f.write_str("repeated consecutive '..' characters in OID"), Error::TrailingDot => f.write_str("OID ends with invalid trailing '.'"), } } diff --git a/const-oid/src/parser.rs b/const-oid/src/parser.rs index 41020f037..5b5155b36 100644 --- a/const-oid/src/parser.rs +++ b/const-oid/src/parser.rs @@ -8,7 +8,7 @@ use crate::{encoder::Encoder, Arc, Error, ObjectIdentifier, Result}; #[derive(Debug)] pub(crate) struct Parser { /// Current arc in progress - current_arc: Arc, + current_arc: Option, /// BER/DER encoder encoder: Encoder<{ ObjectIdentifier::MAX_SIZE }>, @@ -25,7 +25,7 @@ impl Parser { match bytes[0] { b'0'..=b'9' => Self { - current_arc: 0, + current_arc: None, encoder: Encoder::new(), } .parse_bytes(bytes), @@ -42,33 +42,51 @@ impl Parser { const fn parse_bytes(mut self, bytes: &[u8]) -> Result { match bytes { // TODO(tarcieri): use `?` when stable in `const fn` - [] => match self.encoder.arc(self.current_arc) { - Ok(encoder) => { - self.encoder = encoder; - Ok(self) - } - Err(err) => Err(err), + [] => match self.current_arc { + Some(arc) => match self.encoder.arc(arc) { + Ok(encoder) => { + self.encoder = encoder; + Ok(self) + } + Err(err) => Err(err), + }, + None => Err(Error::TrailingDot), }, - // TODO(tarcieri): checked arithmetic - #[allow(clippy::arithmetic_side_effects)] [byte @ b'0'..=b'9', remaining @ ..] => { let digit = byte.saturating_sub(b'0'); - self.current_arc = self.current_arc * 10 + digit as Arc; + let arc = match self.current_arc { + Some(arc) => arc, + None => 0, + }; + + // TODO(tarcieri): use `and_then` when const traits are stable + self.current_arc = match arc.checked_mul(10) { + Some(arc) => match arc.checked_add(digit as Arc) { + None => return Err(Error::ArcTooBig), + Some(arc) => Some(arc), + }, + None => return Err(Error::ArcTooBig), + }; self.parse_bytes(remaining) } [b'.', remaining @ ..] => { - if remaining.is_empty() { - return Err(Error::TrailingDot); - } + match self.current_arc { + Some(arc) => { + if remaining.is_empty() { + return Err(Error::TrailingDot); + } - // TODO(tarcieri): use `?` when stable in `const fn` - match self.encoder.arc(self.current_arc) { - Ok(encoder) => { - self.encoder = encoder; - self.current_arc = 0; - self.parse_bytes(remaining) + // TODO(tarcieri): use `?` when stable in `const fn` + match self.encoder.arc(arc) { + Ok(encoder) => { + self.encoder = encoder; + self.current_arc = None; + self.parse_bytes(remaining) + } + Err(err) => Err(err), + } } - Err(err) => Err(err), + None => Err(Error::RepeatedDot), } } [byte, ..] => Err(Error::DigitExpected { actual: *byte }), diff --git a/const-oid/tests/oid.rs b/const-oid/tests/oid.rs index bebbbadf5..92bfc49c4 100644 --- a/const-oid/tests/oid.rs +++ b/const-oid/tests/oid.rs @@ -29,52 +29,85 @@ const EXAMPLE_OID_LARGE_ARC_0: ObjectIdentifier = ObjectIdentifier::new_unwrap(crate::EXAMPLE_OID_LARGE_ARC_0_STR); /// Example OID value with a large arc -const EXAMPLE_OID_LARGE_ARC_1_STR: &str = "0.9.2342.19200300.100.1.1"; -const EXAMPLE_OID_LARGE_ARC_1_BER: &[u8] = &hex!("0992268993F22C640101"); +const EXAMPLE_OID_LARGE_ARC_1_STR: &str = "1.1.1.60817410.1"; +const EXAMPLE_OID_LARGE_ARC_1_BER: &[u8] = &hex!("29019D80800201"); const EXAMPLE_OID_LARGE_ARC_1: ObjectIdentifier = ObjectIdentifier::new_unwrap(EXAMPLE_OID_LARGE_ARC_1_STR); +/// Example OID value with a large arc (namely `u32::MAX`, the edge case) +const EXAMPLE_OID_LARGE_ARC_2_STR: &str = "1.2.4294967295"; +const EXAMPLE_OID_LARGE_ARC_2_BER: &[u8] = &hex!("2A8FFFFFFF7F"); +const EXAMPLE_OID_LARGE_ARC_2: ObjectIdentifier = + ObjectIdentifier::new_unwrap(crate::EXAMPLE_OID_LARGE_ARC_2_STR); + /// Create an OID from a string. pub fn oid(s: &str) -> ObjectIdentifier { ObjectIdentifier::new(s).unwrap() } +/// 0.9.2342.19200300.100.1.1 #[test] -fn from_bytes() { - let oid0 = ObjectIdentifier::from_bytes(EXAMPLE_OID_0_BER).unwrap(); - assert_eq!(oid0.arc(0).unwrap(), 0); - assert_eq!(oid0.arc(1).unwrap(), 9); - assert_eq!(oid0.arc(2).unwrap(), 2342); - assert_eq!(oid0, EXAMPLE_OID_0); +fn from_bytes_oid_0() { + let oid = ObjectIdentifier::from_bytes(EXAMPLE_OID_0_BER).unwrap(); + assert_eq!(oid, EXAMPLE_OID_0); + assert_eq!(oid.arc(0).unwrap(), 0); + assert_eq!(oid.arc(1).unwrap(), 9); + assert_eq!(oid.arc(2).unwrap(), 2342); +} - let oid1 = ObjectIdentifier::from_bytes(EXAMPLE_OID_1_BER).unwrap(); - assert_eq!(oid1.arc(0).unwrap(), 1); - assert_eq!(oid1.arc(1).unwrap(), 2); - assert_eq!(oid1.arc(2).unwrap(), 840); - assert_eq!(oid1, EXAMPLE_OID_1); +/// 1.2.840.10045.2.1 +#[test] +fn from_bytes_oid_1() { + let oid = ObjectIdentifier::from_bytes(EXAMPLE_OID_1_BER).unwrap(); + assert_eq!(oid, EXAMPLE_OID_1); + assert_eq!(oid.arc(0).unwrap(), 1); + assert_eq!(oid.arc(1).unwrap(), 2); + assert_eq!(oid.arc(2).unwrap(), 840); +} - let oid2 = ObjectIdentifier::from_bytes(EXAMPLE_OID_2_BER).unwrap(); - assert_eq!(oid2.arc(0).unwrap(), 2); - assert_eq!(oid2.arc(1).unwrap(), 16); - assert_eq!(oid2.arc(2).unwrap(), 840); - assert_eq!(oid2, EXAMPLE_OID_2); +/// 2.16.840.1.101.3.4.1.42 +#[test] +fn from_bytes_oid_2() { + let oid = ObjectIdentifier::from_bytes(EXAMPLE_OID_2_BER).unwrap(); + assert_eq!(oid, EXAMPLE_OID_2); + assert_eq!(oid.arc(0).unwrap(), 2); + assert_eq!(oid.arc(1).unwrap(), 16); + assert_eq!(oid.arc(2).unwrap(), 840); +} - let oid_largearc0 = ObjectIdentifier::from_bytes(EXAMPLE_OID_LARGE_ARC_0_BER).unwrap(); - assert_eq!(oid_largearc0.arc(0).unwrap(), 1); - assert_eq!(oid_largearc0.arc(1).unwrap(), 2); - assert_eq!(oid_largearc0.arc(2).unwrap(), 16384); - assert_eq!(oid_largearc0.arc(3), None); - assert_eq!(oid_largearc0, EXAMPLE_OID_LARGE_ARC_0); +/// 1.2.16384 +#[test] +fn from_bytes_oid_largearc_0() { + let oid = ObjectIdentifier::from_bytes(EXAMPLE_OID_LARGE_ARC_0_BER).unwrap(); + assert_eq!(oid, EXAMPLE_OID_LARGE_ARC_0); + assert_eq!(oid.arc(0).unwrap(), 1); + assert_eq!(oid.arc(1).unwrap(), 2); + assert_eq!(oid.arc(2).unwrap(), 16384); + assert_eq!(oid.arc(3), None); +} - let oid_largearc1 = ObjectIdentifier::from_bytes(EXAMPLE_OID_LARGE_ARC_1_BER).unwrap(); - assert_eq!(oid_largearc1.arc(0).unwrap(), 0); - assert_eq!(oid_largearc1.arc(1).unwrap(), 9); - assert_eq!(oid_largearc1.arc(2).unwrap(), 2342); - assert_eq!(oid_largearc1.arc(3).unwrap(), 19200300); - assert_eq!(oid_largearc1.arc(4).unwrap(), 100); - assert_eq!(oid_largearc1.arc(5).unwrap(), 1); - assert_eq!(oid_largearc1.arc(6).unwrap(), 1); - assert_eq!(oid_largearc1, EXAMPLE_OID_LARGE_ARC_1); +/// 1.1.1.60817410.1 +#[test] +fn from_bytes_oid_largearc_1() { + let oid = ObjectIdentifier::from_bytes(EXAMPLE_OID_LARGE_ARC_1_BER).unwrap(); + assert_eq!(oid, EXAMPLE_OID_LARGE_ARC_1); + assert_eq!(oid.arc(0).unwrap(), 1); + assert_eq!(oid.arc(1).unwrap(), 1); + assert_eq!(oid.arc(2).unwrap(), 1); + assert_eq!(oid.arc(3).unwrap(), 60817410); + assert_eq!(oid.arc(4).unwrap(), 1); + assert_eq!(oid.arc(5), None); +} + +/// 1.2.4294967295 +#[test] +fn from_bytes_oid_largearc_2() { + let oid = ObjectIdentifier::from_bytes(EXAMPLE_OID_LARGE_ARC_2_BER).unwrap(); + assert_eq!(oid, EXAMPLE_OID_LARGE_ARC_2); + assert_eq!(oid.arc(0).unwrap(), 1); + assert_eq!(oid.arc(1).unwrap(), 2); + assert_eq!(oid.arc(2).unwrap(), 4294967295); + assert_eq!(oid.arc(3), None); // Empty assert_eq!(ObjectIdentifier::from_bytes(&[]), Err(Error::Empty)); @@ -108,15 +141,21 @@ fn from_str() { let oid_largearc1 = EXAMPLE_OID_LARGE_ARC_1_STR .parse::() .unwrap(); - assert_eq!(oid_largearc1.arc(0).unwrap(), 0); - assert_eq!(oid_largearc1.arc(1).unwrap(), 9); - assert_eq!(oid_largearc1.arc(2).unwrap(), 2342); - assert_eq!(oid_largearc1.arc(3).unwrap(), 19200300); - assert_eq!(oid_largearc1.arc(4).unwrap(), 100); - assert_eq!(oid_largearc1.arc(5).unwrap(), 1); - assert_eq!(oid_largearc1.arc(6).unwrap(), 1); + assert_eq!(oid_largearc1.arc(0).unwrap(), 1); + assert_eq!(oid_largearc1.arc(1).unwrap(), 1); + assert_eq!(oid_largearc1.arc(2).unwrap(), 1); + assert_eq!(oid_largearc1.arc(3).unwrap(), 60817410); + assert_eq!(oid_largearc1.arc(4).unwrap(), 1); assert_eq!(oid_largearc1, EXAMPLE_OID_LARGE_ARC_1); + let oid_largearc2 = EXAMPLE_OID_LARGE_ARC_2_STR + .parse::() + .unwrap(); + assert_eq!(oid_largearc2.arc(0).unwrap(), 1); + assert_eq!(oid_largearc2.arc(1).unwrap(), 2); + assert_eq!(oid_largearc2.arc(2).unwrap(), 4294967295); + assert_eq!(oid_largearc2, EXAMPLE_OID_LARGE_ARC_2); + // Truncated assert_eq!( "1.2.840.10045.2.".parse::(), @@ -141,10 +180,18 @@ fn display() { assert_eq!(EXAMPLE_OID_0.to_string(), EXAMPLE_OID_0_STR); assert_eq!(EXAMPLE_OID_1.to_string(), EXAMPLE_OID_1_STR); assert_eq!(EXAMPLE_OID_2.to_string(), EXAMPLE_OID_2_STR); + assert_eq!( + EXAMPLE_OID_LARGE_ARC_0.to_string(), + EXAMPLE_OID_LARGE_ARC_0_STR + ); assert_eq!( EXAMPLE_OID_LARGE_ARC_1.to_string(), EXAMPLE_OID_LARGE_ARC_1_STR ); + assert_eq!( + EXAMPLE_OID_LARGE_ARC_2.to_string(), + EXAMPLE_OID_LARGE_ARC_2_STR + ); } #[test] @@ -207,6 +254,11 @@ fn parse_invalid_second_arc() { ); } +#[test] +fn parse_invalid_repeat_dots() { + assert_eq!(ObjectIdentifier::new("1.2..3.4"), Err(Error::RepeatedDot)) +} + #[test] fn parent() { let child = oid("1.2.3.4"); diff --git a/const-oid/tests/proptests.proptest-regressions b/const-oid/tests/proptests.proptest-regressions new file mode 100644 index 000000000..805e18453 --- /dev/null +++ b/const-oid/tests/proptests.proptest-regressions @@ -0,0 +1,13 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 1663923d2fb0c804c5b850d10dd0ded1cbfc06dddf3f88faa4abf149b8430831 # shrinks to s = "" +cc 829ba8833ee42816bc33d308b7a186452e36617f0fa0e771edea08fd07d78718 # shrinks to s = "0.40" +cc bc88f232a7e2e45d2b1325d4e02c09742aca7ad31903326fedb36c047533696c # shrinks to s = "0" +cc d90305406041ea5e4cf4d9e7849cad03391db1869d0b1329f60ccbf1fabaee91 # shrinks to s = "0..0" +cc 8ed8dde35d12a2c8e10cdde6d591a8f17f0cd6d6fdf90f1582536401364623bf # shrinks to s = "0.00" +cc ba5e3e3dc1a64870477e82054bbf6d8272f8b0d0c9094115bf7e8b5ff59f3c63 # shrinks to s = "00.1.1" +cc d211e943da9a0e3d0ee5097899b2435f784ca2b3d2f8d4790aae3744823a268a # shrinks to s = "1.1.1.60817410.1" diff --git a/const-oid/tests/proptests.rs b/const-oid/tests/proptests.rs new file mode 100644 index 000000000..5f398fbb5 --- /dev/null +++ b/const-oid/tests/proptests.rs @@ -0,0 +1,56 @@ +//! `proptest`-powered property-based tests. + +use const_oid::{Error, ObjectIdentifier}; +use proptest::prelude::*; +use regex::Regex; + +prop_compose! { + /// Produce a string of digits and dots, i.e. the component parts of OIDs. + /// + /// Note that this can be any permutation of digits-and-dots and does not necessarily + /// represent a valid OID. + fn oid_like_string()(bytes in any::>()) -> String { + // Create a digit or dot from a byte input + fn byte_to_char(byte: u8) -> char { + match byte % 11 { + n @ 0..=9 => (b'0' + n) as char, + 10 => '.', + _ => unreachable!() + } + } + + + let mut ret = String::with_capacity(bytes.len()); + for byte in bytes { + ret.push(byte_to_char(byte)); + } + ret + } +} + +proptest! { + #[test] + fn round_trip(s in oid_like_string()) { + match ObjectIdentifier::new(&s) { + Ok(oid) => { + // Leading zeros won't round trip, so ignore that case + // TODO(tarcieri): disallow leading zeros? + if !s.starts_with("0") && !s.contains(".0") { + let oid_string = oid.to_string(); + prop_assert_eq!(s, oid_string); + } + }, + Err(Error::ArcInvalid { .. }) | Err(Error::ArcTooBig) => (), + Err(e) => { + let re = Regex::new("^([0-2])\\.([0-3]?[0-9])((\\.0)|(\\.[1-9][0-9]*))+$").unwrap(); + + prop_assert!( + re.find(&s).is_none(), + "regex asserts OID `{}` is valid, but `const-oid`failed: {}", + &s, + &e + ); + } + } + } +} diff --git a/sec1/Cargo.toml b/sec1/Cargo.toml index 60d05a128..2b4874ff9 100644 --- a/sec1/Cargo.toml +++ b/sec1/Cargo.toml @@ -19,7 +19,7 @@ rust-version = "1.81" [dependencies] base16ct = { version = "0.2", optional = true, default-features = false } der = { version = "0.8.0-rc.0", optional = true, features = ["oid"] } -hybrid-array = { version = "0.2.0-rc.11", optional = true, default-features = false } +hybrid-array = { version = "0.2.1", optional = true, default-features = false } pkcs8 = { version = "0.11.0-rc.1", optional = true, default-features = false } serdect = { version = "0.3.0-rc.0", optional = true, default-features = false, features = ["alloc"] } subtle = { version = "2", optional = true, default-features = false } diff --git a/tai64/CHANGELOG.md b/tai64/CHANGELOG.md index 6640511d5..38a8ecb69 100644 --- a/tai64/CHANGELOG.md +++ b/tai64/CHANGELOG.md @@ -4,6 +4,17 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 4.1.0 (2024-10-29) + +### Changed +- MSRV bump from 1.56 to 1.60 ([#802]) + +### Fixed +- fix TAI offset and verify with GH Action ([#1583]) + +[#802]: https://github.com/RustCrypto/formats/pull/802 +[#1583]: https://github.com/RustCrypto/formats/pull/1583 + ## 4.0.0 (2021-11-04) ### Changed - Upgrade to Rust 2021 edition; MSRV 1.56+ diff --git a/tai64/Cargo.toml b/tai64/Cargo.toml index 7f0a1afc9..5ada6e0ce 100644 --- a/tai64/Cargo.toml +++ b/tai64/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "tai64" description = "TAI64 and TAI64N (i.e. Temps Atomique International) timestamp support for Rust" -version = "5.0.0-pre" +version = "4.1.0" authors = ["RustCrypto Developers"] license = "Apache-2.0 OR MIT" homepage = "https://github.com/RustCrypto/formats/tree/master/tai64" diff --git a/tai64/src/lib.rs b/tai64/src/lib.rs index d7f92d584..2ffcfc618 100644 --- a/tai64/src/lib.rs +++ b/tai64/src/lib.rs @@ -36,8 +36,8 @@ const NANOS_PER_SECOND: u32 = 1_000_000_000; pub struct Tai64(pub u64); impl Tai64 { - /// Unix epoch in `TAI64`: 1970-01-01 00:00:10 TAI. - pub const UNIX_EPOCH: Self = Self(10 + (1 << 62)); + /// Unix epoch in `TAI64`: 1970-01-01 00:00:37 TAI. + pub const UNIX_EPOCH: Self = Self(37 + (1 << 62)); /// Length of serialized `TAI64` timestamp in bytes. pub const BYTE_SIZE: usize = 8; @@ -151,7 +151,7 @@ impl Zeroize for Tai64N { } impl Tai64N { - /// Unix epoch in `TAI64N`: 1970-01-01 00:00:10 TAI. + /// Unix epoch in `TAI64N`: 1970-01-01 00:00:37 TAI. pub const UNIX_EPOCH: Self = Self(Tai64::UNIX_EPOCH, 0); /// Length of serialized `TAI64N` timestamp. diff --git a/tls_codec/src/primitives.rs b/tls_codec/src/primitives.rs index 2677f6fc9..63e21d931 100644 --- a/tls_codec/src/primitives.rs +++ b/tls_codec/src/primitives.rs @@ -1,6 +1,6 @@ //! Codec implementations for unsigned integer primitives. -use alloc::vec::Vec; +use alloc::{boxed::Box, vec::Vec}; use crate::{DeserializeBytes, SerializeBytes, U24}; @@ -371,3 +371,40 @@ impl SerializeBytes for PhantomData { Ok(vec![]) } } + +impl Size for Box { + #[inline(always)] + fn tls_serialized_len(&self) -> usize { + self.as_ref().tls_serialized_len() + } +} + +impl Serialize for Box { + #[cfg(feature = "std")] + #[inline(always)] + fn tls_serialize(&self, writer: &mut W) -> Result { + self.as_ref().tls_serialize(writer) + } +} + +impl SerializeBytes for Box { + #[inline(always)] + fn tls_serialize(&self) -> Result, Error> { + self.as_ref().tls_serialize() + } +} + +impl Deserialize for Box { + #[cfg(feature = "std")] + #[inline(always)] + fn tls_deserialize(bytes: &mut R) -> Result { + T::tls_deserialize(bytes).map(Box::new) + } +} + +impl DeserializeBytes for Box { + #[inline(always)] + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + T::tls_deserialize_bytes(bytes).map(|(v, r)| (Box::new(v), r)) + } +} diff --git a/x509-cert/Cargo.toml b/x509-cert/Cargo.toml index 799ad6b50..cab147c91 100644 --- a/x509-cert/Cargo.toml +++ b/x509-cert/Cargo.toml @@ -36,7 +36,7 @@ p256 = "=0.14.0-pre.2" rstest = "0.23" sha2 = { version = "=0.11.0-pre.4", features = ["oid"] } tempfile = "3.5.0" -tokio = { version = "1.40.0", features = ["macros", "rt"] } +tokio = { version = "1.41.0", features = ["macros", "rt"] } x509-cert-test-support = { path = "./test-support" } [features] diff --git a/x509-cert/src/builder.rs b/x509-cert/src/builder.rs index 493cfaf7a..809e1a538 100644 --- a/x509-cert/src/builder.rs +++ b/x509-cert/src/builder.rs @@ -12,8 +12,6 @@ use spki::{ use crate::{ certificate::{Certificate, TbsCertificate, Version}, ext::{AsExtension, Extensions}, - name::Name, - request::{attributes::AsAttribute, CertReq, CertReqInfo, ExtensionReq}, serial_number::SerialNumber, time::Validity, AlgorithmIdentifier, SubjectPublicKeyInfo, @@ -29,7 +27,13 @@ use self::profile::BuilderProfile; )] pub use self::profile::BuilderProfile as Profile; -const NULL_OID: ObjectIdentifier = ObjectIdentifier::new_unwrap("0.0.0"); +#[deprecated( + since = "0.3.0", + note = "please use `x509_cert::request::RequestBuilder` instead" +)] +pub use crate::request::RequestBuilder; + +pub(crate) const NULL_OID: ObjectIdentifier = ObjectIdentifier::new_unwrap("0.0.0"); /// Error type #[derive(Debug)] @@ -212,90 +216,6 @@ where } } -/// Builder for X509 Certificate Requests -/// -/// ``` -/// # use p256::{pkcs8::DecodePrivateKey, NistP256, ecdsa::DerSignature}; -/// # const PKCS8_PRIVATE_KEY_DER: &[u8] = include_bytes!("../tests/examples/p256-priv.der"); -/// # fn ecdsa_signer() -> ecdsa::SigningKey { -/// # let secret_key = p256::SecretKey::from_pkcs8_der(PKCS8_PRIVATE_KEY_DER).unwrap(); -/// # ecdsa::SigningKey::from(secret_key) -/// # } -/// use x509_cert::{ -/// builder::{Builder, RequestBuilder}, -/// ext::pkix::{name::GeneralName, SubjectAltName}, -/// name::Name, -/// }; -/// use std::str::FromStr; -/// -/// use std::net::{IpAddr, Ipv4Addr}; -/// let subject = Name::from_str("CN=service.domination.world").unwrap(); -/// -/// let signer = ecdsa_signer(); -/// let mut builder = RequestBuilder::new(subject).expect("Create certificate request"); -/// builder -/// .add_extension(&SubjectAltName(vec![GeneralName::from(IpAddr::V4( -/// Ipv4Addr::new(192, 0, 2, 0), -/// ))])) -/// .unwrap(); -/// -/// let cert_req = builder.build::<_, DerSignature>(&signer).unwrap(); -/// ``` -pub struct RequestBuilder { - info: CertReqInfo, - extension_req: ExtensionReq, -} - -impl RequestBuilder { - /// Creates a new certificate request builder - pub fn new(subject: Name) -> Result { - let version = Default::default(); - - let algorithm = AlgorithmIdentifier { - oid: NULL_OID, - parameters: None, - }; - let public_key = SubjectPublicKeyInfo { - algorithm, - subject_public_key: BitString::from_bytes(&[]).expect("unable to parse empty object"), - }; - - let attributes = Default::default(); - let extension_req = Default::default(); - - Ok(Self { - info: CertReqInfo { - version, - subject, - public_key, - attributes, - }, - extension_req, - }) - } - - /// Add an extension to this certificate request - /// - /// Extensions need to implement [`AsExtension`], examples may be found in - /// in [`AsExtension` documentation](../ext/trait.AsExtension.html#examples) or - /// [the implementors](../ext/trait.AsExtension.html#implementors). - pub fn add_extension(&mut self, extension: &E) -> Result<()> { - let ext = extension.to_extension(&self.info.subject, &self.extension_req.0)?; - - self.extension_req.0.push(ext); - - Ok(()) - } - - /// Add an attribute to this certificate request - pub fn add_attribute(&mut self, attribute: &A) -> Result<()> { - let attr = attribute.to_attribute()?; - - self.info.attributes.insert(attr)?; - Ok(()) - } -} - /// Trait for X509 builders /// /// This trait defines the interface between builder and the signers. @@ -404,40 +324,6 @@ where } } -impl Builder for RequestBuilder { - type Output = CertReq; - - fn finalize(&mut self, signer: &S) -> Result> - where - S: Keypair + DynSignatureAlgorithmIdentifier, - S::VerifyingKey: EncodePublicKey, - { - let verifying_key = signer.verifying_key(); - let public_key = SubjectPublicKeyInfo::from_key(&verifying_key)?; - self.info.public_key = public_key; - - self.info - .attributes - .insert(self.extension_req.clone().try_into()?)?; - - self.info.to_der().map_err(Error::from) - } - - fn assemble(self, signature: BitString, signer: &S) -> Result - where - S: Keypair + DynSignatureAlgorithmIdentifier, - S::VerifyingKey: EncodePublicKey, - { - let algorithm = signer.signature_algorithm_identifier()?; - - Ok(CertReq { - info: self.info, - algorithm, - signature, - }) - } -} - /// Trait for async X509 builders /// /// This trait defines the interface between builder and the signers. diff --git a/x509-cert/src/request.rs b/x509-cert/src/request.rs index 9681be12a..0f1bdb1a1 100644 --- a/x509-cert/src/request.rs +++ b/x509-cert/src/request.rs @@ -19,6 +19,12 @@ use der::{ #[cfg(feature = "pem")] use der::pem::PemLabel; +#[cfg(feature = "builder")] +mod builder; + +#[cfg(feature = "builder")] +pub use self::builder::RequestBuilder; + /// Version identifier for certification request information. /// /// (RFC 2986 designates `0` as the only valid version) diff --git a/x509-cert/src/request/builder.rs b/x509-cert/src/request/builder.rs new file mode 100644 index 000000000..a87bd02e5 --- /dev/null +++ b/x509-cert/src/request/builder.rs @@ -0,0 +1,132 @@ +use alloc::vec; + +use der::{asn1::BitString, Encode}; +use signature::Keypair; +use spki::{ + AlgorithmIdentifier, DynSignatureAlgorithmIdentifier, EncodePublicKey, SubjectPublicKeyInfo, +}; + +use crate::{ + builder::{Builder, Error, Result, NULL_OID}, + ext::AsExtension, + name::Name, + request::{attributes::AsAttribute, CertReq, CertReqInfo, ExtensionReq}, +}; + +/// Builder for X509 Certificate Requests (CSR) +/// +/// ``` +/// # use p256::{pkcs8::DecodePrivateKey, NistP256, ecdsa::DerSignature}; +/// # const PKCS8_PRIVATE_KEY_DER: &[u8] = include_bytes!("../../tests/examples/p256-priv.der"); +/// # fn ecdsa_signer() -> ecdsa::SigningKey { +/// # let secret_key = p256::SecretKey::from_pkcs8_der(PKCS8_PRIVATE_KEY_DER).unwrap(); +/// # ecdsa::SigningKey::from(secret_key) +/// # } +/// use x509_cert::{ +/// builder::{Builder, RequestBuilder}, +/// ext::pkix::{name::GeneralName, SubjectAltName}, +/// name::Name, +/// }; +/// use std::str::FromStr; +/// +/// use std::net::{IpAddr, Ipv4Addr}; +/// let subject = Name::from_str("CN=service.domination.world").unwrap(); +/// +/// let signer = ecdsa_signer(); +/// let mut builder = RequestBuilder::new(subject).expect("Create certificate request"); +/// builder +/// .add_extension(&SubjectAltName(vec![GeneralName::from(IpAddr::V4( +/// Ipv4Addr::new(192, 0, 2, 0), +/// ))])) +/// .unwrap(); +/// +/// let cert_req = builder.build::<_, DerSignature>(&signer).unwrap(); +/// ``` +pub struct RequestBuilder { + info: CertReqInfo, + extension_req: ExtensionReq, +} + +impl RequestBuilder { + /// Creates a new certificate request builder + pub fn new(subject: Name) -> Result { + let version = Default::default(); + + let algorithm = AlgorithmIdentifier { + oid: NULL_OID, + parameters: None, + }; + let public_key = SubjectPublicKeyInfo { + algorithm, + subject_public_key: BitString::from_bytes(&[]).expect("unable to parse empty object"), + }; + + let attributes = Default::default(); + let extension_req = Default::default(); + + Ok(Self { + info: CertReqInfo { + version, + subject, + public_key, + attributes, + }, + extension_req, + }) + } + + /// Add an extension to this certificate request + /// + /// Extensions need to implement [`AsExtension`], examples may be found in + /// in [`AsExtension` documentation](../ext/trait.AsExtension.html#examples) or + /// [the implementors](../ext/trait.AsExtension.html#implementors). + pub fn add_extension(&mut self, extension: &E) -> Result<()> { + let ext = extension.to_extension(&self.info.subject, &self.extension_req.0)?; + + self.extension_req.0.push(ext); + + Ok(()) + } + + /// Add an attribute to this certificate request + pub fn add_attribute(&mut self, attribute: &A) -> Result<()> { + let attr = attribute.to_attribute()?; + + self.info.attributes.insert(attr)?; + Ok(()) + } +} + +impl Builder for RequestBuilder { + type Output = CertReq; + + fn finalize(&mut self, signer: &S) -> Result> + where + S: Keypair + DynSignatureAlgorithmIdentifier, + S::VerifyingKey: EncodePublicKey, + { + let verifying_key = signer.verifying_key(); + let public_key = SubjectPublicKeyInfo::from_key(&verifying_key)?; + self.info.public_key = public_key; + + self.info + .attributes + .insert(self.extension_req.clone().try_into()?)?; + + self.info.to_der().map_err(Error::from) + } + + fn assemble(self, signature: BitString, signer: &S) -> Result + where + S: Keypair + DynSignatureAlgorithmIdentifier, + S::VerifyingKey: EncodePublicKey, + { + let algorithm = signer.signature_algorithm_identifier()?; + + Ok(CertReq { + info: self.info, + algorithm, + signature, + }) + } +}