Skip to content

Commit

Permalink
fix(gateway): sierra class decompress size limit (#482)
Browse files Browse the repository at this point in the history
  • Loading branch information
cchudant authored Jan 30, 2025
1 parent ee31c57 commit 4f9054c
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 17 deletions.
12 changes: 3 additions & 9 deletions crates/madara/primitives/class/src/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,7 @@ use blockifier::execution::{
};
use num_bigint::{BigInt, BigUint, Sign};
use starknet_types_core::felt::Felt;
use std::{
borrow::Cow,
io::{Cursor, Read},
};
use std::borrow::Cow;

#[derive(Debug, thiserror::Error)]
pub enum ClassCompilationError {
Expand All @@ -32,11 +29,8 @@ pub enum ClassCompilationError {

impl CompressedLegacyContractClass {
pub fn serialize_to_json(&self) -> Result<String, ClassCompilationError> {
let mut decompressor = flate2::read::GzDecoder::new(Cursor::new(&self.program));
let mut program = Vec::new();
decompressor.read_to_end(&mut program)?;

let mut program: serde_json::Value = serde_json::from_slice(&program)?;
let mut program: serde_json::Value =
serde_json::from_reader(crate::convert::gz_decompress_stream(self.program.as_slice()))?;

let program_object = program.as_object_mut().ok_or(ClassCompilationError::ProgramIsNotAnObject)?;

Expand Down
10 changes: 7 additions & 3 deletions crates/madara/primitives/class/src/convert.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use flate2::bufread::GzDecoder;
use flate2::read::GzDecoder;
use starknet_core::types::LegacyContractEntryPoint;
use starknet_core::types::{
contract::legacy::{
Expand All @@ -22,13 +22,17 @@ pub enum ParseCompressedLegacyClassError {
const MiB: u64 = 1024 * 1024;
const CLASS_SIZE_LIMIT: u64 = 4 * MiB;

/// Decompress and limits the size of the decompression stream, to avoid potential DoS vectors.
pub fn gz_decompress_stream(r: impl io::Read) -> impl io::Read {
ReadSizeLimiter::new(GzDecoder::new(r), CLASS_SIZE_LIMIT)
}

/// Attempts to recover a compressed legacy program.
pub fn parse_compressed_legacy_class(
class: CompressedLegacyContractClass,
) -> Result<LegacyContractClass, ParseCompressedLegacyClassError> {
// decompress and parse as a single [`Read`] pipeline to avoid having an intermediary buffer here.
let program: LegacyProgram =
serde_json::from_reader(ReadSizeLimiter::new(GzDecoder::new(class.program.as_slice()), CLASS_SIZE_LIMIT))?;
let program: LegacyProgram = serde_json::from_reader(gz_decompress_stream(class.program.as_slice()))?;

let is_pre_0_11_0 = match &program.compiler_version {
Some(compiler_version) => {
Expand Down
10 changes: 5 additions & 5 deletions crates/madara/primitives/class/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,11 +206,11 @@ impl TryFrom<CompressedSierraClass> for FlattenedSierraClass {
type Error = std::io::Error;

fn try_from(compressed_sierra_class: CompressedSierraClass) -> Result<Self, Self::Error> {
let string_reader = std::io::Cursor::new(compressed_sierra_class.sierra_program);
let base64_decoder =
base64::read::DecoderReader::new(string_reader, &base64::engine::general_purpose::STANDARD);
let gzip_decoder = flate2::read::GzDecoder::new(base64_decoder);
let sierra_program = serde_json::from_reader(gzip_decoder)?;
let s = compressed_sierra_class.sierra_program;
// base64 -> gz -> json
let sierra_program = serde_json::from_reader(crate::convert::gz_decompress_stream(
base64::read::DecoderReader::new(s.as_bytes(), &base64::engine::general_purpose::STANDARD),
))?;

Ok(Self {
sierra_program,
Expand Down

0 comments on commit 4f9054c

Please sign in to comment.