File tree Expand file tree Collapse file tree 3 files changed +15
-17
lines changed
madara/crates/primitives/class/src Expand file tree Collapse file tree 3 files changed +15
-17
lines changed Original file line number Diff line number Diff line change @@ -7,10 +7,7 @@ use blockifier::execution::{
77} ;
88use num_bigint:: { BigInt , BigUint , Sign } ;
99use starknet_types_core:: felt:: Felt ;
10- use std:: {
11- borrow:: Cow ,
12- io:: { Cursor , Read } ,
13- } ;
10+ use std:: borrow:: Cow ;
1411
1512#[ derive( Debug , thiserror:: Error ) ]
1613pub enum ClassCompilationError {
@@ -32,11 +29,8 @@ pub enum ClassCompilationError {
3229
3330impl CompressedLegacyContractClass {
3431 pub fn serialize_to_json ( & self ) -> Result < String , ClassCompilationError > {
35- let mut decompressor = flate2:: read:: GzDecoder :: new ( Cursor :: new ( & self . program ) ) ;
36- let mut program = Vec :: new ( ) ;
37- decompressor. read_to_end ( & mut program) ?;
38-
39- let mut program: serde_json:: Value = serde_json:: from_slice ( & program) ?;
32+ let mut program: serde_json:: Value =
33+ serde_json:: from_reader ( crate :: convert:: gz_decompress_stream ( self . program . as_slice ( ) ) ) ?;
4034
4135 let program_object = program. as_object_mut ( ) . ok_or ( ClassCompilationError :: ProgramIsNotAnObject ) ?;
4236
Original file line number Diff line number Diff line change 1- use flate2:: bufread :: GzDecoder ;
1+ use flate2:: read :: GzDecoder ;
22use starknet_core:: types:: LegacyContractEntryPoint ;
33use starknet_core:: types:: {
44 contract:: legacy:: {
@@ -22,13 +22,17 @@ pub enum ParseCompressedLegacyClassError {
2222const MiB : u64 = 1024 * 1024 ;
2323const CLASS_SIZE_LIMIT : u64 = 4 * MiB ;
2424
25+ /// Decompress and limits the size of the decompression stream, to avoid potential DoS vectors.
26+ pub fn gz_decompress_stream ( r : impl io:: Read ) -> impl io:: Read {
27+ ReadSizeLimiter :: new ( GzDecoder :: new ( r) , CLASS_SIZE_LIMIT )
28+ }
29+
2530/// Attempts to recover a compressed legacy program.
2631pub fn parse_compressed_legacy_class (
2732 class : CompressedLegacyContractClass ,
2833) -> Result < LegacyContractClass , ParseCompressedLegacyClassError > {
2934 // decompress and parse as a single [`Read`] pipeline to avoid having an intermediary buffer here.
30- let program: LegacyProgram =
31- serde_json:: from_reader ( ReadSizeLimiter :: new ( GzDecoder :: new ( class. program . as_slice ( ) ) , CLASS_SIZE_LIMIT ) ) ?;
35+ let program: LegacyProgram = serde_json:: from_reader ( gz_decompress_stream ( class. program . as_slice ( ) ) ) ?;
3236
3337 let is_pre_0_11_0 = match & program. compiler_version {
3438 Some ( compiler_version) => {
Original file line number Diff line number Diff line change @@ -206,11 +206,11 @@ impl TryFrom<CompressedSierraClass> for FlattenedSierraClass {
206206 type Error = std:: io:: Error ;
207207
208208 fn try_from ( compressed_sierra_class : CompressedSierraClass ) -> Result < Self , Self :: Error > {
209- let string_reader = std :: io :: Cursor :: new ( compressed_sierra_class. sierra_program ) ;
210- let base64_decoder =
211- base64 :: read :: DecoderReader :: new ( string_reader , & base64 :: engine :: general_purpose :: STANDARD ) ;
212- let gzip_decoder = flate2 :: read:: GzDecoder :: new ( base64_decoder ) ;
213- let sierra_program = serde_json :: from_reader ( gzip_decoder ) ?;
209+ let s = compressed_sierra_class. sierra_program ;
210+ // base64 -> gz -> json
211+ let sierra_program = serde_json :: from_reader ( crate :: convert :: gz_decompress_stream (
212+ base64 :: read:: DecoderReader :: new ( s . as_bytes ( ) , & base64 :: engine :: general_purpose :: STANDARD ) ,
213+ ) ) ?;
214214
215215 Ok ( Self {
216216 sierra_program,
You can’t perform that action at this time.
0 commit comments