File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed
crates/apollo_central_sync/src Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -901,8 +901,8 @@ impl<
901901 // Safety: If buffer is too large, flush consecutive blocks to prevent OOM
902902 // But only if we have enough consecutive blocks to make it worthwhile (maintains batching benefits)
903903 let buffer_size = self . pending_compilations . len ( ) ;
904- let max_buffer_size = batch_size * 20 ; // Allow up to 20 batches worth of buffering (20 ,000 blocks)
905- let min_partial_batch_size = batch_size / 10 ; // Only flush partial batches if we have at least 100 consecutive blocks
904+ let max_buffer_size = batch_size * 20 ; // Allow up to 20 batches worth of buffering (2 ,000 blocks for batch_size=100 )
905+ let min_partial_batch_size = batch_size / 10 ; // Only flush partial batches if we have at least 10 consecutive blocks (for batch_size=100)
906906
907907 if buffer_size > max_buffer_size {
908908 warn ! (
@@ -959,7 +959,7 @@ impl<
959959 ) ;
960960 } else {
961961 // Emergency fallback: If buffer is extremely large (1.5x threshold), flush whatever we have
962- let emergency_threshold = max_buffer_size + ( max_buffer_size / 2 ) ; // 30 ,000 blocks
962+ let emergency_threshold = max_buffer_size + ( max_buffer_size / 2 ) ; // 3 ,000 blocks (for batch_size=100)
963963 if buffer_size > emergency_threshold {
964964 error ! (
965965 "EMERGENCY: Buffer size ({}) exceeds emergency threshold ({}). Flushing {} consecutive blocks to prevent OOM (batching degraded)." ,
You can’t perform that action at this time.
0 commit comments