Skip to content

Commit 7c6a59c

Browse files
committed
Merge branch 'staging' into perf/sp1-verification
2 parents b561071 + 703f766 commit 7c6a59c

7 files changed

Lines changed: 50 additions & 5 deletions

aggregation_mode/src/backend/config.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ pub struct Config {
2323
pub ecdsa: ECDSAConfig,
2424
pub proofs_per_chunk: u16,
2525
pub pre_verification_enabled: bool,
26+
pub total_proofs_limit: u16,
2627
}
2728

2829
impl Config {

aggregation_mode/src/backend/fetcher.rs

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ impl ProofsFetcher {
5858
pub async fn fetch(
5959
&mut self,
6060
engine: ZKVMEngine,
61+
limit: u16,
6162
) -> Result<Vec<AlignedProof>, ProofsFetcherError> {
6263
// Get current block
6364
let current_block = self
@@ -89,12 +90,9 @@ impl ProofsFetcher {
8990

9091
info!("Logs collected {}", logs.len());
9192

92-
// Update last processed block after collecting logs
93-
self.last_aggregated_block = current_block;
94-
9593
let mut proofs = vec![];
9694

97-
for (batch, _) in logs {
95+
for (batch, log) in logs {
9896
info!(
9997
"New batch submitted, about to process. Batch merkle root {}...",
10098
batch.batchMerkleRoot
@@ -177,9 +175,24 @@ impl ProofsFetcher {
177175
proofs_to_add.len()
178176
);
179177

178+
if (proofs.len() + proofs_to_add.len()) > (limit as usize) {
179+
let log_block_number = log.block_number.unwrap();
180+
info!(
181+
"Limit of {} proofs reached, stopping at block number {}, which is {} from current block",
182+
limit, log_block_number, current_block - log_block_number
183+
);
184+
// Update last processed block to this log block number
185+
// So the next aggregation starts at this block
186+
self.last_aggregated_block = log_block_number;
187+
return Ok(proofs);
188+
}
189+
180190
proofs.extend(proofs_to_add);
181191
}
182192

193+
// Update last processed block after collecting logs
194+
self.last_aggregated_block = current_block;
195+
183196
Ok(proofs)
184197
}
185198

aggregation_mode/src/backend/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ impl ProofAggregator {
9797
) -> Result<(), AggregatedProofSubmissionError> {
9898
let proofs = self
9999
.fetcher
100-
.fetch(self.engine.clone())
100+
.fetch(self.engine.clone(), self.config.total_proofs_limit)
101101
.await
102102
.map_err(AggregatedProofSubmissionError::FetchingProofs)?;
103103

config-files/config-proof-aggregator-ethereum-package.yaml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,13 @@ max_proofs_in_queue: 1000
66
last_aggregated_block_filepath: config-files/proof-aggregator.last_aggregated_block.json
77
proofs_per_chunk: 512 # Amount of proofs to process per chunk
88
pre_verification_enabled: true
9+
# This number comes from the blob data limit
10+
# Since each blob has a capacity of (4096 * 32) = 131.072 bytes
11+
# But to not surpass the field modulus we pad with a 0xo byte so we have (4096 * 31) = 126.976 bytes
12+
# of usable data
13+
# Since each proof commitments takes 32 bytes hash
14+
# We can aggregate as much proofs as 126.976 / 32 = 3968 per blob
15+
total_proofs_limit: 3968
916

1017
ecdsa:
1118
private_key_store_path: "config-files/anvil.proof-aggregator.ecdsa.key.json"

config-files/config-proof-aggregator-mock-ethereum-package.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,14 @@ max_proofs_in_queue: 1000
66
last_aggregated_block_filepath: config-files/proof-aggregator.last_aggregated_block.json
77
proofs_per_chunk: 512 # Amount of proofs to process per chunk
88
pre_verification_enabled: true
9+
# This number comes from the blob data limit
10+
# Since each blob has a capacity of (4096 * 32) = 131.072 bytes
11+
# But to not surpass the field modulus we pad with a 0xo byte so we have (4096 * 31) = 126.976 bytes
12+
# of usable data
13+
# Since each proof commitments takes 32 bytes hash
14+
# We can aggregate as much proofs as 126.976 / 32 = 3968 per blob
15+
total_proofs_limit: 3968
16+
917

1018
ecdsa:
1119
private_key_store_path: "config-files/anvil.proof-aggregator.ecdsa.key.json"

config-files/config-proof-aggregator-mock.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,14 @@ max_proofs_in_queue: 1000
66
last_aggregated_block_filepath: config-files/proof-aggregator.last_aggregated_block.json
77
proofs_per_chunk: 512 # Amount of proofs to process per chunk
88
pre_verification_enabled: true
9+
# This number comes from the blob data limit
10+
# Since each blob has a capacity of (4096 * 32) = 131.072 bytes
11+
# But to not surpass the field modulus we pad with a 0xo byte so we have (4096 * 31) = 126.976 bytes
12+
# of usable data
13+
# Since each proof commitments takes 32 bytes hash
14+
# We can aggregate as much proofs as 126.976 / 32 = 3968 per blob
15+
total_proofs_limit: 3968
16+
917

1018
ecdsa:
1119
private_key_store_path: "config-files/anvil.proof-aggregator.ecdsa.key.json"

config-files/config-proof-aggregator.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,14 @@ max_proofs_in_queue: 1000
66
last_aggregated_block_filepath: config-files/proof-aggregator.last_aggregated_block.json
77
proofs_per_chunk: 512 # Amount of proofs to process per chunk
88
pre_verification_enabled: true
9+
# This number comes from the blob data limit
10+
# Since each blob has a capacity of (4096 * 32) = 131.072 bytes
11+
# But to not surpass the field modulus we pad with a 0xo byte so we have (4096 * 31) = 126.976 bytes
12+
# of usable data
13+
# Since each proof commitments takes 32 bytes hash
14+
# We can aggregate as much proofs as 126.976 / 32 = 3968 per blob
15+
total_proofs_limit: 3968
16+
917

1018
ecdsa:
1119
private_key_store_path: "config-files/anvil.proof-aggregator.ecdsa.key.json"

0 commit comments

Comments
 (0)