Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions API_ENDPOINTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,32 @@ GET /all_pectra_data_gas?start_timestamp=1640995200&end_timestamp=1641081600
}
```

### 5. Seven-Day Stats (Daily Snapshots)

**Endpoint:** `GET /seven_day_stats`

**Parameters:** none

**Description:**
Returns, for each monitored batcher, the most recent 7 daily snapshot rows produced by the background snapshot loop. Each field is returned as an array whose order corresponds to the `timestamps` array.

**Response:**
```json
{
"batchers": [
{
"batcher_address": "0x5050F69a9786F081509234F1a7F4684b5E5b76C9",
"timestamps": [1759430400, 1759516800, 1759603200, 1759689600, 1759776000, 1759862400, 1759948800],
"total_daily_txs": [221, 270, 390, 130, 532, 421, 1009],
"total_eth_saved_wei": ["123", "456", "789", "..."],
"total_blob_data_gas": [100, 200, 300, 400, 500, 600, 700],
"total_pectra_data_gas": [150, 250, 350, 450, 550, 650, 750]
},
{ "batcher_address": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", "timestamps": [...], ... }
]
}
```

## Technical Notes

- All timestamps are in Unix timestamp format (seconds since January 1, 1970)
Expand Down
221 changes: 38 additions & 183 deletions examples/fill_test_data.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
use pectralizer::server::types::TxAnalysisResponse;
use pectralizer::tracker::database::{Database, SqliteDatabase, TrackedBatch};
use pectralizer::{
server::types::DailyBatcherStats,
tracker::database::{Database, SqliteDatabase},
};
use std::time::{SystemTime, UNIX_EPOCH};

#[tokio::main]
Expand All @@ -15,195 +17,48 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
.unwrap()
.as_secs() as i64;

// Use exact addresses from L2_BATCHERS_ADDRESSES in l2_monitor.rs
let base_batcher = "0x5050f69a9786f081509234f1a7f4684b5e5b76c9"; // Base - lowercase hex format
let optimism_batcher = "0x6887246668a3b87f54deb3b94ba47a6f63f32985"; // Optimism - lowercase hex format
// Exact addresses from L2_BATCHERS_ADDRESSES in l2_monitor.rs (lower-case)
let base_batcher = "0x5050f69a9786f081509234f1a7f4684b5e5b76c9"; // Base
let optimism_batcher = "0x6887246668a3b87f54deb3b94ba47a6f63f32985"; // Optimism

// Create proper TxAnalysisResponse structures for test data
let create_analysis_response = |timestamp: u64,
gas_used: u64,
gas_price: u128,
blob_gas_price: u128,
blob_gas_used: u64,
eip_7623_calldata_gas: u64,
legacy_calldata_gas: u64|
-> String {
let response = TxAnalysisResponse {
timestamp,
gas_used,
gas_price,
blob_gas_price: Some(blob_gas_price),
blob_gas_used,
eip_7623_calldata_gas,
legacy_calldata_gas,
blob_data_wei_spent: Some(blob_gas_used as u128 * blob_gas_price),
legacy_calldata_wei_spent: legacy_calldata_gas as u128 * gas_price,
eip_7623_calldata_wei_spent: eip_7623_calldata_gas as u128 * gas_price,
};
serde_json::to_string(&response).unwrap()
};

// Base batcher test data
let base_batches = vec![
TrackedBatch {
id: None,
tx_hash: "0xbase1111111111111111111111111111111111111111111111111111111111111"
.to_string(),
batcher_address: base_batcher.to_string(),
analysis_result: create_analysis_response(
(now - 86400) as u64, // 1 day ago
150000, // gas_used
20_000_000_000, // gas_price (20 gwei)
15_000_000_000, // blob_gas_price (15 gwei)
131072, // blob_gas_used
15000, // eip_7623_calldata_gas
12000, // legacy_calldata_gas
),
timestamp: now - 86400, // 1 day ago
last_analyzed_block: None,
},
TrackedBatch {
id: None,
tx_hash: "0xbase2222222222222222222222222222222222222222222222222222222222222"
.to_string(),
batcher_address: base_batcher.to_string(),
analysis_result: create_analysis_response(
(now - 82800) as u64, // 23 hours ago
200000, // gas_used
25_000_000_000, // gas_price (25 gwei)
18_000_000_000, // blob_gas_price (18 gwei)
262144, // blob_gas_used
25000, // eip_7623_calldata_gas
20000, // legacy_calldata_gas
),
timestamp: now - 82800, // 23 hours ago
last_analyzed_block: None,
},
TrackedBatch {
id: None,
tx_hash: "0xbase3333333333333333333333333333333333333333333333333333333333333"
.to_string(),
batcher_address: base_batcher.to_string(),
analysis_result: create_analysis_response(
(now - 79200) as u64, // 22 hours ago
175000, // gas_used
22_000_000_000, // gas_price (22 gwei)
16_000_000_000, // blob_gas_price (16 gwei)
196608, // blob_gas_used
18000, // eip_7623_calldata_gas
15000, // legacy_calldata_gas
),
timestamp: now - 79200, // 22 hours ago
last_analyzed_block: None,
},
];
// ---------------------------------------------------------------------
// 1. Insert synthetic DAILY SNAPSHOT rows so the /seven_day_stats API
// immediately returns meaningful data without waiting for the
// background snapshot loop.
// ---------------------------------------------------------------------

// Optimism batcher test data
let optimism_batches = vec![
TrackedBatch {
id: None,
tx_hash: "0xop111111111111111111111111111111111111111111111111111111111111111"
.to_string(),
batcher_address: optimism_batcher.to_string(),
analysis_result: create_analysis_response(
(now - 75600) as u64, // 21 hours ago
300000, // gas_used
30_000_000_000, // gas_price (30 gwei)
20_000_000_000, // blob_gas_price (20 gwei)
393216, // blob_gas_used
35000, // eip_7623_calldata_gas
28000, // legacy_calldata_gas
),
timestamp: now - 75600, // 21 hours ago
last_analyzed_block: None,
},
TrackedBatch {
id: None,
tx_hash: "0xop222222222222222222222222222222222222222222222222222222222222222"
.to_string(),
batcher_address: optimism_batcher.to_string(),
analysis_result: create_analysis_response(
(now - 72000) as u64, // 20 hours ago
350000, // gas_used
35_000_000_000, // gas_price (35 gwei)
25_000_000_000, // blob_gas_price (25 gwei)
524288, // blob_gas_used
45000, // eip_7623_calldata_gas
36000, // legacy_calldata_gas
),
timestamp: now - 72000, // 20 hours ago
last_analyzed_block: None,
},
TrackedBatch {
id: None,
tx_hash: "0xop333333333333333333333333333333333333333333333333333333333333333"
.to_string(),
batcher_address: optimism_batcher.to_string(),
analysis_result: create_analysis_response(
(now - 68400) as u64, // 19 hours ago
250000, // gas_used
28_000_000_000, // gas_price (28 gwei)
22_000_000_000, // blob_gas_price (22 gwei)
327680, // blob_gas_used
28000, // eip_7623_calldata_gas
22000, // legacy_calldata_gas
),
timestamp: now - 68400, // 19 hours ago
last_analyzed_block: None,
},
];
let mut snapshot_rows: Vec<DailyBatcherStats> = Vec::new();

// Insert Base batches
println!("📊 Inserting Base batcher data...");
for batch in base_batches {
match db.save_tracked_batch(&batch).await {
Ok(_) => println!(" ✅ Inserted batch: {}", batch.tx_hash),
Err(e) => println!(" ❌ Failed to insert batch {}: {}", batch.tx_hash, e),
}
}
let day_start_ts = (now / 86_400) * 86_400; // midnight UTC of current day

// Insert Optimism batches
println!("📊 Inserting Optimism batcher data...");
for batch in optimism_batches {
match db.save_tracked_batch(&batch).await {
Ok(_) => println!(" ✅ Inserted batch: {}", batch.tx_hash),
Err(e) => println!(" ❌ Failed to insert batch {}: {}", batch.tx_hash, e),
}
}
for i in 1..=7 {
let ts = day_start_ts - (i as i64) * 86_400; // midnight of previous days

// Add some older data for testing different time ranges
println!("📊 Inserting older test data...");
let older_batch = TrackedBatch {
id: None,
tx_hash: "0xold1111111111111111111111111111111111111111111111111111111111111".to_string(),
batcher_address: base_batcher.to_string(),
analysis_result: create_analysis_response(
(now - 604800) as u64, // 7 days ago
120000, // gas_used
15_000_000_000, // gas_price (15 gwei)
12_000_000_000, // blob_gas_price (12 gwei)
131072, // blob_gas_used
12000, // eip_7623_calldata_gas
10000, // legacy_calldata_gas
),
timestamp: now - 604800, // 7 days ago
last_analyzed_block: None,
};
// helper to fabricate some deterministic numbers just to see variety
let make_row = |addr: &str, factor: u64| DailyBatcherStats {
batcher_address: addr.to_string(),
snapshot_timestamp: ts,
total_daily_txs: 100 + factor * i, // 100,110,... or 100,120,...
total_eth_saved_wei: (1_000_000_000_000u128) * (i as u128) * (factor as u128),
total_blob_data_gas: 1_000 * factor * i, // 1000,2000,...
total_pectra_data_gas: 2_000 * factor * i, // 2000,4000,...
};

match db.save_tracked_batch(&older_batch).await {
Ok(_) => println!(" ✅ Inserted older batch: {}", older_batch.tx_hash),
Err(e) => println!(" ❌ Failed to insert older batch: {}", e),
snapshot_rows.push(make_row(base_batcher, 1));
snapshot_rows.push(make_row(optimism_batcher, 2));
}

println!("\n🎉 Test data insertion completed!");
println!("📈 Summary:");
println!(" - 3 Base batcher transactions");
println!(" - 3 Optimism batcher transactions");
println!(" - 1 older transaction for time range testing");
println!("\n🔍 You can now test the API endpoints with:");
println!(" Base batcher: {}", base_batcher);
println!(" Optimism batcher: {}", optimism_batcher);
println!(" Time range: {} to {}", now - 86400, now);
println!(
"📊 Inserting {} synthetic daily snapshot rows...",
snapshot_rows.len()
);
db.insert_daily_batcher_stats(&snapshot_rows).await?;

println!("\n🎉 Test snapshot insertion completed!");
println!(
"Inserted 7 days × 2 batchers = {} rows",
snapshot_rows.len()
);

Ok(())
}
11 changes: 9 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use pectralizer::{
all_blob_data_gas_handler, all_daily_txs_handler, all_eth_saved_handler,
all_pectra_data_gas_handler, blob_data_gas_handler, contract_handler,
daily_txs_handler, eth_saved_handler, pectra_data_gas_handler, root_handler,
tx_handler,
seven_day_stats_handler, tx_handler,
},
},
tracker::{
Expand All @@ -35,7 +35,7 @@ async fn run_l2_batches_monitoring_service(app_state: AppState) -> eyre::Result<

// run both monitoring and retry services concurrently
tokio::select! {
res = tracker::l2_monitor::start_monitoring(app_state.db, app_state.provider_state) => {
res = tracker::l2_monitor::start_monitoring(app_state.db.clone(), app_state.provider_state.clone()) => {
if let Err(e) = res {
error!("L2 monitor error: {:?}", e);
}
Expand All @@ -45,6 +45,11 @@ async fn run_l2_batches_monitoring_service(app_state: AppState) -> eyre::Result<
error!("Retry handler error: {:?}", e);
}
},
res = tracker::snapshot::start_snapshot_loop(app_state.db.clone()) => {
if let Err(e) = res {
error!("Snapshot loop error: {:?}", e);
}
},
}

Ok(())
Expand Down Expand Up @@ -116,6 +121,7 @@ async fn main() -> eyre::Result<()> {
.route("/all_eth_saved", get(all_eth_saved_handler))
.route("/all_blob_data_gas", get(all_blob_data_gas_handler))
.route("/all_pectra_data_gas", get(all_pectra_data_gas_handler))
.route("/seven_day_stats", get(seven_day_stats_handler))
.layer(CorsLayer::permissive())
.with_state(app_state.clone());

Expand All @@ -139,6 +145,7 @@ async fn main() -> eyre::Result<()> {
info!(" - GET /all_eth_saved - ETH saved data for all batchers");
info!(" - GET /all_blob_data_gas - Blob data gas for all batchers");
info!(" - GET /all_pectra_data_gas - Pectra data gas for all batchers");
info!(" - GET /seven_day_stats - Last 7-day snapshot series for all batchers");

// run both services concurrently
tokio::select! {
Expand Down
36 changes: 36 additions & 0 deletions src/server/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,15 @@ use super::{
};
use crate::{
provider::ProviderState,
server::types::{AllBatchersSevenDayStatsResponse, BatcherSevenDayStats},
utils::{BASE_STIPEND, BYTES_PER_BLOB, compute_calldata_gas, compute_legacy_calldata_gas},
};
use alloy_consensus::{Transaction, Typed2718};
use alloy_primitives::{Address, FixedBytes, hex::FromHex};
use alloy_provider::Provider;
use axum::{Json, extract::Query, extract::State};
use rustc_hash::FxHashSet;
use std::collections::HashMap;

pub async fn root_handler() -> &'static str {
concat!(
Expand Down Expand Up @@ -363,3 +365,37 @@ pub async fn all_pectra_data_gas_handler(

Ok(Json(AllPectraDataGasResponse { batchers }))
}

pub async fn seven_day_stats_handler(
State(app_state): State<super::AppState>,
) -> Result<Json<AllBatchersSevenDayStatsResponse>, HandlerError> {
let rows = app_state.db.get_recent_daily_stats(7).await.map_err(|e| {
HandlerError::DatabaseError(format!("Failed to get recent daily stats: {}", e))
})?;

let mut map: HashMap<String, BatcherSevenDayStats> = HashMap::new();

for r in rows {
let entry = map
.entry(r.batcher_address.clone())
.or_insert_with(|| BatcherSevenDayStats {
batcher_address: r.batcher_address.clone(),
timestamps: Vec::new(),
total_daily_txs: Vec::new(),
total_eth_saved_wei: Vec::new(),
total_blob_data_gas: Vec::new(),
total_pectra_data_gas: Vec::new(),
});
entry.timestamps.push(r.snapshot_timestamp);
entry.total_daily_txs.push(r.total_daily_txs);
entry.total_eth_saved_wei.push(r.total_eth_saved_wei);
entry.total_blob_data_gas.push(r.total_blob_data_gas);
entry.total_pectra_data_gas.push(r.total_pectra_data_gas);
}

let mut batchers: Vec<BatcherSevenDayStats> = map.into_values().collect();
// ensure ascending order by timestamp inside vectors (they are already since query sorted asc)
batchers.sort_by(|a, b| a.batcher_address.cmp(&b.batcher_address));

Ok(Json(AllBatchersSevenDayStatsResponse { batchers }))
}
Loading