Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: BWIP #2258

Merged
merged 69 commits into from
Jul 5, 2024
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
Show all changes
69 commits
Select commit Hold shift + click to select a range
4ce3d7b
initial commit
Artemka374 Jun 7, 2024
4f7f974
Merge branch 'refs/heads/main' into afo/bwip
Artemka374 Jun 17, 2024
69a2828
few more tries to compose data
Artemka374 Jun 18, 2024
205df46
more or less finalize needed data
Artemka374 Jun 18, 2024
2569671
add storage cache and migrations
Artemka374 Jun 20, 2024
a691ca4
Merge branch 'refs/heads/main' into afo/bwip
Artemka374 Jun 20, 2024
5da26bf
update some db stuff
Artemka374 Jun 20, 2024
da3fb98
fix some errors
Artemka374 Jun 21, 2024
0e68fa5
fix core build
Artemka374 Jun 24, 2024
2b1a7ae
fmt
Artemka374 Jun 24, 2024
714f8e3
update data handler and gateway
Artemka374 Jun 24, 2024
d54e5ae
add bwip to node
Artemka374 Jun 24, 2024
46cd1ff
fix some builds
Artemka374 Jun 24, 2024
f9fe7aa
get some data from updates manager
Artemka374 Jun 25, 2024
85dcf5b
use updates manager data everywhere possible
Artemka374 Jun 25, 2024
ecac597
fix build
Artemka374 Jun 26, 2024
639a9af
merge main
Artemka374 Jun 26, 2024
6ea08dc
fix build
Artemka374 Jun 26, 2024
ba40d4a
fix prover build
Artemka374 Jun 26, 2024
11fdfb1
Merge branch 'refs/heads/main' into afo/bwip
Artemka374 Jun 26, 2024
f270ccf
checkout contracts
Artemka374 Jun 26, 2024
f5b593a
fix some bugs
Artemka374 Jun 26, 2024
b0fa3b4
some cleanups
Artemka374 Jun 27, 2024
fcc4a34
allow dead code
Artemka374 Jun 27, 2024
5e1816f
Merge branch 'main' into afo/bwip
Artemka374 Jun 27, 2024
314557a
add docs
Artemka374 Jun 27, 2024
3d1e2ba
Merge remote-tracking branch 'origin/afo/bwip' into afo/bwip
Artemka374 Jun 27, 2024
dc82471
remove redundant query
Artemka374 Jun 27, 2024
da2729a
add tracing
Artemka374 Jun 27, 2024
22fb747
Merge branch 'main' into afo/bwip
Artemka374 Jun 27, 2024
5ba82ae
fix serializing problem(should be fixed better in the future)
Artemka374 Jun 27, 2024
c67c1c1
Merge remote-tracking branch 'origin/afo/bwip' into afo/bwip
Artemka374 Jun 28, 2024
84607e5
fmt
Artemka374 Jun 28, 2024
4c0e270
rework serialization
Artemka374 Jun 28, 2024
f3465e6
fix bootloader code problem
Artemka374 Jun 28, 2024
c6e3e68
fix cached state
Artemka374 Jul 1, 2024
838b589
fix previous batch metadataa
Artemka374 Jul 2, 2024
7e6f236
fix type annotation
Artemka374 Jul 2, 2024
e4e91ae
fmt
Artemka374 Jul 2, 2024
db57909
fix type for BWG
Artemka374 Jul 2, 2024
aad26ba
address some comments
Artemka374 Jul 2, 2024
b8ec09c
merge main
Artemka374 Jul 3, 2024
2c47a86
update contracts
Artemka374 Jul 3, 2024
8cc7ca6
update ident
Artemka374 Jul 3, 2024
0a7cddd
update ident
Artemka374 Jul 3, 2024
8c1f1e7
update ident
Artemka374 Jul 3, 2024
d61acf8
address some comments
Artemka374 Jul 3, 2024
e94ba4b
fix build
Artemka374 Jul 3, 2024
4079e62
add migration for witness inputs
Artemka374 Jul 3, 2024
80fa675
merge main
Artemka374 Jul 3, 2024
bd62133
address comments
Artemka374 Jul 3, 2024
3f04efe
address comments
Artemka374 Jul 3, 2024
bb9865b
undo formatting
Artemka374 Jul 3, 2024
8071326
address comments
Artemka374 Jul 3, 2024
ecd2d10
address comments
Artemka374 Jul 3, 2024
9bb645b
merge main
Artemka374 Jul 3, 2024
aec5bd9
updated query
Artemka374 Jul 3, 2024
8b98295
fix build
Artemka374 Jul 3, 2024
9c415ba
fix lint
Artemka374 Jul 4, 2024
c0cc2d2
fix lint, add vm_runner to CI
Artemka374 Jul 4, 2024
d813bdc
remove bwip from loadtest
Artemka374 Jul 4, 2024
dc860cf
address comments
Artemka374 Jul 4, 2024
cdba732
fix build
Artemka374 Jul 4, 2024
74bb85c
Merge branch 'main' into afo/bwip
Artemka374 Jul 4, 2024
3e8c4c8
fix build
Artemka374 Jul 4, 2024
2e5d56c
Merge remote-tracking branch 'origin/afo/bwip' into afo/bwip
Artemka374 Jul 4, 2024
ec68265
fix build
Artemka374 Jul 4, 2024
419a723
Update core/node/vm_runner/src/impls/bwip.rs
Artemka374 Jul 4, 2024
c39e7a1
Update core/node/vm_runner/src/impls/bwip.rs
Artemka374 Jul 4, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
ALTER TABLE proof_generation_details DROP COLUMN IF EXISTS vm_run_data_blob_url;
DROP TABLE IF EXISTS vm_runner_protective_reads;
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
ALTER TABLE proof_generation_details
ADD COLUMN IF NOT EXISTS vm_run_data_blob_url DEFAULT NULL;

CREATE TABLE IF NOT EXISTS vm_runner_bwip
(
l1_batch_number BIGINT NOT NULL PRIMARY KEY,
created_at TIMESTAMP NOT NULL,
updated_at TIMESTAMP NOT NULL,
time_taken TIME
);
76 changes: 76 additions & 0 deletions core/lib/dal/src/vm_runner_dal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,4 +84,80 @@ impl VmRunnerDal<'_, '_> {
.await?;
Ok(())
}

pub async fn get_bwip_latest_processed_batch(
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
&mut self,
default_batch: L1BatchNumber,
) -> DalResult<L1BatchNumber> {
let row = sqlx::query!(
r#"
SELECT
COALESCE(MAX(l1_batch_number), $1) AS "last_processed_l1_batch!"
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
FROM
vm_runner_bwip
"#,
default_batch.0 as i32
)
.instrument("get_bwip_latest_processed_batch")
.report_latency()
.fetch_one(self.storage)
.await?;
Ok(L1BatchNumber(row.last_processed_l1_batch as u32))
}

pub async fn get_bwip_last_ready_batch(
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
&mut self,
default_batch: L1BatchNumber,
window_size: u32,
) -> DalResult<L1BatchNumber> {
let row = sqlx::query!(
r#"
WITH
available_batches AS (
SELECT
MAX(number) AS "last_batch"
FROM
l1_batches
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
),
processed_batches AS (
SELECT
COALESCE(MAX(l1_batch_number), $1) + $2 AS "last_ready_batch"
FROM
vm_runner_bwip
)
SELECT
LEAST(last_batch, last_ready_batch) AS "last_ready_batch!"
FROM
available_batches
FULL JOIN processed_batches ON TRUE
"#,
default_batch.0 as i32,
window_size as i32
)
.instrument("get_bwip_last_ready_batch")
.report_latency()
.fetch_one(self.storage)
.await?;
Ok(L1BatchNumber(row.last_ready_batch as u32))
}

pub async fn mark_bwip_batch_as_completed(
&mut self,
l1_batch_number: L1BatchNumber,
) -> DalResult<()> {
sqlx::query!(
r#"
INSERT INTO
vm_runner_bwip (l1_batch_number, created_at, updated_at)
VALUES
($1, NOW(), NOW())
"#,
i64::from(l1_batch_number.0),
)
.instrument("mark_bwip_batch_as_completed")
.report_latency()
.execute(self.storage)
.await?;
Ok(())
}
}
1 change: 0 additions & 1 deletion core/lib/object_store/src/objects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ impl StoredObject for SnapshotFactoryDependencies {
.map_err(From::from)
}
}

Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
impl StoredObject for SnapshotStorageLogsChunk {
const BUCKET: Bucket = Bucket::StorageSnapshot;
type Key<'a> = SnapshotStorageLogsStorageKey;
Expand Down
1 change: 1 addition & 0 deletions core/lib/prover_interface/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ categories.workspace = true
[dependencies]
zksync_object_store.workspace = true
zksync_types.workspace = true
zksync_state.workspace = true

# We can use the newest api to send proofs to L1.
circuit_sequencer_api_1_5_0.workspace = true
Expand Down
57 changes: 46 additions & 11 deletions core/lib/prover_interface/src/inputs.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
use std::{convert::TryInto, fmt::Debug};
use std::{collections::HashMap, convert::TryInto, fmt::Debug};

use serde::{Deserialize, Serialize};
use serde_with::{serde_as, Bytes};
use zksync_object_store::{serialize_using_bincode, Bucket, StoredObject};
use zksync_types::{L1BatchNumber, H256, U256};
pub use zksync_state::WitnessStorage;
use zksync_types::{
block::L1BatchHeader, commitment::L1BatchWithMetadata, witness_block_state::WitnessBlockState,
L1BatchNumber, ProtocolVersionId, H256, U256,
};

const HASH_LEN: usize = H256::len_bytes();

Expand Down Expand Up @@ -132,16 +136,47 @@ impl PrepareBasicCircuitsJob {
}
}

/// Enriched `PrepareBasicCircuitsJob`. All the other fields are taken from the `l1_batches` table.
#[derive(Debug, Clone)]
pub struct BasicCircuitWitnessGeneratorInput {
pub block_number: L1BatchNumber,
pub previous_block_hash: H256,
pub previous_block_timestamp: u64,
pub block_timestamp: u64,
pub used_bytecodes_hashes: Vec<U256>,
pub struct VMRunWitnessInputData {
pub l1_batch_header: L1BatchHeader,
pub previous_batch_with_metadata: L1BatchWithMetadata,
pub used_bytecodes: HashMap<U256, Vec<[u8; 32]>>,
pub initial_heap_content: Vec<(usize, U256)>,
pub merkle_paths_input: PrepareBasicCircuitsJob,
pub protocol_version: ProtocolVersionId,

pub bootloader_code: Vec<[u8; 32]>,
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
pub default_account_code_hash: U256,
pub storage_refunds: Vec<u32>,
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
pub pubdata_costs: Option<Vec<i32>>,
pub witness_block_state: WitnessBlockState,
Artemka374 marked this conversation as resolved.
Show resolved Hide resolved
}

impl StoredObject for VMRunWitnessInputData {
const BUCKET: Bucket = Bucket::WitnessInput;

type Key<'a> = L1BatchNumber;

fn encode_key(key: Self::Key<'_>) -> String {
format!("vm_run_data_{key}.bin")
}

serialize_using_bincode!();
}

pub struct WitnessInputData {
pub vm_run_data: VMRunWitnessInputData,
pub merkle_paths: PrepareBasicCircuitsJob,
}

impl StoredObject for WitnessInputData {
const BUCKET: Bucket = Bucket::WitnessInput;

type Key<'a> = L1BatchNumber;

fn encode_key(key: Self::Key<'_>) -> String {
format!("witness_inputs_{key}.bin")
}

serialize_using_bincode!();
}

#[cfg(test)]
Expand Down
28 changes: 15 additions & 13 deletions core/lib/state/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,34 @@ use zksync_types::{
H256,
};

mod cache;
mod catchup;
mod in_memory;
mod postgres;
mod rocksdb;
mod shadow_storage;
mod storage_factory;
mod storage_view;
#[cfg(test)]
mod test_utils;

pub use self::{
cache::sequential_cache::SequentialCache,
catchup::{AsyncCatchupTask, RocksdbCell},
in_memory::InMemoryStorage,
// Note, that `test_infra` of the bootloader tests relies on this value to be exposed
in_memory::InMemoryStorage,
in_memory::IN_MEMORY_STORAGE_DEFAULT_NETWORK_ID,
postgres::{PostgresStorage, PostgresStorageCaches, PostgresStorageCachesTask},
rocksdb::{
RocksdbStorage, RocksdbStorageBuilder, RocksdbStorageOptions, StateKeeperColumnFamily,
},
shadow_storage::ShadowStorage,
storage_factory::{BatchDiff, PgOrRocksdbStorage, ReadStorageFactory, RocksdbWithMemory},
storage_view::{StorageView, StorageViewMetrics},
storage_view::{StorageView, StorageViewCache, StorageViewMetrics},
witness::WitnessStorage,
};

mod cache;
mod catchup;
mod in_memory;
mod postgres;
mod rocksdb;
mod shadow_storage;
mod storage_factory;
mod storage_view;
#[cfg(test)]
mod test_utils;
mod witness;

/// Functionality to read from the VM storage.
pub trait ReadStorage: fmt::Debug {
/// Read value of the key.
Expand Down
43 changes: 32 additions & 11 deletions core/lib/state/src/storage_view.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,33 @@ pub struct StorageView<S> {
storage_handle: S,
// Used for caching and to get the list/count of modified keys
modified_storage_keys: HashMap<StorageKey, StorageValue>,
cache: StorageViewCache,
metrics: StorageViewMetrics,
}

#[derive(Debug, Clone)]
pub struct StorageViewCache {
// Used purely for caching
read_storage_keys: HashMap<StorageKey, StorageValue>,
// Cache for `contains_key()` checks. The cache is only valid within one L1 batch execution.
initial_writes_cache: HashMap<StorageKey, bool>,
metrics: StorageViewMetrics,
initial_writes: HashMap<StorageKey, bool>,
}

impl StorageViewCache {
pub fn read_storage_keys(&self) -> HashMap<StorageKey, StorageValue> {
self.read_storage_keys.clone()
}

pub fn initial_writes(&self) -> HashMap<StorageKey, bool> {
self.initial_writes.clone()
}
}

impl<S> StorageView<S> {
pub fn cache(&self) -> StorageViewCache {
self.cache.clone()
}

/// Returns the modified storage keys
pub fn modified_storage_keys(&self) -> &HashMap<StorageKey, StorageValue> {
&self.modified_storage_keys
Expand Down Expand Up @@ -90,8 +109,10 @@ impl<S: ReadStorage + fmt::Debug> StorageView<S> {
Self {
storage_handle,
modified_storage_keys: HashMap::new(),
read_storage_keys: HashMap::new(),
initial_writes_cache: HashMap::new(),
cache: StorageViewCache {
read_storage_keys: HashMap::new(),
initial_writes: HashMap::new(),
},
metrics: StorageViewMetrics::default(),
}
}
Expand All @@ -102,10 +123,10 @@ impl<S: ReadStorage + fmt::Debug> StorageView<S> {
let cached_value = self
.modified_storage_keys
.get(key)
.or_else(|| self.read_storage_keys.get(key));
.or_else(|| self.cache.read_storage_keys.get(key));
cached_value.copied().unwrap_or_else(|| {
let value = self.storage_handle.read_value(key);
self.read_storage_keys.insert(*key, value);
self.cache.read_storage_keys.insert(*key, value);
self.metrics.time_spent_on_storage_missed += started_at.elapsed();
self.metrics.storage_invocations_missed += 1;
value
Expand All @@ -114,8 +135,8 @@ impl<S: ReadStorage + fmt::Debug> StorageView<S> {

fn cache_size(&self) -> usize {
self.modified_storage_keys.len() * mem::size_of::<(StorageKey, StorageValue)>()
+ self.initial_writes_cache.len() * mem::size_of::<(StorageKey, bool)>()
+ self.read_storage_keys.len() * mem::size_of::<(StorageKey, StorageValue)>()
+ self.cache.initial_writes.len() * mem::size_of::<(StorageKey, bool)>()
+ self.cache.read_storage_keys.len() * mem::size_of::<(StorageKey, StorageValue)>()
}

/// Returns the current metrics.
Expand Down Expand Up @@ -153,11 +174,11 @@ impl<S: ReadStorage + fmt::Debug> ReadStorage for StorageView<S> {
/// Only keys contained in the underlying storage will return `false`. If a key was
/// inserted using [`Self::set_value()`], it will still return `true`.
fn is_write_initial(&mut self, key: &StorageKey) -> bool {
if let Some(&is_write_initial) = self.initial_writes_cache.get(key) {
if let Some(&is_write_initial) = self.cache.initial_writes.get(key) {
is_write_initial
} else {
let is_write_initial = self.storage_handle.is_write_initial(key);
self.initial_writes_cache.insert(*key, is_write_initial);
self.cache.initial_writes.insert(*key, is_write_initial);
is_write_initial
}
}
Expand All @@ -173,7 +194,7 @@ impl<S: ReadStorage + fmt::Debug> ReadStorage for StorageView<S> {

impl<S: ReadStorage + fmt::Debug> WriteStorage for StorageView<S> {
fn read_storage_keys(&self) -> &HashMap<StorageKey, StorageValue> {
&self.read_storage_keys
&self.cache.read_storage_keys
}

fn set_value(&mut self, key: StorageKey, value: StorageValue) -> StorageValue {
Expand Down
Loading
Loading