diff --git a/core/bin/zksync_tee_prover/src/api_client.rs b/core/bin/zksync_tee_prover/src/api_client.rs index 1530da971157..0eec6ad4adaa 100644 --- a/core/bin/zksync_tee_prover/src/api_client.rs +++ b/core/bin/zksync_tee_prover/src/api_client.rs @@ -74,17 +74,21 @@ impl TeeApiClient { /// Fetches the next job for the TEE prover to process, verifying and signing it if the /// verification is successful. - pub async fn get_job(&self) -> Result>, TeeProverError> { - let request = TeeProofGenerationDataRequest {}; + pub async fn get_job( + &self, + tee_type: TeeType, + ) -> Result>, TeeProverError> { + let request = TeeProofGenerationDataRequest { tee_type }; let response = self .post::<_, TeeProofGenerationDataResponse, _>("/tee/proof_inputs", request) .await?; - Ok(response.0) + Ok(response) } /// Submits the successfully verified proof to the TEE prover interface API. pub async fn submit_proof( &self, + proof_id: i64, batch_number: L1BatchNumber, signature: Signature, pubkey: &PublicKey, @@ -92,6 +96,7 @@ impl TeeApiClient { tee_type: TeeType, ) -> Result<(), TeeProverError> { let request = SubmitTeeProofRequest(Box::new(L1BatchTeeProofForL1 { + proof_id, signature: signature.serialize_compact().into(), pubkey: pubkey.serialize().into(), proof: root_hash.as_bytes().into(), diff --git a/core/bin/zksync_tee_prover/src/tee_prover.rs b/core/bin/zksync_tee_prover/src/tee_prover.rs index b14d07b72db6..a3891b0fd7c0 100644 --- a/core/bin/zksync_tee_prover/src/tee_prover.rs +++ b/core/bin/zksync_tee_prover/src/tee_prover.rs @@ -112,11 +112,12 @@ impl TeeProver { } async fn step(&self) -> Result, TeeProverError> { - match self.api_client.get_job().await? { + match self.api_client.get_job(self.tee_type).await? { Some(job) => { - let (signature, batch_number, root_hash) = self.verify(*job)?; + let (signature, batch_number, root_hash) = self.verify(job.input)?; self.api_client .submit_proof( + job.proof_id.unwrap(), // TODO unwrap() is not safe here batch_number, signature, &self.public_key, diff --git a/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json b/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json new file mode 100644 index 000000000000..07e25f505a6f --- /dev/null +++ b/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs\n ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.prover_taken_at IS NULL\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28" +} diff --git a/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json b/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json new file mode 100644 index 000000000000..3d5bd64e3a57 --- /dev/null +++ b/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proofs\n SET\n pubkey = $1,\n signature = $2,\n proof = $3,\n proved_at = NOW()\n WHERE\n id = $4\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Bytea", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5" +} diff --git a/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json b/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json new file mode 100644 index 000000000000..fdab20788c36 --- /dev/null +++ b/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.prover_taken_at IS NULL\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33" +} diff --git a/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json b/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json new file mode 100644 index 000000000000..fb5ec089514b --- /dev/null +++ b/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json @@ -0,0 +1,29 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proofs\n SET\n prover_taken_at = NOW()\n WHERE\n id = (\n SELECT\n proofs.id\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.tee_type = $1\n AND (\n proofs.prover_taken_at IS NULL\n OR proofs.prover_taken_at < NOW() - $2::INTERVAL\n )\n ORDER BY\n tee_proofs.l1_batch_number ASC,\n proofs.prover_taken_at ASC NULLS FIRST\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proofs.id,\n tee_proofs.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + "Interval" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc" +} diff --git a/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json b/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json new file mode 100644 index 000000000000..0db3c5ca1b1d --- /dev/null +++ b/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proofs (l1_batch_number, tee_type, created_at)\n VALUES\n ($1, $2, NOW())\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1" +} diff --git a/core/lib/dal/doc/ProofGenerationDal.md b/core/lib/dal/doc/ProofGenerationDal.md deleted file mode 100644 index 618fdfba13b0..000000000000 --- a/core/lib/dal/doc/ProofGenerationDal.md +++ /dev/null @@ -1,22 +0,0 @@ -# ProofGenerationDal - -## Table Name - -proof_generation_details - -## `status` Diagram - -```mermaid ---- -title: Status Diagram ---- -stateDiagram-v2 -[*] --> ready_to_be_proven : insert_proof_generation_details -ready_to_be_proven --> picked_by_prover : get_next_block_to_be_proven -picked_by_prover --> generated : save_proof_artifacts_metadata -generated --> [*] - -[*] --> skipped : mark_proof_generation_job_as_skipped -skipped --> [*] - -``` diff --git a/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql b/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql index 2d4d6a6ad7a7..bccd4f97dcbb 100644 --- a/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql +++ b/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql @@ -1,4 +1,5 @@ DROP INDEX IF EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts; +DROP INDEX IF EXISTS idx_tee_verifier_input_producer_jobs_l1_batch_number_status; DROP TABLE IF EXISTS tee_verifier_input_producer_jobs; diff --git a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql index eb34068f876d..26e7e5a11940 100644 --- a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql +++ b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql @@ -5,7 +5,6 @@ CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs l1_batch_number BIGINT NOT NULL PRIMARY KEY, attempts SMALLINT NOT NULL DEFAULT 0, status tee_verifier_input_producer_job_status, - picked_by TEXT, input_blob_url TEXT, error TEXT, created_at TIMESTAMP NOT NULL, @@ -16,3 +15,6 @@ CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts ON tee_verifier_input_producer_jobs (status, processing_started_at, attempts); + +CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_l1_batch_number_status +ON tee_verifier_input_producer_jobs (l1_batch_number, status); diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql index 5b4f9958a8ea..d46bc4aa154d 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql @@ -1,4 +1,4 @@ -DROP TABLE IF EXISTS tee_attestations; -DROP TABLE IF EXISTS tee_proof_generation_details; +DROP INDEX IF EXISTS idx_proofs_number_per_batch_number_and_tee_type; -DROP INDEX IF EXISTS idx_tee_proof_generation_details_status_prover_taken_at; +DROP TABLE IF EXISTS tee_attestations; +DROP TABLE IF EXISTS tee_proofs; diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 3a249c44346c..013bdcd02b17 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -4,19 +4,18 @@ CREATE TABLE IF NOT EXISTS tee_attestations attestation BYTEA ); -CREATE TABLE IF NOT EXISTS tee_proof_generation_details +CREATE TABLE IF NOT EXISTS tee_proofs ( - l1_batch_number BIGINT PRIMARY KEY REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, - status TEXT NOT NULL, + id BIGSERIAL PRIMARY KEY, + l1_batch_number BIGINT NOT NULL REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, + tee_type TEXT NOT NULL, + pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE CASCADE, signature BYTEA, - pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE SET NULL, proof BYTEA, - tee_type TEXT, created_at TIMESTAMP NOT NULL, - updated_at TIMESTAMP NOT NULL, + proved_at TIMESTAMP, prover_taken_at TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_tee_proof_generation_details_status_prover_taken_at - ON tee_proof_generation_details (prover_taken_at) - WHERE status = 'picked_by_prover'; +CREATE INDEX IF NOT EXISTS idx_proofs_number_per_batch_number_and_tee_type + ON tee_proofs (l1_batch_number, tee_type); diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs index 1e852e3f6364..d22541620f2a 100644 --- a/core/lib/dal/src/models/mod.rs +++ b/core/lib/dal/src/models/mod.rs @@ -11,6 +11,7 @@ pub mod storage_log; pub mod storage_oracle_info; pub mod storage_protocol_version; pub mod storage_sync; +pub mod storage_tee_proof; pub mod storage_transaction; pub mod storage_verification_request; pub mod storage_witness_job_info; diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs new file mode 100644 index 000000000000..9c55c86eb26d --- /dev/null +++ b/core/lib/dal/src/models/storage_tee_proof.rs @@ -0,0 +1,35 @@ +use serde::{Deserialize, Serialize}; // TODO needed? +use zksync_types::{tee_types::TeeType, L1BatchNumber}; + +/// Represents a proof generated within a TEE enclave +/// TODO move it to core/lib/types/src/api/mod.rs and call TeeProof? +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StorageTeeProof { + // batch number for which the proof was generated + pub l1_batch_number: L1BatchNumber, + // type of TEE used for attestation + pub tee_type: Option, + // pubkey used for signature verification; each key pair is attested by the TEE attestation + // stored in the db + pub pubkey: Option>, + // signature generated within the TEE enclave, using the privkey corresponding to the pubkey + pub signature: Option>, + // data that was signed + pub proof: Option>, + // attestation quote generated within the TEE enclave + pub attestation: Option>, + // timestamp when the proof was generated + pub proved_at: chrono::DateTime, +} + +/// TODO rename it TeeProof once StorageTeeProof is moved to api/mod.rs? +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct TmpStorageTeeProof { + #[allow(dead_code)] + pub id: i64, + pub pubkey: Option>, + pub signature: Option>, + pub proof: Option>, + pub proved_at: chrono::DateTime, + pub attestation: Option>, +} diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index 4e37cc644f8e..0c857d929fe9 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -1,4 +1,3 @@ -#![doc = include_str!("../doc/ProofGenerationDal.md")] use std::time::Duration; use strum::{Display, EnumString}; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index e93743781f87..f0ebc7720523 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,130 +1,101 @@ use std::time::Duration; -use serde::{Deserialize, Serialize}; -use strum::{Display, EnumString}; use zksync_db_connection::{ - connection::Connection, - error::DalResult, - instrument::{InstrumentExt, Instrumented}, + connection::Connection, error::DalResult, instrument::Instrumented, utils::pg_interval_from_duration, }; use zksync_types::{tee_types::TeeType, L1BatchNumber}; -use crate::Core; +use crate::{ + models::storage_tee_proof::{StorageTeeProof, TmpStorageTeeProof}, + Core, +}; #[derive(Debug)] pub struct TeeProofGenerationDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } -#[derive(Debug, Serialize, Deserialize)] -pub struct TeeProof { - // signature generated within the TEE enclave, using the privkey corresponding to the pubkey - pub signature: Option>, - // pubkey used for signature verification; each key pair is attested by the TEE attestation - // stored in the db - pub pubkey: Option>, - // data that was signed - pub proof: Option>, - // type of TEE used for attestation - pub tee_type: Option, -} - -#[derive(Debug, EnumString, Display)] -enum TeeProofGenerationJobStatus { - #[strum(serialize = "ready_to_be_proven")] - ReadyToBeProven, - #[strum(serialize = "picked_by_prover")] - PickedByProver, - #[strum(serialize = "generated")] - Generated, - #[strum(serialize = "skipped")] - Skipped, -} - impl TeeProofGenerationDal<'_, '_> { - pub async fn get_next_block_to_be_proven( + pub async fn get_next_batch_to_be_proven( &mut self, + tee_type: TeeType, processing_timeout: Duration, - ) -> DalResult> { + ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); let query = sqlx::query!( r#" - UPDATE tee_proof_generation_details + UPDATE tee_proofs SET - status = 'picked_by_prover', - updated_at = NOW(), prover_taken_at = NOW() WHERE - l1_batch_number = ( + id = ( SELECT - proofs.l1_batch_number + proofs.id FROM - tee_proof_generation_details AS proofs + tee_proofs AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE inputs.status = 'Successful' + AND proofs.tee_type = $1 AND ( - proofs.status = 'ready_to_be_proven' - OR ( - proofs.status = 'picked_by_prover' - AND proofs.prover_taken_at < NOW() - $1::INTERVAL - ) + proofs.prover_taken_at IS NULL + OR proofs.prover_taken_at < NOW() - $2::INTERVAL ) ORDER BY - l1_batch_number ASC + tee_proofs.l1_batch_number ASC, + proofs.prover_taken_at ASC NULLS FIRST LIMIT 1 FOR UPDATE SKIP LOCKED ) RETURNING - tee_proof_generation_details.l1_batch_number + tee_proofs.id, + tee_proofs.l1_batch_number "#, + &tee_type.to_string(), &processing_timeout, ); - let batch_number = Instrumented::new("get_next_block_to_be_proven") + let batch_number = Instrumented::new("get_next_batch_to_be_proven") + .with_arg("tee_type", &tee_type) .with_arg("processing_timeout", &processing_timeout) .with(query) .fetch_optional(self.storage) .await? - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + .map(|row| (row.id as i64, L1BatchNumber(row.l1_batch_number as u32))); Ok(batch_number) } pub async fn save_proof_artifacts_metadata( &mut self, - block_number: L1BatchNumber, - signature: &[u8], + proof_id: i64, pubkey: &[u8], + signature: &[u8], proof: &[u8], - tee_type: TeeType, ) -> DalResult<()> { let query = sqlx::query!( r#" - UPDATE tee_proof_generation_details + UPDATE tee_proofs SET - status = 'generated', - signature = $1, - pubkey = $2, + pubkey = $1, + signature = $2, proof = $3, - tee_type = $4, - updated_at = NOW() + proved_at = NOW() WHERE - l1_batch_number = $5 + id = $4 "#, - signature, pubkey, + signature, proof, - tee_type.to_string(), - i64::from(block_number.0) + proof_id ); let instrumentation = Instrumented::new("save_proof_artifacts_metadata") - .with_arg("signature", &signature) .with_arg("pubkey", &pubkey) + .with_arg("signature", &signature) .with_arg("proof", &proof) - .with_arg("tee_type", &tee_type); + .with_arg("proof_id", &proof_id); let result = instrumentation .clone() .with(query) @@ -132,7 +103,8 @@ impl TeeProofGenerationDal<'_, '_> { .await?; if result.rows_affected() == 0 { let err = instrumentation.constraint_error(anyhow::anyhow!( - "Updating TEE proof for a non-existent batch number is not allowed" + "Updating TEE proof for a non-existent ID {} is not allowed", + proof_id )); return Err(err); } @@ -142,39 +114,52 @@ impl TeeProofGenerationDal<'_, '_> { pub async fn insert_tee_proof_generation_job( &mut self, - block_number: L1BatchNumber, + batch_number: L1BatchNumber, + tee_type: TeeType, ) -> DalResult<()> { - let block_number = i64::from(block_number.0); - sqlx::query!( + let batch_number = i64::from(batch_number.0); + let query = sqlx::query!( r#" INSERT INTO - tee_proof_generation_details (l1_batch_number, status, created_at, updated_at) + tee_proofs (l1_batch_number, tee_type, created_at) VALUES - ($1, 'ready_to_be_proven', NOW(), NOW()) - ON CONFLICT (l1_batch_number) DO NOTHING + ($1, $2, NOW()) "#, - block_number, - ) - .instrument("create_tee_proof_generation_details") - .with_arg("l1_batch_number", &block_number) - .report_latency() - .execute(self.storage) - .await?; + batch_number, + tee_type.to_string(), + ); + let instrumentation = Instrumented::new("insert_tee_proof_generation_job") + .with_arg("l1_batch_number", &batch_number) + .with_arg("tee_type", &tee_type); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Unable to insert TEE proof for {}, {}", + batch_number, + tee_type + )); + return Err(err); + } Ok(()) } + #[cfg(test)] pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { let query = sqlx::query!( r#" SELECT proofs.l1_batch_number FROM - tee_proof_generation_details AS proofs + tee_proofs AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE inputs.status = 'Successful' - AND proofs.status = 'ready_to_be_proven' + AND proofs.prover_taken_at IS NULL ORDER BY proofs.l1_batch_number ASC LIMIT @@ -220,38 +205,49 @@ impl TeeProofGenerationDal<'_, '_> { Ok(()) } - pub async fn get_tee_proof( + pub async fn get_tee_proofs( &mut self, - block_number: L1BatchNumber, - ) -> DalResult> { - let query = sqlx::query!( + batch_number: L1BatchNumber, + tee_type: Option, + ) -> DalResult> { + let query = format!( r#" SELECT - signature, - pubkey, - proof, - tee_type + tp.id, + tp.pubkey, + tp.signature, + tp.proof, + tp.proved_at, + ta.attestation FROM - tee_proof_generation_details + tee_proofs tp + LEFT JOIN + tee_attestations ta ON tp.pubkey = ta.pubkey WHERE - l1_batch_number = $1 + tp.l1_batch_number = {} + {} + ORDER BY tp.id "#, - i64::from(block_number.0) + i64::from(batch_number.0), + tee_type.map_or_else(String::new, |tt| format!("AND tp.tee_type = '{}'", tt)) ); - let proof = Instrumented::new("get_tee_proof") - .with_arg("l1_batch_number", &block_number) - .with(query) - .fetch_optional(self.storage) - .await? - .map(|row| TeeProof { - signature: row.signature, + + let proofs: Vec = sqlx::query_as(&query) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row: TmpStorageTeeProof| StorageTeeProof { + l1_batch_number: batch_number, + tee_type, pubkey: row.pubkey, + signature: row.signature, proof: row.proof, - tee_type: row - .tee_type - .map(|tt| tt.parse::().expect("Invalid TEE type")), - }); + attestation: row.attestation, + proved_at: row.proved_at, + }) + .collect(); - Ok(proof) + Ok(proofs) } } diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 00ac85a40739..6f937d6ab91b 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; use zksync_types::{ protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, + tee_types::TeeType, L1BatchNumber, }; @@ -29,7 +30,10 @@ pub enum ProofGenerationDataResponse { } #[derive(Debug, Serialize, Deserialize)] -pub struct TeeProofGenerationDataResponse(pub Option>); +pub struct TeeProofGenerationDataResponse { + pub proof_id: Option, + pub input: Option>, +} #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofResponse { @@ -52,7 +56,10 @@ pub enum RegisterTeeAttestationResponse { #[derive(Debug, Serialize, Deserialize)] pub struct ProofGenerationDataRequest {} -pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest; +#[derive(Debug, Serialize, Deserialize)] +pub struct TeeProofGenerationDataRequest { + pub tee_type: TeeType, +} #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofRequest { diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index 9672bfb2142b..8a4bd8e47405 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -16,6 +16,8 @@ pub struct L1BatchProofForL1 { /// A "final" TEE proof that can be sent to the L1 contract. #[derive(Clone, PartialEq, Serialize, Deserialize)] pub struct L1BatchTeeProofForL1 { + // ID of the proof in the database to distinguish between different proofs for the same batch + pub proof_id: i64, // signature generated within the TEE enclave, using the privkey corresponding to the pubkey pub signature: Vec, // pubkey used for signature verification; each key pair is attested by the TEE attestation diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index a2d55a140655..81b969d5d13e 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -167,6 +167,7 @@ fn test_proof_request_serialization() { #[test] fn test_tee_proof_request_serialization() { let tee_proof_str = r#"{ + "proof_id": 0, "signature": [ 0, 1, 2, 3, 4 ], "pubkey": [ 5, 6, 7, 8, 9 ], "proof": [ 10, 11, 12, 13, 14 ], @@ -174,6 +175,7 @@ fn test_tee_proof_request_serialization() { }"#; let tee_proof_result = serde_json::from_str::(tee_proof_str).unwrap(); let tee_proof_expected = SubmitTeeProofRequest(Box::new(L1BatchTeeProofForL1 { + proof_id: 0, signature: vec![0, 1, 2, 3, 4], pubkey: vec![5, 6, 7, 8, 9], proof: vec![10, 11, 12, 13, 14], diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 50643e7add1f..618a786ea658 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -1,11 +1,7 @@ use std::{net::SocketAddr, sync::Arc}; use anyhow::Context as _; -use axum::{ - extract::Path, - routing::{get, post}, - Json, Router, -}; +use axum::{extract::Path, routing::post, Json, Router}; use request_processor::RequestProcessor; use tee_request_processor::TeeRequestProcessor; use tokio::sync::watch; @@ -95,7 +91,6 @@ fn create_proof_processing_router( TeeRequestProcessor::new(blob_store, connection_pool, config.clone()); let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); let register_tee_attestation_processor = get_tee_proof_gen_processor.clone(); - let get_tee_proof_processor = get_tee_proof_gen_processor.clone(); router = router.route( "/tee/proof_inputs", @@ -126,15 +121,6 @@ fn create_proof_processing_router( .await }, ), - ) - .route("/tee/get_proof/:l1_batch_number", - get( - move |l1_batch_number: Path| async move { - get_tee_proof_processor - .get_proof(l1_batch_number) - .await - }, - ) ); } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 686d500b9eb4..20613ba909e2 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{tee_proof_generation_dal::TeeProof, ConnectionPool, Core, CoreDal}; +use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::{ api::{ @@ -49,12 +49,19 @@ impl TeeRequestProcessor { let l1_batch_number_result = connection .tee_proof_generation_dal() - .get_next_block_to_be_proven(self.config.proof_generation_timeout()) + .get_next_batch_to_be_proven(request.tee_type, self.config.proof_generation_timeout()) .await .map_err(RequestProcessorError::Dal)?; - let l1_batch_number = match l1_batch_number_result { - Some(number) => number, - None => return Ok(Json(TeeProofGenerationDataResponse(None))), + + let (proof_id, l1_batch_number) = match l1_batch_number_result { + Some((proof_id, number)) => (proof_id, number), + None => { + // TODO introduce a proper type + return Ok(Json(TeeProofGenerationDataResponse { + proof_id: None, + input: None, + })); + } }; let tee_verifier_input: TeeVerifierInput = self @@ -63,9 +70,12 @@ impl TeeRequestProcessor { .await .map_err(RequestProcessorError::ObjectStore)?; - Ok(Json(TeeProofGenerationDataResponse(Some(Box::new( - tee_verifier_input, - ))))) + let response = TeeProofGenerationDataResponse { + proof_id: Some(proof_id), + input: Some(Box::new(tee_verifier_input)), + }; + + Ok(Json(response)) } pub(crate) async fn submit_proof( @@ -82,16 +92,15 @@ impl TeeRequestProcessor { let mut dal = connection.tee_proof_generation_dal(); tracing::info!( - "Received proof {:?} for block number: {:?}", + "Received proof {:?} for batch number: {:?}", proof, l1_batch_number ); dal.save_proof_artifacts_metadata( - l1_batch_number, - &proof.0.signature, + proof.0.proof_id, &proof.0.pubkey, + &proof.0.signature, &proof.0.proof, - proof.0.tee_type, ) .await .map_err(RequestProcessorError::Dal)?; @@ -118,23 +127,4 @@ impl TeeRequestProcessor { Ok(Json(RegisterTeeAttestationResponse::Success)) } - - pub(crate) async fn get_proof( - &self, - Path(l1_batch_number): Path, - ) -> Result, RequestProcessorError> { - let mut connection = self - .pool - .connection() - .await - .map_err(RequestProcessorError::Dal)?; - let mut dal = connection.tee_proof_generation_dal(); - let l1_batch_number = L1BatchNumber(l1_batch_number); - let tee_proof = dal - .get_tee_proof(l1_batch_number) - .await - .map_err(RequestProcessorError::Dal)?; - - Ok(Json(tee_proof.unwrap())) - } } diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs index 0cd28ee5ce79..abd70542a42f 100644 --- a/core/node/tee_verifier_input_producer/src/lib.rs +++ b/core/node/tee_verifier_input_producer/src/lib.rs @@ -19,7 +19,7 @@ use zksync_prover_interface::inputs::{ }; use zksync_queued_job_processor::JobProcessor; use zksync_tee_verifier::Verify; -use zksync_types::{L1BatchNumber, L2ChainId}; +use zksync_types::{tee_types::TeeType, L1BatchNumber, L2ChainId}; use zksync_utils::u256_to_h256; use zksync_vm_utils::storage::L1BatchParamsProvider; @@ -241,7 +241,7 @@ impl JobProcessor for TeeVerifierInputProducer { .context("failed to mark job as successful for TeeVerifierInputProducer")?; transaction .tee_proof_generation_dal() - .insert_tee_proof_generation_job(job_id) + .insert_tee_proof_generation_job(job_id, TeeType::Sgx) .await?; transaction .commit()