From f4b058064d829fa9caf9aa9c3035daa4c17dfa7b Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 24 Jul 2024 14:10:25 +0200 Subject: [PATCH 01/20] feat(tee): add tee/get_proof endpoint to TEE Prover Gateway --- ...a6f325a6ef0da152c51234b177a78227e2320.json | 40 +++++++++++++++++ core/lib/dal/src/tee_proof_generation_dal.rs | 44 +++++++++++++++++++ core/node/proof_data_handler/src/lib.rs | 16 ++++++- .../src/tee_request_processor.rs | 21 ++++++++- 4 files changed, 119 insertions(+), 2 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json diff --git a/core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json b/core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json new file mode 100644 index 000000000000..c1a6d5989555 --- /dev/null +++ b/core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n signature,\n pubkey,\n proof,\n tee_type\n FROM\n tee_proof_generation_details\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "signature", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "pubkey", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "proof", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "tee_type", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true, + true, + true, + true + ] + }, + "hash": "ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320" +} diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 0ddf36abdbed..4a339b7d044e 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,5 +1,6 @@ use std::time::Duration; +use serde::{Deserialize, Serialize}; use strum::{Display, EnumString}; use zksync_db_connection::{ connection::Connection, @@ -16,6 +17,19 @@ pub struct TeeProofGenerationDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } +#[derive(Debug, Serialize, Deserialize)] +pub struct TeeProof { + // signature generated within the TEE enclave, using the privkey corresponding to the pubkey + pub signature: Option>, + // pubkey used for signature verification; each key pair is attested by the TEE attestation + // stored in the db + pub pubkey: Option>, + // data that was signed + pub proof: Option>, + // type of TEE used for attestation + pub tee_type: Option, +} + #[derive(Debug, EnumString, Display)] enum TeeProofGenerationJobStatus { #[strum(serialize = "ready_to_be_proven")] @@ -202,4 +216,34 @@ impl TeeProofGenerationDal<'_, '_> { Ok(()) } + + pub async fn get_proof(&mut self, block_number: L1BatchNumber) -> DalResult> { + let result: Option = sqlx::query!( + r#" + SELECT + signature, + pubkey, + proof, + tee_type + FROM + tee_proof_generation_details + WHERE + l1_batch_number = $1 + "#, + i64::from(block_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| TeeProof { + signature: row.signature, + pubkey: row.pubkey, + proof: row.proof, + tee_type: row + .tee_type + .map(|tt| tt.parse::().expect("Invalid TEE type")), + }); + + Ok(result) + } } diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 618a786ea658..d91ce78ecfca 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -1,7 +1,11 @@ use std::{net::SocketAddr, sync::Arc}; use anyhow::Context as _; -use axum::{extract::Path, routing::post, Json, Router}; +use axum::{ + extract::Path, + routing::{get, post}, + Json, Router, +}; use request_processor::RequestProcessor; use tee_request_processor::TeeRequestProcessor; use tokio::sync::watch; @@ -91,6 +95,7 @@ fn create_proof_processing_router( TeeRequestProcessor::new(blob_store, connection_pool, config.clone()); let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); let register_tee_attestation_processor = get_tee_proof_gen_processor.clone(); + let get_tee_proof_processor = get_tee_proof_gen_processor.clone(); router = router.route( "/tee/proof_inputs", @@ -121,6 +126,15 @@ fn create_proof_processing_router( .await }, ), + ) + .route("tee/get_proof/:l1_batch_number", + get( + move |l1_batch_number: Path| async move { + get_tee_proof_processor + .get_proof(l1_batch_number) + .await + }, + ) ); } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 243c9e06cfcc..fce727e22645 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_dal::{tee_proof_generation_dal::TeeProof, ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::{ api::{ @@ -118,4 +118,23 @@ impl TeeRequestProcessor { Ok(Json(RegisterTeeAttestationResponse::Success)) } + + pub(crate) async fn get_proof( + &self, + Path(l1_batch_number): Path, + ) -> Result, RequestProcessorError> { + let mut connection = self + .pool + .connection() + .await + .map_err(RequestProcessorError::Dal)?; + let mut dal = connection.tee_proof_generation_dal(); + let l1_batch_number = L1BatchNumber(l1_batch_number); + let tee_proof = dal + .get_proof(l1_batch_number) + .await + .map_err(RequestProcessorError::Dal)?; + + Ok(Json(tee_proof.unwrap())) + } } From 402efe7119c399d72a6faa5a61bd19e2152923ae Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 25 Jul 2024 13:44:29 +0200 Subject: [PATCH 02/20] Get rid of .unwrap() in TEE proof generation DAL --- core/lib/dal/src/tee_proof_generation_dal.rs | 68 +++++++++++-------- .../src/tee_request_processor.rs | 2 +- 2 files changed, 39 insertions(+), 31 deletions(-) diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 4a339b7d044e..e93743781f87 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -48,7 +48,7 @@ impl TeeProofGenerationDal<'_, '_> { processing_timeout: Duration, ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); - let result: Option = sqlx::query!( + let query = sqlx::query!( r#" UPDATE tee_proof_generation_details SET @@ -82,13 +82,15 @@ impl TeeProofGenerationDal<'_, '_> { tee_proof_generation_details.l1_batch_number "#, &processing_timeout, - ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + ); + let batch_number = Instrumented::new("get_next_block_to_be_proven") + .with_arg("processing_timeout", &processing_timeout) + .with(query) + .fetch_optional(self.storage) + .await? + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - Ok(result) + Ok(batch_number) } pub async fn save_proof_artifacts_metadata( @@ -163,7 +165,7 @@ impl TeeProofGenerationDal<'_, '_> { } pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { - let result: Option = sqlx::query!( + let query = sqlx::query!( r#" SELECT proofs.l1_batch_number @@ -178,13 +180,14 @@ impl TeeProofGenerationDal<'_, '_> { LIMIT 1 "#, - ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + ); + let batch_number = Instrumented::new("get_oldest_unpicked_batch") + .with(query) + .fetch_optional(self.storage) + .await? + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - Ok(result) + Ok(batch_number) } pub async fn save_attestation(&mut self, pubkey: &[u8], attestation: &[u8]) -> DalResult<()> { @@ -217,8 +220,11 @@ impl TeeProofGenerationDal<'_, '_> { Ok(()) } - pub async fn get_proof(&mut self, block_number: L1BatchNumber) -> DalResult> { - let result: Option = sqlx::query!( + pub async fn get_tee_proof( + &mut self, + block_number: L1BatchNumber, + ) -> DalResult> { + let query = sqlx::query!( r#" SELECT signature, @@ -231,19 +237,21 @@ impl TeeProofGenerationDal<'_, '_> { l1_batch_number = $1 "#, i64::from(block_number.0) - ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() - .map(|row| TeeProof { - signature: row.signature, - pubkey: row.pubkey, - proof: row.proof, - tee_type: row - .tee_type - .map(|tt| tt.parse::().expect("Invalid TEE type")), - }); - - Ok(result) + ); + let proof = Instrumented::new("get_tee_proof") + .with_arg("l1_batch_number", &block_number) + .with(query) + .fetch_optional(self.storage) + .await? + .map(|row| TeeProof { + signature: row.signature, + pubkey: row.pubkey, + proof: row.proof, + tee_type: row + .tee_type + .map(|tt| tt.parse::().expect("Invalid TEE type")), + }); + + Ok(proof) } } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index fce727e22645..686d500b9eb4 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -131,7 +131,7 @@ impl TeeRequestProcessor { let mut dal = connection.tee_proof_generation_dal(); let l1_batch_number = L1BatchNumber(l1_batch_number); let tee_proof = dal - .get_proof(l1_batch_number) + .get_tee_proof(l1_batch_number) .await .map_err(RequestProcessorError::Dal)?; From 3e3ef004139fd483b7f39e0953159769c18f7761 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 25 Jul 2024 13:49:21 +0200 Subject: [PATCH 03/20] Fix a typo in the proof_data_handler endpoint --- core/node/proof_data_handler/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index d91ce78ecfca..50643e7add1f 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -127,7 +127,7 @@ fn create_proof_processing_router( }, ), ) - .route("tee/get_proof/:l1_batch_number", + .route("/tee/get_proof/:l1_batch_number", get( move |l1_batch_number: Path| async move { get_tee_proof_processor From 45cd290f976422984d552c586797bd91caea18d8 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 26 Jul 2024 10:35:56 +0200 Subject: [PATCH 04/20] Add unit test --- core/node/proof_data_handler/src/tests.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 1fbe563d2d28..0dac2f960a87 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -180,6 +180,20 @@ async fn submit_tee_proof() { .unwrap(); assert!(oldest_batch_number.is_none()); + + // there should be one TEE proof in the db now + + let proof = proof_db_conn + .tee_proof_generation_dal() + .get_tee_proof(batch_number) + .await + .unwrap() + .unwrap(); + + assert_eq!(proof.signature.unwrap(), tee_proof_request.0.signature); + assert_eq!(proof.pubkey.unwrap(), tee_proof_request.0.pubkey); + assert_eq!(proof.proof.unwrap(), tee_proof_request.0.proof); + assert_eq!(proof.tee_type.unwrap(), tee_proof_request.0.tee_type); } // Mock SQL db with information about the status of the TEE proof generation From cea2cf98860a65a7ffcb34094833e344874be8e9 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 29 Jul 2024 13:47:08 +0200 Subject: [PATCH 05/20] Restructure db tables for TEE proof generation Make them more flexible to allow multiple proofs for each (batch_number, tee_type) pair. --- core/bin/zksync_tee_prover/src/api_client.rs | 12 +- core/bin/zksync_tee_prover/src/tee_prover.rs | 5 +- ...4b4a0c11d0a45774fade80e88cb902104fe28.json | 20 ++ ...38dbd01af9068af4059b53fe79a6f2505b0a5.json | 17 ++ ...304afac6fc7b4c3e5bed2ff4f7da248523e33.json | 20 ++ ...7042743766d7fc2c231bb699bef005b81a7fc.json | 29 +++ ...3521353d37863bad59cc4c364b83d02e63db1.json | 15 ++ core/lib/dal/doc/ProofGenerationDal.md | 22 -- ...erifier_input_producer_jobs_table.down.sql | 1 + ..._verifier_input_producer_jobs_table.up.sql | 4 +- ...ee_proof_generation_details_table.down.sql | 6 +- ..._tee_proof_generation_details_table.up.sql | 17 +- core/lib/dal/src/models/mod.rs | 1 + core/lib/dal/src/models/storage_tee_proof.rs | 35 +++ core/lib/dal/src/proof_generation_dal.rs | 1 - core/lib/dal/src/tee_proof_generation_dal.rs | 200 +++++++++--------- core/lib/prover_interface/src/api.rs | 11 +- core/lib/prover_interface/src/outputs.rs | 2 + .../tests/job_serialization.rs | 2 + core/node/proof_data_handler/src/lib.rs | 16 +- .../src/tee_request_processor.rs | 52 ++--- .../tee_verifier_input_producer/src/lib.rs | 4 +- 22 files changed, 298 insertions(+), 194 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json create mode 100644 core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json create mode 100644 core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json create mode 100644 core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json create mode 100644 core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json delete mode 100644 core/lib/dal/doc/ProofGenerationDal.md create mode 100644 core/lib/dal/src/models/storage_tee_proof.rs diff --git a/core/bin/zksync_tee_prover/src/api_client.rs b/core/bin/zksync_tee_prover/src/api_client.rs index 1530da971157..9e9d1afe5b66 100644 --- a/core/bin/zksync_tee_prover/src/api_client.rs +++ b/core/bin/zksync_tee_prover/src/api_client.rs @@ -8,7 +8,6 @@ use zksync_prover_interface::{ RegisterTeeAttestationRequest, RegisterTeeAttestationResponse, SubmitTeeProofRequest, SubmitTeeProofResponse, TeeProofGenerationDataRequest, TeeProofGenerationDataResponse, }, - inputs::TeeVerifierInput, outputs::L1BatchTeeProofForL1, }; use zksync_types::{tee_types::TeeType, L1BatchNumber}; @@ -74,17 +73,21 @@ impl TeeApiClient { /// Fetches the next job for the TEE prover to process, verifying and signing it if the /// verification is successful. - pub async fn get_job(&self) -> Result>, TeeProverError> { - let request = TeeProofGenerationDataRequest {}; + pub async fn get_job( + &self, + tee_type: TeeType, + ) -> Result { + let request = TeeProofGenerationDataRequest { tee_type }; let response = self .post::<_, TeeProofGenerationDataResponse, _>("/tee/proof_inputs", request) .await?; - Ok(response.0) + Ok(response) } /// Submits the successfully verified proof to the TEE prover interface API. pub async fn submit_proof( &self, + proof_id: i64, batch_number: L1BatchNumber, signature: Signature, pubkey: &PublicKey, @@ -92,6 +95,7 @@ impl TeeApiClient { tee_type: TeeType, ) -> Result<(), TeeProverError> { let request = SubmitTeeProofRequest(Box::new(L1BatchTeeProofForL1 { + proof_id, signature: signature.serialize_compact().into(), pubkey: pubkey.serialize().into(), proof: root_hash.as_bytes().into(), diff --git a/core/bin/zksync_tee_prover/src/tee_prover.rs b/core/bin/zksync_tee_prover/src/tee_prover.rs index b14d07b72db6..a3891b0fd7c0 100644 --- a/core/bin/zksync_tee_prover/src/tee_prover.rs +++ b/core/bin/zksync_tee_prover/src/tee_prover.rs @@ -112,11 +112,12 @@ impl TeeProver { } async fn step(&self) -> Result, TeeProverError> { - match self.api_client.get_job().await? { + match self.api_client.get_job(self.tee_type).await? { Some(job) => { - let (signature, batch_number, root_hash) = self.verify(*job)?; + let (signature, batch_number, root_hash) = self.verify(job.input)?; self.api_client .submit_proof( + job.proof_id.unwrap(), // TODO unwrap() is not safe here batch_number, signature, &self.public_key, diff --git a/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json b/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json new file mode 100644 index 000000000000..07e25f505a6f --- /dev/null +++ b/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs\n ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.prover_taken_at IS NULL\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28" +} diff --git a/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json b/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json new file mode 100644 index 000000000000..3d5bd64e3a57 --- /dev/null +++ b/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proofs\n SET\n pubkey = $1,\n signature = $2,\n proof = $3,\n proved_at = NOW()\n WHERE\n id = $4\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Bytea", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5" +} diff --git a/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json b/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json new file mode 100644 index 000000000000..fdab20788c36 --- /dev/null +++ b/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.prover_taken_at IS NULL\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33" +} diff --git a/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json b/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json new file mode 100644 index 000000000000..fb5ec089514b --- /dev/null +++ b/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json @@ -0,0 +1,29 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proofs\n SET\n prover_taken_at = NOW()\n WHERE\n id = (\n SELECT\n proofs.id\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.tee_type = $1\n AND (\n proofs.prover_taken_at IS NULL\n OR proofs.prover_taken_at < NOW() - $2::INTERVAL\n )\n ORDER BY\n tee_proofs.l1_batch_number ASC,\n proofs.prover_taken_at ASC NULLS FIRST\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proofs.id,\n tee_proofs.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + "Interval" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc" +} diff --git a/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json b/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json new file mode 100644 index 000000000000..0db3c5ca1b1d --- /dev/null +++ b/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proofs (l1_batch_number, tee_type, created_at)\n VALUES\n ($1, $2, NOW())\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1" +} diff --git a/core/lib/dal/doc/ProofGenerationDal.md b/core/lib/dal/doc/ProofGenerationDal.md deleted file mode 100644 index 618fdfba13b0..000000000000 --- a/core/lib/dal/doc/ProofGenerationDal.md +++ /dev/null @@ -1,22 +0,0 @@ -# ProofGenerationDal - -## Table Name - -proof_generation_details - -## `status` Diagram - -```mermaid ---- -title: Status Diagram ---- -stateDiagram-v2 -[*] --> ready_to_be_proven : insert_proof_generation_details -ready_to_be_proven --> picked_by_prover : get_next_block_to_be_proven -picked_by_prover --> generated : save_proof_artifacts_metadata -generated --> [*] - -[*] --> skipped : mark_proof_generation_job_as_skipped -skipped --> [*] - -``` diff --git a/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql b/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql index 2d4d6a6ad7a7..bccd4f97dcbb 100644 --- a/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql +++ b/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql @@ -1,4 +1,5 @@ DROP INDEX IF EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts; +DROP INDEX IF EXISTS idx_tee_verifier_input_producer_jobs_l1_batch_number_status; DROP TABLE IF EXISTS tee_verifier_input_producer_jobs; diff --git a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql index eb34068f876d..26e7e5a11940 100644 --- a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql +++ b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql @@ -5,7 +5,6 @@ CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs l1_batch_number BIGINT NOT NULL PRIMARY KEY, attempts SMALLINT NOT NULL DEFAULT 0, status tee_verifier_input_producer_job_status, - picked_by TEXT, input_blob_url TEXT, error TEXT, created_at TIMESTAMP NOT NULL, @@ -16,3 +15,6 @@ CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts ON tee_verifier_input_producer_jobs (status, processing_started_at, attempts); + +CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_l1_batch_number_status +ON tee_verifier_input_producer_jobs (l1_batch_number, status); diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql index 5b4f9958a8ea..d46bc4aa154d 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql @@ -1,4 +1,4 @@ -DROP TABLE IF EXISTS tee_attestations; -DROP TABLE IF EXISTS tee_proof_generation_details; +DROP INDEX IF EXISTS idx_proofs_number_per_batch_number_and_tee_type; -DROP INDEX IF EXISTS idx_tee_proof_generation_details_status_prover_taken_at; +DROP TABLE IF EXISTS tee_attestations; +DROP TABLE IF EXISTS tee_proofs; diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 3a249c44346c..013bdcd02b17 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -4,19 +4,18 @@ CREATE TABLE IF NOT EXISTS tee_attestations attestation BYTEA ); -CREATE TABLE IF NOT EXISTS tee_proof_generation_details +CREATE TABLE IF NOT EXISTS tee_proofs ( - l1_batch_number BIGINT PRIMARY KEY REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, - status TEXT NOT NULL, + id BIGSERIAL PRIMARY KEY, + l1_batch_number BIGINT NOT NULL REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, + tee_type TEXT NOT NULL, + pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE CASCADE, signature BYTEA, - pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE SET NULL, proof BYTEA, - tee_type TEXT, created_at TIMESTAMP NOT NULL, - updated_at TIMESTAMP NOT NULL, + proved_at TIMESTAMP, prover_taken_at TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_tee_proof_generation_details_status_prover_taken_at - ON tee_proof_generation_details (prover_taken_at) - WHERE status = 'picked_by_prover'; +CREATE INDEX IF NOT EXISTS idx_proofs_number_per_batch_number_and_tee_type + ON tee_proofs (l1_batch_number, tee_type); diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs index 1e852e3f6364..d22541620f2a 100644 --- a/core/lib/dal/src/models/mod.rs +++ b/core/lib/dal/src/models/mod.rs @@ -11,6 +11,7 @@ pub mod storage_log; pub mod storage_oracle_info; pub mod storage_protocol_version; pub mod storage_sync; +pub mod storage_tee_proof; pub mod storage_transaction; pub mod storage_verification_request; pub mod storage_witness_job_info; diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs new file mode 100644 index 000000000000..9c55c86eb26d --- /dev/null +++ b/core/lib/dal/src/models/storage_tee_proof.rs @@ -0,0 +1,35 @@ +use serde::{Deserialize, Serialize}; // TODO needed? +use zksync_types::{tee_types::TeeType, L1BatchNumber}; + +/// Represents a proof generated within a TEE enclave +/// TODO move it to core/lib/types/src/api/mod.rs and call TeeProof? +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StorageTeeProof { + // batch number for which the proof was generated + pub l1_batch_number: L1BatchNumber, + // type of TEE used for attestation + pub tee_type: Option, + // pubkey used for signature verification; each key pair is attested by the TEE attestation + // stored in the db + pub pubkey: Option>, + // signature generated within the TEE enclave, using the privkey corresponding to the pubkey + pub signature: Option>, + // data that was signed + pub proof: Option>, + // attestation quote generated within the TEE enclave + pub attestation: Option>, + // timestamp when the proof was generated + pub proved_at: chrono::DateTime, +} + +/// TODO rename it TeeProof once StorageTeeProof is moved to api/mod.rs? +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct TmpStorageTeeProof { + #[allow(dead_code)] + pub id: i64, + pub pubkey: Option>, + pub signature: Option>, + pub proof: Option>, + pub proved_at: chrono::DateTime, + pub attestation: Option>, +} diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index 4e37cc644f8e..0c857d929fe9 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -1,4 +1,3 @@ -#![doc = include_str!("../doc/ProofGenerationDal.md")] use std::time::Duration; use strum::{Display, EnumString}; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index e93743781f87..f0ebc7720523 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,130 +1,101 @@ use std::time::Duration; -use serde::{Deserialize, Serialize}; -use strum::{Display, EnumString}; use zksync_db_connection::{ - connection::Connection, - error::DalResult, - instrument::{InstrumentExt, Instrumented}, + connection::Connection, error::DalResult, instrument::Instrumented, utils::pg_interval_from_duration, }; use zksync_types::{tee_types::TeeType, L1BatchNumber}; -use crate::Core; +use crate::{ + models::storage_tee_proof::{StorageTeeProof, TmpStorageTeeProof}, + Core, +}; #[derive(Debug)] pub struct TeeProofGenerationDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } -#[derive(Debug, Serialize, Deserialize)] -pub struct TeeProof { - // signature generated within the TEE enclave, using the privkey corresponding to the pubkey - pub signature: Option>, - // pubkey used for signature verification; each key pair is attested by the TEE attestation - // stored in the db - pub pubkey: Option>, - // data that was signed - pub proof: Option>, - // type of TEE used for attestation - pub tee_type: Option, -} - -#[derive(Debug, EnumString, Display)] -enum TeeProofGenerationJobStatus { - #[strum(serialize = "ready_to_be_proven")] - ReadyToBeProven, - #[strum(serialize = "picked_by_prover")] - PickedByProver, - #[strum(serialize = "generated")] - Generated, - #[strum(serialize = "skipped")] - Skipped, -} - impl TeeProofGenerationDal<'_, '_> { - pub async fn get_next_block_to_be_proven( + pub async fn get_next_batch_to_be_proven( &mut self, + tee_type: TeeType, processing_timeout: Duration, - ) -> DalResult> { + ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); let query = sqlx::query!( r#" - UPDATE tee_proof_generation_details + UPDATE tee_proofs SET - status = 'picked_by_prover', - updated_at = NOW(), prover_taken_at = NOW() WHERE - l1_batch_number = ( + id = ( SELECT - proofs.l1_batch_number + proofs.id FROM - tee_proof_generation_details AS proofs + tee_proofs AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE inputs.status = 'Successful' + AND proofs.tee_type = $1 AND ( - proofs.status = 'ready_to_be_proven' - OR ( - proofs.status = 'picked_by_prover' - AND proofs.prover_taken_at < NOW() - $1::INTERVAL - ) + proofs.prover_taken_at IS NULL + OR proofs.prover_taken_at < NOW() - $2::INTERVAL ) ORDER BY - l1_batch_number ASC + tee_proofs.l1_batch_number ASC, + proofs.prover_taken_at ASC NULLS FIRST LIMIT 1 FOR UPDATE SKIP LOCKED ) RETURNING - tee_proof_generation_details.l1_batch_number + tee_proofs.id, + tee_proofs.l1_batch_number "#, + &tee_type.to_string(), &processing_timeout, ); - let batch_number = Instrumented::new("get_next_block_to_be_proven") + let batch_number = Instrumented::new("get_next_batch_to_be_proven") + .with_arg("tee_type", &tee_type) .with_arg("processing_timeout", &processing_timeout) .with(query) .fetch_optional(self.storage) .await? - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + .map(|row| (row.id as i64, L1BatchNumber(row.l1_batch_number as u32))); Ok(batch_number) } pub async fn save_proof_artifacts_metadata( &mut self, - block_number: L1BatchNumber, - signature: &[u8], + proof_id: i64, pubkey: &[u8], + signature: &[u8], proof: &[u8], - tee_type: TeeType, ) -> DalResult<()> { let query = sqlx::query!( r#" - UPDATE tee_proof_generation_details + UPDATE tee_proofs SET - status = 'generated', - signature = $1, - pubkey = $2, + pubkey = $1, + signature = $2, proof = $3, - tee_type = $4, - updated_at = NOW() + proved_at = NOW() WHERE - l1_batch_number = $5 + id = $4 "#, - signature, pubkey, + signature, proof, - tee_type.to_string(), - i64::from(block_number.0) + proof_id ); let instrumentation = Instrumented::new("save_proof_artifacts_metadata") - .with_arg("signature", &signature) .with_arg("pubkey", &pubkey) + .with_arg("signature", &signature) .with_arg("proof", &proof) - .with_arg("tee_type", &tee_type); + .with_arg("proof_id", &proof_id); let result = instrumentation .clone() .with(query) @@ -132,7 +103,8 @@ impl TeeProofGenerationDal<'_, '_> { .await?; if result.rows_affected() == 0 { let err = instrumentation.constraint_error(anyhow::anyhow!( - "Updating TEE proof for a non-existent batch number is not allowed" + "Updating TEE proof for a non-existent ID {} is not allowed", + proof_id )); return Err(err); } @@ -142,39 +114,52 @@ impl TeeProofGenerationDal<'_, '_> { pub async fn insert_tee_proof_generation_job( &mut self, - block_number: L1BatchNumber, + batch_number: L1BatchNumber, + tee_type: TeeType, ) -> DalResult<()> { - let block_number = i64::from(block_number.0); - sqlx::query!( + let batch_number = i64::from(batch_number.0); + let query = sqlx::query!( r#" INSERT INTO - tee_proof_generation_details (l1_batch_number, status, created_at, updated_at) + tee_proofs (l1_batch_number, tee_type, created_at) VALUES - ($1, 'ready_to_be_proven', NOW(), NOW()) - ON CONFLICT (l1_batch_number) DO NOTHING + ($1, $2, NOW()) "#, - block_number, - ) - .instrument("create_tee_proof_generation_details") - .with_arg("l1_batch_number", &block_number) - .report_latency() - .execute(self.storage) - .await?; + batch_number, + tee_type.to_string(), + ); + let instrumentation = Instrumented::new("insert_tee_proof_generation_job") + .with_arg("l1_batch_number", &batch_number) + .with_arg("tee_type", &tee_type); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Unable to insert TEE proof for {}, {}", + batch_number, + tee_type + )); + return Err(err); + } Ok(()) } + #[cfg(test)] pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { let query = sqlx::query!( r#" SELECT proofs.l1_batch_number FROM - tee_proof_generation_details AS proofs + tee_proofs AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE inputs.status = 'Successful' - AND proofs.status = 'ready_to_be_proven' + AND proofs.prover_taken_at IS NULL ORDER BY proofs.l1_batch_number ASC LIMIT @@ -220,38 +205,49 @@ impl TeeProofGenerationDal<'_, '_> { Ok(()) } - pub async fn get_tee_proof( + pub async fn get_tee_proofs( &mut self, - block_number: L1BatchNumber, - ) -> DalResult> { - let query = sqlx::query!( + batch_number: L1BatchNumber, + tee_type: Option, + ) -> DalResult> { + let query = format!( r#" SELECT - signature, - pubkey, - proof, - tee_type + tp.id, + tp.pubkey, + tp.signature, + tp.proof, + tp.proved_at, + ta.attestation FROM - tee_proof_generation_details + tee_proofs tp + LEFT JOIN + tee_attestations ta ON tp.pubkey = ta.pubkey WHERE - l1_batch_number = $1 + tp.l1_batch_number = {} + {} + ORDER BY tp.id "#, - i64::from(block_number.0) + i64::from(batch_number.0), + tee_type.map_or_else(String::new, |tt| format!("AND tp.tee_type = '{}'", tt)) ); - let proof = Instrumented::new("get_tee_proof") - .with_arg("l1_batch_number", &block_number) - .with(query) - .fetch_optional(self.storage) - .await? - .map(|row| TeeProof { - signature: row.signature, + + let proofs: Vec = sqlx::query_as(&query) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row: TmpStorageTeeProof| StorageTeeProof { + l1_batch_number: batch_number, + tee_type, pubkey: row.pubkey, + signature: row.signature, proof: row.proof, - tee_type: row - .tee_type - .map(|tt| tt.parse::().expect("Invalid TEE type")), - }); + attestation: row.attestation, + proved_at: row.proved_at, + }) + .collect(); - Ok(proof) + Ok(proofs) } } diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 00ac85a40739..6f937d6ab91b 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; use zksync_types::{ protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, + tee_types::TeeType, L1BatchNumber, }; @@ -29,7 +30,10 @@ pub enum ProofGenerationDataResponse { } #[derive(Debug, Serialize, Deserialize)] -pub struct TeeProofGenerationDataResponse(pub Option>); +pub struct TeeProofGenerationDataResponse { + pub proof_id: Option, + pub input: Option>, +} #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofResponse { @@ -52,7 +56,10 @@ pub enum RegisterTeeAttestationResponse { #[derive(Debug, Serialize, Deserialize)] pub struct ProofGenerationDataRequest {} -pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest; +#[derive(Debug, Serialize, Deserialize)] +pub struct TeeProofGenerationDataRequest { + pub tee_type: TeeType, +} #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofRequest { diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index 9672bfb2142b..8a4bd8e47405 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -16,6 +16,8 @@ pub struct L1BatchProofForL1 { /// A "final" TEE proof that can be sent to the L1 contract. #[derive(Clone, PartialEq, Serialize, Deserialize)] pub struct L1BatchTeeProofForL1 { + // ID of the proof in the database to distinguish between different proofs for the same batch + pub proof_id: i64, // signature generated within the TEE enclave, using the privkey corresponding to the pubkey pub signature: Vec, // pubkey used for signature verification; each key pair is attested by the TEE attestation diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index a2d55a140655..81b969d5d13e 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -167,6 +167,7 @@ fn test_proof_request_serialization() { #[test] fn test_tee_proof_request_serialization() { let tee_proof_str = r#"{ + "proof_id": 0, "signature": [ 0, 1, 2, 3, 4 ], "pubkey": [ 5, 6, 7, 8, 9 ], "proof": [ 10, 11, 12, 13, 14 ], @@ -174,6 +175,7 @@ fn test_tee_proof_request_serialization() { }"#; let tee_proof_result = serde_json::from_str::(tee_proof_str).unwrap(); let tee_proof_expected = SubmitTeeProofRequest(Box::new(L1BatchTeeProofForL1 { + proof_id: 0, signature: vec![0, 1, 2, 3, 4], pubkey: vec![5, 6, 7, 8, 9], proof: vec![10, 11, 12, 13, 14], diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 50643e7add1f..618a786ea658 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -1,11 +1,7 @@ use std::{net::SocketAddr, sync::Arc}; use anyhow::Context as _; -use axum::{ - extract::Path, - routing::{get, post}, - Json, Router, -}; +use axum::{extract::Path, routing::post, Json, Router}; use request_processor::RequestProcessor; use tee_request_processor::TeeRequestProcessor; use tokio::sync::watch; @@ -95,7 +91,6 @@ fn create_proof_processing_router( TeeRequestProcessor::new(blob_store, connection_pool, config.clone()); let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); let register_tee_attestation_processor = get_tee_proof_gen_processor.clone(); - let get_tee_proof_processor = get_tee_proof_gen_processor.clone(); router = router.route( "/tee/proof_inputs", @@ -126,15 +121,6 @@ fn create_proof_processing_router( .await }, ), - ) - .route("/tee/get_proof/:l1_batch_number", - get( - move |l1_batch_number: Path| async move { - get_tee_proof_processor - .get_proof(l1_batch_number) - .await - }, - ) ); } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 686d500b9eb4..20613ba909e2 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{tee_proof_generation_dal::TeeProof, ConnectionPool, Core, CoreDal}; +use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::{ api::{ @@ -49,12 +49,19 @@ impl TeeRequestProcessor { let l1_batch_number_result = connection .tee_proof_generation_dal() - .get_next_block_to_be_proven(self.config.proof_generation_timeout()) + .get_next_batch_to_be_proven(request.tee_type, self.config.proof_generation_timeout()) .await .map_err(RequestProcessorError::Dal)?; - let l1_batch_number = match l1_batch_number_result { - Some(number) => number, - None => return Ok(Json(TeeProofGenerationDataResponse(None))), + + let (proof_id, l1_batch_number) = match l1_batch_number_result { + Some((proof_id, number)) => (proof_id, number), + None => { + // TODO introduce a proper type + return Ok(Json(TeeProofGenerationDataResponse { + proof_id: None, + input: None, + })); + } }; let tee_verifier_input: TeeVerifierInput = self @@ -63,9 +70,12 @@ impl TeeRequestProcessor { .await .map_err(RequestProcessorError::ObjectStore)?; - Ok(Json(TeeProofGenerationDataResponse(Some(Box::new( - tee_verifier_input, - ))))) + let response = TeeProofGenerationDataResponse { + proof_id: Some(proof_id), + input: Some(Box::new(tee_verifier_input)), + }; + + Ok(Json(response)) } pub(crate) async fn submit_proof( @@ -82,16 +92,15 @@ impl TeeRequestProcessor { let mut dal = connection.tee_proof_generation_dal(); tracing::info!( - "Received proof {:?} for block number: {:?}", + "Received proof {:?} for batch number: {:?}", proof, l1_batch_number ); dal.save_proof_artifacts_metadata( - l1_batch_number, - &proof.0.signature, + proof.0.proof_id, &proof.0.pubkey, + &proof.0.signature, &proof.0.proof, - proof.0.tee_type, ) .await .map_err(RequestProcessorError::Dal)?; @@ -118,23 +127,4 @@ impl TeeRequestProcessor { Ok(Json(RegisterTeeAttestationResponse::Success)) } - - pub(crate) async fn get_proof( - &self, - Path(l1_batch_number): Path, - ) -> Result, RequestProcessorError> { - let mut connection = self - .pool - .connection() - .await - .map_err(RequestProcessorError::Dal)?; - let mut dal = connection.tee_proof_generation_dal(); - let l1_batch_number = L1BatchNumber(l1_batch_number); - let tee_proof = dal - .get_tee_proof(l1_batch_number) - .await - .map_err(RequestProcessorError::Dal)?; - - Ok(Json(tee_proof.unwrap())) - } } diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs index 0cd28ee5ce79..abd70542a42f 100644 --- a/core/node/tee_verifier_input_producer/src/lib.rs +++ b/core/node/tee_verifier_input_producer/src/lib.rs @@ -19,7 +19,7 @@ use zksync_prover_interface::inputs::{ }; use zksync_queued_job_processor::JobProcessor; use zksync_tee_verifier::Verify; -use zksync_types::{L1BatchNumber, L2ChainId}; +use zksync_types::{tee_types::TeeType, L1BatchNumber, L2ChainId}; use zksync_utils::u256_to_h256; use zksync_vm_utils::storage::L1BatchParamsProvider; @@ -241,7 +241,7 @@ impl JobProcessor for TeeVerifierInputProducer { .context("failed to mark job as successful for TeeVerifierInputProducer")?; transaction .tee_proof_generation_dal() - .insert_tee_proof_generation_job(job_id) + .insert_tee_proof_generation_job(job_id, TeeType::Sgx) .await?; transaction .commit() From 90f4a077abb63da2aa56566ab25ce0d76e7f5780 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 31 Jul 2024 12:21:11 +0200 Subject: [PATCH 06/20] One proof instance per batch number per TEE type --- core/bin/zksync_tee_prover/src/api_client.rs | 7 +- core/bin/zksync_tee_prover/src/tee_prover.rs | 3 +- ...b9416720e3fdf2d7078d5d1b1ae27e7c73949.json | 48 ++++++ ...bd3c9feadae298511f0d5622b54426c144376.json | 28 ++++ ...2c74a1f1eea15dd55ddf645e6989b2d249709.json | 31 ++++ ...7f5f0d2a33913312eac25b62cf3e3363e459d.json | 76 +++++++++ ...erifier_input_producer_jobs_table.down.sql | 1 - ..._verifier_input_producer_jobs_table.up.sql | 3 - ...ee_proof_generation_details_table.down.sql | 4 +- ..._tee_proof_generation_details_table.up.sql | 14 +- core/lib/dal/src/models/storage_tee_proof.rs | 2 - core/lib/dal/src/tee_proof_generation_dal.rs | 150 +++++++++++------- core/lib/prover_interface/src/api.rs | 5 +- core/lib/prover_interface/src/outputs.rs | 2 - .../tests/job_serialization.rs | 2 - core/lib/types/src/api/mod.rs | 12 ++ core/lib/web3_decl/src/namespaces/unstable.rs | 13 +- .../backend_jsonrpsee/namespaces/unstable.rs | 16 +- .../src/web3/namespaces/unstable.rs | 29 +++- .../src/tee_request_processor.rs | 20 +-- core/node/proof_data_handler/src/tests.rs | 28 ++-- 21 files changed, 382 insertions(+), 112 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json create mode 100644 core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json create mode 100644 core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json create mode 100644 core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json diff --git a/core/bin/zksync_tee_prover/src/api_client.rs b/core/bin/zksync_tee_prover/src/api_client.rs index 9e9d1afe5b66..13fbc1ba8868 100644 --- a/core/bin/zksync_tee_prover/src/api_client.rs +++ b/core/bin/zksync_tee_prover/src/api_client.rs @@ -8,6 +8,7 @@ use zksync_prover_interface::{ RegisterTeeAttestationRequest, RegisterTeeAttestationResponse, SubmitTeeProofRequest, SubmitTeeProofResponse, TeeProofGenerationDataRequest, TeeProofGenerationDataResponse, }, + inputs::TeeVerifierInput, outputs::L1BatchTeeProofForL1, }; use zksync_types::{tee_types::TeeType, L1BatchNumber}; @@ -76,18 +77,17 @@ impl TeeApiClient { pub async fn get_job( &self, tee_type: TeeType, - ) -> Result { + ) -> Result>, TeeProverError> { let request = TeeProofGenerationDataRequest { tee_type }; let response = self .post::<_, TeeProofGenerationDataResponse, _>("/tee/proof_inputs", request) .await?; - Ok(response) + Ok(response.0) } /// Submits the successfully verified proof to the TEE prover interface API. pub async fn submit_proof( &self, - proof_id: i64, batch_number: L1BatchNumber, signature: Signature, pubkey: &PublicKey, @@ -95,7 +95,6 @@ impl TeeApiClient { tee_type: TeeType, ) -> Result<(), TeeProverError> { let request = SubmitTeeProofRequest(Box::new(L1BatchTeeProofForL1 { - proof_id, signature: signature.serialize_compact().into(), pubkey: pubkey.serialize().into(), proof: root_hash.as_bytes().into(), diff --git a/core/bin/zksync_tee_prover/src/tee_prover.rs b/core/bin/zksync_tee_prover/src/tee_prover.rs index a3891b0fd7c0..f797b67a418e 100644 --- a/core/bin/zksync_tee_prover/src/tee_prover.rs +++ b/core/bin/zksync_tee_prover/src/tee_prover.rs @@ -114,10 +114,9 @@ impl TeeProver { async fn step(&self) -> Result, TeeProverError> { match self.api_client.get_job(self.tee_type).await? { Some(job) => { - let (signature, batch_number, root_hash) = self.verify(job.input)?; + let (signature, batch_number, root_hash) = self.verify(*job)?; self.api_client .submit_proof( - job.proof_id.unwrap(), // TODO unwrap() is not safe here batch_number, signature, &self.public_key, diff --git a/core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json b/core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json new file mode 100644 index 000000000000..229520f87d30 --- /dev/null +++ b/core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json @@ -0,0 +1,48 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $1\n AND proofs.status = $2\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + { + "Custom": { + "name": "tee_verifier_input_producer_job_status", + "kind": { + "Enum": [ + "Queued", + "ManuallySkipped", + "InProgress", + "Successful", + "Failed" + ] + } + } + }, + { + "Custom": { + "name": "tee_proofs_job_status", + "kind": { + "Enum": [ + "ReadyToBeProven", + "PickedByProver", + "Generated", + "Skipped" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949" +} diff --git a/core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json b/core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json new file mode 100644 index 000000000000..ad0fcc071867 --- /dev/null +++ b/core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proofs (l1_batch_number, tee_type, status, created_at, updated_at)\n VALUES\n ($1, $2, $3, NOW(), NOW())\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text", + { + "Custom": { + "name": "tee_proofs_job_status", + "kind": { + "Enum": [ + "ReadyToBeProven", + "PickedByProver", + "Generated", + "Skipped" + ] + } + } + } + ] + }, + "nullable": [] + }, + "hash": "34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376" +} diff --git a/core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json b/core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json new file mode 100644 index 000000000000..b1bb73b2d091 --- /dev/null +++ b/core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json @@ -0,0 +1,31 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proofs\n SET\n tee_type = $1,\n status = $2,\n pubkey = $3,\n signature = $4,\n proof = $5,\n updated_at = NOW()\n WHERE\n l1_batch_number = $6\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + { + "Custom": { + "name": "tee_proofs_job_status", + "kind": { + "Enum": [ + "ReadyToBeProven", + "PickedByProver", + "Generated", + "Skipped" + ] + } + } + }, + "Bytea", + "Bytea", + "Bytea", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709" +} diff --git a/core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json b/core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json new file mode 100644 index 000000000000..120a9c4e0c39 --- /dev/null +++ b/core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json @@ -0,0 +1,76 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proofs\n SET\n status = $1,\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n tee_type = $2\n AND l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $3\n AND (\n proofs.status = $4\n OR (\n proofs.status = $5\n AND proofs.prover_taken_at < NOW() - $6::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proofs.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + { + "Custom": { + "name": "tee_proofs_job_status", + "kind": { + "Enum": [ + "ReadyToBeProven", + "PickedByProver", + "Generated", + "Skipped" + ] + } + } + }, + "Text", + { + "Custom": { + "name": "tee_verifier_input_producer_job_status", + "kind": { + "Enum": [ + "Queued", + "ManuallySkipped", + "InProgress", + "Successful", + "Failed" + ] + } + } + }, + { + "Custom": { + "name": "tee_proofs_job_status", + "kind": { + "Enum": [ + "ReadyToBeProven", + "PickedByProver", + "Generated", + "Skipped" + ] + } + } + }, + { + "Custom": { + "name": "tee_proofs_job_status", + "kind": { + "Enum": [ + "ReadyToBeProven", + "PickedByProver", + "Generated", + "Skipped" + ] + } + } + }, + "Interval" + ] + }, + "nullable": [ + false + ] + }, + "hash": "efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d" +} diff --git a/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql b/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql index bccd4f97dcbb..2d4d6a6ad7a7 100644 --- a/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql +++ b/core/lib/dal/migrations/20240325133100_add_tee_verifier_input_producer_jobs_table.down.sql @@ -1,5 +1,4 @@ DROP INDEX IF EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts; -DROP INDEX IF EXISTS idx_tee_verifier_input_producer_jobs_l1_batch_number_status; DROP TABLE IF EXISTS tee_verifier_input_producer_jobs; diff --git a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql index 26e7e5a11940..ead044266df4 100644 --- a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql +++ b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql @@ -15,6 +15,3 @@ CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts ON tee_verifier_input_producer_jobs (status, processing_started_at, attempts); - -CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_l1_batch_number_status -ON tee_verifier_input_producer_jobs (l1_batch_number, status); diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql index d46bc4aa154d..15de3d74c65c 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql @@ -1,4 +1,6 @@ -DROP INDEX IF EXISTS idx_proofs_number_per_batch_number_and_tee_type; +DROP INDEX IF EXISTS idx_tee_proofs_status_prover_taken_at; DROP TABLE IF EXISTS tee_attestations; DROP TABLE IF EXISTS tee_proofs; + +DROP TYPE IF EXISTS tee_proofs_job_status; diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 013bdcd02b17..35074baecc94 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -1,3 +1,5 @@ +CREATE TYPE tee_proofs_job_status AS ENUM ('ReadyToBeProven', 'PickedByProver', 'Generated', 'Skipped'); + CREATE TABLE IF NOT EXISTS tee_attestations ( pubkey BYTEA PRIMARY KEY, @@ -6,16 +8,18 @@ CREATE TABLE IF NOT EXISTS tee_attestations CREATE TABLE IF NOT EXISTS tee_proofs ( - id BIGSERIAL PRIMARY KEY, l1_batch_number BIGINT NOT NULL REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, tee_type TEXT NOT NULL, + status tee_proofs_job_status NOT NULL, pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE CASCADE, signature BYTEA, proof BYTEA, created_at TIMESTAMP NOT NULL, - proved_at TIMESTAMP, - prover_taken_at TIMESTAMP + updated_at TIMESTAMP NOT NULL, + prover_taken_at TIMESTAMP, + PRIMARY KEY (l1_batch_number, tee_type) ); -CREATE INDEX IF NOT EXISTS idx_proofs_number_per_batch_number_and_tee_type - ON tee_proofs (l1_batch_number, tee_type); +CREATE INDEX IF NOT EXISTS idx_tee_proofs_status_prover_taken_at + ON tee_proofs (prover_taken_at) + WHERE status = 'PickedByProver'; diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs index 9c55c86eb26d..f2a2647e86d0 100644 --- a/core/lib/dal/src/models/storage_tee_proof.rs +++ b/core/lib/dal/src/models/storage_tee_proof.rs @@ -25,8 +25,6 @@ pub struct StorageTeeProof { /// TODO rename it TeeProof once StorageTeeProof is moved to api/mod.rs? #[derive(Debug, Clone, sqlx::FromRow)] pub struct TmpStorageTeeProof { - #[allow(dead_code)] - pub id: i64, pub pubkey: Option>, pub signature: Option>, pub proof: Option>, diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index f0ebc7720523..a19ffc24d40a 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -8,6 +8,7 @@ use zksync_types::{tee_types::TeeType, L1BatchNumber}; use crate::{ models::storage_tee_proof::{StorageTeeProof, TmpStorageTeeProof}, + tee_verifier_input_producer_dal::TeeVerifierInputProducerJobStatus, Core, }; @@ -16,45 +17,61 @@ pub struct TeeProofGenerationDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } +#[derive(Debug, sqlx::Type)] +#[sqlx(type_name = "tee_proofs_job_status")] +enum TeeProofGenerationJobStatus { + ReadyToBeProven, + PickedByProver, + Generated, + Skipped, +} + impl TeeProofGenerationDal<'_, '_> { pub async fn get_next_batch_to_be_proven( &mut self, tee_type: TeeType, processing_timeout: Duration, - ) -> DalResult> { + ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); let query = sqlx::query!( r#" UPDATE tee_proofs SET + status = $1, + updated_at = NOW(), prover_taken_at = NOW() WHERE - id = ( + tee_type = $2 + AND l1_batch_number = ( SELECT - proofs.id + proofs.l1_batch_number FROM tee_proofs AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE - inputs.status = 'Successful' - AND proofs.tee_type = $1 + inputs.status = $3 AND ( - proofs.prover_taken_at IS NULL - OR proofs.prover_taken_at < NOW() - $2::INTERVAL + proofs.status = $4 + OR ( + proofs.status = $5 + AND proofs.prover_taken_at < NOW() - $6::INTERVAL + ) ) ORDER BY - tee_proofs.l1_batch_number ASC, - proofs.prover_taken_at ASC NULLS FIRST + l1_batch_number ASC LIMIT 1 FOR UPDATE SKIP LOCKED ) RETURNING - tee_proofs.id, tee_proofs.l1_batch_number "#, + TeeProofGenerationJobStatus::PickedByProver as TeeProofGenerationJobStatus, &tee_type.to_string(), + TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus, + TeeProofGenerationJobStatus::ReadyToBeProven as TeeProofGenerationJobStatus, + TeeProofGenerationJobStatus::PickedByProver as TeeProofGenerationJobStatus, &processing_timeout, ); let batch_number = Instrumented::new("get_next_batch_to_be_proven") @@ -63,14 +80,15 @@ impl TeeProofGenerationDal<'_, '_> { .with(query) .fetch_optional(self.storage) .await? - .map(|row| (row.id as i64, L1BatchNumber(row.l1_batch_number as u32))); + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); Ok(batch_number) } pub async fn save_proof_artifacts_metadata( &mut self, - proof_id: i64, + batch_number: L1BatchNumber, + tee_type: TeeType, pubkey: &[u8], signature: &[u8], proof: &[u8], @@ -79,23 +97,27 @@ impl TeeProofGenerationDal<'_, '_> { r#" UPDATE tee_proofs SET - pubkey = $1, - signature = $2, - proof = $3, - proved_at = NOW() + tee_type = $1, + status = $2, + pubkey = $3, + signature = $4, + proof = $5, + updated_at = NOW() WHERE - id = $4 + l1_batch_number = $6 "#, + tee_type.to_string(), + TeeProofGenerationJobStatus::Generated as TeeProofGenerationJobStatus, pubkey, signature, proof, - proof_id + i64::from(batch_number.0) ); let instrumentation = Instrumented::new("save_proof_artifacts_metadata") + .with_arg("tee_type", &tee_type) .with_arg("pubkey", &pubkey) .with_arg("signature", &signature) - .with_arg("proof", &proof) - .with_arg("proof_id", &proof_id); + .with_arg("proof", &proof); let result = instrumentation .clone() .with(query) @@ -103,8 +125,8 @@ impl TeeProofGenerationDal<'_, '_> { .await?; if result.rows_affected() == 0 { let err = instrumentation.constraint_error(anyhow::anyhow!( - "Updating TEE proof for a non-existent ID {} is not allowed", - proof_id + "Updating TEE proof for a non-existent batch number {} is not allowed", + batch_number )); return Err(err); } @@ -121,12 +143,13 @@ impl TeeProofGenerationDal<'_, '_> { let query = sqlx::query!( r#" INSERT INTO - tee_proofs (l1_batch_number, tee_type, created_at) + tee_proofs (l1_batch_number, tee_type, status, created_at, updated_at) VALUES - ($1, $2, NOW()) + ($1, $2, $3, NOW(), NOW()) "#, batch_number, tee_type.to_string(), + TeeProofGenerationJobStatus::ReadyToBeProven as TeeProofGenerationJobStatus, ); let instrumentation = Instrumented::new("insert_tee_proof_generation_job") .with_arg("l1_batch_number", &batch_number) @@ -138,7 +161,7 @@ impl TeeProofGenerationDal<'_, '_> { .await?; if result.rows_affected() == 0 { let err = instrumentation.constraint_error(anyhow::anyhow!( - "Unable to insert TEE proof for {}, {}", + "Unable to insert TEE proof for batch number {}, TEE type {}", batch_number, tee_type )); @@ -148,33 +171,6 @@ impl TeeProofGenerationDal<'_, '_> { Ok(()) } - #[cfg(test)] - pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { - let query = sqlx::query!( - r#" - SELECT - proofs.l1_batch_number - FROM - tee_proofs AS proofs - JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number - WHERE - inputs.status = 'Successful' - AND proofs.prover_taken_at IS NULL - ORDER BY - proofs.l1_batch_number ASC - LIMIT - 1 - "#, - ); - let batch_number = Instrumented::new("get_oldest_unpicked_batch") - .with(query) - .fetch_optional(self.storage) - .await? - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - - Ok(batch_number) - } - pub async fn save_attestation(&mut self, pubkey: &[u8], attestation: &[u8]) -> DalResult<()> { let query = sqlx::query!( r#" @@ -197,7 +193,8 @@ impl TeeProofGenerationDal<'_, '_> { .await?; if result.rows_affected() == 0 { let err = instrumentation.constraint_error(anyhow::anyhow!( - "Unable to insert TEE attestation: given pubkey already has an attestation assigned" + "Unable to insert TEE attestation: pubkey {:?} already has an attestation assigned", + pubkey )); return Err(err); } @@ -213,26 +210,33 @@ impl TeeProofGenerationDal<'_, '_> { let query = format!( r#" SELECT - tp.id, tp.pubkey, tp.signature, tp.proof, - tp.proved_at, + tp.updated_at, ta.attestation FROM tee_proofs tp LEFT JOIN tee_attestations ta ON tp.pubkey = ta.pubkey WHERE - tp.l1_batch_number = {} + tp.l1_batch_number = $1 + AND tp.status = $2 {} ORDER BY tp.id "#, - i64::from(batch_number.0), - tee_type.map_or_else(String::new, |tt| format!("AND tp.tee_type = '{}'", tt)) + tee_type.map_or_else(String::new, |_| "AND tp.tee_type = $3".to_string()) ); - let proofs: Vec = sqlx::query_as(&query) + let mut query = sqlx::query_as(&query) + .bind(i64::from(batch_number.0)) + .bind(TeeProofGenerationJobStatus::Generated as TeeProofGenerationJobStatus); + + if let Some(tee_type) = tee_type { + query = query.bind(tee_type.to_string()); + } + + let proofs: Vec = query .fetch_all(self.storage.conn()) .await .unwrap() @@ -250,4 +254,32 @@ impl TeeProofGenerationDal<'_, '_> { Ok(proofs) } + + pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { + let query = sqlx::query!( + r#" + SELECT + proofs.l1_batch_number + FROM + tee_proofs AS proofs + JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number + WHERE + inputs.status = $1 + AND proofs.status = $2 + ORDER BY + proofs.l1_batch_number ASC + LIMIT + 1 + "#, + TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus, + TeeProofGenerationJobStatus::ReadyToBeProven as TeeProofGenerationJobStatus, + ); + let batch_number = Instrumented::new("get_oldest_unpicked_batch") + .with(query) + .fetch_optional(self.storage) + .await? + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + + Ok(batch_number) + } } diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 6f937d6ab91b..a8a578a8de08 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -30,10 +30,7 @@ pub enum ProofGenerationDataResponse { } #[derive(Debug, Serialize, Deserialize)] -pub struct TeeProofGenerationDataResponse { - pub proof_id: Option, - pub input: Option>, -} +pub struct TeeProofGenerationDataResponse(pub Option>); #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofResponse { diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index 8a4bd8e47405..9672bfb2142b 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -16,8 +16,6 @@ pub struct L1BatchProofForL1 { /// A "final" TEE proof that can be sent to the L1 contract. #[derive(Clone, PartialEq, Serialize, Deserialize)] pub struct L1BatchTeeProofForL1 { - // ID of the proof in the database to distinguish between different proofs for the same batch - pub proof_id: i64, // signature generated within the TEE enclave, using the privkey corresponding to the pubkey pub signature: Vec, // pubkey used for signature verification; each key pair is attested by the TEE attestation diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index 81b969d5d13e..a2d55a140655 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -167,7 +167,6 @@ fn test_proof_request_serialization() { #[test] fn test_tee_proof_request_serialization() { let tee_proof_str = r#"{ - "proof_id": 0, "signature": [ 0, 1, 2, 3, 4 ], "pubkey": [ 5, 6, 7, 8, 9 ], "proof": [ 10, 11, 12, 13, 14 ], @@ -175,7 +174,6 @@ fn test_tee_proof_request_serialization() { }"#; let tee_proof_result = serde_json::from_str::(tee_proof_str).unwrap(); let tee_proof_expected = SubmitTeeProofRequest(Box::new(L1BatchTeeProofForL1 { - proof_id: 0, signature: vec![0, 1, 2, 3, 4], pubkey: vec![5, 6, 7, 8, 9], proof: vec![10, 11, 12, 13, 14], diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs index 751de9bd7040..367873115e2a 100644 --- a/core/lib/types/src/api/mod.rs +++ b/core/lib/types/src/api/mod.rs @@ -3,6 +3,7 @@ use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde_json::Value; use strum::Display; use zksync_basic_types::{ + tee_types::TeeType, web3::{AccessList, Bytes, Index}, L1BatchNumber, H160, H2048, H256, H64, U256, U64, }; @@ -834,6 +835,17 @@ pub struct TransactionExecutionInfo { pub execution_info: Value, } +/// A "final" TEE proof that can be sent to the L1 contract. +#[derive(Clone, PartialEq, Serialize, Deserialize)] +pub struct TeeProof { + pub l1_batch_number: L1BatchNumber, + pub tee_type: TeeType, + pub proof: Vec, + pub attestation: Vec, + pub signature: Vec, + pub pubkey: Vec, +} + #[cfg(test)] mod tests { use super::*; diff --git a/core/lib/web3_decl/src/namespaces/unstable.rs b/core/lib/web3_decl/src/namespaces/unstable.rs index 4996813a9855..d2a6e29c7083 100644 --- a/core/lib/web3_decl/src/namespaces/unstable.rs +++ b/core/lib/web3_decl/src/namespaces/unstable.rs @@ -1,7 +1,11 @@ #[cfg_attr(not(feature = "server"), allow(unused_imports))] use jsonrpsee::core::RpcResult; use jsonrpsee::proc_macros::rpc; -use zksync_types::{api::TransactionExecutionInfo, H256}; +use zksync_types::{ + api::{TeeProof, TransactionExecutionInfo}, + tee_types::TeeType, + L1BatchNumber, H256, +}; use crate::client::{ForNetwork, L2}; @@ -20,4 +24,11 @@ pub trait UnstableNamespace { &self, hash: H256, ) -> RpcResult>; + + #[method(name = "getTeeProofs")] + async fn tee_proofs( + &self, + l1_batch_number: L1BatchNumber, + tee_type: Option, + ) -> RpcResult>; } diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs index 6abaa718a050..91330aa7d949 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs @@ -1,4 +1,8 @@ -use zksync_types::{api::TransactionExecutionInfo, H256}; +use zksync_types::{ + api::{TeeProof, TransactionExecutionInfo}, + tee_types::TeeType, + L1BatchNumber, H256, +}; use zksync_web3_decl::{ jsonrpsee::core::{async_trait, RpcResult}, namespaces::UnstableNamespaceServer, @@ -16,4 +20,14 @@ impl UnstableNamespaceServer for UnstableNamespace { .await .map_err(|err| self.current_method().map_err(err)) } + + async fn tee_proofs( + &self, + l1_batch_number: L1BatchNumber, + tee_type: Option, + ) -> RpcResult> { + self.get_tee_proofs_impl(l1_batch_number, tee_type) + .await + .map_err(|err| self.current_method().map_err(err)) + } } diff --git a/core/node/api_server/src/web3/namespaces/unstable.rs b/core/node/api_server/src/web3/namespaces/unstable.rs index b46ecd6dc530..104b0792bafa 100644 --- a/core/node/api_server/src/web3/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/namespaces/unstable.rs @@ -1,5 +1,9 @@ use zksync_dal::{CoreDal, DalError}; -use zksync_types::api::TransactionExecutionInfo; +use zksync_types::{ + api::{TeeProof, TransactionExecutionInfo}, + tee_types::TeeType, + L1BatchNumber, +}; use zksync_web3_decl::{error::Web3Error, types::H256}; use crate::web3::{backend_jsonrpsee::MethodTracer, RpcState}; @@ -30,4 +34,27 @@ impl UnstableNamespace { .map_err(DalError::generalize)? .map(|execution_info| TransactionExecutionInfo { execution_info })) } + + pub async fn get_tee_proofs_impl( + &self, + l1_batch_number: L1BatchNumber, + tee_type: Option, + ) -> Result, Web3Error> { + let mut storage = self.state.acquire_connection().await?; + Ok(storage + .tee_proof_generation_dal() + .get_tee_proofs(l1_batch_number, tee_type) + .await + .map_err(DalError::generalize)? + .into_iter() + .map(|proof| TeeProof { + l1_batch_number: proof.l1_batch_number, + tee_type: proof.tee_type.unwrap(), + proof: proof.proof.unwrap_or_default(), + attestation: proof.attestation.unwrap_or_default(), + signature: proof.signature.unwrap_or_default(), + pubkey: proof.pubkey.unwrap_or_default(), + }) + .collect::>()) + } } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 20613ba909e2..d85591dd2c90 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -53,15 +53,9 @@ impl TeeRequestProcessor { .await .map_err(RequestProcessorError::Dal)?; - let (proof_id, l1_batch_number) = match l1_batch_number_result { - Some((proof_id, number)) => (proof_id, number), - None => { - // TODO introduce a proper type - return Ok(Json(TeeProofGenerationDataResponse { - proof_id: None, - input: None, - })); - } + let l1_batch_number = match l1_batch_number_result { + Some(number) => number, + None => return Ok(Json(TeeProofGenerationDataResponse(None))), }; let tee_verifier_input: TeeVerifierInput = self @@ -70,10 +64,7 @@ impl TeeRequestProcessor { .await .map_err(RequestProcessorError::ObjectStore)?; - let response = TeeProofGenerationDataResponse { - proof_id: Some(proof_id), - input: Some(Box::new(tee_verifier_input)), - }; + let response = TeeProofGenerationDataResponse(Some(Box::new(tee_verifier_input))); Ok(Json(response)) } @@ -97,7 +88,8 @@ impl TeeRequestProcessor { l1_batch_number ); dal.save_proof_artifacts_metadata( - proof.0.proof_id, + l1_batch_number, + proof.0.tee_type, &proof.0.pubkey, &proof.0.signature, &proof.0.proof, diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 0dac2f960a87..5007b032b6f0 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -18,7 +18,7 @@ use zksync_prover_interface::{ api::SubmitTeeProofRequest, inputs::{TeeVerifierInput, V1TeeVerifierInput, WitnessInputMerklePaths}, }; -use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; +use zksync_types::{commitment::L1BatchCommitmentMode, tee_types::TeeType, L1BatchNumber, H256}; use crate::create_proof_processing_router; @@ -94,7 +94,7 @@ async fn request_tee_proof_inputs() { }, L1BatchCommitmentMode::Rollup, ); - let req_body = Body::from(serde_json::to_vec(&json!({})).unwrap()); + let req_body = Body::from(serde_json::to_vec(&json!({ "tee_type": "Sgx" })).unwrap()); let response = app .oneshot( Request::builder() @@ -183,17 +183,25 @@ async fn submit_tee_proof() { // there should be one TEE proof in the db now - let proof = proof_db_conn + let proofs = proof_db_conn .tee_proof_generation_dal() - .get_tee_proof(batch_number) + .get_tee_proofs(batch_number, Some(TeeType::Sgx)) .await - .unwrap() .unwrap(); - assert_eq!(proof.signature.unwrap(), tee_proof_request.0.signature); - assert_eq!(proof.pubkey.unwrap(), tee_proof_request.0.pubkey); - assert_eq!(proof.proof.unwrap(), tee_proof_request.0.proof); - assert_eq!(proof.tee_type.unwrap(), tee_proof_request.0.tee_type); + assert_eq!(proofs.len(), 1); + let proof = &proofs[0]; + assert_eq!( + proof.tee_type.as_ref().unwrap(), + &tee_proof_request.0.tee_type + ); + assert_eq!(proof.proof.as_ref().unwrap(), &tee_proof_request.0.proof); + assert_eq!(proof.attestation.as_ref().unwrap(), &attestation); + assert_eq!( + proof.signature.as_ref().unwrap(), + &tee_proof_request.0.signature + ); + assert_eq!(proof.pubkey.as_ref().unwrap(), &tee_proof_request.0.pubkey); } // Mock SQL db with information about the status of the TEE proof generation @@ -229,7 +237,7 @@ async fn mock_tee_batch_status( // mock SQL table with relevant information about the status of TEE proof generation ('ready_to_be_proven') proof_dal - .insert_tee_proof_generation_job(batch_number) + .insert_tee_proof_generation_job(batch_number, TeeType::Sgx) .await .expect("Failed to insert tee_proof_generation_job"); From f9e9cdb01662a74783f3ef7ac9f40348ad0ebd3e Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 1 Aug 2024 12:08:44 +0200 Subject: [PATCH 07/20] Fix SQL query (identified by unit test) --- core/lib/dal/src/tee_proof_generation_dal.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index a19ffc24d40a..49ff83923efd 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -223,7 +223,7 @@ impl TeeProofGenerationDal<'_, '_> { tp.l1_batch_number = $1 AND tp.status = $2 {} - ORDER BY tp.id + ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC "#, tee_type.map_or_else(String::new, |_| "AND tp.tee_type = $3".to_string()) ); From ab382d1d52dfdf3f0fdc835483942859c6dda087 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 1 Aug 2024 13:20:31 +0200 Subject: [PATCH 08/20] Simplify code --- core/lib/dal/src/models/storage_tee_proof.rs | 29 ++----------------- core/lib/dal/src/tee_proof_generation_dal.rs | 21 ++------------ core/lib/types/src/api/mod.rs | 23 ++++++++------- .../src/web3/namespaces/unstable.rs | 14 +++++---- core/node/proof_data_handler/src/tests.rs | 8 ++--- 5 files changed, 29 insertions(+), 66 deletions(-) diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs index f2a2647e86d0..5c93361e7df1 100644 --- a/core/lib/dal/src/models/storage_tee_proof.rs +++ b/core/lib/dal/src/models/storage_tee_proof.rs @@ -1,33 +1,10 @@ -use serde::{Deserialize, Serialize}; // TODO needed? -use zksync_types::{tee_types::TeeType, L1BatchNumber}; +use chrono::NaiveDateTime; -/// Represents a proof generated within a TEE enclave -/// TODO move it to core/lib/types/src/api/mod.rs and call TeeProof? -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct StorageTeeProof { - // batch number for which the proof was generated - pub l1_batch_number: L1BatchNumber, - // type of TEE used for attestation - pub tee_type: Option, - // pubkey used for signature verification; each key pair is attested by the TEE attestation - // stored in the db - pub pubkey: Option>, - // signature generated within the TEE enclave, using the privkey corresponding to the pubkey - pub signature: Option>, - // data that was signed - pub proof: Option>, - // attestation quote generated within the TEE enclave - pub attestation: Option>, - // timestamp when the proof was generated - pub proved_at: chrono::DateTime, -} - -/// TODO rename it TeeProof once StorageTeeProof is moved to api/mod.rs? #[derive(Debug, Clone, sqlx::FromRow)] -pub struct TmpStorageTeeProof { +pub struct StorageTeeProof { pub pubkey: Option>, pub signature: Option>, pub proof: Option>, - pub proved_at: chrono::DateTime, + pub updated_at: NaiveDateTime, pub attestation: Option>, } diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 49ff83923efd..294c49650319 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -7,9 +7,8 @@ use zksync_db_connection::{ use zksync_types::{tee_types::TeeType, L1BatchNumber}; use crate::{ - models::storage_tee_proof::{StorageTeeProof, TmpStorageTeeProof}, - tee_verifier_input_producer_dal::TeeVerifierInputProducerJobStatus, - Core, + models::storage_tee_proof::StorageTeeProof, + tee_verifier_input_producer_dal::TeeVerifierInputProducerJobStatus, Core, }; #[derive(Debug)] @@ -236,21 +235,7 @@ impl TeeProofGenerationDal<'_, '_> { query = query.bind(tee_type.to_string()); } - let proofs: Vec = query - .fetch_all(self.storage.conn()) - .await - .unwrap() - .into_iter() - .map(|row: TmpStorageTeeProof| StorageTeeProof { - l1_batch_number: batch_number, - tee_type, - pubkey: row.pubkey, - signature: row.signature, - proof: row.proof, - attestation: row.attestation, - proved_at: row.proved_at, - }) - .collect(); + let proofs: Vec = query.fetch_all(self.storage.conn()).await.unwrap(); Ok(proofs) } diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs index 367873115e2a..e6c84ecec537 100644 --- a/core/lib/types/src/api/mod.rs +++ b/core/lib/types/src/api/mod.rs @@ -811,6 +811,18 @@ pub struct Proof { pub storage_proof: Vec, } +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TeeProof { + pub l1_batch_number: L1BatchNumber, + pub tee_type: Option, + pub pubkey: Option>, + pub signature: Option>, + pub proof: Option>, + pub proved_at: DateTime, + pub attestation: Option>, +} + #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct TransactionDetailedResult { @@ -835,17 +847,6 @@ pub struct TransactionExecutionInfo { pub execution_info: Value, } -/// A "final" TEE proof that can be sent to the L1 contract. -#[derive(Clone, PartialEq, Serialize, Deserialize)] -pub struct TeeProof { - pub l1_batch_number: L1BatchNumber, - pub tee_type: TeeType, - pub proof: Vec, - pub attestation: Vec, - pub signature: Vec, - pub pubkey: Vec, -} - #[cfg(test)] mod tests { use super::*; diff --git a/core/node/api_server/src/web3/namespaces/unstable.rs b/core/node/api_server/src/web3/namespaces/unstable.rs index 104b0792bafa..783088cdc36a 100644 --- a/core/node/api_server/src/web3/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/namespaces/unstable.rs @@ -1,3 +1,4 @@ +use chrono::{DateTime, Utc}; use zksync_dal::{CoreDal, DalError}; use zksync_types::{ api::{TeeProof, TransactionExecutionInfo}, @@ -48,12 +49,13 @@ impl UnstableNamespace { .map_err(DalError::generalize)? .into_iter() .map(|proof| TeeProof { - l1_batch_number: proof.l1_batch_number, - tee_type: proof.tee_type.unwrap(), - proof: proof.proof.unwrap_or_default(), - attestation: proof.attestation.unwrap_or_default(), - signature: proof.signature.unwrap_or_default(), - pubkey: proof.pubkey.unwrap_or_default(), + l1_batch_number, + tee_type, + pubkey: proof.pubkey, + signature: proof.signature, + proof: proof.proof, + proved_at: DateTime::::from_naive_utc_and_offset(proof.updated_at, Utc), + attestation: proof.attestation, }) .collect::>()) } diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 5007b032b6f0..88d4930e6920 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -181,7 +181,7 @@ async fn submit_tee_proof() { assert!(oldest_batch_number.is_none()); - // there should be one TEE proof in the db now + // there should be one SGX proof in the db now let proofs = proof_db_conn .tee_proof_generation_dal() @@ -190,11 +190,9 @@ async fn submit_tee_proof() { .unwrap(); assert_eq!(proofs.len(), 1); + let proof = &proofs[0]; - assert_eq!( - proof.tee_type.as_ref().unwrap(), - &tee_proof_request.0.tee_type - ); + assert_eq!(proof.proof.as_ref().unwrap(), &tee_proof_request.0.proof); assert_eq!(proof.attestation.as_ref().unwrap(), &attestation); assert_eq!( From 4c95d0b0a730b52012c737dec6cdcc27ad825cf1 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 1 Aug 2024 15:01:46 +0200 Subject: [PATCH 09/20] Update docs (Mermaid diagram) --- core/lib/dal/doc/ProofGenerationDal.md | 19 +++++++++++++++++++ core/lib/dal/src/tee_proof_generation_dal.rs | 1 - 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 core/lib/dal/doc/ProofGenerationDal.md diff --git a/core/lib/dal/doc/ProofGenerationDal.md b/core/lib/dal/doc/ProofGenerationDal.md new file mode 100644 index 000000000000..388471da9385 --- /dev/null +++ b/core/lib/dal/doc/ProofGenerationDal.md @@ -0,0 +1,19 @@ +# ProofGenerationDal + +## Table Name + +proof_generation_details + +## `status` Diagram + +```mermaid +--- +title: Status Diagram +--- +stateDiagram-v2 +[*] --> ReadyToBeProven : insert_tee_proof_generation_job +ReadyToBeProven --> PickedByProver : get_next_batch_to_be_proven +PickedByProver --> Generated : save_proof_artifacts_metadata +Generated --> [*] + +``` diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 294c49650319..d558b67b42be 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -22,7 +22,6 @@ enum TeeProofGenerationJobStatus { ReadyToBeProven, PickedByProver, Generated, - Skipped, } impl TeeProofGenerationDal<'_, '_> { From aa9d3639050bf2a323c854a9c831750b26146eeb Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 5 Aug 2024 16:16:44 +0200 Subject: [PATCH 10/20] Fix docs --- core/lib/dal/doc/ProofGenerationDal.md | 15 ++++++--------- core/lib/dal/doc/TeeProofGenerationDal.md | 19 +++++++++++++++++++ core/lib/dal/src/proof_generation_dal.rs | 2 ++ core/lib/dal/src/tee_proof_generation_dal.rs | 2 ++ 4 files changed, 29 insertions(+), 9 deletions(-) create mode 100644 core/lib/dal/doc/TeeProofGenerationDal.md diff --git a/core/lib/dal/doc/ProofGenerationDal.md b/core/lib/dal/doc/ProofGenerationDal.md index 388471da9385..5f77d6e62992 100644 --- a/core/lib/dal/doc/ProofGenerationDal.md +++ b/core/lib/dal/doc/ProofGenerationDal.md @@ -1,19 +1,16 @@ # ProofGenerationDal - ## Table Name - proof_generation_details - ## `status` Diagram - ```mermaid --- title: Status Diagram --- stateDiagram-v2 -[*] --> ReadyToBeProven : insert_tee_proof_generation_job -ReadyToBeProven --> PickedByProver : get_next_batch_to_be_proven -PickedByProver --> Generated : save_proof_artifacts_metadata -Generated --> [*] - +[*] --> ready_to_be_proven : insert_proof_generation_details +ready_to_be_proven --> picked_by_prover : get_next_block_to_be_proven +picked_by_prover --> generated : save_proof_artifacts_metadata +generated --> [*] +[*] --> skipped : mark_proof_generation_job_as_skipped +skipped --> [*] ``` diff --git a/core/lib/dal/doc/TeeProofGenerationDal.md b/core/lib/dal/doc/TeeProofGenerationDal.md new file mode 100644 index 000000000000..f63c93444684 --- /dev/null +++ b/core/lib/dal/doc/TeeProofGenerationDal.md @@ -0,0 +1,19 @@ +# TeeProofGenerationDal + +## Table Name + +`tee_proofs` + +## `status` Diagram + +```mermaid +--- +title: Status Diagram +--- +stateDiagram-v2 +[*] --> ReadyToBeProven : insert_tee_proof_generation_job +ReadyToBeProven --> PickedByProver : get_next_batch_to_be_proven +PickedByProver --> Generated : save_proof_artifacts_metadata +Generated --> [*] + +``` diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index 0c857d929fe9..cf146698a118 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -1,3 +1,5 @@ +#![doc = include_str!("../doc/ProofGenerationDal.md")] + use std::time::Duration; use strum::{Display, EnumString}; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index d558b67b42be..ec5c9e1f4a41 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,3 +1,5 @@ +#![doc = include_str!("../doc/TeeProofGenerationDal.md")] + use std::time::Duration; use zksync_db_connection::{ From 50a65c9cca209896b4bb42458731e65b196b5cdf Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 5 Aug 2024 16:29:49 +0200 Subject: [PATCH 11/20] fixup! Fix docs --- core/lib/dal/doc/ProofGenerationDal.md | 6 ++++++ core/lib/dal/src/proof_generation_dal.rs | 1 - core/lib/dal/src/tee_proof_generation_dal.rs | 1 - 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/core/lib/dal/doc/ProofGenerationDal.md b/core/lib/dal/doc/ProofGenerationDal.md index 5f77d6e62992..618fdfba13b0 100644 --- a/core/lib/dal/doc/ProofGenerationDal.md +++ b/core/lib/dal/doc/ProofGenerationDal.md @@ -1,7 +1,11 @@ # ProofGenerationDal + ## Table Name + proof_generation_details + ## `status` Diagram + ```mermaid --- title: Status Diagram @@ -11,6 +15,8 @@ stateDiagram-v2 ready_to_be_proven --> picked_by_prover : get_next_block_to_be_proven picked_by_prover --> generated : save_proof_artifacts_metadata generated --> [*] + [*] --> skipped : mark_proof_generation_job_as_skipped skipped --> [*] + ``` diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index cf146698a118..4e37cc644f8e 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -1,5 +1,4 @@ #![doc = include_str!("../doc/ProofGenerationDal.md")] - use std::time::Duration; use strum::{Display, EnumString}; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index ec5c9e1f4a41..378d603b20d8 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,5 +1,4 @@ #![doc = include_str!("../doc/TeeProofGenerationDal.md")] - use std::time::Duration; use zksync_db_connection::{ From 6e5d39f67318784b8998996dbd7d7e6a9d5fe773 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 5 Aug 2024 21:17:01 +0200 Subject: [PATCH 12/20] Fix db migrations Special thanks to ChatGPT/GH Copilot, you are invaluable! --- ..._verifier_input_producer_jobs_table.up.sql | 3 +-- ...ee_proof_generation_details_table.down.sql | 6 ++--- ..._tee_proof_generation_details_table.up.sql | 21 +++++++-------- ...40805143000_drop_picked_by_column.down.sql | 1 + ...0240805143000_drop_picked_by_column.up.sql | 1 + .../20240805144000_tee_proofs_reorg.down.sql | 24 +++++++++++++++++ .../20240805144000_tee_proofs_reorg.up.sql | 26 +++++++++++++++++++ 7 files changed, 64 insertions(+), 18 deletions(-) create mode 100644 core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql create mode 100644 core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql create mode 100644 core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql create mode 100644 core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql diff --git a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql index ead044266df4..7e7d6df3274b 100644 --- a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql +++ b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql @@ -1,10 +1,10 @@ CREATE TYPE tee_verifier_input_producer_job_status AS ENUM ('Queued', 'ManuallySkipped', 'InProgress', 'Successful', 'Failed'); - CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs ( l1_batch_number BIGINT NOT NULL PRIMARY KEY, attempts SMALLINT NOT NULL DEFAULT 0, status tee_verifier_input_producer_job_status, + picked_by TEXT, input_blob_url TEXT, error TEXT, created_at TIMESTAMP NOT NULL, @@ -12,6 +12,5 @@ CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs processing_started_at TIMESTAMP, time_taken TIME ); - CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts ON tee_verifier_input_producer_jobs (status, processing_started_at, attempts); diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql index 15de3d74c65c..5b4f9958a8ea 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql @@ -1,6 +1,4 @@ -DROP INDEX IF EXISTS idx_tee_proofs_status_prover_taken_at; - DROP TABLE IF EXISTS tee_attestations; -DROP TABLE IF EXISTS tee_proofs; +DROP TABLE IF EXISTS tee_proof_generation_details; -DROP TYPE IF EXISTS tee_proofs_job_status; +DROP INDEX IF EXISTS idx_tee_proof_generation_details_status_prover_taken_at; diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 35074baecc94..3a249c44346c 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -1,25 +1,22 @@ -CREATE TYPE tee_proofs_job_status AS ENUM ('ReadyToBeProven', 'PickedByProver', 'Generated', 'Skipped'); - CREATE TABLE IF NOT EXISTS tee_attestations ( pubkey BYTEA PRIMARY KEY, attestation BYTEA ); -CREATE TABLE IF NOT EXISTS tee_proofs +CREATE TABLE IF NOT EXISTS tee_proof_generation_details ( - l1_batch_number BIGINT NOT NULL REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, - tee_type TEXT NOT NULL, - status tee_proofs_job_status NOT NULL, - pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE CASCADE, + l1_batch_number BIGINT PRIMARY KEY REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, + status TEXT NOT NULL, signature BYTEA, + pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE SET NULL, proof BYTEA, + tee_type TEXT, created_at TIMESTAMP NOT NULL, updated_at TIMESTAMP NOT NULL, - prover_taken_at TIMESTAMP, - PRIMARY KEY (l1_batch_number, tee_type) + prover_taken_at TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_tee_proofs_status_prover_taken_at - ON tee_proofs (prover_taken_at) - WHERE status = 'PickedByProver'; +CREATE INDEX IF NOT EXISTS idx_tee_proof_generation_details_status_prover_taken_at + ON tee_proof_generation_details (prover_taken_at) + WHERE status = 'picked_by_prover'; diff --git a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql new file mode 100644 index 000000000000..90bb3f0ff613 --- /dev/null +++ b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql @@ -0,0 +1 @@ +ALTER TABLE tee_verifier_input_producer_job_status ADD COLUMN picked_by TEXT; diff --git a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql new file mode 100644 index 000000000000..5a1a1598b65d --- /dev/null +++ b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql @@ -0,0 +1 @@ +ALTER TABLE tee_verifier_input_producer_job_status DROP COLUMN picked_by; diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql new file mode 100644 index 000000000000..290b22dc12ad --- /dev/null +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql @@ -0,0 +1,24 @@ +DROP INDEX IF EXISTS idx_tee_proofs_status_prover_taken_at; +ALTER TABLE tee_proofs RENAME TO tee_proof_generation_details; +ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; +ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pubkey_fkey; +ALTER TABLE tee_proof_generation_details + ADD CONSTRAINT tee_proof_generation_details_pubkey_fkey + FOREIGN KEY (pubkey) REFERENCES tee_attestations (pubkey) ON DELETE SET NULL; +ALTER TABLE tee_proof_generation_details + ALTER COLUMN status TYPE TEXT + USING CASE + WHEN status = 'ReadyToBeProven' THEN 'ready_to_be_proven' + WHEN status = 'PickedByProver' THEN 'picked_by_prover' + WHEN status = 'Generated' THEN 'generated' + WHEN status = 'Skipped' THEN 'skipped' + ELSE + RAISE EXCEPTION 'Unexpected value for status: %', status + END, + ALTER COLUMN status DROP NOT NULL; +ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type DROP NOT NULL; +ALTER TABLE tee_proof_generation_details ALTER COLUMN l1_batch_number DROP NOT NULL; +ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number); +DROP TYPE IF EXISTS tee_proofs_job_status; +CREATE INDEX IF NOT EXISTS idx_tee_proof_generation_details_status_prover_taken_at + ON tee_proof_generation_details (status, prover_taken_at); diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql new file mode 100644 index 000000000000..16cb731f8282 --- /dev/null +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql @@ -0,0 +1,26 @@ +DROP INDEX IF EXISTS idx_tee_proof_generation_details_status_prover_taken_at; +CREATE TYPE tee_proofs_job_status AS ENUM ('ReadyToBeProven', 'PickedByProver', 'Generated', 'Skipped'); +ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; +UPDATE tee_proof_generation_details SET tee_type = 'sgx' WHERE tee_type IS NULL; +ALTER TABLE tee_proof_generation_details ALTER COLUMN l1_batch_number SET NOT NULL; +ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type SET NOT NULL; +ALTER TABLE tee_proof_generation_details + ALTER COLUMN status TYPE tee_proofs_job_status + USING CASE + WHEN status = 'ready_to_be_proven' THEN 'ReadyToBeProven'::tee_proofs_job_status + WHEN status = 'picked_by_prover' THEN 'PickedByProver'::tee_proofs_job_status + WHEN status = 'generated' THEN 'Generated'::tee_proofs_job_status + WHEN status = 'skipped' THEN 'Skipped'::tee_proofs_job_status + ELSE + RAISE EXCEPTION 'Unexpected value for status: %', status + END, + ALTER COLUMN status SET NOT NULL; +ALTER TABLE tee_proof_generation_details DROP CONSTRAINT IF EXISTS tee_proof_generation_details_pubkey_fkey; +ALTER TABLE tee_proof_generation_details + ADD CONSTRAINT tee_proof_generation_details_pubkey_fkey + FOREIGN KEY (pubkey) REFERENCES tee_attestations (pubkey) ON DELETE CASCADE; +ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number, tee_type); +ALTER TABLE tee_proof_generation_details RENAME TO tee_proofs; +CREATE INDEX IF NOT EXISTS idx_tee_proofs_status_prover_taken_at + ON tee_proofs (prover_taken_at) + WHERE status = 'PickedByProver'; From ab300c93731a2ae94692086a1b5010157f4b47f0 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 5 Aug 2024 21:38:10 +0200 Subject: [PATCH 13/20] fixup! Fix db migrations --- ...0325143100_add_tee_verifier_input_producer_jobs_table.up.sql | 2 ++ .../migrations/20240805143000_drop_picked_by_column.down.sql | 2 +- .../dal/migrations/20240805143000_drop_picked_by_column.up.sql | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql index 7e7d6df3274b..eb34068f876d 100644 --- a/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql +++ b/core/lib/dal/migrations/20240325143100_add_tee_verifier_input_producer_jobs_table.up.sql @@ -1,4 +1,5 @@ CREATE TYPE tee_verifier_input_producer_job_status AS ENUM ('Queued', 'ManuallySkipped', 'InProgress', 'Successful', 'Failed'); + CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs ( l1_batch_number BIGINT NOT NULL PRIMARY KEY, @@ -12,5 +13,6 @@ CREATE TABLE IF NOT EXISTS tee_verifier_input_producer_jobs processing_started_at TIMESTAMP, time_taken TIME ); + CREATE INDEX IF NOT EXISTS idx_tee_verifier_input_producer_jobs_status_processing_attempts ON tee_verifier_input_producer_jobs (status, processing_started_at, attempts); diff --git a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql index 90bb3f0ff613..956aeee1ef6e 100644 --- a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql +++ b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql @@ -1 +1 @@ -ALTER TABLE tee_verifier_input_producer_job_status ADD COLUMN picked_by TEXT; +ALTER TABLE tee_verifier_input_producer_jobs ADD COLUMN picked_by TEXT; diff --git a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql index 5a1a1598b65d..ae676b844792 100644 --- a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql +++ b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql @@ -1 +1 @@ -ALTER TABLE tee_verifier_input_producer_job_status DROP COLUMN picked_by; +ALTER TABLE tee_verifier_input_producer_jobs DROP COLUMN picked_by; From 962007ff9b99b28f8463189d9cb26ddf35504377 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 6 Aug 2024 10:22:07 +0200 Subject: [PATCH 14/20] fixup! Fix db migrations --- .../lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql | 2 -- core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql | 2 -- 2 files changed, 4 deletions(-) diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql index 290b22dc12ad..e7f4c083f20d 100644 --- a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql @@ -12,8 +12,6 @@ ALTER TABLE tee_proof_generation_details WHEN status = 'PickedByProver' THEN 'picked_by_prover' WHEN status = 'Generated' THEN 'generated' WHEN status = 'Skipped' THEN 'skipped' - ELSE - RAISE EXCEPTION 'Unexpected value for status: %', status END, ALTER COLUMN status DROP NOT NULL; ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type DROP NOT NULL; diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql index 16cb731f8282..5678616d8403 100644 --- a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql @@ -11,8 +11,6 @@ ALTER TABLE tee_proof_generation_details WHEN status = 'picked_by_prover' THEN 'PickedByProver'::tee_proofs_job_status WHEN status = 'generated' THEN 'Generated'::tee_proofs_job_status WHEN status = 'skipped' THEN 'Skipped'::tee_proofs_job_status - ELSE - RAISE EXCEPTION 'Unexpected value for status: %', status END, ALTER COLUMN status SET NOT NULL; ALTER TABLE tee_proof_generation_details DROP CONSTRAINT IF EXISTS tee_proof_generation_details_pubkey_fkey; From 328e9a1c37b1aa7ebb97c759e0548ff841bd7fa6 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 7 Aug 2024 13:57:19 +0200 Subject: [PATCH 15/20] Address @perekopskiy's code review comments --- .../20240805144000_tee_proofs_reorg.down.sql | 19 ------ .../20240805144000_tee_proofs_reorg.up.sql | 21 +------ core/lib/dal/src/tee_proof_generation_dal.rs | 63 +++++++++---------- 3 files changed, 30 insertions(+), 73 deletions(-) diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql index e7f4c083f20d..088825e6d52f 100644 --- a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql @@ -1,22 +1,3 @@ -DROP INDEX IF EXISTS idx_tee_proofs_status_prover_taken_at; -ALTER TABLE tee_proofs RENAME TO tee_proof_generation_details; ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; -ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pubkey_fkey; -ALTER TABLE tee_proof_generation_details - ADD CONSTRAINT tee_proof_generation_details_pubkey_fkey - FOREIGN KEY (pubkey) REFERENCES tee_attestations (pubkey) ON DELETE SET NULL; -ALTER TABLE tee_proof_generation_details - ALTER COLUMN status TYPE TEXT - USING CASE - WHEN status = 'ReadyToBeProven' THEN 'ready_to_be_proven' - WHEN status = 'PickedByProver' THEN 'picked_by_prover' - WHEN status = 'Generated' THEN 'generated' - WHEN status = 'Skipped' THEN 'skipped' - END, - ALTER COLUMN status DROP NOT NULL; ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type DROP NOT NULL; -ALTER TABLE tee_proof_generation_details ALTER COLUMN l1_batch_number DROP NOT NULL; ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number); -DROP TYPE IF EXISTS tee_proofs_job_status; -CREATE INDEX IF NOT EXISTS idx_tee_proof_generation_details_status_prover_taken_at - ON tee_proof_generation_details (status, prover_taken_at); diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql index 5678616d8403..f4f04592181a 100644 --- a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql @@ -1,24 +1,5 @@ -DROP INDEX IF EXISTS idx_tee_proof_generation_details_status_prover_taken_at; -CREATE TYPE tee_proofs_job_status AS ENUM ('ReadyToBeProven', 'PickedByProver', 'Generated', 'Skipped'); ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; UPDATE tee_proof_generation_details SET tee_type = 'sgx' WHERE tee_type IS NULL; -ALTER TABLE tee_proof_generation_details ALTER COLUMN l1_batch_number SET NOT NULL; ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type SET NOT NULL; -ALTER TABLE tee_proof_generation_details - ALTER COLUMN status TYPE tee_proofs_job_status - USING CASE - WHEN status = 'ready_to_be_proven' THEN 'ReadyToBeProven'::tee_proofs_job_status - WHEN status = 'picked_by_prover' THEN 'PickedByProver'::tee_proofs_job_status - WHEN status = 'generated' THEN 'Generated'::tee_proofs_job_status - WHEN status = 'skipped' THEN 'Skipped'::tee_proofs_job_status - END, - ALTER COLUMN status SET NOT NULL; -ALTER TABLE tee_proof_generation_details DROP CONSTRAINT IF EXISTS tee_proof_generation_details_pubkey_fkey; -ALTER TABLE tee_proof_generation_details - ADD CONSTRAINT tee_proof_generation_details_pubkey_fkey - FOREIGN KEY (pubkey) REFERENCES tee_attestations (pubkey) ON DELETE CASCADE; +ALTER TABLE tee_proof_generation_details ALTER COLUMN l1_batch_number SET NOT NULL; ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number, tee_type); -ALTER TABLE tee_proof_generation_details RENAME TO tee_proofs; -CREATE INDEX IF NOT EXISTS idx_tee_proofs_status_prover_taken_at - ON tee_proofs (prover_taken_at) - WHERE status = 'PickedByProver'; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 378d603b20d8..073c8373ad79 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,6 +1,7 @@ #![doc = include_str!("../doc/TeeProofGenerationDal.md")] use std::time::Duration; +use strum::{Display, EnumString}; use zksync_db_connection::{ connection::Connection, error::DalResult, instrument::Instrumented, utils::pg_interval_from_duration, @@ -17,11 +18,13 @@ pub struct TeeProofGenerationDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } -#[derive(Debug, sqlx::Type)] -#[sqlx(type_name = "tee_proofs_job_status")] +#[derive(Debug, EnumString, Display)] enum TeeProofGenerationJobStatus { + #[strum(serialize = "ready_to_be_proven")] ReadyToBeProven, + #[strum(serialize = "picked_by_prover")] PickedByProver, + #[strum(serialize = "generated")] Generated, } @@ -34,26 +37,26 @@ impl TeeProofGenerationDal<'_, '_> { let processing_timeout = pg_interval_from_duration(processing_timeout); let query = sqlx::query!( r#" - UPDATE tee_proofs + UPDATE tee_proof_generation_details SET - status = $1, + status = 'picked_by_prover', updated_at = NOW(), prover_taken_at = NOW() WHERE - tee_type = $2 + tee_type = $1 AND l1_batch_number = ( SELECT proofs.l1_batch_number FROM - tee_proofs AS proofs + tee_proof_generation_details AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE - inputs.status = $3 + inputs.status = $2 AND ( - proofs.status = $4 + proofs.status = 'ready_to_be_proven' OR ( - proofs.status = $5 - AND proofs.prover_taken_at < NOW() - $6::INTERVAL + proofs.status = 'picked_by_prover' + AND proofs.prover_taken_at < NOW() - $3::INTERVAL ) ) ORDER BY @@ -64,13 +67,10 @@ impl TeeProofGenerationDal<'_, '_> { SKIP LOCKED ) RETURNING - tee_proofs.l1_batch_number + tee_proof_generation_details.l1_batch_number "#, - TeeProofGenerationJobStatus::PickedByProver as TeeProofGenerationJobStatus, &tee_type.to_string(), TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus, - TeeProofGenerationJobStatus::ReadyToBeProven as TeeProofGenerationJobStatus, - TeeProofGenerationJobStatus::PickedByProver as TeeProofGenerationJobStatus, &processing_timeout, ); let batch_number = Instrumented::new("get_next_batch_to_be_proven") @@ -94,19 +94,18 @@ impl TeeProofGenerationDal<'_, '_> { ) -> DalResult<()> { let query = sqlx::query!( r#" - UPDATE tee_proofs + UPDATE tee_proof_generation_details SET tee_type = $1, - status = $2, - pubkey = $3, - signature = $4, - proof = $5, + status = 'generated', + pubkey = $2, + signature = $3, + proof = $4, updated_at = NOW() WHERE - l1_batch_number = $6 + l1_batch_number = $5 "#, tee_type.to_string(), - TeeProofGenerationJobStatus::Generated as TeeProofGenerationJobStatus, pubkey, signature, proof, @@ -142,13 +141,12 @@ impl TeeProofGenerationDal<'_, '_> { let query = sqlx::query!( r#" INSERT INTO - tee_proofs (l1_batch_number, tee_type, status, created_at, updated_at) + tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at) VALUES - ($1, $2, $3, NOW(), NOW()) + ($1, $2, 'ready_to_be_proven', NOW(), NOW()) "#, batch_number, tee_type.to_string(), - TeeProofGenerationJobStatus::ReadyToBeProven as TeeProofGenerationJobStatus, ); let instrumentation = Instrumented::new("insert_tee_proof_generation_job") .with_arg("l1_batch_number", &batch_number) @@ -215,21 +213,19 @@ impl TeeProofGenerationDal<'_, '_> { tp.updated_at, ta.attestation FROM - tee_proofs tp + tee_proof_generation_details tp LEFT JOIN tee_attestations ta ON tp.pubkey = ta.pubkey WHERE tp.l1_batch_number = $1 - AND tp.status = $2 + AND tp.status = 'generated' {} ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC "#, - tee_type.map_or_else(String::new, |_| "AND tp.tee_type = $3".to_string()) + tee_type.map_or_else(String::new, |_| "AND tp.tee_type = $2".to_string()) ); - let mut query = sqlx::query_as(&query) - .bind(i64::from(batch_number.0)) - .bind(TeeProofGenerationJobStatus::Generated as TeeProofGenerationJobStatus); + let mut query = sqlx::query_as(&query).bind(i64::from(batch_number.0)); if let Some(tee_type) = tee_type { query = query.bind(tee_type.to_string()); @@ -246,18 +242,17 @@ impl TeeProofGenerationDal<'_, '_> { SELECT proofs.l1_batch_number FROM - tee_proofs AS proofs + tee_proof_generation_details AS proofs JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE inputs.status = $1 - AND proofs.status = $2 + AND proofs.status = 'ready_to_be_proven' ORDER BY proofs.l1_batch_number ASC LIMIT 1 "#, - TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus, - TeeProofGenerationJobStatus::ReadyToBeProven as TeeProofGenerationJobStatus, + TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus ); let batch_number = Instrumented::new("get_oldest_unpicked_batch") .with(query) From 51e59a4f8a296eaa79cd8ad4736d51acb48a53eb Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 7 Aug 2024 14:41:31 +0200 Subject: [PATCH 16/20] fixup! Address @perekopskiy's code review comments --- ...0f33404c0d1dc17645c246e3fd3cfa92cb9a6.json | 15 ++++ ...b9416720e3fdf2d7078d5d1b1ae27e7c73949.json | 48 ------------ ...4b4a0c11d0a45774fade80e88cb902104fe28.json | 20 ----- ...38dbd01af9068af4059b53fe79a6f2505b0a5.json | 17 ----- ...93c6f5a43386d4183745a4668507cf672b3f6.json | 37 +++++++++ ...bd3c9feadae298511f0d5622b54426c144376.json | 28 ------- ...a2fca14965083b0589c3b3efad02e37d55f0c.json | 20 ----- ...304afac6fc7b4c3e5bed2ff4f7da248523e33.json | 20 ----- ...2c74a1f1eea15dd55ddf645e6989b2d249709.json | 31 -------- ...7042743766d7fc2c231bb699bef005b81a7fc.json | 29 ------- ...b608d21dc70397b64ce500881a8b55953c59c.json | 14 ---- ...213a0b02b3ff96398920bc0250397bb2a95f.json} | 6 +- ...dc118360ab83bae3196ec941216901be629da.json | 35 +++++++++ ...148b0f1c7e512dd43434062341eb263fe434f.json | 22 ------ ...3521353d37863bad59cc4c364b83d02e63db1.json | 15 ---- ...a6f325a6ef0da152c51234b177a78227e2320.json | 40 ---------- ...7f5f0d2a33913312eac25b62cf3e3363e459d.json | 76 ------------------- 17 files changed, 90 insertions(+), 383 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json delete mode 100644 core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json delete mode 100644 core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json delete mode 100644 core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json create mode 100644 core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json delete mode 100644 core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json delete mode 100644 core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json delete mode 100644 core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json delete mode 100644 core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json delete mode 100644 core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json delete mode 100644 core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json rename core/lib/dal/.sqlx/{query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json => query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json} (58%) create mode 100644 core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json delete mode 100644 core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json delete mode 100644 core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json delete mode 100644 core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json delete mode 100644 core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json diff --git a/core/lib/dal/.sqlx/query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json b/core/lib/dal/.sqlx/query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json new file mode 100644 index 000000000000..e3549790e717 --- /dev/null +++ b/core/lib/dal/.sqlx/query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at)\n VALUES\n ($1, $2, 'ready_to_be_proven', NOW(), NOW())\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6" +} diff --git a/core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json b/core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json deleted file mode 100644 index 229520f87d30..000000000000 --- a/core/lib/dal/.sqlx/query-163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $1\n AND proofs.status = $2\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - { - "Custom": { - "name": "tee_verifier_input_producer_job_status", - "kind": { - "Enum": [ - "Queued", - "ManuallySkipped", - "InProgress", - "Successful", - "Failed" - ] - } - } - }, - { - "Custom": { - "name": "tee_proofs_job_status", - "kind": { - "Enum": [ - "ReadyToBeProven", - "PickedByProver", - "Generated", - "Skipped" - ] - } - } - } - ] - }, - "nullable": [ - false - ] - }, - "hash": "163a136aaac4d01dba226012f27b9416720e3fdf2d7078d5d1b1ae27e7c73949" -} diff --git a/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json b/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json deleted file mode 100644 index 07e25f505a6f..000000000000 --- a/core/lib/dal/.sqlx/query-184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs\n ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.prover_taken_at IS NULL\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "184573db68b82c15ad5a647b4784b4a0c11d0a45774fade80e88cb902104fe28" -} diff --git a/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json b/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json deleted file mode 100644 index 3d5bd64e3a57..000000000000 --- a/core/lib/dal/.sqlx/query-1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proofs\n SET\n pubkey = $1,\n signature = $2,\n proof = $3,\n proved_at = NOW()\n WHERE\n id = $4\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bytea", - "Bytea", - "Bytea", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "1ef09a1df23f8dea9779f34350b38dbd01af9068af4059b53fe79a6f2505b0a5" -} diff --git a/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json b/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json new file mode 100644 index 000000000000..540660bddf34 --- /dev/null +++ b/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n tee_type = $1\n AND l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $2\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $3::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + { + "Custom": { + "name": "tee_verifier_input_producer_job_status", + "kind": { + "Enum": [ + "Queued", + "ManuallySkipped", + "InProgress", + "Successful", + "Failed" + ] + } + } + }, + "Interval" + ] + }, + "nullable": [ + false + ] + }, + "hash": "286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6" +} diff --git a/core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json b/core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json deleted file mode 100644 index ad0fcc071867..000000000000 --- a/core/lib/dal/.sqlx/query-34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_proofs (l1_batch_number, tee_type, status, created_at, updated_at)\n VALUES\n ($1, $2, $3, NOW(), NOW())\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text", - { - "Custom": { - "name": "tee_proofs_job_status", - "kind": { - "Enum": [ - "ReadyToBeProven", - "PickedByProver", - "Generated", - "Skipped" - ] - } - } - } - ] - }, - "nullable": [] - }, - "hash": "34dd3f0fef34fba5fe5746cc326bd3c9feadae298511f0d5622b54426c144376" -} diff --git a/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json b/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json deleted file mode 100644 index f0603488f1e8..000000000000 --- a/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c" -} diff --git a/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json b/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json deleted file mode 100644 index fdab20788c36..000000000000 --- a/core/lib/dal/.sqlx/query-7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.prover_taken_at IS NULL\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "7b11e9d71ba1b19a4976e574286304afac6fc7b4c3e5bed2ff4f7da248523e33" -} diff --git a/core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json b/core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json deleted file mode 100644 index b1bb73b2d091..000000000000 --- a/core/lib/dal/.sqlx/query-90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proofs\n SET\n tee_type = $1,\n status = $2,\n pubkey = $3,\n signature = $4,\n proof = $5,\n updated_at = NOW()\n WHERE\n l1_batch_number = $6\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - { - "Custom": { - "name": "tee_proofs_job_status", - "kind": { - "Enum": [ - "ReadyToBeProven", - "PickedByProver", - "Generated", - "Skipped" - ] - } - } - }, - "Bytea", - "Bytea", - "Bytea", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "90f1aba888fac01b74aa763166e2c74a1f1eea15dd55ddf645e6989b2d249709" -} diff --git a/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json b/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json deleted file mode 100644 index fb5ec089514b..000000000000 --- a/core/lib/dal/.sqlx/query-94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proofs\n SET\n prover_taken_at = NOW()\n WHERE\n id = (\n SELECT\n proofs.id\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.tee_type = $1\n AND (\n proofs.prover_taken_at IS NULL\n OR proofs.prover_taken_at < NOW() - $2::INTERVAL\n )\n ORDER BY\n tee_proofs.l1_batch_number ASC,\n proofs.prover_taken_at ASC NULLS FIRST\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proofs.id,\n tee_proofs.l1_batch_number\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Text", - "Interval" - ] - }, - "nullable": [ - false, - false - ] - }, - "hash": "94eefcf763d8a4bc6a6c1860be17042743766d7fc2c231bb699bef005b81a7fc" -} diff --git a/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json b/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json deleted file mode 100644 index 994bfcfbb5a2..000000000000 --- a/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c" -} diff --git a/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json b/core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json similarity index 58% rename from core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json rename to core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json index 8e210aade885..8b67041427d3 100644 --- a/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json +++ b/core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json @@ -1,18 +1,18 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n signature = $1,\n pubkey = $2,\n proof = $3,\n tee_type = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ", + "query": "\n UPDATE tee_proof_generation_details\n SET\n tee_type = $1,\n status = 'generated',\n pubkey = $2,\n signature = $3,\n proof = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ", "describe": { "columns": [], "parameters": { "Left": [ + "Text", "Bytea", "Bytea", "Bytea", - "Text", "Int8" ] }, "nullable": [] }, - "hash": "727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711" + "hash": "a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f" } diff --git a/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json b/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json new file mode 100644 index 000000000000..70f7f9d12fa4 --- /dev/null +++ b/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json @@ -0,0 +1,35 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $1\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + { + "Custom": { + "name": "tee_verifier_input_producer_job_status", + "kind": { + "Enum": [ + "Queued", + "ManuallySkipped", + "InProgress", + "Successful", + "Failed" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da" +} diff --git a/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json b/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json deleted file mode 100644 index 4236e72fccad..000000000000 --- a/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $1::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Interval" - ] - }, - "nullable": [ - false - ] - }, - "hash": "e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f" -} diff --git a/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json b/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json deleted file mode 100644 index 0db3c5ca1b1d..000000000000 --- a/core/lib/dal/.sqlx/query-eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_proofs (l1_batch_number, tee_type, created_at)\n VALUES\n ($1, $2, NOW())\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [] - }, - "hash": "eddef3ecd6cdc205e91ae9ceaa23521353d37863bad59cc4c364b83d02e63db1" -} diff --git a/core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json b/core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json deleted file mode 100644 index c1a6d5989555..000000000000 --- a/core/lib/dal/.sqlx/query-ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n signature,\n pubkey,\n proof,\n tee_type\n FROM\n tee_proof_generation_details\n WHERE\n l1_batch_number = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "signature", - "type_info": "Bytea" - }, - { - "ordinal": 1, - "name": "pubkey", - "type_info": "Bytea" - }, - { - "ordinal": 2, - "name": "proof", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "tee_type", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - true, - true, - true, - true - ] - }, - "hash": "ef6f778df3f5dbe5014c786c081a6f325a6ef0da152c51234b177a78227e2320" -} diff --git a/core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json b/core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json deleted file mode 100644 index 120a9c4e0c39..000000000000 --- a/core/lib/dal/.sqlx/query-efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proofs\n SET\n status = $1,\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n tee_type = $2\n AND l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proofs AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $3\n AND (\n proofs.status = $4\n OR (\n proofs.status = $5\n AND proofs.prover_taken_at < NOW() - $6::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proofs.l1_batch_number\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - { - "Custom": { - "name": "tee_proofs_job_status", - "kind": { - "Enum": [ - "ReadyToBeProven", - "PickedByProver", - "Generated", - "Skipped" - ] - } - } - }, - "Text", - { - "Custom": { - "name": "tee_verifier_input_producer_job_status", - "kind": { - "Enum": [ - "Queued", - "ManuallySkipped", - "InProgress", - "Successful", - "Failed" - ] - } - } - }, - { - "Custom": { - "name": "tee_proofs_job_status", - "kind": { - "Enum": [ - "ReadyToBeProven", - "PickedByProver", - "Generated", - "Skipped" - ] - } - } - }, - { - "Custom": { - "name": "tee_proofs_job_status", - "kind": { - "Enum": [ - "ReadyToBeProven", - "PickedByProver", - "Generated", - "Skipped" - ] - } - } - }, - "Interval" - ] - }, - "nullable": [ - false - ] - }, - "hash": "efc9b281c548cfe75cb25cb6e0c7f5f0d2a33913312eac25b62cf3e3363e459d" -} From 3aa30b230794a2a02721abfce63ac5fc69579273 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 8 Aug 2024 13:28:25 +0200 Subject: [PATCH 17/20] Code review fix: idempotent SQL queries --- core/lib/dal/src/tee_proof_generation_dal.rs | 20 +++----------------- 1 file changed, 3 insertions(+), 17 deletions(-) diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index fdaaf976b2f0..2bd73323eb10 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -133,6 +133,7 @@ impl TeeProofGenerationDal<'_, '_> { tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at) VALUES ($1, $2, 'ready_to_be_proven', NOW(), NOW()) + ON CONFLICT (l1_batch_number, tee_type) DO NOTHING "#, batch_number, tee_type.to_string(), @@ -140,19 +141,11 @@ impl TeeProofGenerationDal<'_, '_> { let instrumentation = Instrumented::new("insert_tee_proof_generation_job") .with_arg("l1_batch_number", &batch_number) .with_arg("tee_type", &tee_type); - let result = instrumentation + instrumentation .clone() .with(query) .execute(self.storage) .await?; - if result.rows_affected() == 0 { - let err = instrumentation.constraint_error(anyhow::anyhow!( - "Unable to insert TEE proof for batch number {}, TEE type {}", - batch_number, - tee_type - )); - return Err(err); - } Ok(()) } @@ -172,18 +165,11 @@ impl TeeProofGenerationDal<'_, '_> { let instrumentation = Instrumented::new("save_attestation") .with_arg("pubkey", &pubkey) .with_arg("attestation", &attestation); - let result = instrumentation + instrumentation .clone() .with(query) .execute(self.storage) .await?; - if result.rows_affected() == 0 { - let err = instrumentation.constraint_error(anyhow::anyhow!( - "Unable to insert TEE attestation: pubkey {:?} already has an attestation assigned", - pubkey - )); - return Err(err); - } Ok(()) } From 787bc8dddab1bb9c7ab0d53cf5ad0e851d2bb559 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 8 Aug 2024 13:41:08 +0200 Subject: [PATCH 18/20] fixup! Code review fix: idempotent SQL queries --- ...94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename core/lib/dal/.sqlx/{query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json => query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json} (65%) diff --git a/core/lib/dal/.sqlx/query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json b/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json similarity index 65% rename from core/lib/dal/.sqlx/query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json rename to core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json index e3549790e717..0ed8005289f7 100644 --- a/core/lib/dal/.sqlx/query-16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6.json +++ b/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at)\n VALUES\n ($1, $2, 'ready_to_be_proven', NOW(), NOW())\n ", + "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at)\n VALUES\n ($1, $2, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number, tee_type) DO NOTHING\n ", "describe": { "columns": [], "parameters": { @@ -11,5 +11,5 @@ }, "nullable": [] }, - "hash": "16055f3e228774b9c00a9b9a5300f33404c0d1dc17645c246e3fd3cfa92cb9a6" + "hash": "d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6" } From caea7a5ccd7a86adcffd4a044b8dd7a1e27ec3d0 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 8 Aug 2024 13:55:12 +0200 Subject: [PATCH 19/20] Code review fix: single migration file --- .../migrations/20240805143000_drop_picked_by_column.down.sql | 1 - .../dal/migrations/20240805143000_drop_picked_by_column.up.sql | 1 - .../lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql | 2 ++ core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql | 2 ++ 4 files changed, 4 insertions(+), 2 deletions(-) delete mode 100644 core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql delete mode 100644 core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql diff --git a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql deleted file mode 100644 index 956aeee1ef6e..000000000000 --- a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.down.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE tee_verifier_input_producer_jobs ADD COLUMN picked_by TEXT; diff --git a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql b/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql deleted file mode 100644 index ae676b844792..000000000000 --- a/core/lib/dal/migrations/20240805143000_drop_picked_by_column.up.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE tee_verifier_input_producer_jobs DROP COLUMN picked_by; diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql index 088825e6d52f..09a162f31fbf 100644 --- a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql @@ -1,3 +1,5 @@ +ALTER TABLE tee_verifier_input_producer_jobs ADD COLUMN picked_by TEXT; + ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type DROP NOT NULL; ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number); diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql index f4f04592181a..160af44c221c 100644 --- a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql +++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql @@ -1,3 +1,5 @@ +ALTER TABLE tee_verifier_input_producer_jobs DROP COLUMN picked_by; + ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey; UPDATE tee_proof_generation_details SET tee_type = 'sgx' WHERE tee_type IS NULL; ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type SET NOT NULL; From 22b4dd3f191b60cee2d3bbc793f350cca402a3fd Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 8 Aug 2024 14:15:07 +0200 Subject: [PATCH 20/20] Update docs --- core/lib/dal/doc/TeeProofGenerationDal.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/lib/dal/doc/TeeProofGenerationDal.md b/core/lib/dal/doc/TeeProofGenerationDal.md index f63c93444684..23474d5cb5c5 100644 --- a/core/lib/dal/doc/TeeProofGenerationDal.md +++ b/core/lib/dal/doc/TeeProofGenerationDal.md @@ -11,9 +11,9 @@ title: Status Diagram --- stateDiagram-v2 -[*] --> ReadyToBeProven : insert_tee_proof_generation_job -ReadyToBeProven --> PickedByProver : get_next_batch_to_be_proven -PickedByProver --> Generated : save_proof_artifacts_metadata -Generated --> [*] +[*] --> ready_to_be_proven : insert_tee_proof_generation_job +ready_to_be_proven --> picked_by_prover : get_next_batch_to_be_proven +picked_by_prover --> generated : save_proof_artifacts_metadata +generated --> [*] ```