diff --git a/core/bin/zksync_tee_prover/src/api_client.rs b/core/bin/zksync_tee_prover/src/api_client.rs
index 1530da971157..13fbc1ba8868 100644
--- a/core/bin/zksync_tee_prover/src/api_client.rs
+++ b/core/bin/zksync_tee_prover/src/api_client.rs
@@ -74,8 +74,11 @@ impl TeeApiClient {
/// Fetches the next job for the TEE prover to process, verifying and signing it if the
/// verification is successful.
- pub async fn get_job(&self) -> Result>, TeeProverError> {
- let request = TeeProofGenerationDataRequest {};
+ pub async fn get_job(
+ &self,
+ tee_type: TeeType,
+ ) -> Result >, TeeProverError> {
+ let request = TeeProofGenerationDataRequest { tee_type };
let response = self
.post::<_, TeeProofGenerationDataResponse, _>("/tee/proof_inputs", request)
.await?;
diff --git a/core/bin/zksync_tee_prover/src/tee_prover.rs b/core/bin/zksync_tee_prover/src/tee_prover.rs
index bcd1e4a1b6b4..64a3a9c5749d 100644
--- a/core/bin/zksync_tee_prover/src/tee_prover.rs
+++ b/core/bin/zksync_tee_prover/src/tee_prover.rs
@@ -112,7 +112,7 @@ impl TeeProver {
}
async fn step(&self) -> Result , TeeProverError> {
- match self.api_client.get_job().await? {
+ match self.api_client.get_job(self.tee_type).await? {
Some(job) => {
let (signature, batch_number, root_hash) = self.verify(*job)?;
self.api_client
diff --git a/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json b/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json
new file mode 100644
index 000000000000..540660bddf34
--- /dev/null
+++ b/core/lib/dal/.sqlx/query-286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6.json
@@ -0,0 +1,37 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n tee_type = $1\n AND l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $2\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $3::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "l1_batch_number",
+ "type_info": "Int8"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ "Text",
+ {
+ "Custom": {
+ "name": "tee_verifier_input_producer_job_status",
+ "kind": {
+ "Enum": [
+ "Queued",
+ "ManuallySkipped",
+ "InProgress",
+ "Successful",
+ "Failed"
+ ]
+ }
+ }
+ },
+ "Interval"
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "286f27e32a152c293d07e7c22e893c6f5a43386d4183745a4668507cf672b3f6"
+}
diff --git a/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json b/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json
deleted file mode 100644
index f0603488f1e8..000000000000
--- a/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "db_name": "PostgreSQL",
- "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ",
- "describe": {
- "columns": [
- {
- "ordinal": 0,
- "name": "l1_batch_number",
- "type_info": "Int8"
- }
- ],
- "parameters": {
- "Left": []
- },
- "nullable": [
- false
- ]
- },
- "hash": "640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c"
-}
diff --git a/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json b/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json
deleted file mode 100644
index 994bfcfbb5a2..000000000000
--- a/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "db_name": "PostgreSQL",
- "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ",
- "describe": {
- "columns": [],
- "parameters": {
- "Left": [
- "Int8"
- ]
- },
- "nullable": []
- },
- "hash": "9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c"
-}
diff --git a/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json b/core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json
similarity index 58%
rename from core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json
rename to core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json
index 8e210aade885..8b67041427d3 100644
--- a/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json
+++ b/core/lib/dal/.sqlx/query-a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f.json
@@ -1,18 +1,18 @@
{
"db_name": "PostgreSQL",
- "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n signature = $1,\n pubkey = $2,\n proof = $3,\n tee_type = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ",
+ "query": "\n UPDATE tee_proof_generation_details\n SET\n tee_type = $1,\n status = 'generated',\n pubkey = $2,\n signature = $3,\n proof = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
+ "Text",
"Bytea",
"Bytea",
"Bytea",
- "Text",
"Int8"
]
},
"nullable": []
},
- "hash": "727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711"
+ "hash": "a8fdcb5180fc5fd125a003e9675f213a0b02b3ff96398920bc0250397bb2a95f"
}
diff --git a/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json b/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json
new file mode 100644
index 000000000000..0ed8005289f7
--- /dev/null
+++ b/core/lib/dal/.sqlx/query-d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6.json
@@ -0,0 +1,15 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at)\n VALUES\n ($1, $2, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number, tee_type) DO NOTHING\n ",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Left": [
+ "Int8",
+ "Text"
+ ]
+ },
+ "nullable": []
+ },
+ "hash": "d8bc4af72e3d94df53967c83d577a1e1abf3d268b16498cc65758af66781cbb6"
+}
diff --git a/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json b/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json
new file mode 100644
index 000000000000..70f7f9d12fa4
--- /dev/null
+++ b/core/lib/dal/.sqlx/query-e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da.json
@@ -0,0 +1,35 @@
+{
+ "db_name": "PostgreSQL",
+ "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = $1\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "l1_batch_number",
+ "type_info": "Int8"
+ }
+ ],
+ "parameters": {
+ "Left": [
+ {
+ "Custom": {
+ "name": "tee_verifier_input_producer_job_status",
+ "kind": {
+ "Enum": [
+ "Queued",
+ "ManuallySkipped",
+ "InProgress",
+ "Successful",
+ "Failed"
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "e048951ded9e4a4a28238334bc4dc118360ab83bae3196ec941216901be629da"
+}
diff --git a/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json b/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json
deleted file mode 100644
index 4236e72fccad..000000000000
--- a/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
- "db_name": "PostgreSQL",
- "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $1::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ",
- "describe": {
- "columns": [
- {
- "ordinal": 0,
- "name": "l1_batch_number",
- "type_info": "Int8"
- }
- ],
- "parameters": {
- "Left": [
- "Interval"
- ]
- },
- "nullable": [
- false
- ]
- },
- "hash": "e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f"
-}
diff --git a/core/lib/dal/doc/TeeProofGenerationDal.md b/core/lib/dal/doc/TeeProofGenerationDal.md
new file mode 100644
index 000000000000..23474d5cb5c5
--- /dev/null
+++ b/core/lib/dal/doc/TeeProofGenerationDal.md
@@ -0,0 +1,19 @@
+# TeeProofGenerationDal
+
+## Table Name
+
+`tee_proofs`
+
+## `status` Diagram
+
+```mermaid
+---
+title: Status Diagram
+---
+stateDiagram-v2
+[*] --> ready_to_be_proven : insert_tee_proof_generation_job
+ready_to_be_proven --> picked_by_prover : get_next_batch_to_be_proven
+picked_by_prover --> generated : save_proof_artifacts_metadata
+generated --> [*]
+
+```
diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql
new file mode 100644
index 000000000000..09a162f31fbf
--- /dev/null
+++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.down.sql
@@ -0,0 +1,5 @@
+ALTER TABLE tee_verifier_input_producer_jobs ADD COLUMN picked_by TEXT;
+
+ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey;
+ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type DROP NOT NULL;
+ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number);
diff --git a/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql
new file mode 100644
index 000000000000..160af44c221c
--- /dev/null
+++ b/core/lib/dal/migrations/20240805144000_tee_proofs_reorg.up.sql
@@ -0,0 +1,7 @@
+ALTER TABLE tee_verifier_input_producer_jobs DROP COLUMN picked_by;
+
+ALTER TABLE tee_proof_generation_details DROP CONSTRAINT tee_proof_generation_details_pkey;
+UPDATE tee_proof_generation_details SET tee_type = 'sgx' WHERE tee_type IS NULL;
+ALTER TABLE tee_proof_generation_details ALTER COLUMN tee_type SET NOT NULL;
+ALTER TABLE tee_proof_generation_details ALTER COLUMN l1_batch_number SET NOT NULL;
+ALTER TABLE tee_proof_generation_details ADD PRIMARY KEY (l1_batch_number, tee_type);
diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs
index 1e852e3f6364..d22541620f2a 100644
--- a/core/lib/dal/src/models/mod.rs
+++ b/core/lib/dal/src/models/mod.rs
@@ -11,6 +11,7 @@ pub mod storage_log;
pub mod storage_oracle_info;
pub mod storage_protocol_version;
pub mod storage_sync;
+pub mod storage_tee_proof;
pub mod storage_transaction;
pub mod storage_verification_request;
pub mod storage_witness_job_info;
diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs
new file mode 100644
index 000000000000..5c93361e7df1
--- /dev/null
+++ b/core/lib/dal/src/models/storage_tee_proof.rs
@@ -0,0 +1,10 @@
+use chrono::NaiveDateTime;
+
+#[derive(Debug, Clone, sqlx::FromRow)]
+pub struct StorageTeeProof {
+ pub pubkey: Option>,
+ pub signature: Option>,
+ pub proof: Option>,
+ pub updated_at: NaiveDateTime,
+ pub attestation: Option>,
+}
diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs
index 43e86116092b..2bd73323eb10 100644
--- a/core/lib/dal/src/tee_proof_generation_dal.rs
+++ b/core/lib/dal/src/tee_proof_generation_dal.rs
@@ -1,14 +1,16 @@
+#![doc = include_str!("../doc/TeeProofGenerationDal.md")]
use std::time::Duration;
use zksync_db_connection::{
- connection::Connection,
- error::DalResult,
- instrument::{InstrumentExt, Instrumented},
+ connection::Connection, error::DalResult, instrument::Instrumented,
utils::pg_interval_from_duration,
};
use zksync_types::{tee_types::TeeType, L1BatchNumber};
-use crate::Core;
+use crate::{
+ models::storage_tee_proof::StorageTeeProof,
+ tee_verifier_input_producer_dal::TeeVerifierInputProducerJobStatus, Core,
+};
#[derive(Debug)]
pub struct TeeProofGenerationDal<'a, 'c> {
@@ -16,12 +18,13 @@ pub struct TeeProofGenerationDal<'a, 'c> {
}
impl TeeProofGenerationDal<'_, '_> {
- pub async fn get_next_block_to_be_proven(
+ pub async fn get_next_batch_to_be_proven(
&mut self,
+ tee_type: TeeType,
processing_timeout: Duration,
) -> DalResult> {
let processing_timeout = pg_interval_from_duration(processing_timeout);
- let result: Option = sqlx::query!(
+ let query = sqlx::query!(
r#"
UPDATE tee_proof_generation_details
SET
@@ -29,19 +32,20 @@ impl TeeProofGenerationDal<'_, '_> {
updated_at = NOW(),
prover_taken_at = NOW()
WHERE
- l1_batch_number = (
+ tee_type = $1
+ AND l1_batch_number = (
SELECT
proofs.l1_batch_number
FROM
tee_proof_generation_details AS proofs
JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number
WHERE
- inputs.status = 'Successful'
+ inputs.status = $2
AND (
proofs.status = 'ready_to_be_proven'
OR (
proofs.status = 'picked_by_prover'
- AND proofs.prover_taken_at < NOW() - $1::INTERVAL
+ AND proofs.prover_taken_at < NOW() - $3::INTERVAL
)
)
ORDER BY
@@ -54,48 +58,53 @@ impl TeeProofGenerationDal<'_, '_> {
RETURNING
tee_proof_generation_details.l1_batch_number
"#,
+ &tee_type.to_string(),
+ TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus,
&processing_timeout,
- )
- .fetch_optional(self.storage.conn())
- .await
- .unwrap()
- .map(|row| L1BatchNumber(row.l1_batch_number as u32));
+ );
+ let batch_number = Instrumented::new("get_next_batch_to_be_proven")
+ .with_arg("tee_type", &tee_type)
+ .with_arg("processing_timeout", &processing_timeout)
+ .with(query)
+ .fetch_optional(self.storage)
+ .await?
+ .map(|row| L1BatchNumber(row.l1_batch_number as u32));
- Ok(result)
+ Ok(batch_number)
}
pub async fn save_proof_artifacts_metadata(
&mut self,
- block_number: L1BatchNumber,
- signature: &[u8],
+ batch_number: L1BatchNumber,
+ tee_type: TeeType,
pubkey: &[u8],
+ signature: &[u8],
proof: &[u8],
- tee_type: TeeType,
) -> DalResult<()> {
let query = sqlx::query!(
r#"
UPDATE tee_proof_generation_details
SET
+ tee_type = $1,
status = 'generated',
- signature = $1,
pubkey = $2,
- proof = $3,
- tee_type = $4,
+ signature = $3,
+ proof = $4,
updated_at = NOW()
WHERE
l1_batch_number = $5
"#,
- signature,
+ tee_type.to_string(),
pubkey,
+ signature,
proof,
- tee_type.to_string(),
- i64::from(block_number.0)
+ i64::from(batch_number.0)
);
let instrumentation = Instrumented::new("save_proof_artifacts_metadata")
- .with_arg("signature", &signature)
+ .with_arg("tee_type", &tee_type)
.with_arg("pubkey", &pubkey)
- .with_arg("proof", &proof)
- .with_arg("tee_type", &tee_type);
+ .with_arg("signature", &signature)
+ .with_arg("proof", &proof);
let result = instrumentation
.clone()
.with(query)
@@ -103,7 +112,8 @@ impl TeeProofGenerationDal<'_, '_> {
.await?;
if result.rows_affected() == 0 {
let err = instrumentation.constraint_error(anyhow::anyhow!(
- "Updating TEE proof for a non-existent batch number is not allowed"
+ "Updating TEE proof for a non-existent batch number {} is not allowed",
+ batch_number
));
return Err(err);
}
@@ -113,53 +123,33 @@ impl TeeProofGenerationDal<'_, '_> {
pub async fn insert_tee_proof_generation_job(
&mut self,
- block_number: L1BatchNumber,
+ batch_number: L1BatchNumber,
+ tee_type: TeeType,
) -> DalResult<()> {
- let block_number = i64::from(block_number.0);
- sqlx::query!(
+ let batch_number = i64::from(batch_number.0);
+ let query = sqlx::query!(
r#"
INSERT INTO
- tee_proof_generation_details (l1_batch_number, status, created_at, updated_at)
+ tee_proof_generation_details (l1_batch_number, tee_type, status, created_at, updated_at)
VALUES
- ($1, 'ready_to_be_proven', NOW(), NOW())
- ON CONFLICT (l1_batch_number) DO NOTHING
+ ($1, $2, 'ready_to_be_proven', NOW(), NOW())
+ ON CONFLICT (l1_batch_number, tee_type) DO NOTHING
"#,
- block_number,
- )
- .instrument("create_tee_proof_generation_details")
- .with_arg("l1_batch_number", &block_number)
- .report_latency()
- .execute(self.storage)
- .await?;
+ batch_number,
+ tee_type.to_string(),
+ );
+ let instrumentation = Instrumented::new("insert_tee_proof_generation_job")
+ .with_arg("l1_batch_number", &batch_number)
+ .with_arg("tee_type", &tee_type);
+ instrumentation
+ .clone()
+ .with(query)
+ .execute(self.storage)
+ .await?;
Ok(())
}
- pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> {
- let result: Option = sqlx::query!(
- r#"
- SELECT
- proofs.l1_batch_number
- FROM
- tee_proof_generation_details AS proofs
- JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number
- WHERE
- inputs.status = 'Successful'
- AND proofs.status = 'ready_to_be_proven'
- ORDER BY
- proofs.l1_batch_number ASC
- LIMIT
- 1
- "#,
- )
- .fetch_optional(self.storage.conn())
- .await
- .unwrap()
- .map(|row| L1BatchNumber(row.l1_batch_number as u32));
-
- Ok(result)
- }
-
pub async fn save_attestation(&mut self, pubkey: &[u8], attestation: &[u8]) -> DalResult<()> {
let query = sqlx::query!(
r#"
@@ -175,18 +165,76 @@ impl TeeProofGenerationDal<'_, '_> {
let instrumentation = Instrumented::new("save_attestation")
.with_arg("pubkey", &pubkey)
.with_arg("attestation", &attestation);
- let result = instrumentation
+ instrumentation
.clone()
.with(query)
.execute(self.storage)
.await?;
- if result.rows_affected() == 0 {
- let err = instrumentation.constraint_error(anyhow::anyhow!(
- "Unable to insert TEE attestation: given pubkey already has an attestation assigned"
- ));
- return Err(err);
- }
Ok(())
}
+
+ pub async fn get_tee_proofs(
+ &mut self,
+ batch_number: L1BatchNumber,
+ tee_type: Option,
+ ) -> DalResult> {
+ let query = format!(
+ r#"
+ SELECT
+ tp.pubkey,
+ tp.signature,
+ tp.proof,
+ tp.updated_at,
+ ta.attestation
+ FROM
+ tee_proof_generation_details tp
+ LEFT JOIN
+ tee_attestations ta ON tp.pubkey = ta.pubkey
+ WHERE
+ tp.l1_batch_number = $1
+ AND tp.status = 'generated'
+ {}
+ ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC
+ "#,
+ tee_type.map_or_else(String::new, |_| "AND tp.tee_type = $2".to_string())
+ );
+
+ let mut query = sqlx::query_as(&query).bind(i64::from(batch_number.0));
+
+ if let Some(tee_type) = tee_type {
+ query = query.bind(tee_type.to_string());
+ }
+
+ let proofs: Vec = query.fetch_all(self.storage.conn()).await.unwrap();
+
+ Ok(proofs)
+ }
+
+ pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> {
+ let query = sqlx::query!(
+ r#"
+ SELECT
+ proofs.l1_batch_number
+ FROM
+ tee_proof_generation_details AS proofs
+ JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number
+ WHERE
+ inputs.status = $1
+ AND proofs.status = 'ready_to_be_proven'
+ ORDER BY
+ proofs.l1_batch_number ASC
+ LIMIT
+ 1
+ "#,
+ TeeVerifierInputProducerJobStatus::Successful as TeeVerifierInputProducerJobStatus
+ );
+ let batch_number = Instrumented::new("get_oldest_unpicked_batch")
+ .with(query)
+ .fetch_optional(self.storage)
+ .await?
+ .map(|row| L1BatchNumber(row.l1_batch_number as u32));
+
+ Ok(batch_number)
+ }
}
diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs
index 00ac85a40739..a8a578a8de08 100644
--- a/core/lib/prover_interface/src/api.rs
+++ b/core/lib/prover_interface/src/api.rs
@@ -4,6 +4,7 @@
use serde::{Deserialize, Serialize};
use zksync_types::{
protocol_version::{L1VerifierConfig, ProtocolSemanticVersion},
+ tee_types::TeeType,
L1BatchNumber,
};
@@ -52,7 +53,10 @@ pub enum RegisterTeeAttestationResponse {
#[derive(Debug, Serialize, Deserialize)]
pub struct ProofGenerationDataRequest {}
-pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest;
+#[derive(Debug, Serialize, Deserialize)]
+pub struct TeeProofGenerationDataRequest {
+ pub tee_type: TeeType,
+}
#[derive(Debug, Serialize, Deserialize)]
pub enum SubmitProofRequest {
diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs
index 751de9bd7040..e6c84ecec537 100644
--- a/core/lib/types/src/api/mod.rs
+++ b/core/lib/types/src/api/mod.rs
@@ -3,6 +3,7 @@ use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use strum::Display;
use zksync_basic_types::{
+ tee_types::TeeType,
web3::{AccessList, Bytes, Index},
L1BatchNumber, H160, H2048, H256, H64, U256, U64,
};
@@ -810,6 +811,18 @@ pub struct Proof {
pub storage_proof: Vec,
}
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TeeProof {
+ pub l1_batch_number: L1BatchNumber,
+ pub tee_type: Option,
+ pub pubkey: Option>,
+ pub signature: Option>,
+ pub proof: Option>,
+ pub proved_at: DateTime,
+ pub attestation: Option>,
+}
+
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TransactionDetailedResult {
diff --git a/core/lib/web3_decl/src/namespaces/unstable.rs b/core/lib/web3_decl/src/namespaces/unstable.rs
index 4996813a9855..d2a6e29c7083 100644
--- a/core/lib/web3_decl/src/namespaces/unstable.rs
+++ b/core/lib/web3_decl/src/namespaces/unstable.rs
@@ -1,7 +1,11 @@
#[cfg_attr(not(feature = "server"), allow(unused_imports))]
use jsonrpsee::core::RpcResult;
use jsonrpsee::proc_macros::rpc;
-use zksync_types::{api::TransactionExecutionInfo, H256};
+use zksync_types::{
+ api::{TeeProof, TransactionExecutionInfo},
+ tee_types::TeeType,
+ L1BatchNumber, H256,
+};
use crate::client::{ForNetwork, L2};
@@ -20,4 +24,11 @@ pub trait UnstableNamespace {
&self,
hash: H256,
) -> RpcResult>;
+
+ #[method(name = "getTeeProofs")]
+ async fn tee_proofs(
+ &self,
+ l1_batch_number: L1BatchNumber,
+ tee_type: Option,
+ ) -> RpcResult>;
}
diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs
index 6abaa718a050..91330aa7d949 100644
--- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs
+++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs
@@ -1,4 +1,8 @@
-use zksync_types::{api::TransactionExecutionInfo, H256};
+use zksync_types::{
+ api::{TeeProof, TransactionExecutionInfo},
+ tee_types::TeeType,
+ L1BatchNumber, H256,
+};
use zksync_web3_decl::{
jsonrpsee::core::{async_trait, RpcResult},
namespaces::UnstableNamespaceServer,
@@ -16,4 +20,14 @@ impl UnstableNamespaceServer for UnstableNamespace {
.await
.map_err(|err| self.current_method().map_err(err))
}
+
+ async fn tee_proofs(
+ &self,
+ l1_batch_number: L1BatchNumber,
+ tee_type: Option,
+ ) -> RpcResult> {
+ self.get_tee_proofs_impl(l1_batch_number, tee_type)
+ .await
+ .map_err(|err| self.current_method().map_err(err))
+ }
}
diff --git a/core/node/api_server/src/web3/namespaces/unstable.rs b/core/node/api_server/src/web3/namespaces/unstable.rs
index b46ecd6dc530..783088cdc36a 100644
--- a/core/node/api_server/src/web3/namespaces/unstable.rs
+++ b/core/node/api_server/src/web3/namespaces/unstable.rs
@@ -1,5 +1,10 @@
+use chrono::{DateTime, Utc};
use zksync_dal::{CoreDal, DalError};
-use zksync_types::api::TransactionExecutionInfo;
+use zksync_types::{
+ api::{TeeProof, TransactionExecutionInfo},
+ tee_types::TeeType,
+ L1BatchNumber,
+};
use zksync_web3_decl::{error::Web3Error, types::H256};
use crate::web3::{backend_jsonrpsee::MethodTracer, RpcState};
@@ -30,4 +35,28 @@ impl UnstableNamespace {
.map_err(DalError::generalize)?
.map(|execution_info| TransactionExecutionInfo { execution_info }))
}
+
+ pub async fn get_tee_proofs_impl(
+ &self,
+ l1_batch_number: L1BatchNumber,
+ tee_type: Option,
+ ) -> Result, Web3Error> {
+ let mut storage = self.state.acquire_connection().await?;
+ Ok(storage
+ .tee_proof_generation_dal()
+ .get_tee_proofs(l1_batch_number, tee_type)
+ .await
+ .map_err(DalError::generalize)?
+ .into_iter()
+ .map(|proof| TeeProof {
+ l1_batch_number,
+ tee_type,
+ pubkey: proof.pubkey,
+ signature: proof.signature,
+ proof: proof.proof,
+ proved_at: DateTime::::from_naive_utc_and_offset(proof.updated_at, Utc),
+ attestation: proof.attestation,
+ })
+ .collect::>())
+ }
}
diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs
index 243c9e06cfcc..d85591dd2c90 100644
--- a/core/node/proof_data_handler/src/tee_request_processor.rs
+++ b/core/node/proof_data_handler/src/tee_request_processor.rs
@@ -49,9 +49,10 @@ impl TeeRequestProcessor {
let l1_batch_number_result = connection
.tee_proof_generation_dal()
- .get_next_block_to_be_proven(self.config.proof_generation_timeout())
+ .get_next_batch_to_be_proven(request.tee_type, self.config.proof_generation_timeout())
.await
.map_err(RequestProcessorError::Dal)?;
+
let l1_batch_number = match l1_batch_number_result {
Some(number) => number,
None => return Ok(Json(TeeProofGenerationDataResponse(None))),
@@ -63,9 +64,9 @@ impl TeeRequestProcessor {
.await
.map_err(RequestProcessorError::ObjectStore)?;
- Ok(Json(TeeProofGenerationDataResponse(Some(Box::new(
- tee_verifier_input,
- )))))
+ let response = TeeProofGenerationDataResponse(Some(Box::new(tee_verifier_input)));
+
+ Ok(Json(response))
}
pub(crate) async fn submit_proof(
@@ -82,16 +83,16 @@ impl TeeRequestProcessor {
let mut dal = connection.tee_proof_generation_dal();
tracing::info!(
- "Received proof {:?} for block number: {:?}",
+ "Received proof {:?} for batch number: {:?}",
proof,
l1_batch_number
);
dal.save_proof_artifacts_metadata(
l1_batch_number,
- &proof.0.signature,
+ proof.0.tee_type,
&proof.0.pubkey,
+ &proof.0.signature,
&proof.0.proof,
- proof.0.tee_type,
)
.await
.map_err(RequestProcessorError::Dal)?;
diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs
index 1fbe563d2d28..88d4930e6920 100644
--- a/core/node/proof_data_handler/src/tests.rs
+++ b/core/node/proof_data_handler/src/tests.rs
@@ -18,7 +18,7 @@ use zksync_prover_interface::{
api::SubmitTeeProofRequest,
inputs::{TeeVerifierInput, V1TeeVerifierInput, WitnessInputMerklePaths},
};
-use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256};
+use zksync_types::{commitment::L1BatchCommitmentMode, tee_types::TeeType, L1BatchNumber, H256};
use crate::create_proof_processing_router;
@@ -94,7 +94,7 @@ async fn request_tee_proof_inputs() {
},
L1BatchCommitmentMode::Rollup,
);
- let req_body = Body::from(serde_json::to_vec(&json!({})).unwrap());
+ let req_body = Body::from(serde_json::to_vec(&json!({ "tee_type": "Sgx" })).unwrap());
let response = app
.oneshot(
Request::builder()
@@ -180,6 +180,26 @@ async fn submit_tee_proof() {
.unwrap();
assert!(oldest_batch_number.is_none());
+
+ // there should be one SGX proof in the db now
+
+ let proofs = proof_db_conn
+ .tee_proof_generation_dal()
+ .get_tee_proofs(batch_number, Some(TeeType::Sgx))
+ .await
+ .unwrap();
+
+ assert_eq!(proofs.len(), 1);
+
+ let proof = &proofs[0];
+
+ assert_eq!(proof.proof.as_ref().unwrap(), &tee_proof_request.0.proof);
+ assert_eq!(proof.attestation.as_ref().unwrap(), &attestation);
+ assert_eq!(
+ proof.signature.as_ref().unwrap(),
+ &tee_proof_request.0.signature
+ );
+ assert_eq!(proof.pubkey.as_ref().unwrap(), &tee_proof_request.0.pubkey);
}
// Mock SQL db with information about the status of the TEE proof generation
@@ -215,7 +235,7 @@ async fn mock_tee_batch_status(
// mock SQL table with relevant information about the status of TEE proof generation ('ready_to_be_proven')
proof_dal
- .insert_tee_proof_generation_job(batch_number)
+ .insert_tee_proof_generation_job(batch_number, TeeType::Sgx)
.await
.expect("Failed to insert tee_proof_generation_job");
diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs
index 0cd28ee5ce79..abd70542a42f 100644
--- a/core/node/tee_verifier_input_producer/src/lib.rs
+++ b/core/node/tee_verifier_input_producer/src/lib.rs
@@ -19,7 +19,7 @@ use zksync_prover_interface::inputs::{
};
use zksync_queued_job_processor::JobProcessor;
use zksync_tee_verifier::Verify;
-use zksync_types::{L1BatchNumber, L2ChainId};
+use zksync_types::{tee_types::TeeType, L1BatchNumber, L2ChainId};
use zksync_utils::u256_to_h256;
use zksync_vm_utils::storage::L1BatchParamsProvider;
@@ -241,7 +241,7 @@ impl JobProcessor for TeeVerifierInputProducer {
.context("failed to mark job as successful for TeeVerifierInputProducer")?;
transaction
.tee_proof_generation_dal()
- .insert_tee_proof_generation_job(job_id)
+ .insert_tee_proof_generation_job(job_id, TeeType::Sgx)
.await?;
transaction
.commit()