From 844c35b49504129453822faaa6627ccad8b34544 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Mon, 18 Nov 2024 12:05:38 +0000 Subject: [PATCH 1/9] refactor: add a separate db for proving input/output --- .../src/interfaces/proving-job.ts | 103 +++--- .../src/orchestrator/orchestrator.ts | 2 +- .../proof_input_output_database.ts | 100 ++++++ .../src/proving_broker/proving_agent.test.ts | 79 +++-- .../src/proving_broker/proving_agent.ts | 70 +++- .../src/proving_broker/proving_broker.test.ts | 308 +++++++----------- .../src/proving_broker/proving_broker.ts | 13 +- .../proving_broker_interface.ts | 4 +- .../proving_job_controller.test.ts | 9 +- .../proving_broker/proving_job_controller.ts | 19 +- .../proving_broker/proving_job_database.ts | 4 +- .../proving_job_database/memory.ts | 4 +- .../proving_job_database/persisted.ts | 4 +- 13 files changed, 432 insertions(+), 287 deletions(-) create mode 100644 yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts diff --git a/yarn-project/circuit-types/src/interfaces/proving-job.ts b/yarn-project/circuit-types/src/interfaces/proving-job.ts index 7c0643192c8..f751368b84c 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job.ts @@ -241,80 +241,69 @@ export const ProvingRequestResultSchema = z.discriminatedUnion('type', [ export const V2ProvingJobId = z.string().brand('ProvingJobId'); export type V2ProvingJobId = z.infer; -export const V2ProvingJob = z.discriminatedUnion('type', [ +export const V2ProofInput = z.discriminatedUnion('type', [ z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.PUBLIC_VM), - inputs: AvmCircuitInputs.schema, + value: AvmCircuitInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.BASE_PARITY), - inputs: BaseParityInputs.schema, + value: BaseParityInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.ROOT_PARITY), - inputs: RootParityInputs.schema, + value: RootParityInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.PRIVATE_BASE_ROLLUP), - inputs: PrivateBaseRollupInputs.schema, + value: PrivateBaseRollupInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.PUBLIC_BASE_ROLLUP), - inputs: PublicBaseRollupInputs.schema, + value: PublicBaseRollupInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.MERGE_ROLLUP), - inputs: MergeRollupInputs.schema, + value: MergeRollupInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.BLOCK_ROOT_ROLLUP), - inputs: BlockRootRollupInputs.schema, + value: BlockRootRollupInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP), - inputs: EmptyBlockRootRollupInputs.schema, + value: EmptyBlockRootRollupInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.BLOCK_MERGE_ROLLUP), - inputs: BlockMergeRollupInputs.schema, + value: BlockMergeRollupInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.ROOT_ROLLUP), - inputs: RootRollupInputs.schema, + value: RootRollupInputs.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), - inputs: PrivateKernelEmptyInputData.schema, + value: PrivateKernelEmptyInputData.schema, }), z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), type: z.literal(ProvingRequestType.TUBE_PROOF), - inputs: TubeInputs.schema, + value: TubeInputs.schema, }), ]); + +export type V2ProofInput = z.infer; + +export const V2ProofInputUri = z.string().brand('ProofInputUri'); +export type V2ProofInputUri = z.infer; + +export const V2ProvingJob = z.object({ + id: V2ProvingJobId, + blockNumber: z.number(), + type: z.nativeEnum(ProvingRequestType), + inputs: V2ProofInputUri, +}); + export type V2ProvingJob = z.infer; export const V2ProofOutput = z.discriminatedUnion('type', [ @@ -370,14 +359,50 @@ export const V2ProofOutput = z.discriminatedUnion('type', [ export type V2ProofOutput = z.infer; +export const V2ProofOutputUri = z.string().brand('ProofOutputUri'); +export type V2ProofOutputUri = z.infer; + +export type V2ProofInputsByType = { + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PrivateKernelEmptyInputData; + [ProvingRequestType.PUBLIC_VM]: AvmCircuitInputs; + [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PrivateBaseRollupInputs; + [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicBaseRollupInputs; + [ProvingRequestType.MERGE_ROLLUP]: MergeRollupInputs; + [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: EmptyBlockRootRollupInputs; + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: BlockRootRollupInputs; + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: BlockMergeRollupInputs; + [ProvingRequestType.ROOT_ROLLUP]: RootRollupInputs; + [ProvingRequestType.BASE_PARITY]: BaseParityInputs; + [ProvingRequestType.ROOT_PARITY]: RootParityInputs; + [ProvingRequestType.TUBE_PROOF]: TubeInputs; +}; + +export type V2ProofOutputByType = { + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PublicInputsAndRecursiveProof; + [ProvingRequestType.PUBLIC_VM]: ProofAndVerificationKey; + [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.MERGE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.ROOT_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BASE_PARITY]: PublicInputsAndRecursiveProof; + [ProvingRequestType.ROOT_PARITY]: PublicInputsAndRecursiveProof< + ParityPublicInputs, + typeof NESTED_RECURSIVE_PROOF_LENGTH + >; + [ProvingRequestType.TUBE_PROOF]: ProofAndVerificationKey; +}; + export const V2ProvingJobStatus = z.discriminatedUnion('status', [ z.object({ status: z.literal('in-queue') }), z.object({ status: z.literal('in-progress') }), z.object({ status: z.literal('not-found') }), - z.object({ status: z.literal('resolved'), value: V2ProofOutput }), + z.object({ status: z.literal('resolved'), value: V2ProofOutputUri }), z.object({ status: z.literal('rejected'), error: z.string() }), ]); export type V2ProvingJobStatus = z.infer; -export const V2ProvingJobResult = z.union([z.object({ value: V2ProofOutput }), z.object({ error: z.string() })]); +export const V2ProvingJobResult = z.union([z.object({ value: V2ProofOutputUri }), z.object({ error: z.string() })]); export type V2ProvingJobResult = z.infer; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 85c71b7f8ba..b13bc7e7367 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -75,7 +75,7 @@ import { import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js'; import { TxProvingState } from './tx-proving-state.js'; -const logger = createDebugLogger('aztec:prover:proving-orchestrator'); +const logger = createDebugLogger('aztec:prover-client:orchestrator'); /** * Implements an event driven proving scheduler to build the recursive proof tree. The idea being: diff --git a/yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts b/yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts new file mode 100644 index 00000000000..f992946fbd3 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts @@ -0,0 +1,100 @@ +import { + type ProvingRequestType, + V2ProofInput, + type V2ProofInputUri, + V2ProofOutput, + type V2ProofOutputUri, + type V2ProvingJobId, +} from '@aztec/circuit-types'; + +/** + * A database for storing proof inputs and outputs. + */ +export interface ProofInputOutputDatabase { + /** + * Save a proof input to the database. + * @param jobId - The ID of the job the proof input is associated with. + * @param type - The type of the proving request. + * @param proofInput - The proof input to save. + * @returns The URI of the saved proof input. + */ + saveProofInput(jobId: V2ProvingJobId, type: ProvingRequestType, proofInput: V2ProofInput): Promise; + + /** + * Save a proof output to the database. + * @param jobId - The ID of the job the proof input is associated with. + * @param type - The type of the proving request. + * @param proofOutput - The proof output to save. + * @returns The URI of the saved proof output. + */ + saveProofOutput( + jobId: V2ProvingJobId, + type: ProvingRequestType, + proofOutput: V2ProofOutput, + ): Promise; + + /** + * Retrieve a proof input from the database. + * @param uri - The URI of the proof input to retrieve. + * @returns The proof input. + */ + getProofInput(uri: V2ProofInputUri): Promise; + + /** + * Retrieve a proof output from the database. + * @param uri - The URI of the proof output to retrieve. + * @returns The proof output. + */ + getProofOutput(uri: V2ProofOutputUri): Promise; +} + +/** + * An implementation of a proof input/output database that stores data inline in the URI. + */ +export class InlineProofIODatabase implements ProofInputOutputDatabase { + private static readonly PREFIX = 'data:application/json;base64'; + private static readonly SEPARATOR = ','; + private static readonly BUFFER_ENCODING = 'base64url'; + + saveProofInput(_id: V2ProvingJobId, _type: ProvingRequestType, proofInput: V2ProofInput): Promise { + return Promise.resolve( + (InlineProofIODatabase.PREFIX + + InlineProofIODatabase.SEPARATOR + + Buffer.from(JSON.stringify(proofInput)).toString(InlineProofIODatabase.BUFFER_ENCODING)) as V2ProofInputUri, + ); + } + + saveProofOutput( + _id: V2ProvingJobId, + _type: ProvingRequestType, + proofOutput: V2ProofOutput, + ): Promise { + return Promise.resolve( + (InlineProofIODatabase.PREFIX + + InlineProofIODatabase.SEPARATOR + + Buffer.from(JSON.stringify(proofOutput)).toString(InlineProofIODatabase.BUFFER_ENCODING)) as V2ProofOutputUri, + ); + } + + getProofInput(uri: V2ProofInputUri): Promise { + const [prefix, data] = uri.split(','); + if (prefix !== InlineProofIODatabase.PREFIX) { + throw new Error('Invalid proof input URI: ' + prefix); + } + + return Promise.resolve( + V2ProofInput.parse(JSON.parse(Buffer.from(data, InlineProofIODatabase.BUFFER_ENCODING).toString())), + ); + } + + getProofOutput(uri: V2ProofOutputUri): Promise { + const [prefix, data] = uri.split(','); + if (prefix !== InlineProofIODatabase.PREFIX) { + throw new Error('Invalid proof output URI: ' + prefix); + } + + return Promise.resolve( + V2ProofOutput.parse(JSON.parse(Buffer.from(data, InlineProofIODatabase.BUFFER_ENCODING).toString())), + ); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts index 9a2c7db1da9..07b2208add1 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts @@ -2,6 +2,9 @@ import { ProvingError, ProvingRequestType, type PublicInputsAndRecursiveProof, + type V2ProofInput, + type V2ProofInputUri, + type V2ProofOutputUri, type V2ProvingJob, type V2ProvingJobId, makePublicInputsAndRecursiveProof, @@ -20,6 +23,7 @@ import { promiseWithResolvers } from '@aztec/foundation/promise'; import { jest } from '@jest/globals'; import { MockProver } from '../test/mock_prover.js'; +import { type ProofInputOutputDatabase } from './proof_input_output_database.js'; import { ProvingAgent } from './proving_agent.js'; import { type ProvingJobConsumer } from './proving_broker_interface.js'; @@ -27,6 +31,7 @@ describe('ProvingAgent', () => { let prover: MockProver; let jobSource: jest.Mocked; let agent: ProvingAgent; + let proofDB: jest.Mocked; const agentPollIntervalMs = 1000; beforeEach(() => { @@ -39,7 +44,14 @@ describe('ProvingAgent', () => { reportProvingJobError: jest.fn(), reportProvingJobSuccess: jest.fn(), }; - agent = new ProvingAgent(jobSource, prover, [ProvingRequestType.BASE_PARITY]); + proofDB = { + getProofInput: jest.fn(), + getProofOutput: jest.fn(), + saveProofInput: jest.fn(), + saveProofOutput: jest.fn(), + }; + + agent = new ProvingAgent(jobSource, proofDB, prover, [ProvingRequestType.BASE_PARITY]); }); afterEach(async () => { @@ -59,8 +71,9 @@ describe('ProvingAgent', () => { promiseWithResolvers>(); jest.spyOn(prover, 'getBaseParityProof').mockReturnValueOnce(promise); - const jobResponse = makeBaseParityJob(); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + const { job, time, inputs } = makeBaseParityJob(); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); @@ -85,56 +98,64 @@ describe('ProvingAgent', () => { }); it('reports success to the job source', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); const result = makeBaseParityResult(); + jest.spyOn(prover, 'getBaseParityProof').mockResolvedValueOnce(result.value); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); + proofDB.saveProofOutput.mockResolvedValueOnce('output-uri' as V2ProofOutputUri); + agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobSuccess).toHaveBeenCalledWith(jobResponse.job.id, result); + expect(proofDB.saveProofOutput).toHaveBeenCalledWith(result); + expect(jobSource.reportProvingJobSuccess).toHaveBeenCalledWith(job.id, 'output-uri'); }); it('reports errors to the job source', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); jest.spyOn(prover, 'getBaseParityProof').mockRejectedValueOnce(new Error('test error')); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(jobResponse.job.id, new Error('test error'), false); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, new Error('test error'), false); }); it('sets the retry flag on when reporting an error', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); const err = new ProvingError('test error', undefined, true); jest.spyOn(prover, 'getBaseParityProof').mockRejectedValueOnce(err); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(jobResponse.job.id, err, true); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, err, true); }); it('reports jobs in progress to the job source', async () => { - const jobResponse = makeBaseParityJob(); + const { job, time, inputs } = makeBaseParityJob(); const { promise, resolve } = promiseWithResolvers>(); jest.spyOn(prover, 'getBaseParityProof').mockReturnValueOnce(promise); - jobSource.getProvingJob.mockResolvedValueOnce(jobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockResolvedValueOnce(inputs); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(jobResponse.job.id, jobResponse.time, { + expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(job.id, time, { allowList: [ProvingRequestType.BASE_PARITY], }); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(jobResponse.job.id, jobResponse.time, { + expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(job.id, time, { allowList: [ProvingRequestType.BASE_PARITY], }); @@ -142,7 +163,7 @@ describe('ProvingAgent', () => { }); it('abandons jobs if told so by the source', async () => { - const firstJobResponse = makeBaseParityJob(); + const firstJob = makeBaseParityJob(); let firstProofAborted = false; const firstProof = promiseWithResolvers>(); @@ -156,13 +177,14 @@ describe('ProvingAgent', () => { return firstProof.promise; }); - jobSource.getProvingJob.mockResolvedValueOnce(firstJobResponse); + jobSource.getProvingJob.mockResolvedValueOnce({ job: firstJob.job, time: firstJob.time }); + proofDB.getProofInput.mockResolvedValueOnce(firstJob.inputs); agent.start(); // now the agent should be happily proving and reporting progress await jest.advanceTimersByTimeAsync(agentPollIntervalMs); expect(jobSource.reportProvingJobProgress).toHaveBeenCalledTimes(1); - expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(firstJobResponse.job.id, firstJobResponse.time, { + expect(jobSource.reportProvingJobProgress).toHaveBeenCalledWith(firstJob.job.id, firstJob.time, { allowList: [ProvingRequestType.BASE_PARITY], }); @@ -172,7 +194,9 @@ describe('ProvingAgent', () => { // now let's simulate the job source cancelling the job and giving the agent something else to do // this should cause the agent to abort the current job and start the new one const secondJobResponse = makeBaseParityJob(); + jobSource.reportProvingJobProgress.mockResolvedValueOnce(secondJobResponse); + proofDB.getProofInput.mockResolvedValueOnce(secondJobResponse.inputs); const secondProof = promiseWithResolvers>(); @@ -180,13 +204,9 @@ describe('ProvingAgent', () => { await jest.advanceTimersByTimeAsync(agentPollIntervalMs); expect(jobSource.reportProvingJobProgress).toHaveBeenCalledTimes(3); - expect(jobSource.reportProvingJobProgress).toHaveBeenLastCalledWith( - firstJobResponse.job.id, - firstJobResponse.time, - { - allowList: [ProvingRequestType.BASE_PARITY], - }, - ); + expect(jobSource.reportProvingJobProgress).toHaveBeenLastCalledWith(firstJob.job.id, firstJob.time, { + allowList: [ProvingRequestType.BASE_PARITY], + }); expect(firstProofAborted).toBe(true); // agent should have switched now @@ -203,16 +223,17 @@ describe('ProvingAgent', () => { secondProof.resolve(makeBaseParityResult().value); }); - function makeBaseParityJob(): { job: V2ProvingJob; time: number } { + function makeBaseParityJob(): { job: V2ProvingJob; time: number; inputs: V2ProofInput } { const time = jest.now(); + const inputs: V2ProofInput = { type: ProvingRequestType.BASE_PARITY, value: makeBaseParityInputs() }; const job: V2ProvingJob = { id: randomBytes(8).toString('hex') as V2ProvingJobId, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputs: randomBytes(8).toString('hex') as V2ProofInputUri, }; - return { job, time }; + return { job, time, inputs }; } function makeBaseParityResult() { diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.ts index 5ee86900e0d..5857b6bb887 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.ts @@ -1,12 +1,16 @@ import { ProvingError, - type ProvingRequestType, + ProvingRequestType, type ServerCircuitProver, + type V2ProofOutput, type V2ProvingJob, + type V2ProvingJobId, } from '@aztec/circuit-types'; +import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; +import { type ProofInputOutputDatabase } from './proof_input_output_database.js'; import { type ProvingJobConsumer } from './proving_broker_interface.js'; import { ProvingJobController, ProvingJobStatus } from './proving_job_controller.js'; @@ -20,13 +24,16 @@ export class ProvingAgent { constructor( /** The source of proving jobs */ private jobSource: ProvingJobConsumer, + /** Database holding proof inputs and outputs */ + private proofInputOutputDatabase: ProofInputOutputDatabase, /** The prover implementation to defer jobs to */ private circuitProver: ServerCircuitProver, /** Optional list of allowed proof types to build */ private proofAllowList?: Array, /** How long to wait between jobs */ private pollIntervalMs = 1000, - private log = createDebugLogger('aztec:proving-broker:proving-agent'), + name = randomBytes(4).toString('hex'), + private log = createDebugLogger('aztec:prover-client:proving-agent:' + name), ) { this.runningPromise = new RunningPromise(this.safeWork, this.pollIntervalMs); } @@ -69,22 +76,65 @@ export class ProvingAgent { return; } + let abortedProofJobId: string = ''; + let abortedProofName: string = ''; if (this.currentJobController?.getStatus() === ProvingJobStatus.PROVING) { + abortedProofJobId = this.currentJobController.getJobId(); + abortedProofName = this.currentJobController.getProofTypeName(); this.currentJobController?.abort(); } const { job, time } = maybeJob; - this.currentJobController = new ProvingJobController(job, time, this.circuitProver, (err, result) => { - if (err) { - const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; - return this.jobSource.reportProvingJobError(job.id, err, retry); - } else if (result) { - return this.jobSource.reportProvingJobSuccess(job.id, result); - } - }); + const inputs = await this.proofInputOutputDatabase.getProofInput(job.inputs); + + this.currentJobController = new ProvingJobController( + job.id, + inputs, + time, + this.circuitProver, + this.handleJobResult, + ); + + if (abortedProofJobId) { + this.log.info( + `Aborting job id=${abortedProofJobId} type=${abortedProofName} to start new job id=${this.currentJobController.getJobId()} type=${this.currentJobController.getProofTypeName()} inputsUri=${truncateString( + job.inputs, + )}`, + ); + } else { + this.log.info( + `Starting job id=${this.currentJobController.getJobId()} type=${this.currentJobController.getProofTypeName()} inputsUri=${truncateString( + job.inputs, + )}`, + ); + } + this.currentJobController.start(); } catch (err) { this.log.error(`Error in ProvingAgent: ${String(err)}`); } }; + + handleJobResult = async ( + jobId: V2ProvingJobId, + type: ProvingRequestType, + err: Error | undefined, + result: V2ProofOutput | undefined, + ) => { + if (err) { + const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; + this.log.info(`Job id=${jobId} type=${ProvingRequestType[type]} failed err=${err.message} retry=${retry}`); + return this.jobSource.reportProvingJobError(jobId, err, retry); + } else if (result) { + const outputUri = await this.proofInputOutputDatabase.saveProofOutput(jobId, type, result); + this.log.info( + `Job id=${jobId} type=${ProvingRequestType[type]} completed outputUri=${truncateString(outputUri)}`, + ); + return this.jobSource.reportProvingJobSuccess(jobId, outputUri); + } + }; +} + +function truncateString(str: string, length: number = 64): string { + return str.length > length ? str.slice(0, length) + '...' : str; } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts index fef79bfb99f..4414a5eaa7a 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts @@ -1,18 +1,10 @@ import { ProvingRequestType, - type V2ProofOutput, + type V2ProofInputUri, + type V2ProofOutputUri, type V2ProvingJob, type V2ProvingJobId, - makePublicInputsAndRecursiveProof, } from '@aztec/circuit-types'; -import { RECURSIVE_PROOF_LENGTH, VerificationKeyData, makeRecursiveProof } from '@aztec/circuits.js'; -import { - makeBaseOrMergeRollupPublicInputs, - makeBaseParityInputs, - makeParityPublicInputs, - makePrivateBaseRollupInputs, - makeRootParityInputs, -} from '@aztec/circuits.js/testing'; import { randomBytes } from '@aztec/foundation/crypto'; import { openTmpStore } from '@aztec/kv-store/utils'; @@ -77,7 +69,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); expect(await broker.getProvingJobStatus(id)).toEqual({ status: 'in-queue' }); @@ -86,7 +78,7 @@ describe.each([ id: id2, blockNumber: 1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); expect(await broker.getProvingJobStatus(id2)).toEqual({ status: 'in-queue' }); }); @@ -96,7 +88,7 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -110,14 +102,14 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(1), + inputs: makeInputsUri(), }); await expect( broker.enqueueProvingJob({ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(2), + inputs: makeInputsUri(), }), ).rejects.toThrow('Duplicate proving job ID'); }); @@ -133,7 +125,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'in-queue' }); @@ -148,7 +140,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'in-queue' }); await broker.getProvingJob(); @@ -162,19 +154,15 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); - const value = makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ); - await broker.reportProvingJobSuccess(provingJob.id, { type: ProvingRequestType.BASE_PARITY, value }); + const value = makeOutputsUri(); + await broker.reportProvingJobSuccess(provingJob.id, value); const status = await broker.getProvingJobStatus(provingJob.id); - expect(status).toEqual({ status: 'resolved', value: { type: ProvingRequestType.BASE_PARITY, value } }); + expect(status).toEqual({ status: 'resolved', value }); }); it('returns job error if failed', async () => { @@ -182,7 +170,7 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -213,21 +201,21 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; const provingJob2: V2ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; const provingJob3: V2ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 3, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob2); @@ -242,7 +230,7 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await expect( @@ -256,7 +244,7 @@ describe.each([ id: baseParity1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); const baseRollup1 = makeProvingJobId(); @@ -264,7 +252,7 @@ describe.each([ id: baseRollup1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 1, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); const baseRollup2 = makeProvingJobId(); @@ -272,7 +260,7 @@ describe.each([ id: baseRollup2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); const rootParity1 = makeProvingJobId(); @@ -280,7 +268,7 @@ describe.each([ id: rootParity1, type: ProvingRequestType.ROOT_PARITY, blockNumber: 1, - inputs: makeRootParityInputs(), + inputs: makeInputsUri(), }); await getAndAssertNextJobId(baseParity1, ProvingRequestType.BASE_PARITY); @@ -292,7 +280,7 @@ describe.each([ id: baseParity1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); const baseRollup1 = makeProvingJobId(); @@ -300,7 +288,7 @@ describe.each([ id: baseRollup1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 1, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); const baseRollup2 = makeProvingJobId(); @@ -308,7 +296,7 @@ describe.each([ id: baseRollup2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); const rootParity1 = makeProvingJobId(); @@ -316,7 +304,7 @@ describe.each([ id: rootParity1, type: ProvingRequestType.ROOT_PARITY, blockNumber: 1, - inputs: makeRootParityInputs(), + inputs: makeInputsUri(), }); await getAndAssertNextJobId( @@ -327,13 +315,49 @@ describe.each([ ); }); + it('returns any job if filter is empty', async () => { + const baseParity1 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: baseParity1, + type: ProvingRequestType.BASE_PARITY, + blockNumber: 1, + inputs: makeInputsUri(), + }); + + const baseRollup1 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: baseRollup1, + type: ProvingRequestType.PRIVATE_BASE_ROLLUP, + blockNumber: 1, + inputs: makeInputsUri(), + }); + + const baseRollup2 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: baseRollup2, + type: ProvingRequestType.PRIVATE_BASE_ROLLUP, + blockNumber: 2, + inputs: makeInputsUri(), + }); + + const rootParity1 = makeProvingJobId(); + await broker.enqueueProvingJob({ + id: rootParity1, + type: ProvingRequestType.ROOT_PARITY, + blockNumber: 1, + inputs: makeInputsUri(), + }); + + await getAndAssertNextJobId(baseRollup1); + }); + it('returns a new job when reporting progress if current one is cancelled', async () => { const id = makeProvingJobId(); await broker.enqueueProvingJob({ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await broker.getProvingJob(); await assertJobStatus(id, 'in-progress'); @@ -345,7 +369,7 @@ describe.each([ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await expect( broker.reportProvingJobProgress(id, now(), { allowList: [ProvingRequestType.BASE_PARITY] }), @@ -358,14 +382,14 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; const job2: V2ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -426,14 +450,14 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; const job2: V2ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -481,14 +505,14 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; const job2: V2ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -510,16 +534,7 @@ describe.each([ // after the restart the new broker thinks job1 is available // inform the agent of the job completion - await expect( - broker.reportProvingJobSuccess(job1.id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }), - ).resolves.toBeUndefined(); + await expect(broker.reportProvingJobSuccess(job1.id, makeOutputsUri())).resolves.toBeUndefined(); await assertJobStatus(job1.id, 'resolved'); // make sure the the broker sends the next job to the agent @@ -536,25 +551,18 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await broker.enqueueProvingJob({ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await getAndAssertNextJobId(id1); await assertJobStatus(id1, 'in-progress'); - await broker.reportProvingJobSuccess(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await broker.reportProvingJobSuccess(id1, makeOutputsUri()); await assertJobStatus(id1, 'resolved'); await getAndAssertNextJobId(id2); @@ -570,23 +578,16 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await broker.enqueueProvingJob({ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); - await broker.reportProvingJobSuccess(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await broker.reportProvingJobSuccess(id1, makeOutputsUri()); await assertJobStatus(id1, 'resolved'); await broker.reportProvingJobError(id2, new Error('test error')); @@ -603,14 +604,7 @@ describe.each([ it('ignores job result if unknown job', async () => { const id = makeProvingJobId(); await assertJobStatus(id, 'not-found'); - await broker.reportProvingJobSuccess(id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await broker.reportProvingJobSuccess(id, makeOutputsUri()); await assertJobStatus(id, 'not-found'); }); }); @@ -630,7 +624,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -644,7 +638,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -664,7 +658,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -698,7 +692,7 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -726,7 +720,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); for (let i = 0; i < maxRetries; i++) { @@ -748,7 +742,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await getAndAssertNextJobId(id); @@ -773,7 +767,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); const id2 = makeProvingJobId(); @@ -781,7 +775,7 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); await broker.start(); @@ -794,7 +788,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: expect.any(Object), + inputs: expect.any(String), }, time: expect.any(Number), }); @@ -804,7 +798,7 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: expect.any(Object), + inputs: expect.any(String), }, time: expect.any(Number), }); @@ -824,7 +818,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); const id2 = makeProvingJobId(); @@ -832,37 +826,22 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), - }); - - await database.setProvingJobResult(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), + inputs: makeInputsUri(), }); - await database.setProvingJobResult(id2, { - type: ProvingRequestType.PRIVATE_BASE_ROLLUP, - value: makePublicInputsAndRecursiveProof( - makeBaseOrMergeRollupPublicInputs(), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await database.setProvingJobResult(id1, makeOutputsUri()); + await database.setProvingJobResult(id2, makeOutputsUri()); await broker.start(); await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ status: 'resolved', - value: expect.any(Object), + value: expect.any(String), }); await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ status: 'resolved', - value: expect.any(Object), + value: expect.any(String), }); }); @@ -873,33 +852,22 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), - }); - await database.setProvingJobResult(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), + inputs: makeInputsUri(), }); + await database.setProvingJobResult(id1, makeOutputsUri()); const id2 = makeProvingJobId(); await database.addProvingJob({ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); await broker.start(); - await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ - status: 'resolved', - value: expect.any(Object), - }); - - await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ status: 'in-queue' }); + await assertJobStatus(id1, 'resolved'); + await assertJobStatus(id2, 'in-queue'); await getAndAssertNextJobId(id2); }); @@ -910,33 +878,22 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), - }); - await database.setProvingJobResult(id1, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), + inputs: makeInputsUri(), }); + await database.setProvingJobResult(id1, makeOutputsUri()); const id2 = makeProvingJobId(); await database.addProvingJob({ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makePrivateBaseRollupInputs(), + inputs: makeInputsUri(), }); await broker.start(); - await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ - status: 'resolved', - value: expect.any(Object), - }); - - await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ status: 'in-queue' }); + await assertJobStatus(id1, 'resolved'); + await assertJobStatus(id2, 'in-queue'); jest.spyOn(database, 'deleteProvingJobAndResult'); @@ -948,6 +905,8 @@ describe.each([ await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ status: 'not-found' }); await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ status: 'not-found' }); + await assertJobStatus(id1, 'not-found'); + await assertJobStatus(id2, 'not-found'); }); it('saves job when enqueued', async () => { @@ -956,7 +915,7 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; jest.spyOn(database, 'addProvingJob'); @@ -975,7 +934,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }), ).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'not-found'); @@ -988,24 +947,15 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }; jest.spyOn(database, 'setProvingJobResult'); await broker.enqueueProvingJob(job); - const result: V2ProofOutput = { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }; - await broker.reportProvingJobSuccess(job.id, result); - + await broker.reportProvingJobSuccess(job.id, makeOutputsUri()); await assertJobStatus(job.id, 'resolved'); - expect(database.setProvingJobResult).toHaveBeenCalledWith(job.id, result); + expect(database.setProvingJobResult).toHaveBeenCalledWith(job.id, expect.any(String)); }); it('does not retain job result if database fails to save', async () => { @@ -1016,18 +966,9 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); - await expect( - broker.reportProvingJobSuccess(id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }), - ).rejects.toThrow(new Error('db error')); + await expect(broker.reportProvingJobSuccess(id, makeOutputsUri())).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'in-queue'); }); @@ -1041,7 +982,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); const error = new Error('test error'); @@ -1058,7 +999,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeBaseParityInputs(), + inputs: makeInputsUri(), }); await expect(broker.reportProvingJobError(id, new Error())).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'in-queue'); @@ -1071,14 +1012,7 @@ describe.each([ jest.spyOn(database, 'setProvingJobResult'); jest.spyOn(database, 'addProvingJob'); - await broker.reportProvingJobSuccess(id, { - type: ProvingRequestType.BASE_PARITY, - value: makePublicInputsAndRecursiveProof( - makeParityPublicInputs(RECURSIVE_PROOF_LENGTH), - makeRecursiveProof(RECURSIVE_PROOF_LENGTH), - VerificationKeyData.makeFake(), - ), - }); + await broker.reportProvingJobSuccess(id, makeOutputsUri()); expect(database.setProvingJobResult).not.toHaveBeenCalled(); expect(database.addProvingJob).not.toHaveBeenCalled(); @@ -1103,7 +1037,7 @@ describe.each([ } async function getAndAssertNextJobId(id: V2ProvingJobId, ...allowList: ProvingRequestType[]) { - await expect(broker.getProvingJob(allowList.length > 0 ? { allowList } : undefined)).resolves.toEqual( + await expect(broker.getProvingJob({ allowList })).resolves.toEqual( expect.objectContaining({ job: expect.objectContaining({ id }) }), ); } @@ -1112,3 +1046,11 @@ describe.each([ function makeProvingJobId(): V2ProvingJobId { return randomBytes(8).toString('hex') as V2ProvingJobId; } + +function makeInputsUri(): V2ProofInputUri { + return randomBytes(8).toString('hex') as V2ProofInputUri; +} + +function makeOutputsUri(): V2ProofOutputUri { + return randomBytes(8).toString('hex') as V2ProofOutputUri; +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.ts index 2fe40eac234..017d4b6ae48 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.ts @@ -1,6 +1,6 @@ import { ProvingRequestType, - type V2ProofOutput, + type V2ProofOutputUri, type V2ProvingJob, type V2ProvingJobId, type V2ProvingJobResult, @@ -73,7 +73,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { public constructor( private database: ProvingJobDatabase, { jobTimeoutSec = 30, timeoutIntervalSec = 10, maxRetries = 3 }: ProofRequestBrokerConfig = {}, - private logger = createDebugLogger('aztec:prover-client:proof-request-broker'), + private logger = createDebugLogger('aztec:prover-client:proving-broker'), ) { this.timeoutPromise = new RunningPromise(this.timeoutCheck, timeoutIntervalSec * 1000); this.jobTimeoutSec = jobTimeoutSec; @@ -147,9 +147,10 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { async getProvingJob( filter: ProvingJobFilter = {}, ): Promise<{ job: V2ProvingJob; time: number } | undefined> { - const allowedProofs: ProvingRequestType[] = filter.allowList - ? [...filter.allowList] - : Object.values(ProvingRequestType).filter((x): x is ProvingRequestType => typeof x === 'number'); + const allowedProofs: ProvingRequestType[] = + Array.isArray(filter.allowList) && filter.allowList.length > 0 + ? [...filter.allowList] + : Object.values(ProvingRequestType).filter((x): x is ProvingRequestType => typeof x === 'number'); allowedProofs.sort(proofTypeComparator); for (const proofType of allowedProofs) { @@ -255,7 +256,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } } - async reportProvingJobSuccess(id: V2ProvingJobId, value: V2ProofOutput): Promise { + async reportProvingJobSuccess(id: V2ProvingJobId, value: V2ProofOutputUri): Promise { const info = this.inProgress.get(id); const item = this.jobsCache.get(id); const retries = this.retries.get(id) ?? 0; diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts index 493cab538a5..e2b64170cbc 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts @@ -1,6 +1,6 @@ import { type ProvingRequestType, - type V2ProofOutput, + type V2ProofOutputUri, type V2ProvingJob, type V2ProvingJobId, type V2ProvingJobStatus, @@ -50,7 +50,7 @@ export interface ProvingJobConsumer { * @param id - The ID of the job to report success for * @param result - The result of the job */ - reportProvingJobSuccess(id: V2ProvingJobId, result: V2ProofOutput): Promise; + reportProvingJobSuccess(id: V2ProvingJobId, result: V2ProofOutputUri): Promise; /** * Marks a proving job as errored diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts b/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts index 724d1d4606f..f6e1649e50c 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts @@ -17,11 +17,10 @@ describe('ProvingJobController', () => { prover = new MockProver(); onComplete = jest.fn(); controller = new ProvingJobController( + '1' as V2ProvingJobId, { type: ProvingRequestType.BASE_PARITY, - blockNumber: 1, - id: '1' as V2ProvingJobId, - inputs: makeBaseParityInputs(), + value: makeBaseParityInputs(), }, 0, prover, @@ -54,7 +53,7 @@ describe('ProvingJobController', () => { controller.start(); await sleep(1); // give promises a chance to complete - expect(onComplete).toHaveBeenCalledWith(undefined, { + expect(onComplete).toHaveBeenCalledWith('1', undefined, { type: ProvingRequestType.BASE_PARITY, value: resp, }); @@ -66,7 +65,7 @@ describe('ProvingJobController', () => { controller.start(); await sleep(1); - expect(onComplete).toHaveBeenCalledWith(err, undefined); + expect(onComplete).toHaveBeenCalledWith('1', err, undefined); }); it('does not crash if onComplete throws', async () => { diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts b/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts index 53d18b476a0..6210766896d 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts @@ -1,8 +1,8 @@ import { ProvingRequestType, type ServerCircuitProver, + type V2ProofInput, type V2ProofOutput, - type V2ProvingJob, type V2ProvingJobId, } from '@aztec/circuit-types'; @@ -13,6 +13,8 @@ export enum ProvingJobStatus { } type ProvingJobCompletionCallback = ( + jobId: V2ProvingJobId, + type: ProvingRequestType, error: Error | undefined, result: V2ProofOutput | undefined, ) => void | Promise; @@ -23,7 +25,8 @@ export class ProvingJobController { private abortController = new AbortController(); constructor( - private job: V2ProvingJob, + private jobId: V2ProvingJobId, + private inputs: V2ProofInput, private startedAt: number, private circuitProver: ServerCircuitProver, private onComplete: ProvingJobCompletionCallback, @@ -39,7 +42,7 @@ export class ProvingJobController { .then( result => { this.status = ProvingJobStatus.DONE; - return this.onComplete(undefined, result); + return this.onComplete(this.jobId, this.inputs.type, undefined, result); }, error => { this.status = ProvingJobStatus.DONE; @@ -47,7 +50,7 @@ export class ProvingJobController { // Ignore abort errors return; } - return this.onComplete(error, undefined); + return this.onComplete(this.jobId, this.inputs.type, error, undefined); }, ) .catch(_ => { @@ -68,15 +71,19 @@ export class ProvingJobController { } public getJobId(): V2ProvingJobId { - return this.job.id; + return this.jobId; } public getStartedAt(): number { return this.startedAt; } + public getProofTypeName(): string { + return ProvingRequestType[this.inputs.type]; + } + private async generateProof(): Promise { - const { type, inputs } = this.job; + const { type, value: inputs } = this.inputs; const signal = this.abortController.signal; switch (type) { case ProvingRequestType.PUBLIC_VM: { diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database.ts b/yarn-project/prover-client/src/proving_broker/proving_job_database.ts index 99cae7147ac..222aed5741c 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_database.ts @@ -1,5 +1,5 @@ import { - type V2ProofOutput, + type V2ProofOutputUri, type V2ProvingJob, type V2ProvingJobId, type V2ProvingJobResult, @@ -32,7 +32,7 @@ export interface ProvingJobDatabase { * @param ProvingRequestType - The type of proof that was requested * @param value - The result of the proof request */ - setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutput): Promise; + setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutputUri): Promise; /** * Saves an error that occurred while processing a proof request diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts b/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts index 19acfaf88e7..5eed7cdc6ca 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts @@ -1,4 +1,4 @@ -import type { V2ProofOutput, V2ProvingJob, V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; +import type { V2ProofOutputUri, V2ProvingJob, V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; import { type ProvingJobDatabase } from '../proving_job_database.js'; @@ -19,7 +19,7 @@ export class InMemoryDatabase implements ProvingJobDatabase { return Promise.resolve(); } - setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutput): Promise { + setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutputUri): Promise { this.results.set(id, { value }); return Promise.resolve(); } diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts b/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts index c03684b1bf3..5fe5c092cc9 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts @@ -1,4 +1,4 @@ -import { type V2ProofOutput, V2ProvingJob, type V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; +import { type V2ProofOutputUri, V2ProvingJob, type V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; @@ -38,7 +38,7 @@ export class PersistedProvingJobDatabase implements ProvingJobDatabase { await this.jobResults.set(id, jsonStringify(res)); } - async setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutput): Promise { + async setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutputUri): Promise { const res: V2ProvingJobResult = { value }; await this.jobResults.set(id, jsonStringify(res)); } From 25821747c391d0e6f732971205db1c74fe1b9f25 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 19 Nov 2024 10:24:53 +0000 Subject: [PATCH 2/9] feat: integrate new proving broker --- .../files/config/setup-service-addresses.sh | 9 + .../aztec-network/templates/prover-agent.yaml | 14 +- .../templates/prover-broker.yaml | 81 +++++ .../aztec-network/templates/prover-node.yaml | 7 + spartan/aztec-network/values.yaml | 10 + .../aztec-node/src/aztec-node/server.ts | 1 + .../aztec/src/cli/aztec_start_options.ts | 46 ++- yarn-project/aztec/src/cli/cli.ts | 5 +- .../aztec/src/cli/cmds/start_prover_agent.ts | 64 ++-- .../aztec/src/cli/cmds/start_prover_broker.ts | 30 ++ .../aztec/src/cli/cmds/start_prover_node.ts | 15 +- yarn-project/bb-prover/src/config.ts | 2 + .../circuit-types/src/interfaces/index.ts | 1 + .../src/interfaces/prover-agent.ts | 56 ++++ .../src/interfaces/prover-broker.ts | 131 ++++++++ .../src/interfaces/prover-client.ts | 99 +++--- .../src/interfaces/proving-job-source.test.ts | 12 +- .../src/interfaces/proving-job-source.ts | 21 +- .../src/interfaces/proving-job.ts | 254 ++++----------- .../src/benchmarks/bench_prover.test.ts | 6 +- .../src/e2e_prover/e2e_prover_test.ts | 2 +- yarn-project/end-to-end/src/fixtures/utils.ts | 2 +- yarn-project/end-to-end/webpack.config.js | 2 + yarn-project/foundation/src/config/env_var.ts | 11 +- yarn-project/foundation/src/config/index.ts | 2 +- yarn-project/prover-client/package.json | 4 +- yarn-project/prover-client/src/config.ts | 36 ++- yarn-project/prover-client/src/index.ts | 1 + .../prover-client/src/mocks/test_context.ts | 4 +- .../src/orchestrator/block-proving-state.ts | 2 +- .../src/orchestrator/orchestrator.ts | 2 +- .../prover-agent/memory-proving-queue.test.ts | 36 ++- .../src/prover-agent/memory-proving-queue.ts | 87 +++--- .../src/prover-agent/prover-agent.ts | 36 +-- .../caching_broker_facade.test.ts | 111 +++++++ .../proving_broker/caching_broker_facade.ts | 295 ++++++++++++++++++ .../src/proving_broker/factory.ts | 21 ++ .../prover-client/src/proving_broker/index.ts | 8 + .../proof_input_output_database.ts | 100 ------ .../src/proving_broker/proof_store.ts | 103 ++++++ .../src/proving_broker/prover_cache/memory.ts | 16 + .../src/proving_broker/proving_agent.test.ts | 53 ++-- .../src/proving_broker/proving_agent.ts | 55 ++-- .../src/proving_broker/proving_broker.test.ts | 250 ++++++++------- .../src/proving_broker/proving_broker.ts | 152 +++++---- ...database.ts => proving_broker_database.ts} | 19 +- .../proving_broker_database/memory.ts | 43 +++ .../proving_broker_database/persisted.ts | 45 +++ .../proving_broker_interface.ts | 74 ----- .../proving_job_controller.test.ts | 55 +++- .../proving_broker/proving_job_controller.ts | 93 +++--- .../proving_job_database/memory.ts | 43 --- .../proving_job_database/persisted.ts | 45 --- .../prover-client/src/proving_broker/rpc.ts | 65 ++++ .../prover-client/src/test/mock_prover.ts | 51 +++ .../prover-client/src/tx-prover/factory.ts | 9 +- .../prover-client/src/tx-prover/tx-prover.ts | 123 +++++--- yarn-project/prover-node/src/config.ts | 27 +- yarn-project/prover-node/src/factory.ts | 15 +- .../src/prover-cache/cache_manager.ts | 64 ++++ .../prover-node/src/prover-cache/kv_cache.ts | 23 ++ .../prover-node/src/prover-node.test.ts | 8 +- yarn-project/prover-node/src/prover-node.ts | 21 +- .../pxe/src/pxe_service/pxe_service.ts | 3 +- yarn-project/yarn.lock | 1 + 65 files changed, 2041 insertions(+), 1041 deletions(-) create mode 100644 spartan/aztec-network/templates/prover-broker.yaml create mode 100644 yarn-project/aztec/src/cli/cmds/start_prover_broker.ts create mode 100644 yarn-project/circuit-types/src/interfaces/prover-broker.ts create mode 100644 yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts create mode 100644 yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts create mode 100644 yarn-project/prover-client/src/proving_broker/factory.ts create mode 100644 yarn-project/prover-client/src/proving_broker/index.ts delete mode 100644 yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts create mode 100644 yarn-project/prover-client/src/proving_broker/proof_store.ts create mode 100644 yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts rename yarn-project/prover-client/src/proving_broker/{proving_job_database.ts => proving_broker_database.ts} (63%) create mode 100644 yarn-project/prover-client/src/proving_broker/proving_broker_database/memory.ts create mode 100644 yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts delete mode 100644 yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts delete mode 100644 yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts delete mode 100644 yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts create mode 100644 yarn-project/prover-client/src/proving_broker/rpc.ts create mode 100644 yarn-project/prover-node/src/prover-cache/cache_manager.ts create mode 100644 yarn-project/prover-node/src/prover-cache/kv_cache.ts diff --git a/spartan/aztec-network/files/config/setup-service-addresses.sh b/spartan/aztec-network/files/config/setup-service-addresses.sh index 4594b7a7740..5ca3bb5a248 100644 --- a/spartan/aztec-network/files/config/setup-service-addresses.sh +++ b/spartan/aztec-network/files/config/setup-service-addresses.sh @@ -79,10 +79,19 @@ else PROVER_NODE_ADDR="http://${SERVICE_NAME}-prover-node.${NAMESPACE}:${PROVER_NODE_PORT}" fi +if [ "${PROVER_BROKER_EXTERNAL_HOST}" != "" ]; then + PROVER_BROKER_ADDR="${PROVER_BROKER_EXTERNAL_HOST}" +elif [ "${NETWORK_PUBLIC}" = "true" ]; then + PROVER_BROKER_ADDR=$(get_service_address "prover-broker" "${PROVER_BROKER_PORT}") +else + PROVER_BROKER_ADDR="http://${SERVICE_NAME}-prover-broker.${NAMESPACE}:${PROVER_BROKER_PORT}" +fi + # Write addresses to file for sourcing echo "export ETHEREUM_HOST=${ETHEREUM_ADDR}" >> /shared/config/service-addresses echo "export BOOT_NODE_HOST=${BOOT_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_NODE_HOST=${PROVER_NODE_ADDR}" >> /shared/config/service-addresses +echo "export PROVER_BROKER_HOST=${PROVER_BROKER_ADDR}" >> /shared/config/service-addresses echo "Addresses configured:" cat /shared/config/service-addresses diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index 34f9648f3ba..8d56eea21ad 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -50,11 +50,11 @@ spec: - -c - | source /shared/config/service-addresses - until curl -s -X POST ${PROVER_NODE_HOST}/status; do - echo "Waiting for Prover node ${PROVER_NODE_HOST} ..." + until curl -s -X POST ${PROVER_BROKER_HOST}/status; do + echo "Waiting for broker ${PROVER_BROKER_HOST} ..." sleep 5 done - echo "Prover node is ready!" + echo "Broker is ready!" {{- if .Values.telemetry.enabled }} until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do echo "Waiting for OpenTelemetry collector..." @@ -77,8 +77,8 @@ spec: - "-c" - | source /shared/config/service-addresses && \ - PROVER_JOB_SOURCE_URL=${PROVER_NODE_HOST} \ - node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover + PROVER_BROKER_URL=${PROVER_NODE_HOST} \ + node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover-agent env: - name: AZTEC_PORT value: "{{ .Values.proverAgent.service.nodePort }}" @@ -90,9 +90,7 @@ spec: value: "{{ .Values.proverAgent.debug }}" - name: PROVER_REAL_PROOFS value: "{{ .Values.proverAgent.realProofs }}" - - name: PROVER_AGENT_ENABLED - value: "true" - - name: PROVER_AGENT_CONCURRENCY + - name: PROVER_AGENT_COUNT value: {{ .Values.proverAgent.concurrency | quote }} - name: HARDWARE_CONCURRENCY value: {{ .Values.proverAgent.bb.hardwareConcurrency | quote }} diff --git a/spartan/aztec-network/templates/prover-broker.yaml b/spartan/aztec-network/templates/prover-broker.yaml new file mode 100644 index 00000000000..e23c213d553 --- /dev/null +++ b/spartan/aztec-network/templates/prover-broker.yaml @@ -0,0 +1,81 @@ +{{- if .Values.proverBroker.enabled }} +apiVersion: apps/v1 +kind: ReplicaSet +metadata: + name: {{ include "aztec-network.fullname" . }}-prover-broker + labels: + {{- include "aztec-network.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.proverBroker.replicas }} + selector: + matchLabels: + {{- include "aztec-network.selectorLabels" . | nindent 6 }} + app: prover-broker + template: + metadata: + labels: + {{- include "aztec-network.selectorLabels" . | nindent 8 }} + app: prover-broker + spec: + serviceAccountName: {{ include "aztec-network.fullname" . }}-node + {{- if .Values.network.public }} + hostNetwork: true + {{- end }} + volumes: + - name: config + emptyDir: {} + - name: scripts + configMap: + name: {{ include "aztec-network.fullname" . }}-scripts + initContainers: + {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} + - name: wait-for-prover-node + image: {{ .Values.images.aztec.image }} + command: + - /bin/bash + - -c + - | + source /shared/config/service-addresses + {{- if .Values.telemetry.enabled }} + until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do + echo "Waiting for OpenTelemetry collector..." + sleep 5 + done + echo "OpenTelemetry collector is ready!" + {{- end }} + volumeMounts: + - name: config + mountPath: /shared/config + containers: + - name: prover-broker + image: "{{ .Values.images.aztec.image }}" + imagePullPolicy: {{ .Values.images.aztec.pullPolicy }} + volumeMounts: + - name: config + mountPath: /shared/config + command: + - "/bin/bash" + - "-c" + - | + source /shared/config/service-addresses && \ + node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover-broker + env: + - name: AZTEC_PORT + value: "{{ .Values.proverBroker.service.nodePort }}" + - name: LOG_LEVEL + value: "{{ .Values.proverBroker.logLevel }}" + - name: LOG_JSON + value: "1" + - name: DEBUG + value: "{{ .Values.proverBroker.debug }}" + - name: OTEL_RESOURCE_ATTRIBUTES + value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} + - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} + resources: + {{- toYaml .Values.proverBroker.resources | nindent 12 }} +{{- end }} diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 6b7506149a2..923644d7b73 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -39,6 +39,13 @@ spec: sleep 5 done echo "Ethereum node is ready!" + + until curl -s -X POST ${PROVER_BROKER_HOST}/status; do + echo "Waiting for broker ${PROVER_BROKER_HOST} ..." + sleep 5 + done + echo "Broker is ready!" + {{- if .Values.telemetry.enabled }} until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do echo "Waiting for OpenTelemetry collector..." diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 0be51cd0d26..3bd3812d466 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -217,6 +217,16 @@ proverAgent: nodeSelector: {} resources: {} +proverBroker: + service: + nodePort: 8084 + enabled: true + replicas: 1 + logLevel: "debug" + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + nodeSelector: {} + resources: {} + jobs: deployL1Verifier: enable: false diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 936e134eeb9..a2c686cdd6c 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -165,6 +165,7 @@ export class AztecNodeService implements AztecNode { // now create the merkle trees and the world state synchronizer const worldStateSynchronizer = await createWorldStateSynchronizer(config, archiver, telemetry); const proofVerifier = config.realProofs ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(); + log.info(`Aztec node accepting ${config.realProofs ? 'real' : 'test'} proofs`); // create the tx pool and the p2p client, which will need the l2 block source const p2pClient = await createP2PClient(config, archiver, proofVerifier, worldStateSynchronizer, telemetry); diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 53b0ab01949..90b0a970092 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -1,16 +1,21 @@ import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver'; import { sequencerClientConfigMappings } from '@aztec/aztec-node'; import { botConfigMappings } from '@aztec/bot'; +import { + type ProverAgentConfig, + type ProverBrokerConfig, + proverAgentConfigMappings, + proverBrokerConfigMappings, +} from '@aztec/circuit-types'; import { type ConfigMapping, type EnvVar, booleanConfigHelper, - filterConfigMappings, isBooleanConfigValue, + omitConfigMappings, } from '@aztec/foundation/config'; import { bootnodeConfigMappings, p2pConfigMappings } from '@aztec/p2p'; import { proofVerifierConfigMappings } from '@aztec/proof-verifier'; -import { proverClientConfigMappings } from '@aztec/prover-client'; import { proverNodeConfigMappings } from '@aztec/prover-node'; import { allPxeConfigMappings } from '@aztec/pxe'; import { telemetryClientConfigMappings } from '@aztec/telemetry-client/start'; @@ -239,15 +244,6 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { }, ...getOptions('sequencer', sequencerClientConfigMappings), ], - 'PROVER AGENT': [ - { - flag: '--prover', - description: 'Starts Aztec Prover Agent with options', - defaultValue: undefined, - envVar: undefined, - }, - ...getOptions('prover', proverClientConfigMappings), - ], 'PROVER NODE': [ { flag: '--prover-node', @@ -263,10 +259,36 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { }, ...getOptions( 'proverNode', + omitConfigMappings(proverNodeConfigMappings, [ + // filter out options passed separately + ...(Object.keys(archiverConfigMappings) as (keyof ArchiverConfig)[]), + ...(Object.keys(proverBrokerConfigMappings) as (keyof ProverBrokerConfig)[]), + ...(Object.keys(proverAgentConfigMappings) as (keyof ProverAgentConfig)[]), + ]), + ), + ], + 'PROVER BROKER': [ + { + flag: '--prover-broker', + description: 'Starts Aztec proving job broker', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions( + 'proverBroker', // filter out archiver options from prover node options as they're passed separately in --archiver - filterConfigMappings(proverNodeConfigMappings, Object.keys(archiverConfigMappings) as (keyof ArchiverConfig)[]), + proverBrokerConfigMappings, ), ], + 'PROVER AGENT': [ + { + flag: '--prover-agent', + description: 'Starts Aztec Prover Agent with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('proverAgent', proverAgentConfigMappings), + ], 'P2P BOOTSTRAP': [ { flag: '--p2p-bootstrap', diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 57650fede28..91d803851e4 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -96,9 +96,12 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge } else if (options.p2pBootstrap) { const { startP2PBootstrap } = await import('./cmds/start_p2p_bootstrap.js'); await startP2PBootstrap(options, userLog, debugLogger); - } else if (options.prover) { + } else if (options.proverAgent) { const { startProverAgent } = await import('./cmds/start_prover_agent.js'); await startProverAgent(options, signalHandlers, services, userLog); + } else if (options.proverBroker) { + const { startProverBroker } = await import('./cmds/start_prover_broker.js'); + await startProverBroker(options, signalHandlers, services, userLog); } else if (options.txe) { const { startTXE } = await import('./cmds/start_txe.js'); await startTXE(options, debugLogger); diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts index 2cbad090b25..7e27676b1e0 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts @@ -1,14 +1,11 @@ -import { BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; -import { ProverAgentApiSchema, type ServerCircuitProver } from '@aztec/circuit-types'; +import { type ProverAgentConfig, proverAgentConfigMappings } from '@aztec/circuit-types'; +import { times } from '@aztec/foundation/collection'; import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; -import { type ProverClientConfig, proverClientConfigMappings } from '@aztec/prover-client'; -import { ProverAgent, createProvingJobSourceClient } from '@aztec/prover-client/prover-agent'; -import { - type TelemetryClientConfig, - createAndStartTelemetryClient, - telemetryClientConfigMappings, -} from '@aztec/telemetry-client/start'; +import { buildServerCircuitProver } from '@aztec/prover-client'; +import { InlineProofStore, ProvingAgent, createProvingJobBrokerClient } from '@aztec/prover-client/broker'; +import { getProverNodeAgentConfigFromEnv } from '@aztec/prover-node'; +import { createAndStartTelemetryClient, telemetryClientConfigMappings } from '@aztec/telemetry-client/start'; import { extractRelevantOptions } from '../util.js'; @@ -16,36 +13,39 @@ export async function startProverAgent( options: any, signalHandlers: (() => Promise)[], services: NamespacedApiHandlers, - logger: LogFn, + userLog: LogFn, ) { - const proverConfig = extractRelevantOptions(options, proverClientConfigMappings, 'prover'); - const proverJobSourceUrl = proverConfig.proverJobSourceUrl ?? proverConfig.nodeUrl; - if (!proverJobSourceUrl) { - throw new Error('Starting prover without PROVER_JOB_SOURCE_URL is not supported'); + if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) { + userLog(`Starting a prover agent with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`); + process.exit(1); } - logger(`Connecting to prover at ${proverJobSourceUrl}`); - const source = createProvingJobSourceClient(proverJobSourceUrl); + const config = { + ...getProverNodeAgentConfigFromEnv(), // get default config from env + ...extractRelevantOptions(options, proverAgentConfigMappings, 'proverAgent'), // override with command line options + }; - const telemetryConfig = extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'); - const telemetry = await createAndStartTelemetryClient(telemetryConfig); + if (config.realProofs && (!config.bbBinaryPath || config.acvmBinaryPath)) { + process.exit(1); + } - let circuitProver: ServerCircuitProver; - if (proverConfig.realProofs) { - if (!proverConfig.acvmBinaryPath || !proverConfig.bbBinaryPath) { - throw new Error('Cannot start prover without simulation or native prover options'); - } - circuitProver = await BBNativeRollupProver.new(proverConfig, telemetry); - } else { - circuitProver = new TestCircuitProver(telemetry, undefined, proverConfig); + if (!config.proverBrokerUrl) { + process.exit(1); } - const { proverAgentConcurrency, proverAgentPollInterval } = proverConfig; - const agent = new ProverAgent(circuitProver, proverAgentConcurrency, proverAgentPollInterval); - agent.start(source); + const broker = createProvingJobBrokerClient(config.proverBrokerUrl); + + const telemetry = await createAndStartTelemetryClient( + extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'), + ); + const prover = await buildServerCircuitProver(config, telemetry); + const proofStore = new InlineProofStore(); + const agents = times(config.proverAgentCount, () => new ProvingAgent(broker, proofStore, prover)); - logger(`Started prover agent with concurrency limit of ${proverAgentConcurrency}`); + await Promise.all(agents.map(agent => agent.start())); - services.prover = [agent, ProverAgentApiSchema]; - signalHandlers.push(() => agent.stop()); + signalHandlers.push(async () => { + await Promise.all(agents.map(agent => agent.stop())); + await telemetry.stop(); + }); } diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts new file mode 100644 index 00000000000..26535a7b10a --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts @@ -0,0 +1,30 @@ +import { type ProverBrokerConfig, proverBrokerConfigMappings } from '@aztec/circuit-types'; +import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; +import { type LogFn } from '@aztec/foundation/log'; +import { ProvingJobBrokerSchema, createAndStartProvingBroker } from '@aztec/prover-client/broker'; +import { getProverNodeBrokerConfigFromEnv } from '@aztec/prover-node'; + +import { extractRelevantOptions } from '../util.js'; + +export async function startProverBroker( + options: any, + signalHandlers: (() => Promise)[], + services: NamespacedApiHandlers, + userLog: LogFn, +) { + if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) { + userLog(`Starting a prover broker with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`); + process.exit(1); + } + + const config: ProverBrokerConfig = { + ...getProverNodeBrokerConfigFromEnv(), // get default config from env + ...extractRelevantOptions(options, proverBrokerConfigMappings, 'proverBroker'), // override with command line options + }; + + const broker = await createAndStartProvingBroker(config); + services.proverBroker = [broker, ProvingJobBrokerSchema]; + signalHandlers.push(() => broker.stop()); + + await broker.start(); +} diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts index 031298e6890..44aa13eee93 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts @@ -1,7 +1,8 @@ -import { ProverNodeApiSchema, ProvingJobSourceSchema, createAztecNodeClient } from '@aztec/circuit-types'; +import { ProverNodeApiSchema, createAztecNodeClient } from '@aztec/circuit-types'; import { NULL_KEY } from '@aztec/ethereum'; import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; +import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker'; import { type ProverNodeConfig, createProverNode, @@ -37,10 +38,10 @@ export async function startProverNode( if (options.prover || options.proverAgentEnabled) { userLog(`Running prover node with local prover agent.`); - proverConfig.proverAgentEnabled = true; + proverConfig.proverAgentCount = 1; } else { userLog(`Running prover node without local prover agent. Connect one or more prover agents to this node.`); - proverConfig.proverAgentEnabled = false; + proverConfig.proverAgentCount = 0; } if (!proverConfig.publisherPrivateKey || proverConfig.publisherPrivateKey === NULL_KEY) { @@ -67,12 +68,14 @@ export async function startProverNode( const telemetry = await createAndStartTelemetryClient( extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'), ); - const proverNode = await createProverNode(proverConfig, { telemetry }); + + const broker = proverConfig.proverBrokerUrl ? createProvingJobBrokerClient(proverConfig.proverBrokerUrl) : undefined; + const proverNode = await createProverNode(proverConfig, { telemetry, broker }); services.proverNode = [proverNode, ProverNodeApiSchema]; - if (!options.prover) { - services.provingJobSource = [proverNode.getProver().getProvingJobSource(), ProvingJobSourceSchema]; + if (!proverConfig.proverBrokerUrl) { + services.provingJobSource = [proverNode.getProver().getProvingJobSource(), ProvingJobConsumerSchema]; } signalHandlers.push(proverNode.stop.bind(proverNode)); diff --git a/yarn-project/bb-prover/src/config.ts b/yarn-project/bb-prover/src/config.ts index 7b58a67fd92..3e8002fb89a 100644 --- a/yarn-project/bb-prover/src/config.ts +++ b/yarn-project/bb-prover/src/config.ts @@ -6,6 +6,8 @@ export interface BBConfig { } export interface ACVMConfig { + /** The path to the ACVM binary */ acvmBinaryPath: string; + /** The working directory to use for simulation/proving */ acvmWorkingDirectory: string; } diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index c5980197894..c717ceae649 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -20,3 +20,4 @@ export * from './server_circuit_prover.js'; export * from './service.js'; export * from './sync-status.js'; export * from './world_state.js'; +export * from './prover-broker.js'; diff --git a/yarn-project/circuit-types/src/interfaces/prover-agent.ts b/yarn-project/circuit-types/src/interfaces/prover-agent.ts index 19142530d43..c0fe9a76160 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-agent.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-agent.ts @@ -1,7 +1,63 @@ +import { type ConfigMappingsType, booleanConfigHelper, numberConfigHelper } from '@aztec/foundation/config'; import { type ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; +import { ProvingRequestType } from './proving-job.js'; + +export const ProverAgentConfig = z.object({ + /** The number of prover agents to start */ + proverAgentCount: z.number(), + /** The types of proofs the prover agent can generate */ + proverAgentProofTypes: z.array(z.nativeEnum(ProvingRequestType)), + /** How often the prover agents poll for jobs */ + proverAgentPollIntervalMs: z.number(), + /** The URL where this agent takes jobs from */ + proverBrokerUrl: z.string().optional(), + /** Whether to construct real proofs */ + realProofs: z.boolean(), + /** Artificial delay to introduce to all operations to the test prover. */ + proverTestDelayMs: z.number(), +}); + +export type ProverAgentConfig = z.infer; + +export const proverAgentConfigMappings: ConfigMappingsType = { + proverAgentCount: { + env: 'PROVER_AGENT_COUNT', + description: 'Whether this prover has a local prover agent', + ...numberConfigHelper(1), + }, + proverAgentPollIntervalMs: { + env: 'PROVER_AGENT_POLL_INTERVAL_MS', + description: 'The interval agents poll for jobs at', + ...numberConfigHelper(100), + }, + proverAgentProofTypes: { + env: 'PROVER_AGENT_PROOF_TYPES', + description: 'The types of proofs the prover agent can generate', + parseEnv: (val: string) => + val + .split(',') + .map(v => ProvingRequestType[v as any]) + .filter(v => typeof v === 'number'), + }, + proverBrokerUrl: { + env: 'PROVER_BROKER_URL', + description: 'The URL where this agent takes jobs from', + }, + realProofs: { + env: 'PROVER_REAL_PROOFS', + description: 'Whether to construct real proofs', + ...booleanConfigHelper(false), + }, + proverTestDelayMs: { + env: 'PROVER_TEST_DELAY_MS', + description: 'Artificial delay to introduce to all operations to the test prover.', + ...numberConfigHelper(0), + }, +}; + export interface ProverAgentApi { setMaxConcurrency(maxConcurrency: number): Promise; diff --git a/yarn-project/circuit-types/src/interfaces/prover-broker.ts b/yarn-project/circuit-types/src/interfaces/prover-broker.ts new file mode 100644 index 00000000000..a81ec6316ad --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/prover-broker.ts @@ -0,0 +1,131 @@ +import { + type ProofUri, + type ProvingJob, + type ProvingJobId, + type ProvingJobSettledResult, + type ProvingJobStatus, + type ProvingRequestType, +} from '@aztec/circuit-types'; +import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/config'; + +import { z } from 'zod'; + +export const ProverBrokerConfig = z.object({ + /** Whether to enable the prover broker */ + proverBrokerEnabled: z.boolean(), + /** If starting a prover broker locally, the max number of retries per proving job */ + proverBrokerJobMaxRetries: z.number(), + /** If starting a prover broker locally, the time after which a job times out and gets assigned to a different agent */ + proverBrokerJobTimeoutMs: z.number(), + /** If starting a prover broker locally, the interval the broker checks for timed out jobs */ + proverBrokerPollIntervalMs: z.number(), + /** If starting a prover broker locally, the directory to store broker data */ + proverBrokerDataDirectory: z.string().optional(), +}); + +export type ProverBrokerConfig = z.infer; + +export const proverBrokerConfigMappings: ConfigMappingsType = { + proverBrokerEnabled: { + env: 'PROVER_BROKER_ENABLED', + description: 'Whether to enable the prover broker', + ...numberConfigHelper(1), + }, + proverBrokerJobTimeoutMs: { + env: 'PROVER_BROKER_JOB_TIMEOUT_MS', + description: 'Jobs are retried if not kept alive for this long', + ...numberConfigHelper(60_000), + }, + proverBrokerPollIntervalMs: { + env: 'PROVER_BROKER_POLL_INTERVAL_MS', + description: 'The interval to check job health status', + ...numberConfigHelper(1_000), + }, + proverBrokerJobMaxRetries: { + env: 'PROVER_BROKER_JOB_MAX_RETRIES', + description: 'If starting a prover broker locally, the max number of retries per proving job', + ...numberConfigHelper(3), + }, + proverBrokerDataDirectory: { + env: 'PROVER_BROKER_DATA_DIRECTORY', + description: 'If starting a prover broker locally, the directory to store broker data', + }, +}; + +/** + * An interface for the proving orchestrator. The producer uses this to enqueue jobs for agents + */ +export interface ProvingJobProducer { + /** + * Enqueues a proving job + * @param job - The job to enqueue + */ + enqueueProvingJob(job: ProvingJob): Promise; + + /** + * Cancels a proving job and clears all of its + * @param id - The ID of the job to cancel + */ + removeAndCancelProvingJob(id: ProvingJobId): Promise; + + /** + * Returns the current status fof the proving job + * @param id - The ID of the job to get the status of + */ + getProvingJobStatus(id: ProvingJobId): Promise; + + /** + * Waits for the job to settle and returns to the result + * @param id - The ID of the job to get the status of + */ + waitForJobToSettle(id: ProvingJobId): Promise; +} + +export type ProvingJobFilter = { + allowList: ProvingRequestType[]; +}; + +export type GetProvingJobResponse = { + job: ProvingJob; + time: number; +}; + +/** + * An interface for proving agents to request jobs and report results + */ +export interface ProvingJobConsumer { + /** + * Gets a proving job to work on + * @param filter - Optional filter for the type of job to get + */ + getProvingJob(filter?: ProvingJobFilter): Promise; + + /** + * Marks a proving job as successful + * @param id - The ID of the job to report success for + * @param result - The result of the job + */ + reportProvingJobSuccess(id: ProvingJobId, result: ProofUri): Promise; + + /** + * Marks a proving job as errored + * @param id - The ID of the job to report an error for + * @param err - The error that occurred while processing the job + * @param retry - Whether to retry the job + */ + reportProvingJobError(id: ProvingJobId, err: string, retry?: boolean): Promise; + + /** + * Sends a heartbeat to the broker to indicate that the agent is still working on the given proving job + * @param id - The ID of the job to report progress for + * @param startedAt - The unix epoch when the job was started + * @param filter - Optional filter for the type of job to get + */ + reportProvingJobProgress( + id: ProvingJobId, + startedAt: number, + filter?: ProvingJobFilter, + ): Promise; +} + +export interface ProvingJobBroker extends ProvingJobProducer, ProvingJobConsumer {} diff --git a/yarn-project/circuit-types/src/interfaces/prover-client.ts b/yarn-project/circuit-types/src/interfaces/prover-client.ts index 2f2953b5dd7..b09fc665a90 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-client.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-client.ts @@ -7,42 +7,37 @@ import { z } from 'zod'; import { type TxHash } from '../tx/tx_hash.js'; import { type EpochProver } from './epoch-prover.js'; import { type MerkleTreeReadOperations } from './merkle_tree_operations.js'; -import { type ProvingJobSource } from './proving-job-source.js'; +import { type ProvingJobConsumer } from './prover-broker.js'; +import { type ProvingJobStatus } from './proving-job.js'; + +export type ActualProverConfig = { + /** Whether to construct real proofs */ + realProofs: boolean; + /** Artificial delay to introduce to all operations to the test prover. */ + proverTestDelayMs: number; +}; /** * The prover configuration. */ -export type ProverConfig = { +export type ProverConfig = ActualProverConfig & { /** The URL to the Aztec node to take proving jobs from */ nodeUrl?: string; - /** Whether to construct real proofs */ - realProofs: boolean; - /** Whether this prover has a local prover agent */ - proverAgentEnabled: boolean; - /** The interval agents poll for jobs at */ - proverAgentPollInterval: number; - /** The maximum number of proving jobs to be run in parallel */ - proverAgentConcurrency: number; - /** Jobs are retried if not kept alive for this long */ - proverJobTimeoutMs: number; - /** The interval to check job health status */ - proverJobPollIntervalMs: number; - /** Artificial delay to introduce to all operations to the test prover. */ - proverTestDelayMs: number; /** Identifier of the prover */ - proverId?: Fr; + proverId: Fr; + /** Where to store temporary data */ + cacheDir?: string; + + proverAgentCount: number; }; export const ProverConfigSchema = z.object({ nodeUrl: z.string().optional(), realProofs: z.boolean(), - proverAgentEnabled: z.boolean(), - proverAgentPollInterval: z.number(), - proverAgentConcurrency: z.number(), - proverJobTimeoutMs: z.number(), - proverJobPollIntervalMs: z.number(), - proverId: schemas.Fr.optional(), + proverId: schemas.Fr, proverTestDelayMs: z.number(), + cacheDir: z.string().optional(), + proverAgentCount: z.number(), }) satisfies ZodFor; export const proverConfigMappings: ConfigMappingsType = { @@ -55,59 +50,63 @@ export const proverConfigMappings: ConfigMappingsType = { description: 'Whether to construct real proofs', ...booleanConfigHelper(), }, - proverAgentEnabled: { - env: 'PROVER_AGENT_ENABLED', - description: 'Whether this prover has a local prover agent', - ...booleanConfigHelper(true), - }, - proverAgentPollInterval: { - env: 'PROVER_AGENT_POLL_INTERVAL_MS', - description: 'The interval agents poll for jobs at', - ...numberConfigHelper(100), - }, - proverAgentConcurrency: { - env: 'PROVER_AGENT_CONCURRENCY', - description: 'The maximum number of proving jobs to be run in parallel', - ...numberConfigHelper(1), - }, - proverJobTimeoutMs: { - env: 'PROVER_JOB_TIMEOUT_MS', - description: 'Jobs are retried if not kept alive for this long', - ...numberConfigHelper(60_000), - }, - proverJobPollIntervalMs: { - env: 'PROVER_JOB_POLL_INTERVAL_MS', - description: 'The interval to check job health status', - ...numberConfigHelper(1_000), - }, proverId: { env: 'PROVER_ID', parseEnv: (val: string) => parseProverId(val), description: 'Identifier of the prover', + defaultValue: Fr.ZERO, }, proverTestDelayMs: { env: 'PROVER_TEST_DELAY_MS', description: 'Artificial delay to introduce to all operations to the test prover.', ...numberConfigHelper(0), }, + cacheDir: { + env: 'PROVER_CACHE_DIR', + description: 'Where to store cache data generated while proving', + defaultValue: '/tmp/aztec-prover', + }, + proverAgentCount: { + env: 'PROVER_AGENT_COUNT', + description: 'The number of prover agents to start', + ...numberConfigHelper(1), + }, }; function parseProverId(str: string) { return Fr.fromString(str.startsWith('0x') ? str : Buffer.from(str, 'utf8').toString('hex')); } +/** + * A database where the proving orchestrator can store intermediate results + */ +export interface ProverCache { + /** + * Saves the status of a proving job + * @param jobId - The job ID + * @param status - The status of the proof + */ + setProvingJobStatus(jobId: string, status: ProvingJobStatus): Promise; + + /** + * Retrieves the status of a proving job (if known) + * @param jobId - The job ID + */ + getProvingJobStatus(jobId: string): Promise; +} + /** * The interface to the prover client. * Provides the ability to generate proofs and build rollups. */ export interface EpochProverManager { - createEpochProver(db: MerkleTreeReadOperations): EpochProver; + createEpochProver(db: MerkleTreeReadOperations, cache?: ProverCache): EpochProver; start(): Promise; stop(): Promise; - getProvingJobSource(): ProvingJobSource; + getProvingJobSource(): ProvingJobConsumer; updateProverConfig(config: Partial): Promise; } diff --git a/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts b/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts index eb1388a54d7..57b7d2192be 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job-source.test.ts @@ -1,7 +1,6 @@ import { BaseOrMergeRollupPublicInputs, NESTED_RECURSIVE_PROOF_LENGTH, - PrivateBaseRollupInputs, VerificationKeyData, makeRecursiveProof, } from '@aztec/circuits.js'; @@ -9,9 +8,9 @@ import { type JsonRpcTestContext, createJsonRpcTestSetup } from '@aztec/foundati import { type ProvingJobSource, ProvingJobSourceSchema } from './proving-job-source.js'; import { + type ProofUri, type ProvingJob, - type ProvingRequest, - type ProvingRequestResult, + type ProvingJobResult, type ProvingRequestResultFor, ProvingRequestType, makePublicInputsAndRecursiveProof, @@ -66,17 +65,18 @@ describe('ProvingJobSourceSchema', () => { }); class MockProvingJobSource implements ProvingJobSource { - getProvingJob(): Promise | undefined> { + getProvingJob(): Promise { return Promise.resolve({ id: 'a-job-id', - request: { type: ProvingRequestType.PRIVATE_BASE_ROLLUP, inputs: PrivateBaseRollupInputs.empty() }, + type: ProvingRequestType.PRIVATE_BASE_ROLLUP, + inputsUri: 'inputs-uri' as ProofUri, }); } heartbeat(jobId: string): Promise { expect(typeof jobId).toEqual('string'); return Promise.resolve(); } - resolveProvingJob(jobId: string, result: ProvingRequestResult): Promise { + resolveProvingJob(jobId: string, result: ProvingJobResult): Promise { expect(typeof jobId).toEqual('string'); const baseRollupResult = result as ProvingRequestResultFor; expect(baseRollupResult.result.inputs).toBeInstanceOf(BaseOrMergeRollupPublicInputs); diff --git a/yarn-project/circuit-types/src/interfaces/proving-job-source.ts b/yarn-project/circuit-types/src/interfaces/proving-job-source.ts index ebeaa05301a..c54f5964e51 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job-source.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job-source.ts @@ -2,21 +2,14 @@ import { type ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; -import { - JobIdSchema, - type ProvingJob, - ProvingJobSchema, - type ProvingRequest, - type ProvingRequestResult, - ProvingRequestResultSchema, -} from './proving-job.js'; +import { ProvingJob, ProvingJobId, ProvingJobResult } from './proving-job.js'; export interface ProvingJobSource { /** * Gets the next proving job. `heartbeat` must be called periodically to keep the job alive. * @returns The proving job, or undefined if there are no jobs available. */ - getProvingJob(): Promise | undefined>; + getProvingJob(): Promise; /** * Keeps the job alive. If this isn't called regularly then the job will be @@ -30,7 +23,7 @@ export interface ProvingJobSource { * @param jobId - The ID of the job to resolve. * @param result - The result of the proving job. */ - resolveProvingJob(jobId: string, result: ProvingRequestResult): Promise; + resolveProvingJob(jobId: string, result: ProvingJobResult): Promise; /** * Rejects a proving job. @@ -41,8 +34,8 @@ export interface ProvingJobSource { } export const ProvingJobSourceSchema: ApiSchemaFor = { - getProvingJob: z.function().args().returns(ProvingJobSchema.optional()), - heartbeat: z.function().args(JobIdSchema).returns(z.void()), - resolveProvingJob: z.function().args(JobIdSchema, ProvingRequestResultSchema).returns(z.void()), - rejectProvingJob: z.function().args(JobIdSchema, z.string()).returns(z.void()), + getProvingJob: z.function().args().returns(ProvingJob.optional()), + heartbeat: z.function().args(ProvingJobId).returns(z.void()), + resolveProvingJob: z.function().args(ProvingJobId, ProvingJobResult).returns(z.void()), + rejectProvingJob: z.function().args(ProvingJobId, z.string()).returns(z.void()), }; diff --git a/yarn-project/circuit-types/src/interfaces/proving-job.ts b/yarn-project/circuit-types/src/interfaces/proving-job.ts index f751368b84c..f2013799dac 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job.ts @@ -126,14 +126,12 @@ export function mapProvingRequestTypeToCircuitName(type: ProvingRequestType): Ci export type AvmProvingRequest = z.infer; -export type ProvingRequest = z.infer; - export const AvmProvingRequestSchema = z.object({ type: z.literal(ProvingRequestType.PUBLIC_VM), inputs: AvmCircuitInputs.schema, }); -export const ProvingRequestSchema = z.discriminatedUnion('type', [ +export const ProvingJobInputs = z.discriminatedUnion('type', [ AvmProvingRequestSchema, z.object({ type: z.literal(ProvingRequestType.BASE_PARITY), inputs: BaseParityInputs.schema }), z.object({ type: z.literal(ProvingRequestType.ROOT_PARITY), inputs: RootParityInputs.schema }), @@ -147,47 +145,23 @@ export const ProvingRequestSchema = z.discriminatedUnion('type', [ z.object({ type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), inputs: PrivateKernelEmptyInputData.schema }), z.object({ type: z.literal(ProvingRequestType.TUBE_PROOF), inputs: TubeInputs.schema }), ]); - -export type JobId = z.infer; - -export const JobIdSchema = z.string(); - -export type ProvingJob = { id: JobId; request: T }; - -export const ProvingJobSchema = z.object({ id: JobIdSchema, request: ProvingRequestSchema }); - -type ProvingRequestResultsMap = { - [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PublicInputsAndRecursiveProof; - [ProvingRequestType.PUBLIC_VM]: ProofAndVerificationKey; - [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.MERGE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.BLOCK_MERGE_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.ROOT_ROLLUP]: PublicInputsAndRecursiveProof; - [ProvingRequestType.BASE_PARITY]: PublicInputsAndRecursiveProof; - [ProvingRequestType.ROOT_PARITY]: PublicInputsAndRecursiveProof< - ParityPublicInputs, - typeof NESTED_RECURSIVE_PROOF_LENGTH - >; - [ProvingRequestType.TUBE_PROOF]: ProofAndVerificationKey; +export type ProvingJobInputs = z.infer; +export type ProvingJobInputsMap = { + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PrivateKernelEmptyInputData; + [ProvingRequestType.PUBLIC_VM]: AvmCircuitInputs; + [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PrivateBaseRollupInputs; + [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicBaseRollupInputs; + [ProvingRequestType.MERGE_ROLLUP]: MergeRollupInputs; + [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: EmptyBlockRootRollupInputs; + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: BlockRootRollupInputs; + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: BlockMergeRollupInputs; + [ProvingRequestType.ROOT_ROLLUP]: RootRollupInputs; + [ProvingRequestType.BASE_PARITY]: BaseParityInputs; + [ProvingRequestType.ROOT_PARITY]: RootParityInputs; + [ProvingRequestType.TUBE_PROOF]: TubeInputs; }; -export type ProvingRequestResultFor = { type: T; result: ProvingRequestResultsMap[T] }; - -export type ProvingRequestResult = { - [K in keyof ProvingRequestResultsMap]: { type: K; result: ProvingRequestResultsMap[K] }; -}[keyof ProvingRequestResultsMap]; - -export function makeProvingRequestResult( - type: ProvingRequestType, - result: ProvingRequestResult['result'], -): ProvingRequestResult { - return { type, result } as ProvingRequestResult; -} - -export const ProvingRequestResultSchema = z.discriminatedUnion('type', [ +export const ProvingJobResult = z.discriminatedUnion('type', [ z.object({ type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), result: schemaForPublicInputsAndRecursiveProof(KernelCircuitPublicInputs.schema), @@ -236,148 +210,9 @@ export const ProvingRequestResultSchema = z.discriminatedUnion('type', [ type: z.literal(ProvingRequestType.TUBE_PROOF), result: schemaForRecursiveProofAndVerificationKey(TUBE_PROOF_LENGTH), }), -]) satisfies ZodFor; - -export const V2ProvingJobId = z.string().brand('ProvingJobId'); -export type V2ProvingJobId = z.infer; - -export const V2ProofInput = z.discriminatedUnion('type', [ - z.object({ - type: z.literal(ProvingRequestType.PUBLIC_VM), - value: AvmCircuitInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.BASE_PARITY), - value: BaseParityInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.ROOT_PARITY), - value: RootParityInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.PRIVATE_BASE_ROLLUP), - value: PrivateBaseRollupInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.PUBLIC_BASE_ROLLUP), - value: PublicBaseRollupInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.MERGE_ROLLUP), - value: MergeRollupInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.BLOCK_ROOT_ROLLUP), - value: BlockRootRollupInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP), - value: EmptyBlockRootRollupInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.BLOCK_MERGE_ROLLUP), - value: BlockMergeRollupInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.ROOT_ROLLUP), - value: RootRollupInputs.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), - value: PrivateKernelEmptyInputData.schema, - }), - z.object({ - type: z.literal(ProvingRequestType.TUBE_PROOF), - value: TubeInputs.schema, - }), -]); - -export type V2ProofInput = z.infer; - -export const V2ProofInputUri = z.string().brand('ProofInputUri'); -export type V2ProofInputUri = z.infer; - -export const V2ProvingJob = z.object({ - id: V2ProvingJobId, - blockNumber: z.number(), - type: z.nativeEnum(ProvingRequestType), - inputs: V2ProofInputUri, -}); - -export type V2ProvingJob = z.infer; - -export const V2ProofOutput = z.discriminatedUnion('type', [ - z.object({ - type: z.literal(ProvingRequestType.PRIVATE_KERNEL_EMPTY), - value: schemaForPublicInputsAndRecursiveProof(KernelCircuitPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.PUBLIC_VM), - value: schemaForRecursiveProofAndVerificationKey(AVM_PROOF_LENGTH_IN_FIELDS), - }), - z.object({ - type: z.literal(ProvingRequestType.PRIVATE_BASE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BaseOrMergeRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.PUBLIC_BASE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BaseOrMergeRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.MERGE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BaseOrMergeRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BlockRootOrBlockMergePublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.BLOCK_ROOT_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BlockRootOrBlockMergePublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.BLOCK_MERGE_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(BlockRootOrBlockMergePublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.ROOT_ROLLUP), - value: schemaForPublicInputsAndRecursiveProof(RootRollupPublicInputs.schema), - }), - z.object({ - type: z.literal(ProvingRequestType.BASE_PARITY), - value: schemaForPublicInputsAndRecursiveProof(ParityPublicInputs.schema, RECURSIVE_PROOF_LENGTH), - }), - z.object({ - type: z.literal(ProvingRequestType.ROOT_PARITY), - value: schemaForPublicInputsAndRecursiveProof(ParityPublicInputs.schema, NESTED_RECURSIVE_PROOF_LENGTH), - }), - z.object({ - type: z.literal(ProvingRequestType.TUBE_PROOF), - value: schemaForRecursiveProofAndVerificationKey(TUBE_PROOF_LENGTH), - }), ]); - -export type V2ProofOutput = z.infer; - -export const V2ProofOutputUri = z.string().brand('ProofOutputUri'); -export type V2ProofOutputUri = z.infer; - -export type V2ProofInputsByType = { - [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PrivateKernelEmptyInputData; - [ProvingRequestType.PUBLIC_VM]: AvmCircuitInputs; - [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PrivateBaseRollupInputs; - [ProvingRequestType.PUBLIC_BASE_ROLLUP]: PublicBaseRollupInputs; - [ProvingRequestType.MERGE_ROLLUP]: MergeRollupInputs; - [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: EmptyBlockRootRollupInputs; - [ProvingRequestType.BLOCK_ROOT_ROLLUP]: BlockRootRollupInputs; - [ProvingRequestType.BLOCK_MERGE_ROLLUP]: BlockMergeRollupInputs; - [ProvingRequestType.ROOT_ROLLUP]: RootRollupInputs; - [ProvingRequestType.BASE_PARITY]: BaseParityInputs; - [ProvingRequestType.ROOT_PARITY]: RootParityInputs; - [ProvingRequestType.TUBE_PROOF]: TubeInputs; -}; - -export type V2ProofOutputByType = { +export type ProvingJobResult = z.infer; +export type ProvingJobResultsMap = { [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PublicInputsAndRecursiveProof; [ProvingRequestType.PUBLIC_VM]: ProofAndVerificationKey; [ProvingRequestType.PRIVATE_BASE_ROLLUP]: PublicInputsAndRecursiveProof; @@ -395,14 +230,53 @@ export type V2ProofOutputByType = { [ProvingRequestType.TUBE_PROOF]: ProofAndVerificationKey; }; -export const V2ProvingJobStatus = z.discriminatedUnion('status', [ +export type ProvingRequestResultFor = { type: T; result: ProvingJobResultsMap[T] }; + +export const ProvingJobId = z.string(); + +export const ProofUri = z.string().brand('ProvingJobUri'); +export type ProofUri = z.infer; + +export type ProvingJobId = z.infer; +export const ProvingJob = z.object({ + id: ProvingJobId, + type: z.nativeEnum(ProvingRequestType), + blockNumber: z.number().optional(), + inputsUri: ProofUri, +}); + +export type ProvingJob = z.infer; + +export function makeProvingRequestResult( + type: ProvingRequestType, + result: ProvingJobResult['result'], +): ProvingJobResult { + return { type, result } as ProvingJobResult; +} + +export const ProvingJobFulfilledResult = z.object({ + status: z.literal('fulfilled'), + value: ProofUri, +}); +export type ProvingJobFulfilledResult = z.infer; + +export const ProvingJobRejectedResult = z.object({ + status: z.literal('rejected'), + reason: z.string(), +}); +export type ProvingJobRejectedResult = z.infer; + +export const ProvingJobSettledResult = z.discriminatedUnion('status', [ + ProvingJobFulfilledResult, + ProvingJobRejectedResult, +]); +export type ProvingJobSettledResult = z.infer; + +export const ProvingJobStatus = z.discriminatedUnion('status', [ z.object({ status: z.literal('in-queue') }), z.object({ status: z.literal('in-progress') }), z.object({ status: z.literal('not-found') }), - z.object({ status: z.literal('resolved'), value: V2ProofOutputUri }), - z.object({ status: z.literal('rejected'), error: z.string() }), + ProvingJobFulfilledResult, + ProvingJobRejectedResult, ]); -export type V2ProvingJobStatus = z.infer; - -export const V2ProvingJobResult = z.union([z.object({ value: V2ProofOutputUri }), z.object({ error: z.string() })]); -export type V2ProvingJobResult = z.infer; +export type ProvingJobStatus = z.infer; diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts index c704be6b0aa..d77451f317f 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -56,8 +56,8 @@ describe('benchmarks/proving', () => { { // do setup with fake proofs realProofs: false, - proverAgentConcurrency: 4, - proverAgentPollInterval: 10, + proverAgentCount: 4, + proverAgentPollIntervalMs: 10, minTxsPerBlock: 1, }, {}, @@ -141,7 +141,7 @@ describe('benchmarks/proving', () => { ctx.logger.info('Stopping fake provers'); await ctx.aztecNode.setConfig({ - proverAgentConcurrency: 1, + proverAgentCount: 1, realProofs: true, minTxsPerBlock: 2, }); diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index e2d230a40a2..1d200dc9c4c 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -266,7 +266,7 @@ export class FullProverTest { dataDirectory: undefined, proverId: new Fr(81), realProofs: this.realProofs, - proverAgentConcurrency: 2, + proverAgentCount: 2, publisherPrivateKey: `0x${proverNodePrivateKey!.toString('hex')}`, proverNodeMaxPendingJobs: 100, proverNodePollingIntervalMs: 100, diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 5381148d3c1..33f2df5244e 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -707,7 +707,7 @@ export async function createAndSyncProverNode( dataDirectory: undefined, proverId: new Fr(42), realProofs: false, - proverAgentConcurrency: 2, + proverAgentCount: 2, publisherPrivateKey: proverNodePrivateKey, proverNodeMaxPendingJobs: 10, proverNodePollingIntervalMs: 200, diff --git a/yarn-project/end-to-end/webpack.config.js b/yarn-project/end-to-end/webpack.config.js index 88f6bb5178c..8fb6aefc963 100644 --- a/yarn-project/end-to-end/webpack.config.js +++ b/yarn-project/end-to-end/webpack.config.js @@ -43,6 +43,8 @@ export default { new webpack.DefinePlugin({ 'process.env': { NODE_ENV: JSON.stringify('production'), + LOG_LEVEL: JSON.stringify('debug'), + DEBUG: JSON.stringify('aztec:*'), }, }), new webpack.ProvidePlugin({ Buffer: ['buffer', 'Buffer'] }), diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 165d98d7b6c..271e99cb54a 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -97,9 +97,17 @@ export type EnvVar = | 'PEER_ID_PRIVATE_KEY' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' - | 'PROVER_AGENT_CONCURRENCY' | 'PROVER_AGENT_ENABLED' + | 'PROVER_AGENT_CONCURRENCY' + | 'PROVER_AGENT_COUNT' + | 'PROVER_AGENT_PROOF_TYPES' | 'PROVER_AGENT_POLL_INTERVAL_MS' + | 'PROVER_BROKER_URL' + | 'PROVER_BROKER_ENABLED' + | 'PROVER_BROKER_JOB_TIMEOUT_MS' + | 'PROVER_BROKER_POLL_INTERVAL_MS' + | 'PROVER_BROKER_JOB_MAX_RETRIES' + | 'PROVER_BROKER_DATA_DIRECTORY' | 'PROVER_COORDINATION_NODE_URL' | 'PROVER_DISABLED' | 'PROVER_ID' @@ -113,6 +121,7 @@ export type EnvVar = | 'PROVER_REAL_PROOFS' | 'PROVER_REQUIRED_CONFIRMATIONS' | 'PROVER_TEST_DELAY_MS' + | 'PROVER_CACHE_DIR' | 'PXE_BLOCK_POLLING_INTERVAL_MS' | 'PXE_L2_STARTING_BLOCK' | 'PXE_PROVER_ENABLED' diff --git a/yarn-project/foundation/src/config/index.ts b/yarn-project/foundation/src/config/index.ts index 48cbe0301a7..4485aae5059 100644 --- a/yarn-project/foundation/src/config/index.ts +++ b/yarn-project/foundation/src/config/index.ts @@ -46,7 +46,7 @@ export function getConfigFromMappings(configMappings: ConfigMappingsType): * @param keysToFilter - The keys to filter out * @returns The filtered config mappings */ -export function filterConfigMappings( +export function omitConfigMappings( configMappings: ConfigMappingsType, keysToFilter: K[], ): ConfigMappingsType> { diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 97480297dbf..4ee446b70af 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -4,6 +4,7 @@ "type": "module", "exports": { ".": "./dest/index.js", + "./broker": "./dest/proving_broker/index.js", "./prover-agent": "./dest/prover-agent/index.js", "./orchestrator": "./dest/orchestrator/index.js", "./helpers": "./dest/orchestrator/block-building-helpers.js" @@ -78,7 +79,8 @@ "commander": "^12.1.0", "lodash.chunk": "^4.2.0", "source-map-support": "^0.5.21", - "tslib": "^2.4.0" + "tslib": "^2.4.0", + "zod": "^3.23.8" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/prover-client/src/config.ts b/yarn-project/prover-client/src/config.ts index 8572b176dbc..8d64bde6cb2 100644 --- a/yarn-project/prover-client/src/config.ts +++ b/yarn-project/prover-client/src/config.ts @@ -1,25 +1,27 @@ -import { type BBConfig } from '@aztec/bb-prover'; -import { type ProverConfig, proverConfigMappings } from '@aztec/circuit-types'; +import { type ACVMConfig, type BBConfig } from '@aztec/bb-prover'; +import { + type ProverAgentConfig, + type ProverBrokerConfig, + type ProverConfig, + proverAgentConfigMappings, + proverBrokerConfigMappings, + proverConfigMappings, +} from '@aztec/circuit-types'; import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; /** * The prover configuration. */ export type ProverClientConfig = ProverConfig & - BBConfig & { + ProverAgentConfig & + ProverBrokerConfig & + BBConfig & + ACVMConfig & { /** The URL to the Aztec prover node to take proving jobs from */ proverJobSourceUrl?: string; - /** The working directory to use for simulation/proving */ - acvmWorkingDirectory: string; - /** The path to the ACVM binary */ - acvmBinaryPath: string; }; -export const proverClientConfigMappings: ConfigMappingsType = { - proverJobSourceUrl: { - env: 'PROVER_JOB_SOURCE_URL', - description: 'The URL to the Aztec prover node to take proving jobs from', - }, +export const bbConfigMappings: ConfigMappingsType = { acvmWorkingDirectory: { env: 'ACVM_WORKING_DIRECTORY', description: 'The working directory to use for simulation/proving', @@ -41,7 +43,17 @@ export const proverClientConfigMappings: ConfigMappingsType description: 'Whether to skip cleanup of bb temporary files', ...booleanConfigHelper(false), }, +}; + +export const proverClientConfigMappings: ConfigMappingsType = { + proverJobSourceUrl: { + env: 'PROVER_JOB_SOURCE_URL', + description: 'The URL to the Aztec prover node to take proving jobs from', + }, + ...bbConfigMappings, ...proverConfigMappings, + ...proverAgentConfigMappings, + ...proverBrokerConfigMappings, }; /** diff --git a/yarn-project/prover-client/src/index.ts b/yarn-project/prover-client/src/index.ts index 36affdfba2a..56f3430e2c6 100644 --- a/yarn-project/prover-client/src/index.ts +++ b/yarn-project/prover-client/src/index.ts @@ -3,3 +3,4 @@ export { EpochProverManager } from '@aztec/circuit-types'; export * from './tx-prover/tx-prover.js'; export * from './config.js'; export * from './tx-prover/factory.js'; +export * from './proving_broker/prover_cache/memory.js'; diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index ebecd07801a..764a092e813 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -9,7 +9,7 @@ import { type TxValidator, } from '@aztec/circuit-types'; import { type Gas, type GlobalVariables, Header } from '@aztec/circuits.js'; -import { type Fr } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { @@ -118,7 +118,7 @@ export class TestContext { } const queue = new MemoryProvingQueue(telemetry); - const orchestrator = new ProvingOrchestrator(proverDb, queue, telemetry); + const orchestrator = new ProvingOrchestrator(proverDb, queue, telemetry, Fr.ZERO); const agent = new ProverAgent(localProver, proverCount); queue.start(); diff --git a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts index fd6b3626ca1..450715d8c06 100644 --- a/yarn-project/prover-client/src/orchestrator/block-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/block-proving-state.ts @@ -131,7 +131,7 @@ export class BlockProvingState { /** Returns the block number as an epoch number. Used for prioritizing proof requests. */ public get epochNumber(): number { - return this.globalVariables.blockNumber.toNumber(); + return this.parentEpoch.epochNumber; } /** diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index b13bc7e7367..85c71b7f8ba 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -75,7 +75,7 @@ import { import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js'; import { TxProvingState } from './tx-proving-state.js'; -const logger = createDebugLogger('aztec:prover-client:orchestrator'); +const logger = createDebugLogger('aztec:prover:proving-orchestrator'); /** * Implements an event driven proving scheduler to build the recursive proof tree. The idea being: diff --git a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts index dcdb839b595..aa971c116ce 100644 --- a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts +++ b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.test.ts @@ -11,17 +11,27 @@ import { AbortError } from '@aztec/foundation/error'; import { sleep } from '@aztec/foundation/sleep'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; import { MemoryProvingQueue } from './memory-proving-queue.js'; describe('MemoryProvingQueue', () => { let queue: MemoryProvingQueue; let jobTimeoutMs: number; let pollingIntervalMs: number; + let proofStore: ProofStore; beforeEach(() => { jobTimeoutMs = 100; pollingIntervalMs = 10; - queue = new MemoryProvingQueue(new NoopTelemetryClient(), jobTimeoutMs, pollingIntervalMs); + proofStore = new InlineProofStore(); + queue = new MemoryProvingQueue( + new NoopTelemetryClient(), + jobTimeoutMs, + pollingIntervalMs, + undefined, + undefined, + proofStore, + ); queue.start(); }); @@ -34,10 +44,10 @@ describe('MemoryProvingQueue', () => { void queue.getPrivateBaseRollupProof(makePrivateBaseRollupInputs()); const job1 = await queue.getProvingJob(); - expect(job1?.request.type).toEqual(ProvingRequestType.BASE_PARITY); + expect(job1?.type).toEqual(ProvingRequestType.BASE_PARITY); const job2 = await queue.getProvingJob(); - expect(job2?.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect(job2?.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); }); it('returns jobs ordered by priority', async () => { @@ -46,7 +56,7 @@ describe('MemoryProvingQueue', () => { void queue.getPublicBaseRollupProof(makePublicBaseRollupInputs(), undefined, 1); // The agent consumes one of them - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); // A new block comes along with its base rollups, and the orchestrator then pushes a root request for the first one void queue.getPublicBaseRollupProof(makePublicBaseRollupInputs(), undefined, 2); @@ -56,14 +66,14 @@ describe('MemoryProvingQueue', () => { void queue.getRootRollupProof(makeRootRollupInputs(), undefined, 1); // The next jobs for the agent should be the ones from block 1, skipping the ones for block 2 - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.ROOT_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.ROOT_ROLLUP); // And the base rollups for block 2 should go next - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); - expect((await queue.getProvingJob())!.request.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PRIVATE_BASE_ROLLUP); + expect((await queue.getProvingJob())!.type).toEqual(ProvingRequestType.PUBLIC_BASE_ROLLUP); }); it('returns undefined when no jobs are available', async () => { @@ -75,7 +85,8 @@ describe('MemoryProvingQueue', () => { const promise = queue.getBaseParityProof(inputs); const job = await queue.getProvingJob(); - expect(job?.request.inputs).toEqual(inputs); + const jobInputs = await proofStore.getProofInput(job!.inputsUri); + expect(jobInputs.inputs).toEqual(inputs); const publicInputs = makeParityPublicInputs(); const proof = makeRecursiveProof(RECURSIVE_PROOF_LENGTH); @@ -93,7 +104,8 @@ describe('MemoryProvingQueue', () => { void queue.getBaseParityProof(inputs); const job = await queue.getProvingJob(); - expect(job?.request.inputs).toEqual(inputs); + const proofInput = await proofStore.getProofInput(job!.inputsUri); + expect(proofInput.inputs).toEqual(inputs); const error = new Error('test error'); diff --git a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts index f70b66efbc9..a6175f37e95 100644 --- a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts @@ -1,8 +1,8 @@ import { type ProofAndVerificationKey, type ProvingJob, + type ProvingJobInputsMap, type ProvingJobSource, - type ProvingRequest, type ProvingRequestResultFor, ProvingRequestType, type PublicInputsAndRecursiveProof, @@ -35,13 +35,13 @@ import { AbortError, TimeoutError } from '@aztec/foundation/error'; import { createDebugLogger } from '@aztec/foundation/log'; import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise'; import { PriorityMemoryQueue } from '@aztec/foundation/queue'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; import { type TelemetryClient } from '@aztec/telemetry-client'; +import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; import { ProvingQueueMetrics } from './queue_metrics.js'; -type ProvingJobWithResolvers = ProvingJob & - PromiseWithResolvers> & { +type ProvingJobWithResolvers = ProvingJob & + PromiseWithResolvers> & { signal?: AbortSignal; epochNumber?: number; attempts: number; @@ -62,9 +62,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource (a, b) => (a.epochNumber ?? 0) - (b.epochNumber ?? 0), ); private jobsInProgress = new Map(); - private runningPromise: RunningPromise; - private metrics: ProvingQueueMetrics; constructor( @@ -75,6 +73,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource pollingIntervalMs = 1000, private generateId = defaultIdGenerator, private timeSource = defaultTimeSource, + private proofStore: ProofStore = new InlineProofStore(), ) { this.metrics = new ProvingQueueMetrics(client, 'MemoryProvingQueue'); this.runningPromise = new RunningPromise(this.poll, pollingIntervalMs); @@ -100,7 +99,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource this.log.info('Proving queue stopped'); } - public async getProvingJob({ timeoutSec = 1 } = {}): Promise | undefined> { + public async getProvingJob({ timeoutSec = 1 } = {}): Promise { if (!this.runningPromise.isRunning()) { throw new Error('Proving queue is not running. Start the queue before getting jobs.'); } @@ -119,7 +118,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource this.jobsInProgress.set(job.id, job); return { id: job.id, - request: job.request, + type: job.type, + inputsUri: job.inputsUri, }; } catch (err) { if (err instanceof TimeoutError) { @@ -167,20 +167,18 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } // every job should be retried with the exception of the public VM since its in development and can fail - if (job.attempts < MAX_RETRIES && job.request.type !== ProvingRequestType.PUBLIC_VM) { + if (job.attempts < MAX_RETRIES && job.type !== ProvingRequestType.PUBLIC_VM) { job.attempts++; this.log.warn( - `Job id=${job.id} type=${ProvingRequestType[job.request.type]} failed with error: ${reason}. Retry ${ + `Job id=${job.id} type=${ProvingRequestType[job.type]} failed with error: ${reason}. Retry ${ job.attempts }/${MAX_RETRIES}`, ); this.queue.put(job); } else { const logFn = - job.request.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT - ? this.log.warn - : this.log.error; - logFn(`Job id=${job.id} type=${ProvingRequestType[job.request.type]} failed with error: ${reason}`); + job.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT ? this.log.warn : this.log.error; + logFn(`Job id=${job.id} type=${ProvingRequestType[job.type]} failed with error: ${reason}`); job.reject(new Error(reason)); } return Promise.resolve(); @@ -214,7 +212,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } if (job.heartbeat + this.jobTimeoutMs < now) { - this.log.warn(`Job ${job.id} type=${ProvingRequestType[job.request.type]} has timed out`); + this.log.warn(`Job ${job.id} type=${ProvingRequestType[job.type]} has timed out`); this.jobsInProgress.delete(job.id); job.heartbeat = 0; @@ -223,19 +221,23 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } }; - private enqueue( - request: T, + private async enqueue( + type: T, + inputs: ProvingJobInputsMap[T], signal?: AbortSignal, epochNumber?: number, - ): Promise['result']> { + ): Promise['result']> { if (!this.runningPromise.isRunning()) { return Promise.reject(new Error('Proving queue is not running.')); } - const { promise, resolve, reject } = promiseWithResolvers>(); + const { promise, resolve, reject } = promiseWithResolvers>(); + const id = this.generateId(); + const inputsUri = await this.proofStore.saveProofInput(id, type, inputs); const item: ProvingJobWithResolvers = { - id: this.generateId(), - request, + id, + type, + inputsUri, signal, promise, resolve, @@ -250,16 +252,13 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource } this.log.debug( - `Adding id=${item.id} type=${ProvingRequestType[request.type]} proving job to queue depth=${this.queue.length()}`, + `Adding id=${item.id} type=${ProvingRequestType[type]} proving job to queue depth=${this.queue.length()}`, ); - // TODO (alexg) remove the `any` - if (!this.queue.put(item as any)) { + + if (!this.queue.put(item as ProvingJobWithResolvers)) { throw new Error(); } - const byteSize = serializeToBuffer(item.request.inputs).length; - this.metrics.recordNewJob(item.request.type, byteSize); - return promise.then(({ result }) => result); } @@ -268,7 +267,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PRIVATE_KERNEL_EMPTY, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PRIVATE_KERNEL_EMPTY, inputs, signal, epochNumber); } getTubeProof( @@ -276,7 +275,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.TUBE_PROOF, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.TUBE_PROOF, inputs, signal, epochNumber); } /** @@ -288,7 +287,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.BASE_PARITY, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.BASE_PARITY, inputs, signal, epochNumber); } /** @@ -300,7 +299,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.ROOT_PARITY, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.ROOT_PARITY, inputs, signal, epochNumber); } getPrivateBaseRollupProof( @@ -308,7 +307,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PRIVATE_BASE_ROLLUP, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PRIVATE_BASE_ROLLUP, inputs, signal, epochNumber); } getPublicBaseRollupProof( @@ -316,7 +315,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs, signal, epochNumber); } /** @@ -324,11 +323,11 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getMergeRollupProof( - input: MergeRollupInputs, + inputs: MergeRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.MERGE_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.MERGE_ROLLUP, inputs, signal, epochNumber); } /** @@ -336,19 +335,19 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getBlockRootRollupProof( - input: BlockRootRollupInputs, + inputs: BlockRootRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.BLOCK_ROOT_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.BLOCK_ROOT_ROLLUP, inputs, signal, epochNumber); } getEmptyBlockRootRollupProof( - input: EmptyBlockRootRollupInputs, + inputs: EmptyBlockRootRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, inputs, signal, epochNumber); } /** @@ -356,11 +355,11 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getBlockMergeRollupProof( - input: BlockMergeRollupInputs, + inputs: BlockMergeRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.BLOCK_MERGE_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.BLOCK_MERGE_ROLLUP, inputs, signal, epochNumber); } /** @@ -368,11 +367,11 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource * @param input - Input to the circuit. */ getRootRollupProof( - input: RootRollupInputs, + inputs: RootRollupInputs, signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.ROOT_ROLLUP, inputs: input }, signal, epochNumber); + return this.enqueue(ProvingRequestType.ROOT_ROLLUP, inputs, signal, epochNumber); } /** @@ -383,7 +382,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource signal?: AbortSignal, epochNumber?: number, ): Promise> { - return this.enqueue({ type: ProvingRequestType.PUBLIC_VM, inputs }, signal, epochNumber); + return this.enqueue(ProvingRequestType.PUBLIC_VM, inputs, signal, epochNumber); } /** diff --git a/yarn-project/prover-client/src/prover-agent/prover-agent.ts b/yarn-project/prover-client/src/prover-agent/prover-agent.ts index 50c1733652c..2b86450afbf 100644 --- a/yarn-project/prover-client/src/prover-agent/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-agent/prover-agent.ts @@ -1,9 +1,9 @@ import { type ProverAgentApi, type ProvingJob, + type ProvingJobInputs, + type ProvingJobResultsMap, type ProvingJobSource, - type ProvingRequest, - type ProvingRequestResultFor, ProvingRequestType, type ServerCircuitProver, makeProvingRequestResult, @@ -12,6 +12,8 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import { elapsed } from '@aztec/foundation/timer'; +import { InlineProofStore } from '../proving_broker/proof_store.js'; + const PRINT_THRESHOLD_NS = 6e10; // 60 seconds /** @@ -27,6 +29,7 @@ export class ProverAgent implements ProverAgentApi { } >(); private runningPromise?: RunningPromise; + private proofInputsDatabase = new InlineProofStore(); constructor( /** The prover implementation to defer jobs to */ @@ -101,12 +104,12 @@ export class ProverAgent implements ProverAgentApi { const promise = this.work(jobSource, job).finally(() => this.inFlightPromises.delete(job.id)); this.inFlightPromises.set(job.id, { id: job.id, - type: job.request.type, + type: job.type, promise, }); } catch (err) { this.log.warn( - `Error processing job! type=${ProvingRequestType[job.request.type]}: ${err}. ${(err as Error).stack}`, + `Error processing job! type=${ProvingRequestType[job.type]}: ${err}. ${(err as Error).stack}`, ); } } catch (err) { @@ -130,28 +133,24 @@ export class ProverAgent implements ProverAgentApi { this.log.info('Agent stopped'); } - private async work( - jobSource: ProvingJobSource, - job: ProvingJob, - ): Promise { + private async work(jobSource: ProvingJobSource, job: ProvingJob): Promise { try { - this.log.debug(`Picked up proving job id=${job.id} type=${ProvingRequestType[job.request.type]}`); - const type: TRequest['type'] = job.request.type; - const [time, result] = await elapsed(this.getProof(job.request)); + this.log.debug(`Picked up proving job id=${job.id} type=${ProvingRequestType[job.type]}`); + const type = job.type; + const inputs = await this.proofInputsDatabase.getProofInput(job.inputsUri); + const [time, result] = await elapsed(this.getProof(inputs)); if (this.#isRunning()) { this.log.verbose(`Processed proving job id=${job.id} type=${ProvingRequestType[type]} duration=${time}ms`); await jobSource.resolveProvingJob(job.id, makeProvingRequestResult(type, result)); } else { this.log.verbose( - `Dropping proving job id=${job.id} type=${ - ProvingRequestType[job.request.type] - } duration=${time}ms: agent stopped`, + `Dropping proving job id=${job.id} type=${ProvingRequestType[job.type]} duration=${time}ms: agent stopped`, ); } } catch (err) { - const type = ProvingRequestType[job.request.type]; + const type = ProvingRequestType[job.type]; if (this.#isRunning()) { - if (job.request.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT) { + if (job.type === ProvingRequestType.PUBLIC_VM && !process.env.AVM_PROVING_STRICT) { this.log.warn(`Expected error processing VM proving job id=${job.id} type=${type}: ${err}`); } else { this.log.error(`Error processing proving job id=${job.id} type=${type}: ${err}`, err); @@ -164,10 +163,7 @@ export class ProverAgent implements ProverAgentApi { } } - private getProof( - request: TRequest, - ): Promise['result']>; - private getProof(request: ProvingRequest): Promise['result']> { + private getProof(request: ProvingJobInputs): Promise { const { type, inputs } = request; switch (type) { case ProvingRequestType.PUBLIC_VM: { diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts new file mode 100644 index 00000000000..fa357379d90 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts @@ -0,0 +1,111 @@ +import { type ProvingJobProducer, ProvingRequestType, makePublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { RECURSIVE_PROOF_LENGTH, VerificationKeyData, makeRecursiveProof } from '@aztec/circuits.js'; +import { makeBaseParityInputs, makeParityPublicInputs } from '@aztec/circuits.js/testing'; +import { AbortError } from '@aztec/foundation/error'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; + +import { jest } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { CachingBrokerFacade } from './caching_broker_facade.js'; +import { InlineProofStore } from './proof_store.js'; +import { InMemoryProverCache } from './prover_cache/memory.js'; + +describe('CachingBrokerFacade', () => { + let facade: CachingBrokerFacade; + let cache: InMemoryProverCache; + let proofStore: InlineProofStore; + let broker: MockProxy; + + beforeAll(() => { + jest.useFakeTimers(); + }); + + beforeEach(() => { + broker = mock({ + enqueueProvingJob: jest.fn(), + getProvingJobStatus: jest.fn(), + removeAndCancelProvingJob: jest.fn(), + waitForJobToSettle: jest.fn(), + }); + cache = new InMemoryProverCache(); + proofStore = new InlineProofStore(); + facade = new CachingBrokerFacade(broker, cache, proofStore); + }); + + it('marks job as in progress', async () => { + const controller = new AbortController(); + void facade.getBaseParityProof(makeBaseParityInputs(), controller.signal); + + await jest.advanceTimersToNextTimerAsync(); + + expect(broker.enqueueProvingJob).toHaveBeenCalled(); + const job = broker.enqueueProvingJob.mock.calls[0][0]; + + await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'in-queue' }); + controller.abort(); + }); + + it('removes the cached value if a job fails to enqueue', async () => { + const { promise, reject } = promiseWithResolvers(); + broker.enqueueProvingJob.mockResolvedValue(promise); + + void facade.getBaseParityProof(makeBaseParityInputs()).catch(() => {}); + await jest.advanceTimersToNextTimerAsync(); + + const job = broker.enqueueProvingJob.mock.calls[0][0]; + await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'in-queue' }); + + reject(new Error('Failed to enqueue job')); + + await jest.advanceTimersToNextTimerAsync(); + await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'not-found' }); + }); + + it('awaits existing job if in progress', async () => { + const { promise, reject } = promiseWithResolvers(); + broker.enqueueProvingJob.mockResolvedValue(promise); + + const inputs = makeBaseParityInputs(); + void facade.getBaseParityProof(inputs).catch(() => {}); + await jest.advanceTimersToNextTimerAsync(); + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); + + void facade.getBaseParityProof(inputs).catch(() => {}); + await jest.advanceTimersToNextTimerAsync(); + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); + + reject(new AbortError('Job was cancelled')); + }); + + it('reuses already cached results', async () => { + const { promise, resolve } = promiseWithResolvers(); + broker.enqueueProvingJob.mockResolvedValue(Promise.resolve()); + broker.waitForJobToSettle.mockResolvedValue(promise); + + const inputs = makeBaseParityInputs(); + void facade.getBaseParityProof(inputs); + await jest.advanceTimersToNextTimerAsync(); + + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); + const job = broker.enqueueProvingJob.mock.calls[0][0]; + + const result = makePublicInputsAndRecursiveProof( + makeParityPublicInputs(), + makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + VerificationKeyData.makeFakeHonk(), + ); + + const outputUri = await proofStore.saveProofOutput(job.id, ProvingRequestType.BASE_PARITY, result); + resolve({ + status: 'fulfilled', + value: outputUri, + }); + + await jest.advanceTimersToNextTimerAsync(); + await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'fulfilled', value: outputUri }); + + await expect(facade.getBaseParityProof(inputs)).resolves.toEqual(result); + expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); // job was only ever enqueued once + }); +}); diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts new file mode 100644 index 00000000000..c6c0356709c --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts @@ -0,0 +1,295 @@ +import { + type ProofAndVerificationKey, + type ProverCache, + type ProvingJobId, + type ProvingJobInputsMap, + type ProvingJobProducer, + type ProvingJobResultsMap, + ProvingRequestType, + type PublicInputsAndRecursiveProof, + type ServerCircuitProver, +} from '@aztec/circuit-types'; +import type { + AVM_PROOF_LENGTH_IN_FIELDS, + AvmCircuitInputs, + BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BlockMergeRollupInputs, + BlockRootOrBlockMergePublicInputs, + BlockRootRollupInputs, + EmptyBlockRootRollupInputs, + KernelCircuitPublicInputs, + MergeRollupInputs, + NESTED_RECURSIVE_PROOF_LENGTH, + ParityPublicInputs, + PrivateBaseRollupInputs, + PrivateKernelEmptyInputData, + PublicBaseRollupInputs, + RECURSIVE_PROOF_LENGTH, + RootParityInputs, + RootRollupInputs, + RootRollupPublicInputs, + TUBE_PROOF_LENGTH, + TubeInputs, +} from '@aztec/circuits.js'; +import { sha256 } from '@aztec/foundation/crypto'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { retryUntil } from '@aztec/foundation/retry'; + +import { InlineProofStore, type ProofStore } from './proof_store.js'; +import { InMemoryProverCache } from './prover_cache/memory.js'; + +/** + * A facade around a job broker that generates stable job ids and caches results + */ +export class CachingBrokerFacade implements ServerCircuitProver { + constructor( + private broker: ProvingJobProducer, + private cache: ProverCache = new InMemoryProverCache(), + private proofStore: ProofStore = new InlineProofStore(), + private log = createDebugLogger('aztec:prover-client:caching-prover-broker'), + ) {} + + private async enqueueAndWaitForJob( + id: ProvingJobId, + type: T, + inputs: ProvingJobInputsMap[T], + signal?: AbortSignal, + ): Promise { + // first try the cache + let jobEnqueued = false; + try { + const cachedResult = await this.cache.getProvingJobStatus(id); + if (cachedResult.status === 'fulfilled') { + const output = await this.proofStore.getProofOutput(cachedResult.value); + if (output.type === type) { + return output.result as ProvingJobResultsMap[T]; + } else { + this.log.warn(`Cached result type mismatch for job=${id}. Expected=${type} but got=${output.type}`); + } + } else if (cachedResult.status === 'rejected') { + // prefer returning a rejected promises so that we don't trigger the catch block below + return Promise.reject(new Error(cachedResult.reason)); + } else if (cachedResult.status === 'in-progress' || cachedResult.status === 'in-queue') { + jobEnqueued = true; + } else { + jobEnqueued = false; + } + } catch (err) { + this.log.warn(`Failed to get cached proving job id=${id}: ${err}. Re-running job`); + } + + if (!jobEnqueued) { + try { + await this.cache.setProvingJobStatus(id, { status: 'in-queue' }); + const inputsUri = await this.proofStore.saveProofInput(id, type, inputs); + await this.broker.enqueueProvingJob({ + id, + type, + inputsUri, + }); + } catch (err) { + await this.cache.setProvingJobStatus(id, { status: 'not-found' }); + throw err; + } + } + + // notify broker of cancelled job + const abortFn = async () => { + signal?.removeEventListener('abort', abortFn); + await this.broker.removeAndCancelProvingJob(id); + }; + + signal?.addEventListener('abort', abortFn); + + try { + // loop here until the job settles + // NOTE: this could also terminate because the job was cancelled through event listener above + const result = await retryUntil( + () => this.broker.waitForJobToSettle(id), + `Proving job=${id} type=${ProvingRequestType[type]}`, + 0, + 1, + ); + + try { + await this.cache.setProvingJobStatus(id, result); + } catch (err) { + this.log.warn(`Failed to cache proving job id=${id} resultStatus=${result.status}: ${err}`); + } + + if (result.status === 'fulfilled') { + const output = await this.proofStore.getProofOutput(result.value); + if (output.type === type) { + return output.result as ProvingJobResultsMap[T]; + } else { + return Promise.reject(new Error(`Unexpected proof type: ${output.type}. Expected: ${type}`)); + } + } else { + return Promise.reject(new Error(result.reason)); + } + } finally { + signal?.removeEventListener('abort', abortFn); + } + } + + getAvmProof( + inputs: AvmCircuitInputs, + signal?: AbortSignal, + _blockNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PUBLIC_VM, inputs), + ProvingRequestType.PUBLIC_VM, + inputs, + signal, + ); + } + + getBaseParityProof( + inputs: BaseParityInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.BASE_PARITY, inputs), + ProvingRequestType.BASE_PARITY, + inputs, + signal, + ); + } + + getBlockMergeRollupProof( + input: BlockMergeRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.BLOCK_MERGE_ROLLUP, input), + ProvingRequestType.BLOCK_MERGE_ROLLUP, + input, + signal, + ); + } + + getBlockRootRollupProof( + input: BlockRootRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.BLOCK_ROOT_ROLLUP, input), + ProvingRequestType.BLOCK_ROOT_ROLLUP, + input, + signal, + ); + } + + getEmptyBlockRootRollupProof( + input: EmptyBlockRootRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, input), + ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP, + input, + signal, + ); + } + + getEmptyPrivateKernelProof( + inputs: PrivateKernelEmptyInputData, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PRIVATE_KERNEL_EMPTY, inputs), + ProvingRequestType.PRIVATE_KERNEL_EMPTY, + inputs, + signal, + ); + } + + getMergeRollupProof( + input: MergeRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.MERGE_ROLLUP, input), + ProvingRequestType.MERGE_ROLLUP, + input, + signal, + ); + } + getPrivateBaseRollupProof( + baseRollupInput: PrivateBaseRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PRIVATE_BASE_ROLLUP, baseRollupInput), + ProvingRequestType.PRIVATE_BASE_ROLLUP, + baseRollupInput, + signal, + ); + } + + getPublicBaseRollupProof( + inputs: PublicBaseRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.PUBLIC_BASE_ROLLUP, inputs), + ProvingRequestType.PUBLIC_BASE_ROLLUP, + inputs, + signal, + ); + } + + getRootParityProof( + inputs: RootParityInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.ROOT_PARITY, inputs), + ProvingRequestType.ROOT_PARITY, + inputs, + signal, + ); + } + + getRootRollupProof( + input: RootRollupInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.ROOT_ROLLUP, input), + ProvingRequestType.ROOT_ROLLUP, + input, + signal, + ); + } + + getTubeProof( + tubeInput: TubeInputs, + signal?: AbortSignal, + _epochNumber?: number, + ): Promise> { + return this.enqueueAndWaitForJob( + this.generateId(ProvingRequestType.TUBE_PROOF, tubeInput), + ProvingRequestType.TUBE_PROOF, + tubeInput, + signal, + ); + } + + private generateId(type: ProvingRequestType, inputs: { toBuffer(): Buffer }) { + const inputsHash = sha256(inputs.toBuffer()); + return `${ProvingRequestType[type]}:${inputsHash.toString('hex')}`; + } +} diff --git a/yarn-project/prover-client/src/proving_broker/factory.ts b/yarn-project/prover-client/src/proving_broker/factory.ts new file mode 100644 index 00000000000..02a5fcb314b --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/factory.ts @@ -0,0 +1,21 @@ +import { type ProverBrokerConfig } from '@aztec/circuit-types'; +import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; + +import { ProvingBroker } from './proving_broker.js'; +import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js'; +import { KVBrokerDatabase } from './proving_broker_database/persisted.js'; + +export async function createAndStartProvingBroker(config: ProverBrokerConfig): Promise { + const database = config.proverBrokerDataDirectory + ? new KVBrokerDatabase(AztecLmdbStore.open(config.proverBrokerDataDirectory)) + : new InMemoryBrokerDatabase(); + + const broker = new ProvingBroker(database, { + jobTimeoutMs: config.proverBrokerJobTimeoutMs, + maxRetries: config.proverBrokerJobMaxRetries, + timeoutIntervalMs: config.proverBrokerPollIntervalMs, + }); + + await broker.start(); + return broker; +} diff --git a/yarn-project/prover-client/src/proving_broker/index.ts b/yarn-project/prover-client/src/proving_broker/index.ts new file mode 100644 index 00000000000..6770b1ea14e --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/index.ts @@ -0,0 +1,8 @@ +export * from './proving_agent.js'; +export * from './proving_broker.js'; +export * from './rpc.js'; +export * from './proving_broker_database.js'; +export * from './proving_broker_database/memory.js'; +export * from './proving_broker_database/persisted.js'; +export * from './proof_store.js'; +export * from './factory.js'; diff --git a/yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts b/yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts deleted file mode 100644 index f992946fbd3..00000000000 --- a/yarn-project/prover-client/src/proving_broker/proof_input_output_database.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { - type ProvingRequestType, - V2ProofInput, - type V2ProofInputUri, - V2ProofOutput, - type V2ProofOutputUri, - type V2ProvingJobId, -} from '@aztec/circuit-types'; - -/** - * A database for storing proof inputs and outputs. - */ -export interface ProofInputOutputDatabase { - /** - * Save a proof input to the database. - * @param jobId - The ID of the job the proof input is associated with. - * @param type - The type of the proving request. - * @param proofInput - The proof input to save. - * @returns The URI of the saved proof input. - */ - saveProofInput(jobId: V2ProvingJobId, type: ProvingRequestType, proofInput: V2ProofInput): Promise; - - /** - * Save a proof output to the database. - * @param jobId - The ID of the job the proof input is associated with. - * @param type - The type of the proving request. - * @param proofOutput - The proof output to save. - * @returns The URI of the saved proof output. - */ - saveProofOutput( - jobId: V2ProvingJobId, - type: ProvingRequestType, - proofOutput: V2ProofOutput, - ): Promise; - - /** - * Retrieve a proof input from the database. - * @param uri - The URI of the proof input to retrieve. - * @returns The proof input. - */ - getProofInput(uri: V2ProofInputUri): Promise; - - /** - * Retrieve a proof output from the database. - * @param uri - The URI of the proof output to retrieve. - * @returns The proof output. - */ - getProofOutput(uri: V2ProofOutputUri): Promise; -} - -/** - * An implementation of a proof input/output database that stores data inline in the URI. - */ -export class InlineProofIODatabase implements ProofInputOutputDatabase { - private static readonly PREFIX = 'data:application/json;base64'; - private static readonly SEPARATOR = ','; - private static readonly BUFFER_ENCODING = 'base64url'; - - saveProofInput(_id: V2ProvingJobId, _type: ProvingRequestType, proofInput: V2ProofInput): Promise { - return Promise.resolve( - (InlineProofIODatabase.PREFIX + - InlineProofIODatabase.SEPARATOR + - Buffer.from(JSON.stringify(proofInput)).toString(InlineProofIODatabase.BUFFER_ENCODING)) as V2ProofInputUri, - ); - } - - saveProofOutput( - _id: V2ProvingJobId, - _type: ProvingRequestType, - proofOutput: V2ProofOutput, - ): Promise { - return Promise.resolve( - (InlineProofIODatabase.PREFIX + - InlineProofIODatabase.SEPARATOR + - Buffer.from(JSON.stringify(proofOutput)).toString(InlineProofIODatabase.BUFFER_ENCODING)) as V2ProofOutputUri, - ); - } - - getProofInput(uri: V2ProofInputUri): Promise { - const [prefix, data] = uri.split(','); - if (prefix !== InlineProofIODatabase.PREFIX) { - throw new Error('Invalid proof input URI: ' + prefix); - } - - return Promise.resolve( - V2ProofInput.parse(JSON.parse(Buffer.from(data, InlineProofIODatabase.BUFFER_ENCODING).toString())), - ); - } - - getProofOutput(uri: V2ProofOutputUri): Promise { - const [prefix, data] = uri.split(','); - if (prefix !== InlineProofIODatabase.PREFIX) { - throw new Error('Invalid proof output URI: ' + prefix); - } - - return Promise.resolve( - V2ProofOutput.parse(JSON.parse(Buffer.from(data, InlineProofIODatabase.BUFFER_ENCODING).toString())), - ); - } -} diff --git a/yarn-project/prover-client/src/proving_broker/proof_store.ts b/yarn-project/prover-client/src/proving_broker/proof_store.ts new file mode 100644 index 00000000000..b5cd17e3b83 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proof_store.ts @@ -0,0 +1,103 @@ +import { + type ProofUri, + type ProvingJobId, + ProvingJobInputs, + type ProvingJobInputsMap, + ProvingJobResult, + type ProvingJobResultsMap, + type ProvingRequestType, +} from '@aztec/circuit-types'; + +/** + * A database for storing proof inputs and outputs. + */ +export interface ProofStore { + /** + * Save a proof input to the database. + * @param jobId - The ID of the job the proof input is associated with. + * @param type - The type of the proving request. + * @param inputs - The proof input to save. + * @returns The URI of the saved proof input. + */ + saveProofInput( + jobId: ProvingJobId, + type: T, + inputs: ProvingJobInputsMap[T], + ): Promise; + + /** + * Save a proof output to the database. + * @param jobId - The ID of the job the proof input is associated with. + * @param type - The type of the proving request. + * @param result - The proof output to save. + * @returns The URI of the saved proof output. + */ + saveProofOutput( + id: ProvingJobId, + type: T, + result: ProvingJobResultsMap[T], + ): Promise; + + /** + * Retrieve a proof input from the database. + * @param uri - The URI of the proof input to retrieve. + * @returns The proof input. + */ + getProofInput(uri: ProofUri): Promise; + + /** + * Retrieve a proof output from the database. + * @param uri - The URI of the proof output to retrieve. + * @returns The proof output. + */ + getProofOutput(uri: ProofUri): Promise; +} + +const PREFIX = 'data:application/json;base64'; +const SEPARATOR = ','; +const BUFFER_ENCODING = 'base64url'; + +/** + * An implementation of a proof input/output database that stores data inline in the URI. + */ +export class InlineProofStore implements ProofStore { + saveProofInput( + _id: ProvingJobId, + type: T, + inputs: ProvingJobInputsMap[T], + ): Promise { + const jobInputs = { type, inputs } as ProvingJobInputs; + return Promise.resolve( + (PREFIX + SEPARATOR + Buffer.from(JSON.stringify(jobInputs)).toString(BUFFER_ENCODING)) as ProofUri, + ); + } + + saveProofOutput( + _id: ProvingJobId, + type: T, + result: ProvingJobResultsMap[T], + ): Promise { + const jobResult = { type, result } as ProvingJobResult; + return Promise.resolve( + (PREFIX + SEPARATOR + Buffer.from(JSON.stringify(jobResult)).toString(BUFFER_ENCODING)) as ProofUri, + ); + } + + getProofInput(uri: ProofUri): Promise { + const [prefix, data] = uri.split(SEPARATOR); + if (prefix !== PREFIX) { + throw new Error('Invalid proof input URI: ' + prefix); + } + + return Promise.resolve(ProvingJobInputs.parse(JSON.parse(Buffer.from(data, BUFFER_ENCODING).toString()))); + } + + getProofOutput(uri: ProofUri): Promise { + const [prefix, data] = uri.split(SEPARATOR); + if (prefix !== PREFIX) { + throw new Error('Invalid proof output URI: ' + prefix); + } + + return Promise.resolve(ProvingJobResult.parse(JSON.parse(Buffer.from(data, BUFFER_ENCODING).toString()))); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts b/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts new file mode 100644 index 00000000000..5e111f68a92 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts @@ -0,0 +1,16 @@ +import type { ProverCache, ProvingJobStatus } from '@aztec/circuit-types'; + +export class InMemoryProverCache implements ProverCache { + private proofs: Record = {}; + + constructor() {} + + setProvingJobStatus(jobId: string, status: ProvingJobStatus): Promise { + this.proofs[jobId] = status; + return Promise.resolve(); + } + + getProvingJobStatus(jobId: string): Promise { + return Promise.resolve(this.proofs[jobId] ?? { status: 'not-found' }); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts index 07b2208add1..cc49057ab6d 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts @@ -1,12 +1,12 @@ import { + type ProofUri, ProvingError, + type ProvingJob, + type ProvingJobConsumer, + type ProvingJobId, + type ProvingJobInputs, ProvingRequestType, type PublicInputsAndRecursiveProof, - type V2ProofInput, - type V2ProofInputUri, - type V2ProofOutputUri, - type V2ProvingJob, - type V2ProvingJobId, makePublicInputsAndRecursiveProof, } from '@aztec/circuit-types'; import { @@ -23,15 +23,14 @@ import { promiseWithResolvers } from '@aztec/foundation/promise'; import { jest } from '@jest/globals'; import { MockProver } from '../test/mock_prover.js'; -import { type ProofInputOutputDatabase } from './proof_input_output_database.js'; +import { type ProofStore } from './proof_store.js'; import { ProvingAgent } from './proving_agent.js'; -import { type ProvingJobConsumer } from './proving_broker_interface.js'; describe('ProvingAgent', () => { let prover: MockProver; let jobSource: jest.Mocked; let agent: ProvingAgent; - let proofDB: jest.Mocked; + let proofDB: jest.Mocked; const agentPollIntervalMs = 1000; beforeEach(() => { @@ -101,16 +100,16 @@ describe('ProvingAgent', () => { const { job, time, inputs } = makeBaseParityJob(); const result = makeBaseParityResult(); - jest.spyOn(prover, 'getBaseParityProof').mockResolvedValueOnce(result.value); + jest.spyOn(prover, 'getBaseParityProof').mockResolvedValueOnce(result); jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); proofDB.getProofInput.mockResolvedValueOnce(inputs); - proofDB.saveProofOutput.mockResolvedValueOnce('output-uri' as V2ProofOutputUri); + proofDB.saveProofOutput.mockResolvedValueOnce('output-uri' as ProofUri); agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(proofDB.saveProofOutput).toHaveBeenCalledWith(result); + expect(proofDB.saveProofOutput).toHaveBeenCalledWith(job.id, job.type, result); expect(jobSource.reportProvingJobSuccess).toHaveBeenCalledWith(job.id, 'output-uri'); }); @@ -123,7 +122,7 @@ describe('ProvingAgent', () => { agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, new Error('test error'), false); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, 'test error', false); }); it('sets the retry flag on when reporting an error', async () => { @@ -136,7 +135,7 @@ describe('ProvingAgent', () => { agent.start(); await jest.advanceTimersByTimeAsync(agentPollIntervalMs); - expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, err, true); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, err.message, true); }); it('reports jobs in progress to the job source', async () => { @@ -159,7 +158,7 @@ describe('ProvingAgent', () => { allowList: [ProvingRequestType.BASE_PARITY], }); - resolve(makeBaseParityResult().value); + resolve(makeBaseParityResult()); }); it('abandons jobs if told so by the source', async () => { @@ -220,28 +219,38 @@ describe('ProvingAgent', () => { }, ); - secondProof.resolve(makeBaseParityResult().value); + secondProof.resolve(makeBaseParityResult()); }); - function makeBaseParityJob(): { job: V2ProvingJob; time: number; inputs: V2ProofInput } { + it('reports an error if inputs cannot be loaded', async () => { + const { job, time } = makeBaseParityJob(); + jobSource.getProvingJob.mockResolvedValueOnce({ job, time }); + proofDB.getProofInput.mockRejectedValueOnce(new Error('Failed to load proof inputs')); + + agent.start(); + + await jest.advanceTimersByTimeAsync(agentPollIntervalMs); + expect(jobSource.reportProvingJobError).toHaveBeenCalledWith(job.id, 'Failed to load proof inputs', true); + }); + + function makeBaseParityJob(): { job: ProvingJob; time: number; inputs: ProvingJobInputs } { const time = jest.now(); - const inputs: V2ProofInput = { type: ProvingRequestType.BASE_PARITY, value: makeBaseParityInputs() }; - const job: V2ProvingJob = { - id: randomBytes(8).toString('hex') as V2ProvingJobId, + const inputs: ProvingJobInputs = { type: ProvingRequestType.BASE_PARITY, inputs: makeBaseParityInputs() }; + const job: ProvingJob = { + id: randomBytes(8).toString('hex') as ProvingJobId, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: randomBytes(8).toString('hex') as V2ProofInputUri, + inputsUri: randomBytes(8).toString('hex') as ProofUri, }; return { job, time, inputs }; } function makeBaseParityResult() { - const value = makePublicInputsAndRecursiveProof( + return makePublicInputsAndRecursiveProof( makeParityPublicInputs(), makeRecursiveProof(RECURSIVE_PROOF_LENGTH), VerificationKeyData.makeFakeHonk(), ); - return { type: ProvingRequestType.BASE_PARITY, value }; } }); diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.ts index 5857b6bb887..01db71f9e2e 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.ts @@ -1,18 +1,19 @@ import { ProvingError, + type ProvingJob, + type ProvingJobConsumer, + type ProvingJobId, + type ProvingJobInputs, + type ProvingJobResultsMap, ProvingRequestType, type ServerCircuitProver, - type V2ProofOutput, - type V2ProvingJob, - type V2ProvingJobId, } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; -import { type ProofInputOutputDatabase } from './proof_input_output_database.js'; -import { type ProvingJobConsumer } from './proving_broker_interface.js'; -import { ProvingJobController, ProvingJobStatus } from './proving_job_controller.js'; +import { type ProofStore } from './proof_store.js'; +import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_controller.js'; /** * A helper class that encapsulates a circuit prover and connects it to a job source. @@ -23,13 +24,13 @@ export class ProvingAgent { constructor( /** The source of proving jobs */ - private jobSource: ProvingJobConsumer, + private broker: ProvingJobConsumer, /** Database holding proof inputs and outputs */ - private proofInputOutputDatabase: ProofInputOutputDatabase, + private proofStore: ProofStore, /** The prover implementation to defer jobs to */ private circuitProver: ServerCircuitProver, /** Optional list of allowed proof types to build */ - private proofAllowList?: Array, + private proofAllowList: Array = [], /** How long to wait between jobs */ private pollIntervalMs = 1000, name = randomBytes(4).toString('hex'), @@ -61,15 +62,15 @@ export class ProvingAgent { // (1) either do a heartbeat, telling the broker that we're working // (2) get a new job // If during (1) the broker returns a new job that means we can cancel the current job and start the new one - let maybeJob: { job: V2ProvingJob; time: number } | undefined; - if (this.currentJobController?.getStatus() === ProvingJobStatus.PROVING) { - maybeJob = await this.jobSource.reportProvingJobProgress( + let maybeJob: { job: ProvingJob; time: number } | undefined; + if (this.currentJobController?.getStatus() === ProvingJobControllerStatus.PROVING) { + maybeJob = await this.broker.reportProvingJobProgress( this.currentJobController.getJobId(), this.currentJobController.getStartedAt(), { allowList: this.proofAllowList }, ); } else { - maybeJob = await this.jobSource.getProvingJob({ allowList: this.proofAllowList }); + maybeJob = await this.broker.getProvingJob({ allowList: this.proofAllowList }); } if (!maybeJob) { @@ -78,14 +79,20 @@ export class ProvingAgent { let abortedProofJobId: string = ''; let abortedProofName: string = ''; - if (this.currentJobController?.getStatus() === ProvingJobStatus.PROVING) { + if (this.currentJobController?.getStatus() === ProvingJobControllerStatus.PROVING) { abortedProofJobId = this.currentJobController.getJobId(); abortedProofName = this.currentJobController.getProofTypeName(); this.currentJobController?.abort(); } const { job, time } = maybeJob; - const inputs = await this.proofInputOutputDatabase.getProofInput(job.inputs); + let inputs: ProvingJobInputs; + try { + inputs = await this.proofStore.getProofInput(job.inputsUri); + } catch (err) { + await this.broker.reportProvingJobError(job.id, 'Failed to load proof inputs', true); + return; + } this.currentJobController = new ProvingJobController( job.id, @@ -98,13 +105,13 @@ export class ProvingAgent { if (abortedProofJobId) { this.log.info( `Aborting job id=${abortedProofJobId} type=${abortedProofName} to start new job id=${this.currentJobController.getJobId()} type=${this.currentJobController.getProofTypeName()} inputsUri=${truncateString( - job.inputs, + job.inputsUri, )}`, ); } else { this.log.info( `Starting job id=${this.currentJobController.getJobId()} type=${this.currentJobController.getProofTypeName()} inputsUri=${truncateString( - job.inputs, + job.inputsUri, )}`, ); } @@ -115,22 +122,22 @@ export class ProvingAgent { } }; - handleJobResult = async ( - jobId: V2ProvingJobId, - type: ProvingRequestType, + handleJobResult = async ( + jobId: ProvingJobId, + type: T, err: Error | undefined, - result: V2ProofOutput | undefined, + result: ProvingJobResultsMap[T] | undefined, ) => { if (err) { const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; this.log.info(`Job id=${jobId} type=${ProvingRequestType[type]} failed err=${err.message} retry=${retry}`); - return this.jobSource.reportProvingJobError(jobId, err, retry); + return this.broker.reportProvingJobError(jobId, err.message, retry); } else if (result) { - const outputUri = await this.proofInputOutputDatabase.saveProofOutput(jobId, type, result); + const outputUri = await this.proofStore.saveProofOutput(jobId, type, result); this.log.info( `Job id=${jobId} type=${ProvingRequestType[type]} completed outputUri=${truncateString(outputUri)}`, ); - return this.jobSource.reportProvingJobSuccess(jobId, outputUri); + return this.broker.reportProvingJobSuccess(jobId, outputUri); } }; } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts index 4414a5eaa7a..543843a6e15 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts @@ -1,49 +1,43 @@ -import { - ProvingRequestType, - type V2ProofInputUri, - type V2ProofOutputUri, - type V2ProvingJob, - type V2ProvingJobId, -} from '@aztec/circuit-types'; +import { type ProofUri, type ProvingJob, type ProvingJobId, ProvingRequestType } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; import { openTmpStore } from '@aztec/kv-store/utils'; import { jest } from '@jest/globals'; import { ProvingBroker } from './proving_broker.js'; -import { type ProvingJobDatabase } from './proving_job_database.js'; -import { InMemoryDatabase } from './proving_job_database/memory.js'; -import { PersistedProvingJobDatabase } from './proving_job_database/persisted.js'; +import { type ProvingBrokerDatabase } from './proving_broker_database.js'; +import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js'; +import { KVBrokerDatabase } from './proving_broker_database/persisted.js'; beforeAll(() => { jest.useFakeTimers(); }); describe.each([ - () => ({ database: new InMemoryDatabase(), cleanup: undefined }), + () => ({ database: new InMemoryBrokerDatabase(), cleanup: undefined }), () => { const store = openTmpStore(true); - const database = new PersistedProvingJobDatabase(store); + const database = new KVBrokerDatabase(store); const cleanup = () => store.close(); return { database, cleanup }; }, ])('ProvingBroker', createDb => { let broker: ProvingBroker; - let jobTimeoutSec: number; + let jobTimeoutMs: number; let maxRetries: number; - let database: ProvingJobDatabase; + let database: ProvingBrokerDatabase; let cleanup: undefined | (() => Promise | void); const now = () => Math.floor(Date.now() / 1000); beforeEach(() => { - jobTimeoutSec = 10; + jobTimeoutMs = 10_000; maxRetries = 2; ({ database, cleanup } = createDb()); broker = new ProvingBroker(database, { - jobTimeoutSec: jobTimeoutSec, - timeoutIntervalSec: jobTimeoutSec / 4, + jobTimeoutMs, + timeoutIntervalMs: jobTimeoutMs / 4, maxRetries, }); }); @@ -69,7 +63,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); expect(await broker.getProvingJobStatus(id)).toEqual({ status: 'in-queue' }); @@ -78,17 +72,17 @@ describe.each([ id: id2, blockNumber: 1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); expect(await broker.getProvingJobStatus(id2)).toEqual({ status: 'in-queue' }); }); it('ignores duplicate jobs', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -102,14 +96,14 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await expect( broker.enqueueProvingJob({ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }), ).rejects.toThrow('Duplicate proving job ID'); }); @@ -125,7 +119,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'in-queue' }); @@ -140,7 +134,7 @@ describe.each([ id, blockNumber: 1, type: ProvingRequestType.BASE_PARITY, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'in-queue' }); await broker.getProvingJob(); @@ -150,11 +144,11 @@ describe.each([ }); it('returns job result if successful', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -162,23 +156,23 @@ describe.each([ await broker.reportProvingJobSuccess(provingJob.id, value); const status = await broker.getProvingJobStatus(provingJob.id); - expect(status).toEqual({ status: 'resolved', value }); + expect(status).toEqual({ status: 'fulfilled', value }); }); it('returns job error if failed', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); - const error = new Error('test error'); + const error = 'test error'; await broker.reportProvingJobError(provingJob.id, error); const status = await broker.getProvingJobStatus(provingJob.id); - expect(status).toEqual({ status: 'rejected', error: String(error) }); + expect(status).toEqual({ status: 'rejected', reason: String(error) }); }); }); @@ -197,25 +191,25 @@ describe.each([ }); it('returns jobs in priority order', async () => { - const provingJob1: V2ProvingJob = { + const provingJob1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; - const provingJob2: V2ProvingJob = { + const provingJob2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; - const provingJob3: V2ProvingJob = { + const provingJob3: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 3, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob2); @@ -230,7 +224,7 @@ describe.each([ id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await expect( @@ -244,7 +238,7 @@ describe.each([ id: baseParity1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const baseRollup1 = makeProvingJobId(); @@ -252,7 +246,7 @@ describe.each([ id: baseRollup1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const baseRollup2 = makeProvingJobId(); @@ -260,7 +254,7 @@ describe.each([ id: baseRollup2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const rootParity1 = makeProvingJobId(); @@ -268,7 +262,7 @@ describe.each([ id: rootParity1, type: ProvingRequestType.ROOT_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId(baseParity1, ProvingRequestType.BASE_PARITY); @@ -280,7 +274,7 @@ describe.each([ id: baseParity1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const baseRollup1 = makeProvingJobId(); @@ -288,7 +282,7 @@ describe.each([ id: baseRollup1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const baseRollup2 = makeProvingJobId(); @@ -296,7 +290,7 @@ describe.each([ id: baseRollup2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const rootParity1 = makeProvingJobId(); @@ -304,7 +298,7 @@ describe.each([ id: rootParity1, type: ProvingRequestType.ROOT_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId( @@ -321,7 +315,7 @@ describe.each([ id: baseParity1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const baseRollup1 = makeProvingJobId(); @@ -329,7 +323,7 @@ describe.each([ id: baseRollup1, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const baseRollup2 = makeProvingJobId(); @@ -337,7 +331,7 @@ describe.each([ id: baseRollup2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const rootParity1 = makeProvingJobId(); @@ -345,7 +339,7 @@ describe.each([ id: rootParity1, type: ProvingRequestType.ROOT_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId(baseRollup1); @@ -357,7 +351,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await broker.getProvingJob(); await assertJobStatus(id, 'in-progress'); @@ -369,7 +363,7 @@ describe.each([ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await expect( broker.reportProvingJobProgress(id, now(), { allowList: [ProvingRequestType.BASE_PARITY] }), @@ -378,18 +372,18 @@ describe.each([ it('returns a new job if job is already in progress elsewhere', async () => { // this test simulates the broker crashing and when it comes back online it has two agents working the same job - const job1: V2ProvingJob = { + const job1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; - const job2: V2ProvingJob = { + const job2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -402,7 +396,7 @@ describe.each([ expect(firstAgentJob).toEqual(job1); await assertJobStatus(job1.id, 'in-progress'); - await jest.advanceTimersByTimeAsync(jobTimeoutSec / 2); + await jest.advanceTimersByTimeAsync(jobTimeoutMs / 2); await expect( broker.reportProvingJobProgress(job1.id, firstAgentStartedAt, { allowList: [ProvingRequestType.BASE_PARITY], @@ -446,18 +440,18 @@ describe.each([ it('avoids sending the same job to a new agent after a restart', async () => { // this test simulates the broker crashing and when it comes back online it has two agents working the same job - const job1: V2ProvingJob = { + const job1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; - const job2: V2ProvingJob = { + const job2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -501,18 +495,18 @@ describe.each([ it('avoids sending a completed job to a new agent after a restart', async () => { // this test simulates the broker crashing and when it comes back online it has two agents working the same job - const job1: V2ProvingJob = { + const job1: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; - const job2: V2ProvingJob = { + const job2: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(job1); @@ -525,7 +519,7 @@ describe.each([ await broker.stop(); // fake some time passing while the broker restarts - await jest.advanceTimersByTimeAsync(100 * jobTimeoutSec * 1000); + await jest.advanceTimersByTimeAsync(100 * jobTimeoutMs); broker = new ProvingBroker(database); await broker.start(); @@ -535,12 +529,12 @@ describe.each([ // inform the agent of the job completion await expect(broker.reportProvingJobSuccess(job1.id, makeOutputsUri())).resolves.toBeUndefined(); - await assertJobStatus(job1.id, 'resolved'); + await assertJobStatus(job1.id, 'fulfilled'); // make sure the the broker sends the next job to the agent await getAndAssertNextJobId(job2.id); - await assertJobStatus(job1.id, 'resolved'); + await assertJobStatus(job1.id, 'fulfilled'); await assertJobStatus(job2.id, 'in-progress'); }); @@ -551,23 +545,23 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await broker.enqueueProvingJob({ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId(id1); await assertJobStatus(id1, 'in-progress'); await broker.reportProvingJobSuccess(id1, makeOutputsUri()); - await assertJobStatus(id1, 'resolved'); + await assertJobStatus(id1, 'fulfilled'); await getAndAssertNextJobId(id2); await assertJobStatus(id2, 'in-progress'); - await broker.reportProvingJobError(id2, new Error('test error')); + await broker.reportProvingJobError(id2, 'test error'); await assertJobStatus(id2, 'rejected'); }); @@ -578,26 +572,26 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await broker.enqueueProvingJob({ id: id2, type: ProvingRequestType.BASE_PARITY, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await broker.reportProvingJobSuccess(id1, makeOutputsUri()); - await assertJobStatus(id1, 'resolved'); + await assertJobStatus(id1, 'fulfilled'); - await broker.reportProvingJobError(id2, new Error('test error')); + await broker.reportProvingJobError(id2, 'test error'); await assertJobStatus(id2, 'rejected'); }); it('ignores reported job error if unknown job', async () => { const id = makeProvingJobId(); await assertJobStatus(id, 'not-found'); - await broker.reportProvingJobError(id, new Error('test error')); + await broker.reportProvingJobError(id, 'test error'); await assertJobStatus(id, 'not-found'); }); @@ -624,7 +618,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -638,7 +632,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -646,7 +640,7 @@ describe.each([ await assertJobStatus(id, 'in-progress'); // advance time so job times out because of no heartbeats - await jest.advanceTimersByTimeAsync(jobTimeoutSec * 1000); + await jest.advanceTimersByTimeAsync(jobTimeoutMs); // should be back in the queue now await assertJobStatus(id, 'in-queue'); @@ -658,7 +652,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await assertJobStatus(id, 'in-queue'); @@ -667,7 +661,7 @@ describe.each([ await assertJobStatus(id, 'in-progress'); // advance the time slightly, not enough for the request to timeout - await jest.advanceTimersByTimeAsync((jobTimeoutSec * 1000) / 2); + await jest.advanceTimersByTimeAsync(jobTimeoutMs / 2); await assertJobStatus(id, 'in-progress'); @@ -675,24 +669,24 @@ describe.each([ await broker.reportProvingJobProgress(id, time); // advance the time again - await jest.advanceTimersByTimeAsync((jobTimeoutSec * 1000) / 2); + await jest.advanceTimersByTimeAsync(jobTimeoutMs / 2); // should still be our request to process await assertJobStatus(id, 'in-progress'); // advance the time again and lose the request - await jest.advanceTimersByTimeAsync(jobTimeoutSec * 1000); + await jest.advanceTimersByTimeAsync(jobTimeoutMs); await assertJobStatus(id, 'in-queue'); }); }); describe('Retries', () => { it('retries jobs', async () => { - const provingJob: V2ProvingJob = { + const provingJob: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; await broker.enqueueProvingJob(provingJob); @@ -707,7 +701,7 @@ describe.each([ status: 'in-progress', }); - await broker.reportProvingJobError(provingJob.id, new Error('test error'), true); + await broker.reportProvingJobError(provingJob.id, 'test error', true); await expect(broker.getProvingJobStatus(provingJob.id)).resolves.toEqual({ status: 'in-queue', @@ -720,19 +714,19 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); for (let i = 0; i < maxRetries; i++) { await assertJobStatus(id, 'in-queue'); await getAndAssertNextJobId(id); await assertJobStatus(id, 'in-progress'); - await broker.reportProvingJobError(id, new Error('test error'), true); + await broker.reportProvingJobError(id, 'test error', true); } await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'rejected', - error: String(new Error('test error')), + reason: 'test error', }); }); @@ -742,15 +736,15 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await getAndAssertNextJobId(id); await assertJobStatus(id, 'in-progress'); - await broker.reportProvingJobError(id, new Error('test error'), false); + await broker.reportProvingJobError(id, 'test error', false); await expect(broker.getProvingJobStatus(id)).resolves.toEqual({ status: 'rejected', - error: String(new Error('test error')), + reason: 'test error', }); }); }); @@ -767,7 +761,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const id2 = makeProvingJobId(); @@ -775,7 +769,7 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await broker.start(); @@ -788,7 +782,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: expect.any(String), + inputsUri: expect.any(String), }, time: expect.any(Number), }); @@ -798,7 +792,7 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: expect.any(String), + inputsUri: expect.any(String), }, time: expect.any(Number), }); @@ -818,7 +812,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); const id2 = makeProvingJobId(); @@ -826,7 +820,7 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await database.setProvingJobResult(id1, makeOutputsUri()); @@ -835,12 +829,12 @@ describe.each([ await broker.start(); await expect(broker.getProvingJobStatus(id1)).resolves.toEqual({ - status: 'resolved', + status: 'fulfilled', value: expect.any(String), }); await expect(broker.getProvingJobStatus(id2)).resolves.toEqual({ - status: 'resolved', + status: 'fulfilled', value: expect.any(String), }); }); @@ -852,7 +846,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await database.setProvingJobResult(id1, makeOutputsUri()); @@ -861,12 +855,12 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await broker.start(); - await assertJobStatus(id1, 'resolved'); + await assertJobStatus(id1, 'fulfilled'); await assertJobStatus(id2, 'in-queue'); await getAndAssertNextJobId(id2); }); @@ -878,7 +872,7 @@ describe.each([ id: id1, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await database.setProvingJobResult(id1, makeOutputsUri()); @@ -887,12 +881,12 @@ describe.each([ id: id2, type: ProvingRequestType.PRIVATE_BASE_ROLLUP, blockNumber: 2, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await broker.start(); - await assertJobStatus(id1, 'resolved'); + await assertJobStatus(id1, 'fulfilled'); await assertJobStatus(id2, 'in-queue'); jest.spyOn(database, 'deleteProvingJobAndResult'); @@ -911,11 +905,11 @@ describe.each([ it('saves job when enqueued', async () => { await broker.start(); - const job: V2ProvingJob = { + const job: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; jest.spyOn(database, 'addProvingJob'); @@ -934,7 +928,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }), ).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'not-found'); @@ -943,18 +937,18 @@ describe.each([ it('saves job result', async () => { await broker.start(); - const job: V2ProvingJob = { + const job: ProvingJob = { id: makeProvingJobId(), type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }; jest.spyOn(database, 'setProvingJobResult'); await broker.enqueueProvingJob(job); await broker.reportProvingJobSuccess(job.id, makeOutputsUri()); - await assertJobStatus(job.id, 'resolved'); + await assertJobStatus(job.id, 'fulfilled'); expect(database.setProvingJobResult).toHaveBeenCalledWith(job.id, expect.any(String)); }); @@ -966,7 +960,7 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); await expect(broker.reportProvingJobSuccess(id, makeOutputsUri())).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'in-queue'); @@ -982,10 +976,10 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); - const error = new Error('test error'); + const error = 'test error'; await broker.reportProvingJobError(id, error); await assertJobStatus(id, 'rejected'); expect(database.setProvingJobError).toHaveBeenCalledWith(id, error); @@ -999,9 +993,9 @@ describe.each([ id, type: ProvingRequestType.BASE_PARITY, blockNumber: 1, - inputs: makeInputsUri(), + inputsUri: makeInputsUri(), }); - await expect(broker.reportProvingJobError(id, new Error())).rejects.toThrow(new Error('db error')); + await expect(broker.reportProvingJobError(id, 'test error')).rejects.toThrow(new Error('db error')); await assertJobStatus(id, 'in-queue'); }); @@ -1025,32 +1019,32 @@ describe.each([ jest.spyOn(database, 'setProvingJobError'); jest.spyOn(database, 'addProvingJob'); - await broker.reportProvingJobError(id, new Error('test error')); + await broker.reportProvingJobError(id, 'test error'); expect(database.setProvingJobError).not.toHaveBeenCalled(); expect(database.addProvingJob).not.toHaveBeenCalled(); }); }); - async function assertJobStatus(id: V2ProvingJobId, status: string) { + async function assertJobStatus(id: ProvingJobId, status: string) { await expect(broker.getProvingJobStatus(id)).resolves.toEqual(expect.objectContaining({ status })); } - async function getAndAssertNextJobId(id: V2ProvingJobId, ...allowList: ProvingRequestType[]) { + async function getAndAssertNextJobId(id: ProvingJobId, ...allowList: ProvingRequestType[]) { await expect(broker.getProvingJob({ allowList })).resolves.toEqual( expect.objectContaining({ job: expect.objectContaining({ id }) }), ); } }); -function makeProvingJobId(): V2ProvingJobId { - return randomBytes(8).toString('hex') as V2ProvingJobId; +function makeProvingJobId(): ProvingJobId { + return randomBytes(8).toString('hex') as ProvingJobId; } -function makeInputsUri(): V2ProofInputUri { - return randomBytes(8).toString('hex') as V2ProofInputUri; +function makeInputsUri(): ProofUri { + return randomBytes(8).toString('hex') as ProofUri; } -function makeOutputsUri(): V2ProofOutputUri { - return randomBytes(8).toString('hex') as V2ProofOutputUri; +function makeOutputsUri(): ProofUri { + return randomBytes(8).toString('hex') as ProofUri; } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.ts index 017d4b6ae48..d66346a3f81 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.ts @@ -1,29 +1,31 @@ import { + type ProofUri, + type ProvingJob, + type ProvingJobConsumer, + type ProvingJobFilter, + type ProvingJobId, + type ProvingJobProducer, + type ProvingJobSettledResult, + type ProvingJobStatus, ProvingRequestType, - type V2ProofOutputUri, - type V2ProvingJob, - type V2ProvingJobId, - type V2ProvingJobResult, - type V2ProvingJobStatus, } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; -import { RunningPromise } from '@aztec/foundation/promise'; +import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise'; import { PriorityMemoryQueue } from '@aztec/foundation/queue'; import assert from 'assert'; -import type { ProvingJobConsumer, ProvingJobFilter, ProvingJobProducer } from './proving_broker_interface.js'; -import { type ProvingJobDatabase } from './proving_job_database.js'; +import { type ProvingBrokerDatabase } from './proving_broker_database.js'; type InProgressMetadata = { - id: V2ProvingJobId; + id: ProvingJobId; startedAt: number; lastUpdatedAt: number; }; type ProofRequestBrokerConfig = { - timeoutIntervalSec?: number; - jobTimeoutSec?: number; + timeoutIntervalMs?: number; + jobTimeoutMs?: number; maxRetries?: number; }; @@ -33,50 +35,53 @@ type ProofRequestBrokerConfig = { */ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { private queues: ProvingQueues = { - [ProvingRequestType.PUBLIC_VM]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.TUBE_PROOF]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PUBLIC_VM]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.TUBE_PROOF]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.PRIVATE_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.PUBLIC_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PRIVATE_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.PUBLIC_BASE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.BLOCK_MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.BASE_PARITY]: new PriorityMemoryQueue(provingJobComparator), - [ProvingRequestType.ROOT_PARITY]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.BASE_PARITY]: new PriorityMemoryQueue(provingJobComparator), + [ProvingRequestType.ROOT_PARITY]: new PriorityMemoryQueue(provingJobComparator), }; // holds a copy of the database in memory in order to quickly fulfill requests // this is fine because this broker is the only one that can modify the database - private jobsCache = new Map(); + private jobsCache = new Map(); // as above, but for results - private resultsCache = new Map(); + private resultsCache = new Map(); // keeps track of which jobs are currently being processed // in the event of a crash this information is lost, but that's ok // the next time the broker starts it will recreate jobsCache and still // accept results from the workers - private inProgress = new Map(); + private inProgress = new Map(); // keep track of which proving job has been retried - private retries = new Map(); + private retries = new Map(); + + // a map of promises that will be resolved when a job is settled + private promises = new Map>(); private timeoutPromise: RunningPromise; private timeSource = () => Math.floor(Date.now() / 1000); - private jobTimeoutSec: number; + private jobTimeoutMs: number; private maxRetries: number; public constructor( - private database: ProvingJobDatabase, - { jobTimeoutSec = 30, timeoutIntervalSec = 10, maxRetries = 3 }: ProofRequestBrokerConfig = {}, + private database: ProvingBrokerDatabase, + { jobTimeoutMs = 30, timeoutIntervalMs = 10, maxRetries = 3 }: ProofRequestBrokerConfig = {}, private logger = createDebugLogger('aztec:prover-client:proving-broker'), ) { - this.timeoutPromise = new RunningPromise(this.timeoutCheck, timeoutIntervalSec * 1000); - this.jobTimeoutSec = jobTimeoutSec; + this.timeoutPromise = new RunningPromise(this.timeoutCheck, timeoutIntervalMs); + this.jobTimeoutMs = jobTimeoutMs; this.maxRetries = maxRetries; } @@ -86,7 +91,10 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.info(`Restoring proving job id=${item.id} settled=${!!result}`); this.jobsCache.set(item.id, item); + this.promises.set(item.id, promiseWithResolvers()); + if (result) { + this.promises.get(item.id)!.resolve(result); this.resultsCache.set(item.id, result); } else { this.logger.debug(`Re-enqueuing proving job id=${item.id}`); @@ -101,7 +109,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { return this.timeoutPromise.stop(); } - public async enqueueProvingJob(job: V2ProvingJob): Promise { + public async enqueueProvingJob(job: ProvingJob): Promise { if (this.jobsCache.has(job.id)) { const existing = this.jobsCache.get(job.id); assert.deepStrictEqual(job, existing, 'Duplicate proving job ID'); @@ -113,20 +121,35 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.enqueueJobInternal(job); } - public async removeAndCancelProvingJob(id: V2ProvingJobId): Promise { + public waitForJobToSettle(id: ProvingJobId): Promise { + const promiseWithResolvers = this.promises.get(id); + if (!promiseWithResolvers) { + return Promise.resolve({ status: 'rejected', reason: `Job ${id} not found` }); + } + return promiseWithResolvers.promise; + } + + public async removeAndCancelProvingJob(id: ProvingJobId): Promise { this.logger.info(`Cancelling job id=${id}`); await this.database.deleteProvingJobAndResult(id); + // notify listeners of the cancellation + if (!this.resultsCache.has(id)) { + this.promises.get(id)?.resolve({ status: 'rejected', reason: 'Aborted' }); + } + this.jobsCache.delete(id); + this.promises.delete(id); this.resultsCache.delete(id); this.inProgress.delete(id); this.retries.delete(id); } - // eslint-disable-next-line require-await - public async getProvingJobStatus(id: V2ProvingJobId): Promise { + public getProvingJobStatus(id: ProvingJobId): Promise { const result = this.resultsCache.get(id); - if (!result) { + if (result) { + return Promise.resolve(result); + } else { // no result yet, check if we know the item const item = this.jobsCache.get(id); @@ -136,17 +159,13 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } return Promise.resolve({ status: this.inProgress.has(id) ? 'in-progress' : 'in-queue' }); - } else if ('value' in result) { - return Promise.resolve({ status: 'resolved', value: result.value }); - } else { - return Promise.resolve({ status: 'rejected', error: result.error }); } } // eslint-disable-next-line require-await - async getProvingJob( - filter: ProvingJobFilter = {}, - ): Promise<{ job: V2ProvingJob; time: number } | undefined> { + async getProvingJob( + filter: ProvingJobFilter = { allowList: [] }, + ): Promise<{ job: ProvingJob; time: number } | undefined> { const allowedProofs: ProvingRequestType[] = Array.isArray(filter.allowList) && filter.allowList.length > 0 ? [...filter.allowList] @@ -155,7 +174,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { for (const proofType of allowedProofs) { const queue = this.queues[proofType]; - let job: V2ProvingJob | undefined; + let job: ProvingJob | undefined; // exhaust the queue and make sure we're not sending a job that's already in progress // or has already been completed // this can happen if the broker crashes and restarts @@ -177,7 +196,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { return undefined; } - async reportProvingJobError(id: V2ProvingJobId, err: Error, retry = false): Promise { + async reportProvingJobError(id: ProvingJobId, err: string, retry = false): Promise { const info = this.inProgress.get(id); const item = this.jobsCache.get(id); const retries = this.retries.get(id) ?? 0; @@ -203,15 +222,19 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.debug( `Marking proving job id=${id} type=${ProvingRequestType[item.type]} totalAttempts=${retries + 1} as failed`, ); + await this.database.setProvingJobError(id, err); - this.resultsCache.set(id, { error: String(err) }); + + const result: ProvingJobSettledResult = { status: 'rejected', reason: String(err) }; + this.resultsCache.set(id, result); + this.promises.get(id)!.resolve(result); } - reportProvingJobProgress( - id: V2ProvingJobId, + reportProvingJobProgress( + id: ProvingJobId, startedAt: number, - filter?: ProvingJobFilter, - ): Promise<{ job: V2ProvingJob; time: number } | undefined> { + filter?: ProvingJobFilter, + ): Promise<{ job: ProvingJob; time: number } | undefined> { const job = this.jobsCache.get(id); if (!job) { this.logger.warn(`Proving job id=${id} does not exist`); @@ -256,7 +279,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } } - async reportProvingJobSuccess(id: V2ProvingJobId, value: V2ProofOutputUri): Promise { + async reportProvingJobSuccess(id: ProvingJobId, value: ProofUri): Promise { const info = this.inProgress.get(id); const item = this.jobsCache.get(id); const retries = this.retries.get(id) ?? 0; @@ -274,8 +297,12 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.debug( `Proving job complete id=${id} type=${ProvingRequestType[item.type]} totalAttempts=${retries + 1}`, ); + await this.database.setProvingJobResult(id, value); - this.resultsCache.set(id, { value }); + + const result: ProvingJobSettledResult = { status: 'fulfilled', value }; + this.resultsCache.set(id, result); + this.promises.get(id)!.resolve(result); } private timeoutCheck = () => { @@ -288,8 +315,8 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { continue; } - const secondsSinceLastUpdate = this.timeSource() - metadata.lastUpdatedAt; - if (secondsSinceLastUpdate >= this.jobTimeoutSec) { + const msSinceLastUpdate = (this.timeSource() - metadata.lastUpdatedAt) * 1000; + if (msSinceLastUpdate >= this.jobTimeoutMs) { this.logger.warn(`Proving job id=${id} timed out. Adding it back to the queue.`); this.inProgress.delete(id); this.enqueueJobInternal(item); @@ -297,14 +324,17 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } }; - private enqueueJobInternal(job: V2ProvingJob): void { + private enqueueJobInternal(job: ProvingJob): void { + if (!this.promises.has(job.id)) { + this.promises.set(job.id, promiseWithResolvers()); + } this.queues[job.type].put(job); this.logger.debug(`Enqueued new proving job id=${job.id}`); } } type ProvingQueues = { - [K in ProvingRequestType]: PriorityMemoryQueue; + [K in ProvingRequestType]: PriorityMemoryQueue; }; /** @@ -313,10 +343,12 @@ type ProvingQueues = { * @param b - Another proving job * @returns A number indicating the relative priority of the two proving jobs */ -function provingJobComparator(a: V2ProvingJob, b: V2ProvingJob): -1 | 0 | 1 { - if (a.blockNumber < b.blockNumber) { +function provingJobComparator(a: ProvingJob, b: ProvingJob): -1 | 0 | 1 { + const aBlockNumber = a.blockNumber ?? 0; + const bBlockNumber = b.blockNumber ?? 0; + if (aBlockNumber < bBlockNumber) { return -1; - } else if (a.blockNumber > b.blockNumber) { + } else if (aBlockNumber > bBlockNumber) { return 1; } else { return 0; diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database.ts similarity index 63% rename from yarn-project/prover-client/src/proving_broker/proving_job_database.ts rename to yarn-project/prover-client/src/proving_broker/proving_broker_database.ts index 222aed5741c..b5adf91cb89 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database.ts @@ -1,30 +1,25 @@ -import { - type V2ProofOutputUri, - type V2ProvingJob, - type V2ProvingJobId, - type V2ProvingJobResult, -} from '@aztec/circuit-types'; +import { type ProofUri, type ProvingJob, type ProvingJobId, type ProvingJobSettledResult } from '@aztec/circuit-types'; /** * A database for storing proof requests and their results */ -export interface ProvingJobDatabase { +export interface ProvingBrokerDatabase { /** * Saves a proof request so it can be retrieved later * @param request - The proof request to save */ - addProvingJob(request: V2ProvingJob): Promise; + addProvingJob(request: ProvingJob): Promise; /** * Removes a proof request from the backend * @param id - The ID of the proof request to remove */ - deleteProvingJobAndResult(id: V2ProvingJobId): Promise; + deleteProvingJobAndResult(id: ProvingJobId): Promise; /** * Returns an iterator over all saved proving jobs */ - allProvingJobs(): Iterable<[V2ProvingJob, V2ProvingJobResult | undefined]>; + allProvingJobs(): Iterable<[ProvingJob, ProvingJobSettledResult | undefined]>; /** * Saves the result of a proof request @@ -32,7 +27,7 @@ export interface ProvingJobDatabase { * @param ProvingRequestType - The type of proof that was requested * @param value - The result of the proof request */ - setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutputUri): Promise; + setProvingJobResult(id: ProvingJobId, value: ProofUri): Promise; /** * Saves an error that occurred while processing a proof request @@ -40,5 +35,5 @@ export interface ProvingJobDatabase { * @param ProvingRequestType - The type of proof that was requested * @param err - The error that occurred while processing the proof request */ - setProvingJobError(id: V2ProvingJobId, err: Error): Promise; + setProvingJobError(id: ProvingJobId, err: string): Promise; } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_database/memory.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database/memory.ts new file mode 100644 index 00000000000..0a737aadd43 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database/memory.ts @@ -0,0 +1,43 @@ +import type { ProofUri, ProvingJob, ProvingJobId, ProvingJobSettledResult } from '@aztec/circuit-types'; + +import { type ProvingBrokerDatabase } from '../proving_broker_database.js'; + +export class InMemoryBrokerDatabase implements ProvingBrokerDatabase { + private jobs = new Map(); + private results = new Map(); + + getProvingJob(id: ProvingJobId): ProvingJob | undefined { + return this.jobs.get(id); + } + + getProvingJobResult(id: ProvingJobId): ProvingJobSettledResult | undefined { + return this.results.get(id); + } + + addProvingJob(request: ProvingJob): Promise { + this.jobs.set(request.id, request); + return Promise.resolve(); + } + + setProvingJobResult(id: ProvingJobId, value: ProofUri): Promise { + this.results.set(id, { status: 'fulfilled', value }); + return Promise.resolve(); + } + + setProvingJobError(id: ProvingJobId, reason: string): Promise { + this.results.set(id, { status: 'rejected', reason }); + return Promise.resolve(); + } + + deleteProvingJobAndResult(id: ProvingJobId): Promise { + this.jobs.delete(id); + this.results.delete(id); + return Promise.resolve(); + } + + *allProvingJobs(): Iterable<[ProvingJob, ProvingJobSettledResult | undefined]> { + for (const item of this.jobs.values()) { + yield [item, this.results.get(item.id)] as const; + } + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts new file mode 100644 index 00000000000..909b2d6e4e1 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts @@ -0,0 +1,45 @@ +import { type ProofUri, ProvingJob, type ProvingJobId, ProvingJobSettledResult } from '@aztec/circuit-types'; +import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; +import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; + +import { type ProvingBrokerDatabase } from '../proving_broker_database.js'; + +export class KVBrokerDatabase implements ProvingBrokerDatabase { + private jobs: AztecMap; + private jobResults: AztecMap; + + constructor(private store: AztecKVStore) { + this.jobs = store.openMap('proving_jobs'); + this.jobResults = store.openMap('proving_job_results'); + } + + async addProvingJob(job: ProvingJob): Promise { + await this.jobs.set(job.id, jsonStringify(job)); + } + + *allProvingJobs(): Iterable<[ProvingJob, ProvingJobSettledResult | undefined]> { + for (const jobStr of this.jobs.values()) { + const job = jsonParseWithSchema(jobStr, ProvingJob); + const resultStr = this.jobResults.get(job.id); + const result = resultStr ? jsonParseWithSchema(resultStr, ProvingJobSettledResult) : undefined; + yield [job, result]; + } + } + + deleteProvingJobAndResult(id: ProvingJobId): Promise { + return this.store.transaction(() => { + void this.jobs.delete(id); + void this.jobResults.delete(id); + }); + } + + async setProvingJobError(id: ProvingJobId, reason: string): Promise { + const result: ProvingJobSettledResult = { status: 'rejected', reason }; + await this.jobResults.set(id, jsonStringify(result)); + } + + async setProvingJobResult(id: ProvingJobId, value: ProofUri): Promise { + const result: ProvingJobSettledResult = { status: 'fulfilled', value }; + await this.jobResults.set(id, jsonStringify(result)); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts deleted file mode 100644 index e2b64170cbc..00000000000 --- a/yarn-project/prover-client/src/proving_broker/proving_broker_interface.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { - type ProvingRequestType, - type V2ProofOutputUri, - type V2ProvingJob, - type V2ProvingJobId, - type V2ProvingJobStatus, -} from '@aztec/circuit-types'; - -/** - * An interface for the proving orchestrator. The producer uses this to enqueue jobs for agents - */ -export interface ProvingJobProducer { - /** - * Enqueues a proving job - * @param job - The job to enqueue - */ - enqueueProvingJob(job: V2ProvingJob): Promise; - - /** - * Cancels a proving job and clears all of its - * @param id - The ID of the job to cancel - */ - removeAndCancelProvingJob(id: V2ProvingJobId): Promise; - - /** - * Returns the current status fof the proving job - * @param id - The ID of the job to get the status of - */ - getProvingJobStatus(id: V2ProvingJobId): Promise; -} - -export interface ProvingJobFilter { - allowList?: T; -} - -/** - * An interface for proving agents to request jobs and report results - */ -export interface ProvingJobConsumer { - /** - * Gets a proving job to work on - * @param filter - Optional filter for the type of job to get - */ - getProvingJob( - filter?: ProvingJobFilter, - ): Promise<{ job: V2ProvingJob; time: number } | undefined>; - - /** - * Marks a proving job as successful - * @param id - The ID of the job to report success for - * @param result - The result of the job - */ - reportProvingJobSuccess(id: V2ProvingJobId, result: V2ProofOutputUri): Promise; - - /** - * Marks a proving job as errored - * @param id - The ID of the job to report an error for - * @param err - The error that occurred while processing the job - * @param retry - Whether to retry the job - */ - reportProvingJobError(id: V2ProvingJobId, err: Error, retry?: boolean): Promise; - - /** - * Sends a heartbeat to the broker to indicate that the agent is still working on the given proving job - * @param id - The ID of the job to report progress for - * @param startedAt - The unix epoch when the job was started - * @param filter - Optional filter for the type of job to get - */ - reportProvingJobProgress( - id: V2ProvingJobId, - startedAt: number, - filter?: ProvingJobFilter, - ): Promise<{ job: V2ProvingJob; time: number } | undefined>; -} diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts b/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts index f6e1649e50c..364703b23cf 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_controller.test.ts @@ -1,12 +1,13 @@ -import { ProvingRequestType, type V2ProvingJobId, makePublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { type ProvingJobId, ProvingRequestType, makePublicInputsAndRecursiveProof } from '@aztec/circuit-types'; import { RECURSIVE_PROOF_LENGTH, VerificationKeyData, makeRecursiveProof } from '@aztec/circuits.js'; import { makeBaseParityInputs, makeParityPublicInputs } from '@aztec/circuits.js/testing'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; import { sleep } from '@aztec/foundation/sleep'; import { jest } from '@jest/globals'; import { MockProver } from '../test/mock_prover.js'; -import { ProvingJobController, ProvingJobStatus } from './proving_job_controller.js'; +import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_controller.js'; describe('ProvingJobController', () => { let prover: MockProver; @@ -17,10 +18,10 @@ describe('ProvingJobController', () => { prover = new MockProver(); onComplete = jest.fn(); controller = new ProvingJobController( - '1' as V2ProvingJobId, + '1' as ProvingJobId, { type: ProvingRequestType.BASE_PARITY, - value: makeBaseParityInputs(), + inputs: makeBaseParityInputs(), }, 0, prover, @@ -29,18 +30,25 @@ describe('ProvingJobController', () => { }); it('reports IDLE status initially', () => { - expect(controller.getStatus()).toBe(ProvingJobStatus.IDLE); + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.IDLE); }); it('reports PROVING status while busy', () => { controller.start(); - expect(controller.getStatus()).toBe(ProvingJobStatus.PROVING); + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.PROVING); }); it('reports DONE status after job is done', async () => { controller.start(); await sleep(1); // give promises a chance to complete - expect(controller.getStatus()).toBe(ProvingJobStatus.DONE); + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.DONE); + }); + + it('reports ABORTED status after job is aborted', async () => { + controller.start(); + controller.abort(); + await sleep(1); // give promises a chance to complete + expect(controller.getStatus()).toBe(ProvingJobControllerStatus.ABORTED); }); it('calls onComplete with the proof', async () => { @@ -53,10 +61,7 @@ describe('ProvingJobController', () => { controller.start(); await sleep(1); // give promises a chance to complete - expect(onComplete).toHaveBeenCalledWith('1', undefined, { - type: ProvingRequestType.BASE_PARITY, - value: resp, - }); + expect(onComplete).toHaveBeenCalledWith('1', ProvingRequestType.BASE_PARITY, undefined, resp); }); it('calls onComplete with the error', async () => { @@ -65,7 +70,7 @@ describe('ProvingJobController', () => { controller.start(); await sleep(1); - expect(onComplete).toHaveBeenCalledWith('1', err, undefined); + expect(onComplete).toHaveBeenCalledWith('1', ProvingRequestType.BASE_PARITY, err, undefined); }); it('does not crash if onComplete throws', async () => { @@ -87,4 +92,30 @@ describe('ProvingJobController', () => { await sleep(1); expect(onComplete).toHaveBeenCalled(); }); + + it('does not call onComplete if abort is called', async () => { + const { promise, resolve } = promiseWithResolvers(); + jest.spyOn(prover, 'getBaseParityProof').mockReturnValueOnce(promise); + + controller.start(); + + await sleep(1); + expect(onComplete).not.toHaveBeenCalled(); + + controller.abort(); + await sleep(1); + expect(onComplete).not.toHaveBeenCalled(); + + // simulate a prover that does not respect signals, still completes the proof after aborting + resolve( + makePublicInputsAndRecursiveProof( + makeParityPublicInputs(), + makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + VerificationKeyData.makeFakeHonk(), + ), + ); + + await sleep(1); + expect(onComplete).not.toHaveBeenCalled(); + }); }); diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts b/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts index 6210766896d..2ce47cbe6f7 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_job_controller.ts @@ -1,55 +1,67 @@ import { + type ProvingJobId, + type ProvingJobInputs, + type ProvingJobResultsMap, ProvingRequestType, type ServerCircuitProver, - type V2ProofInput, - type V2ProofOutput, - type V2ProvingJobId, } from '@aztec/circuit-types'; -export enum ProvingJobStatus { +export enum ProvingJobControllerStatus { IDLE = 'idle', PROVING = 'proving', DONE = 'done', + ABORTED = 'aborted', } -type ProvingJobCompletionCallback = ( - jobId: V2ProvingJobId, - type: ProvingRequestType, - error: Error | undefined, - result: V2ProofOutput | undefined, -) => void | Promise; +interface ProvingJobCompletionCallback { + ( + jobId: ProvingJobId, + type: T, + error: Error | undefined, + result: ProvingJobResultsMap[T] | undefined, + ): void | Promise; +} export class ProvingJobController { - private status: ProvingJobStatus = ProvingJobStatus.IDLE; + private status: ProvingJobControllerStatus = ProvingJobControllerStatus.IDLE; private promise?: Promise; private abortController = new AbortController(); constructor( - private jobId: V2ProvingJobId, - private inputs: V2ProofInput, + private jobId: ProvingJobId, + private inputs: ProvingJobInputs, private startedAt: number, private circuitProver: ServerCircuitProver, private onComplete: ProvingJobCompletionCallback, ) {} public start(): void { - if (this.status !== ProvingJobStatus.IDLE) { + if (this.status !== ProvingJobControllerStatus.IDLE) { return; } - this.status = ProvingJobStatus.PROVING; + this.status = ProvingJobControllerStatus.PROVING; this.promise = this.generateProof() .then( result => { - this.status = ProvingJobStatus.DONE; + if (this.status === ProvingJobControllerStatus.ABORTED) { + return; + } + + this.status = ProvingJobControllerStatus.DONE; return this.onComplete(this.jobId, this.inputs.type, undefined, result); }, error => { - this.status = ProvingJobStatus.DONE; + if (this.status === ProvingJobControllerStatus.ABORTED) { + return; + } + if (error.name === 'AbortError') { // Ignore abort errors return; } + + this.status = ProvingJobControllerStatus.DONE; return this.onComplete(this.jobId, this.inputs.type, error, undefined); }, ) @@ -58,19 +70,20 @@ export class ProvingJobController { }); } - public getStatus(): ProvingJobStatus { + public getStatus(): ProvingJobControllerStatus { return this.status; } public abort(): void { - if (this.status !== ProvingJobStatus.PROVING) { + if (this.status !== ProvingJobControllerStatus.PROVING) { return; } + this.status = ProvingJobControllerStatus.ABORTED; this.abortController.abort(); } - public getJobId(): V2ProvingJobId { + public getJobId(): ProvingJobId { return this.jobId; } @@ -82,68 +95,56 @@ export class ProvingJobController { return ProvingRequestType[this.inputs.type]; } - private async generateProof(): Promise { - const { type, value: inputs } = this.inputs; + private async generateProof(): Promise { + const { type, inputs } = this.inputs; const signal = this.abortController.signal; switch (type) { case ProvingRequestType.PUBLIC_VM: { - const value = await this.circuitProver.getAvmProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getAvmProof(inputs, signal); } case ProvingRequestType.PRIVATE_BASE_ROLLUP: { - const value = await this.circuitProver.getPrivateBaseRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getPrivateBaseRollupProof(inputs, signal); } case ProvingRequestType.PUBLIC_BASE_ROLLUP: { - const value = await this.circuitProver.getPublicBaseRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getPublicBaseRollupProof(inputs, signal); } case ProvingRequestType.MERGE_ROLLUP: { - const value = await this.circuitProver.getMergeRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getMergeRollupProof(inputs, signal); } case ProvingRequestType.EMPTY_BLOCK_ROOT_ROLLUP: { - const value = await this.circuitProver.getEmptyBlockRootRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getEmptyBlockRootRollupProof(inputs, signal); } case ProvingRequestType.BLOCK_ROOT_ROLLUP: { - const value = await this.circuitProver.getBlockRootRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getBlockRootRollupProof(inputs, signal); } case ProvingRequestType.BLOCK_MERGE_ROLLUP: { - const value = await this.circuitProver.getBlockMergeRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getBlockMergeRollupProof(inputs, signal); } case ProvingRequestType.ROOT_ROLLUP: { - const value = await this.circuitProver.getRootRollupProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getRootRollupProof(inputs, signal); } case ProvingRequestType.BASE_PARITY: { - const value = await this.circuitProver.getBaseParityProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getBaseParityProof(inputs, signal); } case ProvingRequestType.ROOT_PARITY: { - const value = await this.circuitProver.getRootParityProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getRootParityProof(inputs, signal); } case ProvingRequestType.PRIVATE_KERNEL_EMPTY: { - const value = await this.circuitProver.getEmptyPrivateKernelProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getEmptyPrivateKernelProof(inputs, signal); } case ProvingRequestType.TUBE_PROOF: { - const value = await this.circuitProver.getTubeProof(inputs, signal); - return { type, value }; + return await this.circuitProver.getTubeProof(inputs, signal); } default: { diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts b/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts deleted file mode 100644 index 5eed7cdc6ca..00000000000 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database/memory.ts +++ /dev/null @@ -1,43 +0,0 @@ -import type { V2ProofOutputUri, V2ProvingJob, V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; - -import { type ProvingJobDatabase } from '../proving_job_database.js'; - -export class InMemoryDatabase implements ProvingJobDatabase { - private jobs = new Map(); - private results = new Map(); - - getProvingJob(id: V2ProvingJobId): V2ProvingJob | undefined { - return this.jobs.get(id); - } - - getProvingJobResult(id: V2ProvingJobId): V2ProvingJobResult | undefined { - return this.results.get(id); - } - - addProvingJob(request: V2ProvingJob): Promise { - this.jobs.set(request.id, request); - return Promise.resolve(); - } - - setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutputUri): Promise { - this.results.set(id, { value }); - return Promise.resolve(); - } - - setProvingJobError(id: V2ProvingJobId, error: Error): Promise { - this.results.set(id, { error: String(error) }); - return Promise.resolve(); - } - - deleteProvingJobAndResult(id: V2ProvingJobId): Promise { - this.jobs.delete(id); - this.results.delete(id); - return Promise.resolve(); - } - - *allProvingJobs(): Iterable<[V2ProvingJob, V2ProvingJobResult | undefined]> { - for (const item of this.jobs.values()) { - yield [item, this.results.get(item.id)] as const; - } - } -} diff --git a/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts b/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts deleted file mode 100644 index 5fe5c092cc9..00000000000 --- a/yarn-project/prover-client/src/proving_broker/proving_job_database/persisted.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { type V2ProofOutputUri, V2ProvingJob, type V2ProvingJobId, V2ProvingJobResult } from '@aztec/circuit-types'; -import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; -import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; - -import { type ProvingJobDatabase } from '../proving_job_database.js'; - -export class PersistedProvingJobDatabase implements ProvingJobDatabase { - private jobs: AztecMap; - private jobResults: AztecMap; - - constructor(private store: AztecKVStore) { - this.jobs = store.openMap('proving_jobs'); - this.jobResults = store.openMap('proving_job_results'); - } - - async addProvingJob(job: V2ProvingJob): Promise { - await this.jobs.set(job.id, jsonStringify(job)); - } - - *allProvingJobs(): Iterable<[V2ProvingJob, V2ProvingJobResult | undefined]> { - for (const jobStr of this.jobs.values()) { - const job = jsonParseWithSchema(jobStr, V2ProvingJob); - const resultStr = this.jobResults.get(job.id); - const result = resultStr ? jsonParseWithSchema(resultStr, V2ProvingJobResult) : undefined; - yield [job, result]; - } - } - - deleteProvingJobAndResult(id: V2ProvingJobId): Promise { - return this.store.transaction(() => { - void this.jobs.delete(id); - void this.jobResults.delete(id); - }); - } - - async setProvingJobError(id: V2ProvingJobId, err: Error): Promise { - const res: V2ProvingJobResult = { error: err.message }; - await this.jobResults.set(id, jsonStringify(res)); - } - - async setProvingJobResult(id: V2ProvingJobId, value: V2ProofOutputUri): Promise { - const res: V2ProvingJobResult = { value }; - await this.jobResults.set(id, jsonStringify(res)); - } -} diff --git a/yarn-project/prover-client/src/proving_broker/rpc.ts b/yarn-project/prover-client/src/proving_broker/rpc.ts new file mode 100644 index 00000000000..0db7e38f7c0 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/rpc.ts @@ -0,0 +1,65 @@ +import { + type GetProvingJobResponse, + ProofUri, + ProvingJob, + type ProvingJobBroker, + type ProvingJobConsumer, + ProvingJobId, + type ProvingJobProducer, + ProvingJobSettledResult, + ProvingJobStatus, + ProvingRequestType, +} from '@aztec/circuit-types'; +import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client'; +import { type SafeJsonRpcServer, createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server'; +import { type ApiSchemaFor } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + +const ProvingJobFilterSchema = z.object({ + allowList: z.array(z.nativeEnum(ProvingRequestType)), +}); + +const GetProvingJobResponse = z.object({ + job: ProvingJob, + time: z.number(), +}); + +export const ProvingJobProducerSchema: ApiSchemaFor = { + enqueueProvingJob: z.function().args(ProvingJob).returns(z.void()), + getProvingJobStatus: z.function().args(ProvingJobId).returns(ProvingJobStatus), + removeAndCancelProvingJob: z.function().args(ProvingJobId).returns(z.void()), + waitForJobToSettle: z.function().args(ProvingJobId).returns(ProvingJobSettledResult), +}; + +// can't use ApiSchemaFor because of the optional parameters +export const ProvingJobConsumerSchema = { + getProvingJob: z.function().args(ProvingJobFilterSchema.optional()).returns(GetProvingJobResponse.optional()), + reportProvingJobError: z.function().args(ProvingJobId, z.string(), z.boolean().optional()).returns(z.void()), + reportProvingJobProgress: z + .function() + .args(ProvingJobId, z.number(), ProvingJobFilterSchema.optional()) + .returns(GetProvingJobResponse.optional()), + reportProvingJobSuccess: z.function().args(ProvingJobId, ProofUri).returns(z.void()), +} as unknown as ApiSchemaFor; + +export const ProvingJobBrokerSchema: ApiSchemaFor = { + ...ProvingJobConsumerSchema, + ...ProvingJobProducerSchema, +}; + +export function createProvingBrokerServer(broker: ProvingJobBroker): SafeJsonRpcServer { + return createSafeJsonRpcServer(broker, ProvingJobBrokerSchema); +} + +export function createProvingJobBrokerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobBroker { + return createSafeJsonRpcClient(url, ProvingJobBrokerSchema, false, 'proverBroker', fetch); +} + +export function createProvingJobProducerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobProducer { + return createSafeJsonRpcClient(url, ProvingJobProducerSchema, false, 'provingJobProducer', fetch); +} + +export function createProvingJobConsumerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobConsumer { + return createSafeJsonRpcClient(url, ProvingJobConsumerSchema, false, 'provingJobConsumer', fetch); +} diff --git a/yarn-project/prover-client/src/test/mock_prover.ts b/yarn-project/prover-client/src/test/mock_prover.ts index 118ff214e14..c0ea23c2643 100644 --- a/yarn-project/prover-client/src/test/mock_prover.ts +++ b/yarn-project/prover-client/src/test/mock_prover.ts @@ -1,5 +1,10 @@ import { type ProofAndVerificationKey, + type ProvingJob, + type ProvingJobId, + type ProvingJobProducer, + type ProvingJobSettledResult, + type ProvingJobStatus, type PublicInputsAndRecursiveProof, type ServerCircuitProver, makeProofAndVerificationKey, @@ -37,6 +42,52 @@ import { makeParityPublicInputs, makeRootRollupPublicInputs, } from '@aztec/circuits.js/testing'; +import { times } from '@aztec/foundation/collection'; + +import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; +import { ProvingAgent } from '../proving_broker/proving_agent.js'; +import { ProvingBroker } from '../proving_broker/proving_broker.js'; +import { InMemoryBrokerDatabase } from '../proving_broker/proving_broker_database/memory.js'; + +export class TestBroker implements ProvingJobProducer { + private broker = new ProvingBroker(new InMemoryBrokerDatabase()); + private agents: ProvingAgent[]; + + constructor( + agentCount: number, + prover: ServerCircuitProver, + private proofStore: ProofStore = new InlineProofStore(), + ) { + this.agents = times(agentCount, () => new ProvingAgent(this.broker, proofStore, prover)); + } + + public async start() { + await this.broker.start(); + this.agents.forEach(agent => agent.start()); + } + + public async stop() { + await Promise.all(this.agents.map(agent => agent.stop())); + await this.broker.stop(); + } + + public getProofStore(): ProofStore { + return this.proofStore; + } + + enqueueProvingJob(job: ProvingJob): Promise { + return this.broker.enqueueProvingJob(job); + } + getProvingJobStatus(id: ProvingJobId): Promise { + return this.broker.getProvingJobStatus(id); + } + removeAndCancelProvingJob(id: ProvingJobId): Promise { + return this.broker.removeAndCancelProvingJob(id); + } + waitForJobToSettle(id: ProvingJobId): Promise { + return this.broker.waitForJobToSettle(id); + } +} export class MockProver implements ServerCircuitProver { constructor() {} diff --git a/yarn-project/prover-client/src/tx-prover/factory.ts b/yarn-project/prover-client/src/tx-prover/factory.ts index d81ff2e15e7..07a65a8c57c 100644 --- a/yarn-project/prover-client/src/tx-prover/factory.ts +++ b/yarn-project/prover-client/src/tx-prover/factory.ts @@ -1,9 +1,14 @@ +import { type ProvingJobBroker } from '@aztec/circuit-types'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type ProverClientConfig } from '../config.js'; import { TxProver } from './tx-prover.js'; -export function createProverClient(config: ProverClientConfig, telemetry: TelemetryClient = new NoopTelemetryClient()) { - return TxProver.new(config, telemetry); +export function createProverClient( + config: ProverClientConfig, + broker: ProvingJobBroker, + telemetry: TelemetryClient = new NoopTelemetryClient(), +) { + return TxProver.new(config, broker, telemetry); } diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 7fc059d902a..121af71c801 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -1,40 +1,59 @@ -import { BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; +import { type ACVMConfig, type BBConfig, BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; import { + type ActualProverConfig, type EpochProver, type EpochProverManager, type MerkleTreeWriteOperations, - type ProvingJobSource, + type ProverCache, + type ProvingJobBroker, + type ProvingJobConsumer, + type ProvingJobProducer, type ServerCircuitProver, } from '@aztec/circuit-types/interfaces'; import { Fr } from '@aztec/circuits.js'; +import { times } from '@aztec/foundation/collection'; +import { createDebugLogger } from '@aztec/foundation/log'; import { NativeACVMSimulator } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; +import { join } from 'path'; + import { type ProverClientConfig } from '../config.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; -import { MemoryProvingQueue } from '../prover-agent/memory-proving-queue.js'; -import { ProverAgent } from '../prover-agent/prover-agent.js'; +import { CachingBrokerFacade } from '../proving_broker/caching_broker_facade.js'; +import { InlineProofStore } from '../proving_broker/proof_store.js'; +import { InMemoryProverCache } from '../proving_broker/prover_cache/memory.js'; +import { ProvingAgent } from '../proving_broker/proving_agent.js'; /** * A prover factory. * TODO(palla/prover-node): Rename this class */ export class TxProver implements EpochProverManager { - private queue: MemoryProvingQueue; private running = false; + private agents: ProvingAgent[] = []; + + private cacheDir?: string; private constructor( private config: ProverClientConfig, private telemetry: TelemetryClient, - private agent?: ProverAgent, + private orchestratorClient: ProvingJobProducer, + private agentClient?: ProvingJobConsumer, + private log = createDebugLogger('aztec:prover-client:tx-prover'), ) { // TODO(palla/prover-node): Cache the paddingTx here, and not in each proving orchestrator, // so it can be reused across multiple ones and not recomputed every time. - this.queue = new MemoryProvingQueue(telemetry, config.proverJobTimeoutMs, config.proverJobPollIntervalMs); + this.cacheDir = this.config.cacheDir ? join(this.config.cacheDir, `tx_prover_${this.config.proverId}`) : undefined; } - public createEpochProver(db: MerkleTreeWriteOperations): EpochProver { - return new ProvingOrchestrator(db, this.queue, this.telemetry, this.config.proverId); + public createEpochProver(db: MerkleTreeWriteOperations, cache: ProverCache = new InMemoryProverCache()): EpochProver { + return new ProvingOrchestrator( + db, + new CachingBrokerFacade(this.orchestratorClient, cache), + this.telemetry, + this.config.proverId, + ); } public getProverId(): Fr { @@ -44,13 +63,12 @@ export class TxProver implements EpochProverManager { async updateProverConfig(config: Partial): Promise { const newConfig = { ...this.config, ...config }; - if (newConfig.realProofs !== this.config.realProofs && this.agent) { - const circuitProver = await TxProver.buildCircuitProver(newConfig, this.telemetry); - this.agent.setCircuitProver(circuitProver); - } - - if (this.config.proverAgentConcurrency !== newConfig.proverAgentConcurrency) { - await this.agent?.setMaxConcurrency(newConfig.proverAgentConcurrency); + if ( + newConfig.realProofs !== this.config.realProofs || + newConfig.proverAgentCount !== this.config.proverAgentCount + ) { + await this.stopAgents(); + await this.createAndStartAgents(); } if (!this.config.realProofs && newConfig.realProofs) { @@ -63,15 +81,13 @@ export class TxProver implements EpochProverManager { /** * Starts the prover instance */ - public start() { + public async start(): Promise { if (this.running) { return Promise.resolve(); } this.running = true; - this.queue.start(); - this.agent?.start(this.queue); - return Promise.resolve(); + await this.createAndStartAgents(); } /** @@ -82,10 +98,8 @@ export class TxProver implements EpochProverManager { return; } this.running = false; - + await this.stopAgents(); // TODO(palla/prover-node): Keep a reference to all proving orchestrators that are alive and stop them? - await this.agent?.stop(); - await this.queue.stop(); } /** @@ -95,36 +109,55 @@ export class TxProver implements EpochProverManager { * @param worldStateSynchronizer - An instance of the world state * @returns An instance of the prover, constructed and started. */ - public static async new(config: ProverClientConfig, telemetry: TelemetryClient) { - const agent = config.proverAgentEnabled - ? new ProverAgent( - await TxProver.buildCircuitProver(config, telemetry), - config.proverAgentConcurrency, - config.proverAgentPollInterval, - ) - : undefined; - - const prover = new TxProver(config, telemetry, agent); + public static async new(config: ProverClientConfig, broker: ProvingJobBroker, telemetry: TelemetryClient) { + const prover = new TxProver(config, telemetry, broker, broker); await prover.start(); return prover; } - private static async buildCircuitProver( - config: ProverClientConfig, - telemetry: TelemetryClient, - ): Promise { - if (config.realProofs) { - return await BBNativeRollupProver.new(config, telemetry); + public getProvingJobSource(): ProvingJobConsumer { + if (!this.agentClient) { + throw new Error('Agent client not provided'); + } + + return this.agentClient; + } + + private async createAndStartAgents(): Promise { + if (this.agents.length > 0) { + throw new Error('Agents already started'); + } + + if (!this.agentClient) { + throw new Error('Agent client not provided'); } - const simulationProvider = config.acvmBinaryPath - ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath) - : undefined; + const proofStore = new InlineProofStore(); + const prover = await buildServerCircuitProver(this.config, this.telemetry); + this.agents = times( + this.config.proverAgentCount, + () => new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs), + ); + + await Promise.all(this.agents.map(agent => agent.start())); + } - return new TestCircuitProver(telemetry, simulationProvider, config); + private async stopAgents() { + await Promise.all(this.agents.map(agent => agent.stop())); } +} - public getProvingJobSource(): ProvingJobSource { - return this.queue; +export function buildServerCircuitProver( + config: ActualProverConfig & ACVMConfig & BBConfig, + telemetry: TelemetryClient, +): Promise { + if (config.realProofs) { + return BBNativeRollupProver.new(config, telemetry); } + + const simulationProvider = config.acvmBinaryPath + ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath) + : undefined; + + return Promise.resolve(new TestCircuitProver(telemetry, simulationProvider, config)); } diff --git a/yarn-project/prover-node/src/config.ts b/yarn-project/prover-node/src/config.ts index 12894b5cd0d..34a59b0a338 100644 --- a/yarn-project/prover-node/src/config.ts +++ b/yarn-project/prover-node/src/config.ts @@ -1,4 +1,11 @@ import { type ArchiverConfig, archiverConfigMappings, getArchiverConfigFromEnv } from '@aztec/archiver'; +import { type ACVMConfig, type BBConfig } from '@aztec/bb-prover'; +import { + type ProverAgentConfig, + type ProverBrokerConfig, + proverAgentConfigMappings, + proverBrokerConfigMappings, +} from '@aztec/circuit-types'; import { type ConfigMappingsType, bigintConfigHelper, @@ -7,7 +14,12 @@ import { } from '@aztec/foundation/config'; import { type DataStoreConfig, dataConfigMappings, getDataConfigFromEnv } from '@aztec/kv-store/config'; import { type P2PConfig, getP2PConfigFromEnv, p2pConfigMappings } from '@aztec/p2p'; -import { type ProverClientConfig, getProverEnvVars, proverClientConfigMappings } from '@aztec/prover-client'; +import { + type ProverClientConfig, + bbConfigMappings, + getProverEnvVars, + proverClientConfigMappings, +} from '@aztec/prover-client'; import { type PublisherConfig, type TxSenderConfig, @@ -107,3 +119,16 @@ export function getProverNodeConfigFromEnv(): ProverNodeConfig { ...getConfigFromMappings(proverBondManagerConfigMappings), }; } + +export function getProverNodeBrokerConfigFromEnv(): ProverBrokerConfig { + return { + ...getConfigFromMappings(proverBrokerConfigMappings), + }; +} + +export function getProverNodeAgentConfigFromEnv(): ProverAgentConfig & BBConfig & ACVMConfig { + return { + ...getConfigFromMappings(proverAgentConfigMappings), + ...getConfigFromMappings(bbConfigMappings), + }; +} diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index 12ac2e0de92..2f54b4b7f7d 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -1,22 +1,25 @@ import { type Archiver, createArchiver } from '@aztec/archiver'; -import { type ProverCoordination } from '@aztec/circuit-types'; +import { type ProverCoordination, type ProvingJobBroker } from '@aztec/circuit-types'; import { createEthereumChain } from '@aztec/ethereum'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { type DataStoreConfig } from '@aztec/kv-store/config'; import { RollupAbi } from '@aztec/l1-artifacts'; import { createProverClient } from '@aztec/prover-client'; +import { createAndStartProvingBroker } from '@aztec/prover-client/broker'; import { L1Publisher } from '@aztec/sequencer-client'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { createWorldStateSynchronizer } from '@aztec/world-state'; +import { join } from 'path'; import { createPublicClient, getAddress, getContract, http } from 'viem'; import { createBondManager } from './bond/factory.js'; import { type ProverNodeConfig, type QuoteProviderConfig } from './config.js'; import { ClaimsMonitor } from './monitors/claims-monitor.js'; import { EpochMonitor } from './monitors/epoch-monitor.js'; +import { ProverCacheManager } from './prover-cache/cache_manager.js'; import { createProverCoordination } from './prover-coordination/factory.js'; import { ProverNode } from './prover-node.js'; import { HttpQuoteProvider } from './quote-provider/http.js'; @@ -32,6 +35,7 @@ export async function createProverNode( aztecNodeTxProvider?: ProverCoordination; archiver?: Archiver; publisher?: L1Publisher; + broker?: ProvingJobBroker; } = {}, ) { const telemetry = deps.telemetry ?? new NoopTelemetryClient(); @@ -43,7 +47,8 @@ export async function createProverNode( const worldStateSynchronizer = await createWorldStateSynchronizer(worldStateConfig, archiver, telemetry); await worldStateSynchronizer.start(); - const prover = await createProverClient(config, telemetry); + const broker = deps.broker ?? (await createAndStartProvingBroker(config)); + const prover = await createProverClient(config, broker, telemetry); // REFACTOR: Move publisher out of sequencer package and into an L1-related package const publisher = deps.publisher ?? new L1Publisher(config, telemetry); @@ -72,8 +77,11 @@ export async function createProverNode( const walletClient = publisher.getClient(); const bondManager = await createBondManager(rollupContract, walletClient, config); + const cacheDir = config.cacheDir ? join(config.cacheDir, `prover_${config.proverId}`) : undefined; + const cacheManager = new ProverCacheManager(cacheDir); + return new ProverNode( - prover!, + prover, publisher, archiver, archiver, @@ -86,6 +94,7 @@ export async function createProverNode( epochMonitor, bondManager, telemetry, + cacheManager, proverNodeConfig, ); } diff --git a/yarn-project/prover-node/src/prover-cache/cache_manager.ts b/yarn-project/prover-node/src/prover-cache/cache_manager.ts new file mode 100644 index 00000000000..c5583a48d8c --- /dev/null +++ b/yarn-project/prover-node/src/prover-cache/cache_manager.ts @@ -0,0 +1,64 @@ +import { type ProverCache } from '@aztec/circuit-types'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; +import { InMemoryProverCache } from '@aztec/prover-client'; + +import { type Dirent } from 'fs'; +import { mkdir, readFile, readdir, rm, writeFile } from 'fs/promises'; +import { join } from 'path'; + +import { KVProverCache } from './kv_cache.js'; + +const EPOCH_DIR_PREFIX = 'epoch'; +const EPOCH_DIR_SEPARATOR = '_'; +const EPOCH_HASH_FILENAME = 'epoch_hash.txt'; + +export class ProverCacheManager { + constructor(private cacheDir?: string, private log = createDebugLogger('aztec:prover-node:cache-manager')) {} + + public async openCache(epochNumber: bigint, epochHash: Buffer): Promise { + if (!this.cacheDir) { + return new InMemoryProverCache(); + } + + const epochDir = EPOCH_DIR_PREFIX + EPOCH_DIR_SEPARATOR + epochNumber; + const dataDir = join(this.cacheDir, epochDir); + + const storedEpochHash = await readFile(join(dataDir, EPOCH_HASH_FILENAME), 'hex').catch(() => Buffer.alloc(0)); + if (storedEpochHash.toString() !== epochHash.toString()) { + await rm(dataDir, { recursive: true, force: true }); + } + + await mkdir(dataDir, { recursive: true }); + await writeFile(join(dataDir, EPOCH_HASH_FILENAME), epochHash.toString('hex')); + + const store = AztecLmdbStore.open(dataDir); + this.log.debug(`Created new database for epoch ${epochNumber} at ${dataDir}`); + return new KVProverCache(store); + } + + public async removedStaleCaches(currentEpochNumber: bigint): Promise { + if (!this.cacheDir) { + return; + } + + const entries: Dirent[] = await readdir(this.cacheDir, { withFileTypes: true }).catch(() => []); + + for (const item of entries) { + if (!item.isDirectory()) { + continue; + } + + const [prefix, epochNumber] = item.name.split(EPOCH_DIR_SEPARATOR); + if (prefix !== EPOCH_DIR_PREFIX) { + continue; + } + + const epochNumberInt = BigInt(epochNumber); + if (epochNumberInt < currentEpochNumber) { + this.log.info(`Removing old epoch database for epoch ${epochNumberInt} at ${join(this.cacheDir, item.name)}`); + await rm(join(this.cacheDir, item.name), { recursive: true }); + } + } + } +} diff --git a/yarn-project/prover-node/src/prover-cache/kv_cache.ts b/yarn-project/prover-node/src/prover-cache/kv_cache.ts new file mode 100644 index 00000000000..28b6f9af40e --- /dev/null +++ b/yarn-project/prover-node/src/prover-cache/kv_cache.ts @@ -0,0 +1,23 @@ +import type { ProverCache, ProvingJobStatus } from '@aztec/circuit-types'; +import type { AztecKVStore, AztecMap } from '@aztec/kv-store'; + +export class KVProverCache implements ProverCache { + private proofs: AztecMap; + + constructor(store: AztecKVStore) { + this.proofs = store.openMap('prover_node_proof_status'); + } + + getProvingJobStatus(jobId: string): Promise { + const item = this.proofs.get(jobId); + if (!item) { + return Promise.resolve({ status: 'not-found' }); + } + + return Promise.resolve(JSON.parse(item)); + } + + setProvingJobStatus(jobId: string, status: ProvingJobStatus): Promise { + return this.proofs.set(jobId, JSON.stringify(status)); + } +} diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index ce7251dc344..008b2443cc4 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -7,11 +7,12 @@ import { type L2Block, type L2BlockSource, type MerkleTreeWriteOperations, + type ProverCache, type ProverCoordination, WorldStateRunningState, type WorldStateSynchronizer, } from '@aztec/circuit-types'; -import { type ContractDataSource, EthAddress } from '@aztec/circuits.js'; +import { type ContractDataSource, EthAddress, Fr } from '@aztec/circuits.js'; import { times } from '@aztec/foundation/collection'; import { Signature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; @@ -35,6 +36,7 @@ import { type BondManager } from './bond/bond-manager.js'; import { type EpochProvingJob } from './job/epoch-proving-job.js'; import { ClaimsMonitor } from './monitors/claims-monitor.js'; import { EpochMonitor } from './monitors/epoch-monitor.js'; +import { ProverCacheManager } from './prover-cache/cache_manager.js'; import { ProverNode, type ProverNodeOptions } from './prover-node.js'; import { type QuoteProvider } from './quote-provider/index.js'; import { type QuoteSigner } from './quote-signer.js'; @@ -102,6 +104,7 @@ describe('prover-node', () => { epochMonitor, bondManager, telemetryClient, + new ProverCacheManager(), config, ); @@ -139,7 +142,7 @@ describe('prover-node', () => { quoteSigner.sign.mockImplementation(payload => Promise.resolve(new EpochProofQuote(payload, Signature.empty()))); // Archiver returns a bunch of fake blocks - blocks = times(3, i => mock({ number: i + 20 })); + blocks = times(3, i => mock({ number: i + 20, hash: () => new Fr(i) })); l2BlockSource.getBlocksForEpoch.mockResolvedValue(blocks); // A sample claim @@ -377,6 +380,7 @@ describe('prover-node', () => { _blocks: L2Block[], publicDb: MerkleTreeWriteOperations, _proverDb: MerkleTreeWriteOperations, + _cache: ProverCache, _publicProcessorFactory: PublicProcessorFactory, cleanUp: (job: EpochProvingJob) => Promise, ): EpochProvingJob { diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index cff56201098..4f1fb3a6c07 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -7,6 +7,7 @@ import { type L2Block, type L2BlockSource, type MerkleTreeWriteOperations, + type ProverCache, type ProverCoordination, type ProverNodeApi, type Service, @@ -15,6 +16,7 @@ import { } from '@aztec/circuit-types'; import { type ContractDataSource } from '@aztec/circuits.js'; import { compact } from '@aztec/foundation/collection'; +import { sha256 } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { type Maybe } from '@aztec/foundation/types'; import { type L1Publisher } from '@aztec/sequencer-client'; @@ -26,6 +28,7 @@ import { EpochProvingJob, type EpochProvingJobState } from './job/epoch-proving- import { ProverNodeMetrics } from './metrics.js'; import { type ClaimsMonitor, type ClaimsMonitorHandler } from './monitors/claims-monitor.js'; import { type EpochMonitor, type EpochMonitorHandler } from './monitors/epoch-monitor.js'; +import { type ProverCacheManager } from './prover-cache/cache_manager.js'; import { type QuoteProvider } from './quote-provider/index.js'; import { type QuoteSigner } from './quote-signer.js'; @@ -62,6 +65,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr private readonly epochsMonitor: EpochMonitor, private readonly bondManager: BondManager, private readonly telemetryClient: TelemetryClient, + private readonly proverCacheManager: ProverCacheManager, options: Partial = {}, ) { this.options = { @@ -250,13 +254,25 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr // Create a processor using the forked world state const publicProcessorFactory = new PublicProcessorFactory(this.contractDataSource, this.telemetryClient); + const epochHash = sha256(Buffer.concat(blocks.map(block => block.hash().toBuffer()))); + const proverCache = await this.proverCacheManager.openCache(epochNumber, epochHash); + const cleanUp = async () => { await publicDb.close(); await proverDb.close(); + await this.proverCacheManager.removedStaleCaches(epochNumber); this.jobs.delete(job.getId()); }; - const job = this.doCreateEpochProvingJob(epochNumber, blocks, publicDb, proverDb, publicProcessorFactory, cleanUp); + const job = this.doCreateEpochProvingJob( + epochNumber, + blocks, + publicDb, + proverDb, + proverCache, + publicProcessorFactory, + cleanUp, + ); this.jobs.set(job.getId(), job); return job; } @@ -267,6 +283,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr blocks: L2Block[], publicDb: MerkleTreeWriteOperations, proverDb: MerkleTreeWriteOperations, + proverCache: ProverCache, publicProcessorFactory: PublicProcessorFactory, cleanUp: () => Promise, ) { @@ -274,7 +291,7 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr publicDb, epochNumber, blocks, - this.prover.createEpochProver(proverDb), + this.prover.createEpochProver(proverDb, proverCache), publicProcessorFactory, this.publisher, this.l2BlockSource, diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index fa81c902d7e..9a41966a582 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -23,6 +23,7 @@ import { type SiblingPath, SimulationError, type Tx, + type TxEffect, type TxExecutionRequest, type TxHash, TxProvingResult, @@ -621,7 +622,7 @@ export class PXEService implements PXE { return this.node.getTxReceipt(txHash); } - public getTxEffect(txHash: TxHash) { + public getTxEffect(txHash: TxHash): Promise | undefined> { return this.node.getTxEffect(txHash); } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 2d0af3c2c7c..02941856405 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -999,6 +999,7 @@ __metadata: ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 + zod: ^3.23.8 languageName: unknown linkType: soft From 7b31cdc168c77fd2464894d816b4f1e6867bf273 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 26 Nov 2024 11:45:22 +0000 Subject: [PATCH 3/9] fix: k8s --- spartan/aztec-network/templates/_helpers.tpl | 2 ++ .../aztec-network/templates/prover-agent.yaml | 8 +++-- .../templates/prover-broker.yaml | 25 ++++++++++++- .../aztec-network/templates/prover-node.yaml | 36 +++++++++++++++---- spartan/aztec-network/values.yaml | 18 ++++++++-- .../src/interfaces/prover-broker.ts | 2 +- 6 files changed, 77 insertions(+), 14 deletions(-) diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 8afb0c4636d..3db484690a0 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -165,6 +165,8 @@ Service Address Setup Container value: "{{ .Values.proverNode.externalHost }}" - name: PROVER_NODE_PORT value: "{{ .Values.proverNode.service.nodePort }}" + - name: PROVER_BROKER_PORT + value: "{{ .Values.proverBroker.service.nodePort }}" - name: SERVICE_NAME value: {{ include "aztec-network.fullname" . }} volumeMounts: diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index 8d56eea21ad..1367340c705 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -91,9 +91,11 @@ spec: - name: PROVER_REAL_PROOFS value: "{{ .Values.proverAgent.realProofs }}" - name: PROVER_AGENT_COUNT - value: {{ .Values.proverAgent.concurrency | quote }} - - name: HARDWARE_CONCURRENCY - value: {{ .Values.proverAgent.bb.hardwareConcurrency | quote }} + value: "1" + - name: PROVER_AGENT_POLL_INTERVAL_MS + value: "{{ .Values.proverAgent.pollIntervalMs }}" + - name: PROVER_AGENT_PROOF_TYPES + value: {{ join "," .Values.proverAgent.proofTypes | quote }} - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT diff --git a/spartan/aztec-network/templates/prover-broker.yaml b/spartan/aztec-network/templates/prover-broker.yaml index e23c213d553..214b6720fce 100644 --- a/spartan/aztec-network/templates/prover-broker.yaml +++ b/spartan/aztec-network/templates/prover-broker.yaml @@ -68,6 +68,14 @@ spec: value: "1" - name: DEBUG value: "{{ .Values.proverBroker.debug }}" + - name: PROVER_BROKER_POLL_INTERVAL_MS + value: "{{ .Values.proverBroker.pollIntervalMs }}" + - name: PROVER_BROKER_JOB_TIMEOUT_MS + value: "{{ .Values.proverBroker.jobTimeoutMs }}" + - name: PROVER_BROKER_JOB_MAX_RETRIES + value: "{{ .Values.proverBroker.jobMaxRetries }}" + - name: PROVER_BROKER_DATA_DIRECTORY + value: "{{ .Values.proverBroker.dataDirectory }}" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT @@ -78,4 +86,19 @@ spec: value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} resources: {{- toYaml .Values.proverBroker.resources | nindent 12 }} -{{- end }} +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ include "aztec-network.fullname" . }}-prover-broker + labels: + {{- include "aztec-network.labels" . | nindent 4 }} +spec: + type: ClusterIP + selector: + {{- include "aztec-network.selectorLabels" . | nindent 4 }} + app: prover-broker + ports: + - port: {{ .Values.proverBroker.service.nodePort }} + name: node +{{ end }} diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 923644d7b73..4a3ab02bf19 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -40,11 +40,15 @@ spec: done echo "Ethereum node is ready!" - until curl -s -X POST ${PROVER_BROKER_HOST}/status; do - echo "Waiting for broker ${PROVER_BROKER_HOST} ..." - sleep 5 - done - echo "Broker is ready!" + if [ "${PROVER_BROKER_ENABLED}" == "false" ]; then + until curl -s -X POST ${PROVER_BROKER_HOST}/status; do + echo "Waiting for broker ${PROVER_BROKER_HOST} ..." + sleep 5 + done + echo "Broker is ready!" + else + echo "Using built-in job broker" + fi {{- if .Values.telemetry.enabled }} until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do @@ -61,6 +65,10 @@ spec: volumeMounts: - name: config mountPath: /shared/config + env: + - name: PROVER_BROKER_ENABLED + value: "{{ .Values.proverNode.proverBroker.enabled }}" + - name: configure-prover-env image: "{{ .Values.images.aztec.image }}" imagePullPolicy: {{ .Values.images.aztec.pullPolicy }} @@ -114,8 +122,22 @@ spec: value: "{{ .Values.proverNode.debug }}" - name: PROVER_REAL_PROOFS value: "{{ .Values.proverNode.realProofs }}" - - name: PROVER_AGENT_ENABLED - value: "{{ .Values.proverNode.proverAgentEnabled }}" + - name: PROVER_AGENT_COUNT + value: "{{ .Values.proverNode.proverAgent.count }}" + - name: PROVER_AGENT_POLL_INTERVAL_MS + value: "{{ .Values.proverNode.proverAgent.pollIntervalMs }}" + - name: PROVER_AGENT_PROOF_TYPES + value: {{ join "," .Values.proverNode.proverAgent.proofTypes | quote }} + - name: PROVER_BROKER_ENABLED + value: "{{ .Values.proverNode.proverBroker.enabled }}" + - name: PROVER_BROKER_POLL_INTERVAL_MS + value: "{{ .Values.proverNode.proverBroker.pollIntervalMs }}" + - name: PROVER_BROKER_JOB_TIMEOUT_MS + value: "{{ .Values.proverNode.proverBroker.jobTimeoutMs }}" + - name: PROVER_BROKER_JOB_MAX_RETRIES + value: "{{ .Values.proverNode.proverBroker.jobMaxRetries }}" + - name: PROVER_BROKER_DATA_DIRECTORY + value: "{{ .Values.proverNode.proverBroker.dataDirectory }}" - name: PROVER_PUBLISHER_PRIVATE_KEY value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - name: OTEL_RESOURCE_ATTRIBUTES diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 3bd3812d466..245b51f9435 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -118,7 +118,16 @@ proverNode: logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" realProofs: false - proverAgentEnabled: false + proverAgent: + count: 0 + pollIntervalMs: 1000 + proofTypes: [] + proverBroker: + enabled: false + jobTimeoutMs: 30000 + pollIntervalMs: 1000 + jobMaxRetries: 3 + dataDirectory: "" resources: requests: memory: "2Gi" @@ -206,12 +215,13 @@ proverAgent: nodePort: 8083 enabled: true replicas: 1 + pollIntervalMs: 1000 + proofTypes: ["foo", "bar", "baz"] gke: spotEnabled: false logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" realProofs: false - concurrency: 1 bb: hardwareConcurrency: "" nodeSelector: {} @@ -222,6 +232,10 @@ proverBroker: nodePort: 8084 enabled: true replicas: 1 + jobTimeoutMs: 30000 + pollIntervalMs: 1000 + jobMaxRetries: 3 + dataDirectory: "" logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" nodeSelector: {} diff --git a/yarn-project/circuit-types/src/interfaces/prover-broker.ts b/yarn-project/circuit-types/src/interfaces/prover-broker.ts index a81ec6316ad..06457fdb1e1 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-broker.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-broker.ts @@ -34,7 +34,7 @@ export const proverBrokerConfigMappings: ConfigMappingsType proverBrokerJobTimeoutMs: { env: 'PROVER_BROKER_JOB_TIMEOUT_MS', description: 'Jobs are retried if not kept alive for this long', - ...numberConfigHelper(60_000), + ...numberConfigHelper(30_000), }, proverBrokerPollIntervalMs: { env: 'PROVER_BROKER_POLL_INTERVAL_MS', From 98ac426891c4480bf79629531fdc7fb9078286f6 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 26 Nov 2024 13:13:13 +0000 Subject: [PATCH 4/9] fix: native network --- .../end-to-end/scripts/native-network/prover-node.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index c6388c91e39..774ffcbe617 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -35,15 +35,15 @@ export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') export LOG_LEVEL=${LOG_LEVEL:-"debug"} export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} export ETHEREUM_HOST="http://127.0.0.1:8545" -export PROVER_AGENT_ENABLED="true" +export PROVER_AGENT_COUNT="1" +export PROVER_BROKER_ENABLED="true" export PROVER_PUBLISHER_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" export AZTEC_NODE_URL="http://127.0.0.1:8080" -export PROVER_JOB_SOURCE_URL="http://127.0.0.1:$PORT" export OTEL_RESOURCE_ATTRIBUTES="service.name=prover-node-${PORT}" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" # Start the Prover Node with the prover and archiver -node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --prover-node --prover --archiver +node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --prover-node --archiver From 76bcbd78c15b8df33d40d0d3da29d101abaaca0c Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 26 Nov 2024 13:13:25 +0000 Subject: [PATCH 5/9] fix: provernet docker compose --- docker-compose.provernet.yml | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/docker-compose.provernet.yml b/docker-compose.provernet.yml index 83e6cd1913f..7b398b0b0a3 100644 --- a/docker-compose.provernet.yml +++ b/docker-compose.provernet.yml @@ -65,7 +65,8 @@ services: ARCHIVER_POLLING_INTERVAL_MS: 1000 ARCHIVER_VIEM_POLLING_INTERVAL_MS: 1000 PROVER_VIEM_POLLING_INTERVAL_MS: 1000 - PROVER_AGENT_ENABLED: false + PROVER_AGENT_COUNT: 0 + PROVER_BROKER_URL: http://aztec-prover-broker PROVER_PUBLISHER_PRIVATE_KEY: "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97" PROVER_REAL_PROOFS: "${PROVER_REAL_PROOFS:-false}" PROVER_MINIMUM_ESCROW_AMOUNT: 1000000000 @@ -76,6 +77,8 @@ services: depends_on: aztec-node: condition: service_healthy + aztec-prover-broker: + condition: service_healthy healthcheck: test: [ "CMD", "curl", "-fSs", "http://127.0.0.1:80/status" ] interval: 3s @@ -84,6 +87,21 @@ services: command: [ "start", "--prover-node", "--archiver" ] restart: on-failure:5 + aztec-prover-broker: + image: "aztecprotocol/${IMAGE:-aztec:master}" + ports: + - "8084:80" + environment: + LOG_LEVEL: verbose + AZTEC_PORT: 80 + healthcheck: + test: [ "CMD", "curl", "-fSs", "http://127.0.0.1:80/status" ] + interval: 3s + timeout: 30s + start_period: 120s + command: [ "start", "--prover-broker" ] + restart: on-failure:5 + # Prover agent that connects to the prover-node for fetching proving jobs and executing them # Multiple instances can be run, or PROVER_AGENT_CONCURRENCY can be increased to run multiple workers in a single instance aztec-prover-agent: @@ -93,13 +111,11 @@ services: environment: LOG_LEVEL: verbose ETHEREUM_HOST: http://ethereum:8545 - AZTEC_NODE_URL: http://aztec-prover # Deprecated, use PROVER_JOB_SOURCE_URL - PROVER_JOB_SOURCE_URL: http://aztec-prover + PROVER_BROKER_URL: http://aztec-prover-broker L1_CHAIN_ID: 31337 AZTEC_PORT: 80 PROVER_REAL_PROOFS: "${PROVER_REAL_PROOFS:-false}" PROVER_TEST_DELAY_MS: "${PROVER_TEST_DELAY_MS:-0}" - PROVER_AGENT_CONCURRENCY: 2 BB_SKIP_CLEANUP: "${BB_SKIP_CLEANUP:-0}" # Persist tmp dirs for debugging PROVER_ID: "${PROVER_ID:-0x01}" volumes: @@ -107,9 +123,12 @@ services: - ./cache/bb-crs/:/root/.bb-crs:rw - ./workdir/bb-prover/:/usr/src/yarn-project/bb:rw depends_on: - aztec-prover: + aztec-prover-broker: condition: service_healthy - command: [ "start", "--prover" ] + command: [ "start", "--prover-agent" ] + deploy: + mode: replicated + replicas: 2 restart: on-failure:5 healthcheck: test: [ "CMD", "curl", "-fSs", "http://127.0.0.1:80/status" ] From ef8d742cd325a11438b6be4b069a7a300f70e5b2 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 26 Nov 2024 15:28:40 +0000 Subject: [PATCH 6/9] refactor: prover broker host --- docker-compose.provernet.yml | 4 +-- .../aztec-network/templates/prover-agent.yaml | 1 - .../aztec-network/templates/prover-node.yaml | 3 -- .../aztec/src/cli/cmds/start_prover_broker.ts | 6 ++-- .../aztec/src/cli/cmds/start_prover_node.ts | 29 ++++++++++++------- .../src/interfaces/prover-agent.ts | 2 +- .../src/interfaces/prover-broker.ts | 7 ----- .../scripts/native-network/prover-node.sh | 3 +- yarn-project/foundation/src/config/env_var.ts | 3 +- yarn-project/prover-client/src/config.ts | 13 +-------- 10 files changed, 28 insertions(+), 43 deletions(-) diff --git a/docker-compose.provernet.yml b/docker-compose.provernet.yml index 7b398b0b0a3..372d2602e9e 100644 --- a/docker-compose.provernet.yml +++ b/docker-compose.provernet.yml @@ -66,7 +66,7 @@ services: ARCHIVER_VIEM_POLLING_INTERVAL_MS: 1000 PROVER_VIEM_POLLING_INTERVAL_MS: 1000 PROVER_AGENT_COUNT: 0 - PROVER_BROKER_URL: http://aztec-prover-broker + PROVER_BROKER_HOST: http://aztec-prover-broker PROVER_PUBLISHER_PRIVATE_KEY: "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97" PROVER_REAL_PROOFS: "${PROVER_REAL_PROOFS:-false}" PROVER_MINIMUM_ESCROW_AMOUNT: 1000000000 @@ -111,7 +111,7 @@ services: environment: LOG_LEVEL: verbose ETHEREUM_HOST: http://ethereum:8545 - PROVER_BROKER_URL: http://aztec-prover-broker + PROVER_BROKER_HOST: http://aztec-prover-broker L1_CHAIN_ID: 31337 AZTEC_PORT: 80 PROVER_REAL_PROOFS: "${PROVER_REAL_PROOFS:-false}" diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index 1367340c705..ef080501868 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -77,7 +77,6 @@ spec: - "-c" - | source /shared/config/service-addresses && \ - PROVER_BROKER_URL=${PROVER_NODE_HOST} \ node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover-agent env: - name: AZTEC_PORT diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 4a3ab02bf19..bf13dad1821 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -142,9 +142,6 @@ spec: value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - # get private proofs from the boot node - - name: PROVER_JOB_SOURCE_URL - value: "http://$(POD_IP):{{ .Values.proverNode.service.nodePort }}" - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts index 26535a7b10a..197d48971c9 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts @@ -1,4 +1,4 @@ -import { type ProverBrokerConfig, proverBrokerConfigMappings } from '@aztec/circuit-types'; +import { type ProverBrokerConfig, type ProvingJobBroker, proverBrokerConfigMappings } from '@aztec/circuit-types'; import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; import { ProvingJobBrokerSchema, createAndStartProvingBroker } from '@aztec/prover-client/broker'; @@ -11,7 +11,7 @@ export async function startProverBroker( signalHandlers: (() => Promise)[], services: NamespacedApiHandlers, userLog: LogFn, -) { +): Promise { if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) { userLog(`Starting a prover broker with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`); process.exit(1); @@ -27,4 +27,6 @@ export async function startProverBroker( signalHandlers.push(() => broker.stop()); await broker.start(); + + return broker; } diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts index 44aa13eee93..0d6fa266edc 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts @@ -1,4 +1,4 @@ -import { ProverNodeApiSchema, createAztecNodeClient } from '@aztec/circuit-types'; +import { ProverNodeApiSchema, type ProvingJobBroker, createAztecNodeClient } from '@aztec/circuit-types'; import { NULL_KEY } from '@aztec/ethereum'; import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; @@ -14,6 +14,7 @@ import { createAndStartTelemetryClient, telemetryClientConfigMappings } from '@a import { mnemonicToAccount } from 'viem/accounts'; import { extractRelevantOptions } from '../util.js'; +import { startProverBroker } from './start_prover_broker.js'; export async function startProverNode( options: any, @@ -36,14 +37,6 @@ export async function startProverNode( process.exit(1); } - if (options.prover || options.proverAgentEnabled) { - userLog(`Running prover node with local prover agent.`); - proverConfig.proverAgentCount = 1; - } else { - userLog(`Running prover node without local prover agent. Connect one or more prover agents to this node.`); - proverConfig.proverAgentCount = 0; - } - if (!proverConfig.publisherPrivateKey || proverConfig.publisherPrivateKey === NULL_KEY) { if (!options.l1Mnemonic) { userLog(`--l1-mnemonic is required to start a Prover Node without --node.publisherPrivateKey`); @@ -69,9 +62,23 @@ export async function startProverNode( extractRelevantOptions(options, telemetryClientConfigMappings, 'tel'), ); - const broker = proverConfig.proverBrokerUrl ? createProvingJobBrokerClient(proverConfig.proverBrokerUrl) : undefined; - const proverNode = await createProverNode(proverConfig, { telemetry, broker }); + let broker: ProvingJobBroker; + if (proverConfig.proverBrokerUrl) { + broker = createProvingJobBrokerClient(proverConfig.proverBrokerUrl); + } else if (options.proverBroker) { + broker = await startProverBroker(options, signalHandlers, services, userLog); + } else { + userLog(`--prover-broker-url or --prover-broker is required to start a Prover Node`); + process.exit(1); + } + if (proverConfig.proverAgentCount === 0) { + userLog( + `Running prover node without local prover agent. Connect one or more prover agents to this node or pass --proverAgent.proverAgentCount`, + ); + } + + const proverNode = await createProverNode(proverConfig, { telemetry, broker }); services.proverNode = [proverNode, ProverNodeApiSchema]; if (!proverConfig.proverBrokerUrl) { diff --git a/yarn-project/circuit-types/src/interfaces/prover-agent.ts b/yarn-project/circuit-types/src/interfaces/prover-agent.ts index c0fe9a76160..7a2cb519a9e 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-agent.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-agent.ts @@ -43,7 +43,7 @@ export const proverAgentConfigMappings: ConfigMappingsType = .filter(v => typeof v === 'number'), }, proverBrokerUrl: { - env: 'PROVER_BROKER_URL', + env: 'PROVER_BROKER_HOST', description: 'The URL where this agent takes jobs from', }, realProofs: { diff --git a/yarn-project/circuit-types/src/interfaces/prover-broker.ts b/yarn-project/circuit-types/src/interfaces/prover-broker.ts index 06457fdb1e1..5f11be3347e 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-broker.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-broker.ts @@ -11,8 +11,6 @@ import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/c import { z } from 'zod'; export const ProverBrokerConfig = z.object({ - /** Whether to enable the prover broker */ - proverBrokerEnabled: z.boolean(), /** If starting a prover broker locally, the max number of retries per proving job */ proverBrokerJobMaxRetries: z.number(), /** If starting a prover broker locally, the time after which a job times out and gets assigned to a different agent */ @@ -26,11 +24,6 @@ export const ProverBrokerConfig = z.object({ export type ProverBrokerConfig = z.infer; export const proverBrokerConfigMappings: ConfigMappingsType = { - proverBrokerEnabled: { - env: 'PROVER_BROKER_ENABLED', - description: 'Whether to enable the prover broker', - ...numberConfigHelper(1), - }, proverBrokerJobTimeoutMs: { env: 'PROVER_BROKER_JOB_TIMEOUT_MS', description: 'Jobs are retried if not kept alive for this long', diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index 774ffcbe617..1fa0ac6865c 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -36,7 +36,6 @@ export LOG_LEVEL=${LOG_LEVEL:-"debug"} export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} export ETHEREUM_HOST="http://127.0.0.1:8545" export PROVER_AGENT_COUNT="1" -export PROVER_BROKER_ENABLED="true" export PROVER_PUBLISHER_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" export AZTEC_NODE_URL="http://127.0.0.1:8080" @@ -46,4 +45,4 @@ export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" # Start the Prover Node with the prover and archiver -node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --prover-node --archiver +node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --prover-node --prover-broker --archiver diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 271e99cb54a..ce7c17fb3ef 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -102,7 +102,7 @@ export type EnvVar = | 'PROVER_AGENT_COUNT' | 'PROVER_AGENT_PROOF_TYPES' | 'PROVER_AGENT_POLL_INTERVAL_MS' - | 'PROVER_BROKER_URL' + | 'PROVER_BROKER_HOST' | 'PROVER_BROKER_ENABLED' | 'PROVER_BROKER_JOB_TIMEOUT_MS' | 'PROVER_BROKER_POLL_INTERVAL_MS' @@ -113,7 +113,6 @@ export type EnvVar = | 'PROVER_ID' | 'PROVER_JOB_POLL_INTERVAL_MS' | 'PROVER_JOB_TIMEOUT_MS' - | 'PROVER_JOB_SOURCE_URL' | 'PROVER_NODE_POLLING_INTERVAL_MS' | 'PROVER_NODE_MAX_PENDING_JOBS' | 'PROVER_PUBLISH_RETRY_INTERVAL_MS' diff --git a/yarn-project/prover-client/src/config.ts b/yarn-project/prover-client/src/config.ts index 8d64bde6cb2..347301ebb26 100644 --- a/yarn-project/prover-client/src/config.ts +++ b/yarn-project/prover-client/src/config.ts @@ -12,14 +12,7 @@ import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } f /** * The prover configuration. */ -export type ProverClientConfig = ProverConfig & - ProverAgentConfig & - ProverBrokerConfig & - BBConfig & - ACVMConfig & { - /** The URL to the Aztec prover node to take proving jobs from */ - proverJobSourceUrl?: string; - }; +export type ProverClientConfig = ProverConfig & ProverAgentConfig & ProverBrokerConfig & BBConfig & ACVMConfig; export const bbConfigMappings: ConfigMappingsType = { acvmWorkingDirectory: { @@ -46,10 +39,6 @@ export const bbConfigMappings: ConfigMappingsType = { }; export const proverClientConfigMappings: ConfigMappingsType = { - proverJobSourceUrl: { - env: 'PROVER_JOB_SOURCE_URL', - description: 'The URL to the Aztec prover node to take proving jobs from', - }, ...bbConfigMappings, ...proverConfigMappings, ...proverAgentConfigMappings, From 072316f1305527d7337675cedf60bbf79f3beb35 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Wed, 27 Nov 2024 13:48:57 +0000 Subject: [PATCH 7/9] Apply suggestions from code review Co-authored-by: Santiago Palladino --- yarn-project/aztec/src/cli/cmds/start_prover_agent.ts | 2 +- .../prover-client/src/proving_broker/caching_broker_facade.ts | 2 +- yarn-project/prover-client/src/tx-prover/tx-prover.ts | 1 - yarn-project/prover-node/src/prover-cache/cache_manager.ts | 2 +- 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts index 7e27676b1e0..3ae24df0ad9 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts @@ -25,7 +25,7 @@ export async function startProverAgent( ...extractRelevantOptions(options, proverAgentConfigMappings, 'proverAgent'), // override with command line options }; - if (config.realProofs && (!config.bbBinaryPath || config.acvmBinaryPath)) { + if (config.realProofs && (!config.bbBinaryPath || !config.acvmBinaryPath)) { process.exit(1); } diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts index c6c0356709c..7ead3ff334f 100644 --- a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts @@ -81,13 +81,13 @@ export class CachingBrokerFacade implements ServerCircuitProver { if (!jobEnqueued) { try { - await this.cache.setProvingJobStatus(id, { status: 'in-queue' }); const inputsUri = await this.proofStore.saveProofInput(id, type, inputs); await this.broker.enqueueProvingJob({ id, type, inputsUri, }); + await this.cache.setProvingJobStatus(id, { status: 'in-queue' }); } catch (err) { await this.cache.setProvingJobStatus(id, { status: 'not-found' }); throw err; diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 121af71c801..9bd34df56ca 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -99,7 +99,6 @@ export class TxProver implements EpochProverManager { } this.running = false; await this.stopAgents(); - // TODO(palla/prover-node): Keep a reference to all proving orchestrators that are alive and stop them? } /** diff --git a/yarn-project/prover-node/src/prover-cache/cache_manager.ts b/yarn-project/prover-node/src/prover-cache/cache_manager.ts index c5583a48d8c..d2b08e3bab4 100644 --- a/yarn-project/prover-node/src/prover-cache/cache_manager.ts +++ b/yarn-project/prover-node/src/prover-cache/cache_manager.ts @@ -37,7 +37,7 @@ export class ProverCacheManager { return new KVProverCache(store); } - public async removedStaleCaches(currentEpochNumber: bigint): Promise { + public async removeStaleCaches(currentEpochNumber: bigint): Promise { if (!this.cacheDir) { return; } From 38cd368ac10cee09f9f4f4cb3e29272812eadfbc Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Wed, 27 Nov 2024 14:42:19 +0000 Subject: [PATCH 8/9] fix: code review --- .../src/interfaces/prover-client.ts | 5 ++ yarn-project/end-to-end/webpack.config.js | 2 - .../proving_broker/caching_broker_facade.ts | 62 +++++++++++-------- .../src/proving_broker/proof_store.ts | 33 +++++----- .../src/proving_broker/prover_cache/memory.ts | 4 ++ .../src/proving_broker/proving_agent.ts | 8 +-- .../src/proving_broker/proving_broker.ts | 2 +- .../prover-client/src/proving_broker/rpc.ts | 13 ++-- .../src/prover-cache/cache_manager.ts | 11 +++- .../prover-node/src/prover-cache/kv_cache.ts | 6 +- yarn-project/prover-node/src/prover-node.ts | 3 +- 11 files changed, 88 insertions(+), 61 deletions(-) diff --git a/yarn-project/circuit-types/src/interfaces/prover-client.ts b/yarn-project/circuit-types/src/interfaces/prover-client.ts index b09fc665a90..bf1ef3d6485 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-client.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-client.ts @@ -93,6 +93,11 @@ export interface ProverCache { * @param jobId - The job ID */ getProvingJobStatus(jobId: string): Promise; + + /** + * Closes the cache + */ + close(): Promise; } /** diff --git a/yarn-project/end-to-end/webpack.config.js b/yarn-project/end-to-end/webpack.config.js index 8fb6aefc963..88f6bb5178c 100644 --- a/yarn-project/end-to-end/webpack.config.js +++ b/yarn-project/end-to-end/webpack.config.js @@ -43,8 +43,6 @@ export default { new webpack.DefinePlugin({ 'process.env': { NODE_ENV: JSON.stringify('production'), - LOG_LEVEL: JSON.stringify('debug'), - DEBUG: JSON.stringify('aztec:*'), }, }), new webpack.ProvidePlugin({ Buffer: ['buffer', 'Buffer'] }), diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts index 7ead3ff334f..57f979e9e79 100644 --- a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts @@ -9,28 +9,28 @@ import { type PublicInputsAndRecursiveProof, type ServerCircuitProver, } from '@aztec/circuit-types'; -import type { - AVM_PROOF_LENGTH_IN_FIELDS, - AvmCircuitInputs, - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BlockMergeRollupInputs, - BlockRootOrBlockMergePublicInputs, - BlockRootRollupInputs, - EmptyBlockRootRollupInputs, - KernelCircuitPublicInputs, - MergeRollupInputs, - NESTED_RECURSIVE_PROOF_LENGTH, - ParityPublicInputs, - PrivateBaseRollupInputs, - PrivateKernelEmptyInputData, - PublicBaseRollupInputs, - RECURSIVE_PROOF_LENGTH, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, - TUBE_PROOF_LENGTH, - TubeInputs, +import { + type AVM_PROOF_LENGTH_IN_FIELDS, + type AvmCircuitInputs, + type BaseOrMergeRollupPublicInputs, + type BaseParityInputs, + type BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + type BlockRootRollupInputs, + type EmptyBlockRootRollupInputs, + type KernelCircuitPublicInputs, + type MergeRollupInputs, + type NESTED_RECURSIVE_PROOF_LENGTH, + type ParityPublicInputs, + type PrivateBaseRollupInputs, + type PrivateKernelEmptyInputData, + type PublicBaseRollupInputs, + type RECURSIVE_PROOF_LENGTH, + type RootParityInputs, + type RootRollupInputs, + type RootRollupPublicInputs, + type TUBE_PROOF_LENGTH, + type TubeInputs, } from '@aztec/circuits.js'; import { sha256 } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -39,6 +39,9 @@ import { retryUntil } from '@aztec/foundation/retry'; import { InlineProofStore, type ProofStore } from './proof_store.js'; import { InMemoryProverCache } from './prover_cache/memory.js'; +// 20 minutes, roughly the length of an Aztec epoch. If a proof isn't ready in this amount of time then we've failed to prove the whole epoch +const MAX_WAIT_MS = 1_200_000; + /** * A facade around a job broker that generates stable job ids and caches results */ @@ -47,6 +50,8 @@ export class CachingBrokerFacade implements ServerCircuitProver { private broker: ProvingJobProducer, private cache: ProverCache = new InMemoryProverCache(), private proofStore: ProofStore = new InlineProofStore(), + private waitTimeoutMs = MAX_WAIT_MS, + private pollIntervalMs = 1000, private log = createDebugLogger('aztec:prover-client:caching-prover-broker'), ) {} @@ -106,10 +111,17 @@ export class CachingBrokerFacade implements ServerCircuitProver { // loop here until the job settles // NOTE: this could also terminate because the job was cancelled through event listener above const result = await retryUntil( - () => this.broker.waitForJobToSettle(id), + async () => { + try { + return await this.broker.waitForJobToSettle(id); + } catch (err) { + // waitForJobToSettle can only fail for network errors + // keep retrying until we time out + } + }, `Proving job=${id} type=${ProvingRequestType[type]}`, - 0, - 1, + this.waitTimeoutMs / 1000, + this.pollIntervalMs / 1000, ); try { diff --git a/yarn-project/prover-client/src/proving_broker/proof_store.ts b/yarn-project/prover-client/src/proving_broker/proof_store.ts index b5cd17e3b83..cfeee1f866e 100644 --- a/yarn-project/prover-client/src/proving_broker/proof_store.ts +++ b/yarn-project/prover-client/src/proving_broker/proof_store.ts @@ -53,9 +53,10 @@ export interface ProofStore { getProofOutput(uri: ProofUri): Promise; } -const PREFIX = 'data:application/json;base64'; +// use an ASCII encoded data uri https://datatracker.ietf.org/doc/html/rfc2397#section-2 +// we do this to avoid double encoding to base64 (since the inputs already serialize to a base64 string) +const PREFIX = 'data:application/json;charset=utf-8'; const SEPARATOR = ','; -const BUFFER_ENCODING = 'base64url'; /** * An implementation of a proof input/output database that stores data inline in the URI. @@ -67,9 +68,7 @@ export class InlineProofStore implements ProofStore { inputs: ProvingJobInputsMap[T], ): Promise { const jobInputs = { type, inputs } as ProvingJobInputs; - return Promise.resolve( - (PREFIX + SEPARATOR + Buffer.from(JSON.stringify(jobInputs)).toString(BUFFER_ENCODING)) as ProofUri, - ); + return Promise.resolve(this.encode(jobInputs)); } saveProofOutput( @@ -78,26 +77,28 @@ export class InlineProofStore implements ProofStore { result: ProvingJobResultsMap[T], ): Promise { const jobResult = { type, result } as ProvingJobResult; - return Promise.resolve( - (PREFIX + SEPARATOR + Buffer.from(JSON.stringify(jobResult)).toString(BUFFER_ENCODING)) as ProofUri, - ); + return Promise.resolve(this.encode(jobResult)); } getProofInput(uri: ProofUri): Promise { - const [prefix, data] = uri.split(SEPARATOR); - if (prefix !== PREFIX) { - throw new Error('Invalid proof input URI: ' + prefix); - } - - return Promise.resolve(ProvingJobInputs.parse(JSON.parse(Buffer.from(data, BUFFER_ENCODING).toString()))); + return Promise.resolve(ProvingJobInputs.parse(this.decode(uri))); } getProofOutput(uri: ProofUri): Promise { + return Promise.resolve(ProvingJobResult.parse(this.decode(uri))); + } + + private encode(obj: object): ProofUri { + const encoded = encodeURIComponent(JSON.stringify(obj)); + return (PREFIX + SEPARATOR + encoded) as ProofUri; + } + + private decode(uri: ProofUri): object { const [prefix, data] = uri.split(SEPARATOR); if (prefix !== PREFIX) { - throw new Error('Invalid proof output URI: ' + prefix); + throw new Error('Invalid proof input URI: ' + prefix); } - return Promise.resolve(ProvingJobResult.parse(JSON.parse(Buffer.from(data, BUFFER_ENCODING).toString()))); + return JSON.parse(decodeURIComponent(data)); } } diff --git a/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts b/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts index 5e111f68a92..b4da076cbcb 100644 --- a/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts +++ b/yarn-project/prover-client/src/proving_broker/prover_cache/memory.ts @@ -13,4 +13,8 @@ export class InMemoryProverCache implements ProverCache { getProvingJobStatus(jobId: string): Promise { return Promise.resolve(this.proofs[jobId] ?? { status: 'not-found' }); } + + close(): Promise { + return Promise.resolve(); + } } diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.ts index 01db71f9e2e..b7ee2eb69f8 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.ts @@ -8,7 +8,6 @@ import { ProvingRequestType, type ServerCircuitProver, } from '@aztec/circuit-types'; -import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; @@ -33,8 +32,7 @@ export class ProvingAgent { private proofAllowList: Array = [], /** How long to wait between jobs */ private pollIntervalMs = 1000, - name = randomBytes(4).toString('hex'), - private log = createDebugLogger('aztec:prover-client:proving-agent:' + name), + private log = createDebugLogger('aztec:prover-client:proving-agent'), ) { this.runningPromise = new RunningPromise(this.safeWork, this.pollIntervalMs); } @@ -77,8 +75,8 @@ export class ProvingAgent { return; } - let abortedProofJobId: string = ''; - let abortedProofName: string = ''; + let abortedProofJobId: string | undefined; + let abortedProofName: string | undefined; if (this.currentJobController?.getStatus() === ProvingJobControllerStatus.PROVING) { abortedProofJobId = this.currentJobController.getJobId(); abortedProofName = this.currentJobController.getProofTypeName(); diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.ts index d66346a3f81..62667821ec7 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.ts @@ -178,7 +178,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { // exhaust the queue and make sure we're not sending a job that's already in progress // or has already been completed // this can happen if the broker crashes and restarts - // it's possible agents will report progress or results for jobs that are no longer in the queue + // it's possible agents will report progress or results for jobs that are in the queue (after the restart) while ((job = queue.getImmediate())) { if (!this.inProgress.has(job.id) && !this.resultsCache.has(job.id)) { const time = this.timeSource(); diff --git a/yarn-project/prover-client/src/proving_broker/rpc.ts b/yarn-project/prover-client/src/proving_broker/rpc.ts index 0db7e38f7c0..9895e7937dc 100644 --- a/yarn-project/prover-client/src/proving_broker/rpc.ts +++ b/yarn-project/prover-client/src/proving_broker/rpc.ts @@ -12,7 +12,7 @@ import { } from '@aztec/circuit-types'; import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client'; import { type SafeJsonRpcServer, createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server'; -import { type ApiSchemaFor } from '@aztec/foundation/schemas'; +import { type ApiSchemaFor, optional } from '@aztec/foundation/schemas'; import { z } from 'zod'; @@ -32,16 +32,15 @@ export const ProvingJobProducerSchema: ApiSchemaFor = { waitForJobToSettle: z.function().args(ProvingJobId).returns(ProvingJobSettledResult), }; -// can't use ApiSchemaFor because of the optional parameters -export const ProvingJobConsumerSchema = { - getProvingJob: z.function().args(ProvingJobFilterSchema.optional()).returns(GetProvingJobResponse.optional()), - reportProvingJobError: z.function().args(ProvingJobId, z.string(), z.boolean().optional()).returns(z.void()), +export const ProvingJobConsumerSchema: ApiSchemaFor = { + getProvingJob: z.function().args(optional(ProvingJobFilterSchema)).returns(GetProvingJobResponse.optional()), + reportProvingJobError: z.function().args(ProvingJobId, z.string(), optional(z.boolean())).returns(z.void()), reportProvingJobProgress: z .function() - .args(ProvingJobId, z.number(), ProvingJobFilterSchema.optional()) + .args(ProvingJobId, z.number(), optional(ProvingJobFilterSchema)) .returns(GetProvingJobResponse.optional()), reportProvingJobSuccess: z.function().args(ProvingJobId, ProofUri).returns(z.void()), -} as unknown as ApiSchemaFor; +}; export const ProvingJobBrokerSchema: ApiSchemaFor = { ...ProvingJobConsumerSchema, diff --git a/yarn-project/prover-node/src/prover-cache/cache_manager.ts b/yarn-project/prover-node/src/prover-cache/cache_manager.ts index d2b08e3bab4..b15693ecffe 100644 --- a/yarn-project/prover-node/src/prover-cache/cache_manager.ts +++ b/yarn-project/prover-node/src/prover-cache/cache_manager.ts @@ -34,10 +34,15 @@ export class ProverCacheManager { const store = AztecLmdbStore.open(dataDir); this.log.debug(`Created new database for epoch ${epochNumber} at ${dataDir}`); - return new KVProverCache(store); + const cleanup = () => store.close(); + return new KVProverCache(store, cleanup); } - public async removeStaleCaches(currentEpochNumber: bigint): Promise { + /** + * Removes all caches for epochs older than the given epoch (including) + * @param upToAndIncludingEpoch - The epoch number up to which to remove caches + */ + public async removeStaleCaches(upToAndIncludingEpoch: bigint): Promise { if (!this.cacheDir) { return; } @@ -55,7 +60,7 @@ export class ProverCacheManager { } const epochNumberInt = BigInt(epochNumber); - if (epochNumberInt < currentEpochNumber) { + if (epochNumberInt <= upToAndIncludingEpoch) { this.log.info(`Removing old epoch database for epoch ${epochNumberInt} at ${join(this.cacheDir, item.name)}`); await rm(join(this.cacheDir, item.name), { recursive: true }); } diff --git a/yarn-project/prover-node/src/prover-cache/kv_cache.ts b/yarn-project/prover-node/src/prover-cache/kv_cache.ts index 28b6f9af40e..82b216e384a 100644 --- a/yarn-project/prover-node/src/prover-cache/kv_cache.ts +++ b/yarn-project/prover-node/src/prover-cache/kv_cache.ts @@ -4,7 +4,7 @@ import type { AztecKVStore, AztecMap } from '@aztec/kv-store'; export class KVProverCache implements ProverCache { private proofs: AztecMap; - constructor(store: AztecKVStore) { + constructor(store: AztecKVStore, private cleanup?: () => Promise) { this.proofs = store.openMap('prover_node_proof_status'); } @@ -20,4 +20,8 @@ export class KVProverCache implements ProverCache { setProvingJobStatus(jobId: string, status: ProvingJobStatus): Promise { return this.proofs.set(jobId, JSON.stringify(status)); } + + async close(): Promise { + await this.cleanup?.(); + } } diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 4f1fb3a6c07..0c63bc79b40 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -260,7 +260,8 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, Pr const cleanUp = async () => { await publicDb.close(); await proverDb.close(); - await this.proverCacheManager.removedStaleCaches(epochNumber); + await proverCache.close(); + await this.proverCacheManager.removeStaleCaches(epochNumber); this.jobs.delete(job.getId()); }; From bb5ff3c0ce6de62da71a435f2606171392eec6f3 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Wed, 27 Nov 2024 16:28:36 +0000 Subject: [PATCH 9/9] fix: serialisation --- .../src/proving_broker/caching_broker_facade.test.ts | 7 ------- .../src/proving_broker/caching_broker_facade.ts | 5 +++++ .../prover-client/src/proving_broker/proof_store.ts | 12 +++++++----- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts index fa357379d90..f4782e092ac 100644 --- a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.test.ts @@ -1,7 +1,6 @@ import { type ProvingJobProducer, ProvingRequestType, makePublicInputsAndRecursiveProof } from '@aztec/circuit-types'; import { RECURSIVE_PROOF_LENGTH, VerificationKeyData, makeRecursiveProof } from '@aztec/circuits.js'; import { makeBaseParityInputs, makeParityPublicInputs } from '@aztec/circuits.js/testing'; -import { AbortError } from '@aztec/foundation/error'; import { promiseWithResolvers } from '@aztec/foundation/promise'; import { jest } from '@jest/globals'; @@ -54,7 +53,6 @@ describe('CachingBrokerFacade', () => { await jest.advanceTimersToNextTimerAsync(); const job = broker.enqueueProvingJob.mock.calls[0][0]; - await expect(cache.getProvingJobStatus(job.id)).resolves.toEqual({ status: 'in-queue' }); reject(new Error('Failed to enqueue job')); @@ -63,9 +61,6 @@ describe('CachingBrokerFacade', () => { }); it('awaits existing job if in progress', async () => { - const { promise, reject } = promiseWithResolvers(); - broker.enqueueProvingJob.mockResolvedValue(promise); - const inputs = makeBaseParityInputs(); void facade.getBaseParityProof(inputs).catch(() => {}); await jest.advanceTimersToNextTimerAsync(); @@ -74,8 +69,6 @@ describe('CachingBrokerFacade', () => { void facade.getBaseParityProof(inputs).catch(() => {}); await jest.advanceTimersToNextTimerAsync(); expect(broker.enqueueProvingJob).toHaveBeenCalledTimes(1); - - reject(new AbortError('Job was cancelled')); }); it('reuses already cached results', async () => { diff --git a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts index 57f979e9e79..2885350d958 100644 --- a/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts +++ b/yarn-project/prover-client/src/proving_broker/caching_broker_facade.ts @@ -65,6 +65,10 @@ export class CachingBrokerFacade implements ServerCircuitProver { let jobEnqueued = false; try { const cachedResult = await this.cache.getProvingJobStatus(id); + if (cachedResult.status !== 'not-found') { + this.log.debug(`Found cached result for job=${id}: status=${cachedResult.status}`); + } + if (cachedResult.status === 'fulfilled') { const output = await this.proofStore.getProofOutput(cachedResult.value); if (output.type === type) { @@ -94,6 +98,7 @@ export class CachingBrokerFacade implements ServerCircuitProver { }); await this.cache.setProvingJobStatus(id, { status: 'in-queue' }); } catch (err) { + this.log.error(`Failed to enqueue proving job id=${id}: ${err}`); await this.cache.setProvingJobStatus(id, { status: 'not-found' }); throw err; } diff --git a/yarn-project/prover-client/src/proving_broker/proof_store.ts b/yarn-project/prover-client/src/proving_broker/proof_store.ts index cfeee1f866e..9f605170ed3 100644 --- a/yarn-project/prover-client/src/proving_broker/proof_store.ts +++ b/yarn-project/prover-client/src/proving_broker/proof_store.ts @@ -7,6 +7,8 @@ import { type ProvingJobResultsMap, type ProvingRequestType, } from '@aztec/circuit-types'; +import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; +import { type ZodFor } from '@aztec/foundation/schemas'; /** * A database for storing proof inputs and outputs. @@ -81,24 +83,24 @@ export class InlineProofStore implements ProofStore { } getProofInput(uri: ProofUri): Promise { - return Promise.resolve(ProvingJobInputs.parse(this.decode(uri))); + return Promise.resolve(this.decode(uri, ProvingJobInputs)); } getProofOutput(uri: ProofUri): Promise { - return Promise.resolve(ProvingJobResult.parse(this.decode(uri))); + return Promise.resolve(this.decode(uri, ProvingJobResult)); } private encode(obj: object): ProofUri { - const encoded = encodeURIComponent(JSON.stringify(obj)); + const encoded = encodeURIComponent(jsonStringify(obj)); return (PREFIX + SEPARATOR + encoded) as ProofUri; } - private decode(uri: ProofUri): object { + private decode(uri: ProofUri, schema: ZodFor): T { const [prefix, data] = uri.split(SEPARATOR); if (prefix !== PREFIX) { throw new Error('Invalid proof input URI: ' + prefix); } - return JSON.parse(decodeURIComponent(data)); + return jsonParseWithSchema(decodeURIComponent(data), schema); } }