diff --git a/barretenberg/acir_tests/Dockerfile.bb.js b/barretenberg/acir_tests/Dockerfile.bb.js index 248d14aeedf..760b231fc90 100644 --- a/barretenberg/acir_tests/Dockerfile.bb.js +++ b/barretenberg/acir_tests/Dockerfile.bb.js @@ -2,7 +2,7 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/bb.js FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests FROM node:18.19.0 -COPY --from=0 /usr/src/barretenberg/ts /usr/src/barretenberg/ts +COPY --from=0 /usr/src/barretenberg/ts-build /usr/src/barretenberg/ts COPY --from=noir-acir-tests /usr/src/noir/test_programs /usr/src/noir/test_programs RUN apt update && apt install -y lsof jq WORKDIR /usr/src/barretenberg/acir_tests diff --git a/barretenberg/ts/Dockerfile b/barretenberg/ts/Dockerfile index 2d01fb8f76f..d28b5661b2d 100644 --- a/barretenberg/ts/Dockerfile +++ b/barretenberg/ts/Dockerfile @@ -5,7 +5,7 @@ COPY --from=0 /usr/src/barretenberg /usr/src/barretenberg # Create a standalone container that can run bb.js (and tests). # We'll perform the build in a new, different directory, so the original directory can become the "published" package. -WORKDIR /usr/src/barretenberg/ts +WORKDIR /usr/src/barretenberg/ts-build # Leverage layer caching. Only re-install packages if these files change. COPY .yarn .yarn COPY package.json package.json @@ -17,4 +17,4 @@ RUN yarn formatting && SKIP_CPP_BUILD=1 yarn build CMD ["yarn", "test"] # We want to create a pure package, as would be published to npm, for consuming projects. -RUN yarn pack && tar zxf package.tgz && rm package.tgz \ No newline at end of file +RUN yarn pack && tar zxf package.tgz && rm package.tgz && mv package ../ts \ No newline at end of file diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index 64b06f27e09..8deb94192b6 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -20,14 +20,13 @@ "README.md" ], "scripts": { - "clean": "rm -rf ./dest .tsbuildinfo .tsbuildinfo.cjs package.tgz package", - "build": "yarn clean && yarn build:wasm && yarn build:esm && yarn build:cjs && yarn build:browser && yarn build:package", + "clean": "rm -rf ./dest .tsbuildinfo .tsbuildinfo.cjs", + "build": "yarn clean && yarn build:wasm && yarn build:esm && yarn build:cjs && yarn build:browser", "build:wasm": "./scripts/build_wasm.sh", "build:esm": "tsc -b && chmod +x ./dest/node/main.js", "build:cjs": "tsc -b tsconfig.cjs.json && ./scripts/cjs_postprocess.sh", "build:browser": "webpack", "build:bindings": "cd .. && ./scripts/bindgen.sh", - "build:package": "yarn pack && tar zxf package.tgz && rm -f package.tgz", "formatting": "prettier --check ./src && eslint --max-warnings 0 ./src", "formatting:fix": "prettier -w ./src", "test": "NODE_OPTIONS='--loader ts-node/esm' NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --no-cache --passWithNoTests", diff --git a/barretenberg/ts/src/barretenberg/index.ts b/barretenberg/ts/src/barretenberg/index.ts index 40b2ef0261e..6019b24e88f 100644 --- a/barretenberg/ts/src/barretenberg/index.ts +++ b/barretenberg/ts/src/barretenberg/index.ts @@ -41,7 +41,8 @@ export class Barretenberg extends BarretenbergApi { } } -let barretenbergSyncSingleton: Promise; +let barretenbergSyncSingleton: BarretenbergSync; +let barretenbergSyncSingletonPromise: Promise; export class BarretenbergSync extends BarretenbergApiSync { private constructor(wasm: BarretenbergWasmMain) { @@ -55,9 +56,16 @@ export class BarretenbergSync extends BarretenbergApiSync { return new BarretenbergSync(wasm); } + static initSingleton() { + if (!barretenbergSyncSingletonPromise) { + barretenbergSyncSingletonPromise = BarretenbergSync.new().then(s => (barretenbergSyncSingleton = s)); + } + return barretenbergSyncSingletonPromise; + } + static getSingleton() { if (!barretenbergSyncSingleton) { - barretenbergSyncSingleton = BarretenbergSync.new(); + throw new Error('Initialise first via initSingleton().'); } return barretenbergSyncSingleton; } @@ -66,3 +74,11 @@ export class BarretenbergSync extends BarretenbergApiSync { return this.wasm; } } + +// If we're loading this module in a test environment, just init the singleton immediately for convienience. +if (process.env.NODE_ENV === 'test') { + // Need to ignore for cjs build. + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + await BarretenbergSync.initSingleton(); +} diff --git a/build-system/scripts/augment_prompt b/build-system/scripts/augment_prompt new file mode 100755 index 00000000000..e3dc524043b --- /dev/null +++ b/build-system/scripts/augment_prompt @@ -0,0 +1,2 @@ +# Used to augment the prompt when using start_interactive and zsh. +echo "b " \ No newline at end of file diff --git a/build-system/scripts/build_local b/build-system/scripts/build_local index 7ef1d3c5d8e..cdb9f3a60f3 100755 --- a/build-system/scripts/build_local +++ b/build-system/scripts/build_local @@ -114,6 +114,9 @@ for E in "${PROJECTS[@]}"; do # Retag for aztecprotocol dockerhub. docker tag $DEPLOY_IMAGE_URI aztecprotocol/$REPO:latest + echo -e "${BOLD}Tagged${RESET}: aztecprotocol/$REPO:latest" + echo -e "${BOLD}SHA256${RESET}: $(docker inspect --format='{{.Id}}' $DEPLOY_IMAGE_URI)" + if [ "$PROJECT_DIR_NAME" = "$TARGET_PROJECT" ]; then if [ -n "$LAUNCH" ]; then docker run -ti --rm aztecprotocol/$REPO:latest diff --git a/build-system/scripts/query_manifest b/build-system/scripts/query_manifest index cd0b5c0888b..1fee0ee5759 100755 --- a/build-system/scripts/query_manifest +++ b/build-system/scripts/query_manifest @@ -4,7 +4,7 @@ set -eu CMD=$1 REPO=$2 -ROOT_PATH=${ROOT_PATH:-$PWD} +ROOT_PATH=${ROOT_PATH:-$(git rev-parse --show-toplevel)} MANIFEST=$ROOT_PATH/build_manifest.yml if [ $(yq "has(\"$REPO\")" $MANIFEST) == "false" ]; then diff --git a/build-system/scripts/setup_env b/build-system/scripts/setup_env index 6a166d97236..8e4058a1c53 100755 --- a/build-system/scripts/setup_env +++ b/build-system/scripts/setup_env @@ -16,8 +16,9 @@ BRANCH=${5:-} PULL_REQUEST=${6:-} BASH_ENV=${BASH_ENV:-} +ROOT_PATH=$(git rev-parse --show-toplevel) BUILD_SYSTEM_PATH=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd) -PROJECT=$(cat PROJECT) +PROJECT=$(cat $ROOT_PATH/PROJECT) COMMIT_MESSAGE=$(git log -n 1 --pretty=format:"%s" $COMMIT_HASH) PATH=$PATH:$BUILD_SYSTEM_PATH/scripts @@ -64,7 +65,7 @@ if [ -z "$BASH_ENV" ]; then BASH_ENV=$(mktemp) fi -echo export ROOT_PATH=$PWD >> $BASH_ENV +echo export ROOT_PATH=$ROOT_PATH >> $BASH_ENV echo export BUILD_SYSTEM_PATH=$BUILD_SYSTEM_PATH >> $BASH_ENV echo export DOCKER_BUILDKIT=${DOCKER_BUILDKIT:-1} >> $BASH_ENV echo export BUILDKIT_PROGRESS=plain >> $BASH_ENV diff --git a/build-system/start_interactive b/build-system/start_interactive new file mode 100755 index 00000000000..2ed1f2df13a --- /dev/null +++ b/build-system/start_interactive @@ -0,0 +1,6 @@ +#!/bin/bash +# Starts an interactive shell with the build system initialised. +# Good for playing around with build system on development machines. + +source $(dirname "$0")/scripts/setup_env '' '' mainframe_$USER > /dev/null +PROMPT_LEAN_LEFT=augment_prompt $SHELL \ No newline at end of file diff --git a/cspell.json b/cspell.json index 874cc050f0c..22bb21ec9f1 100644 --- a/cspell.json +++ b/cspell.json @@ -42,6 +42,8 @@ "cimg", "clonedeep", "clonedeepwith", + "cmd", + "cmds", "codegen", "comlink", "composability", diff --git a/yarn-project/.dockerignore b/yarn-project/.dockerignore index c227696aa75..f913d69056b 100644 --- a/yarn-project/.dockerignore +++ b/yarn-project/.dockerignore @@ -7,11 +7,10 @@ */data* **/dest -**/*.tsbuildinfo -**/Dockerfile* **/node_modules -Dockerfile* +**/Dockerfile* +**/*.tsbuildinfo noir-contracts/src/types noir-contracts/src/artifacts -noir-contracts/target \ No newline at end of file +noir-contracts/target diff --git a/yarn-project/.yarnrc.yml b/yarn-project/.yarnrc.yml index 843129bf681..3491bc0f4b5 100644 --- a/yarn-project/.yarnrc.yml +++ b/yarn-project/.yarnrc.yml @@ -5,3 +5,7 @@ plugins: spec: '@yarnpkg/plugin-workspace-tools' yarnPath: .yarn/releases/yarn-3.6.3.cjs + +logFilters: + - code: YN0013 + level: discard diff --git a/yarn-project/Dockerfile b/yarn-project/Dockerfile index 99f3e771900..257d93af911 100644 --- a/yarn-project/Dockerfile +++ b/yarn-project/Dockerfile @@ -11,10 +11,10 @@ RUN apk add bash perl # Copy in the entire workspace. COPY . . -RUN yarn workspace @aztec/foundation build && \ - yarn workspace @aztec/noir-compiler build && \ - yarn workspace @aztec/noir-contracts noir:build:all && \ - yarn workspace @aztec/noir-protocol-circuits noir:build && \ - yarn tsc -b +RUN yarn workspace @aztec/foundation build +RUN yarn workspace @aztec/noir-compiler build +RUN yarn workspace @aztec/noir-contracts noir:build:all +RUN yarn workspace @aztec/noir-protocol-circuits noir:build +RUN yarn tsc -b ENTRYPOINT ["yarn"] diff --git a/yarn-project/aztec-sandbox/Dockerfile b/yarn-project/aztec-sandbox/Dockerfile index 40731ebdcab..c61714b06a3 100644 --- a/yarn-project/aztec-sandbox/Dockerfile +++ b/yarn-project/aztec-sandbox/Dockerfile @@ -1,5 +1,4 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod -WORKDIR /usr/src/yarn-project/aztec-sandbox -ENTRYPOINT ["yarn", "start"] +ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec-sandbox/dest/bin/index.js"] EXPOSE 8079 8080 diff --git a/yarn-project/aztec-sandbox/src/bin/index.ts b/yarn-project/aztec-sandbox/src/bin/index.ts index 31dde1ce825..85e8ac2e6ae 100644 --- a/yarn-project/aztec-sandbox/src/bin/index.ts +++ b/yarn-project/aztec-sandbox/src/bin/index.ts @@ -2,9 +2,10 @@ import { createAztecNodeRpcServer, getConfigEnvVars as getNodeConfigEnvVars } from '@aztec/aztec-node'; import { AccountManager, createAztecNodeClient, deployInitialSandboxAccounts } from '@aztec/aztec.js'; import { NULL_KEY } from '@aztec/ethereum'; +import { init } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; -import { NoirWasmVersion } from '@aztec/noir-compiler/versions'; +import { NoirCommit } from '@aztec/noir-compiler/versions'; import { BootstrapNode, getP2PConfigEnvVars } from '@aztec/p2p'; import { GrumpkinScalar, PXEService, createPXERpcServer } from '@aztec/pxe'; @@ -76,15 +77,24 @@ async function main() { const mode = MODE as SandboxMode; - const createShutdown = (cb?: () => Promise) => async () => { - logger.info('Shutting down...'); - if (cb) { - await cb(); - } - process.exit(0); + const installSignalHandlers = (cb?: () => Promise) => { + const shutdown = async () => { + logger.info('Shutting down...'); + if (cb) { + await cb(); + } + process.exit(0); + }; + process.removeAllListeners('SIGINT'); + process.removeAllListeners('SIGTERM'); + process.once('SIGINT', shutdown); + process.once('SIGTERM', shutdown); }; - let shutdown: () => Promise; + installSignalHandlers(); + + // Init crypto (bb.js). + await init(); const logStrings = []; @@ -97,12 +107,12 @@ async function main() { // Code path for starting Sandbox if (mode === SandboxMode.Sandbox) { - logger.info(`Setting up Aztec Sandbox v${version} (noir v${NoirWasmVersion}), please stand by...`); + logger.info(`Setting up Aztec Sandbox v${version} (noir ${NoirCommit}), please stand by...`); const { pxe, node, stop, accounts } = await createAndInitialiseSandbox(deployTestAccounts); // Create shutdown cleanup function - shutdown = createShutdown(stop); + installSignalHandlers(stop); // Start Node and PXE JSON-RPC servers startHttpRpcServer(node, createAztecNodeRpcServer, AZTEC_NODE_PORT); @@ -115,7 +125,7 @@ async function main() { const accountLogStrings = await createAccountLogs(accounts, pxe); logStrings.push(...accountLogStrings); } - logStrings.push(`Aztec Sandbox v${version} (noir v${NoirWasmVersion}) is now ready for use!`); + logStrings.push(`Aztec Sandbox v${version} (noir ${NoirCommit}) is now ready for use!`); } else if (mode === SandboxMode.Node) { // Code path for starting Node only const nodeConfig = getNodeConfigEnvVars(); @@ -131,13 +141,11 @@ async function main() { } const node = await createAztecNode(nodeConfig); - shutdown = createShutdown(node.stop); + installSignalHandlers(node.stop); // Start Node JSON-RPC server startHttpRpcServer(node, createAztecNodeRpcServer, 8080); // Use standard 8080 when no PXE is running - logStrings.push( - `Aztec Node v${version} (noir v${NoirWasmVersion}) is now ready for use in port ${AZTEC_NODE_PORT}!`, - ); + logStrings.push(`Aztec Node v${version} (noir ${NoirCommit}) is now ready for use in port ${AZTEC_NODE_PORT}!`); } else if (mode === SandboxMode.PXE) { // Code path for starting PXE only @@ -145,7 +153,7 @@ async function main() { const node = createAztecNodeClient(AZTEC_NODE_URL); const pxe = await createAztecPXE(node); - shutdown = createShutdown(pxe.stop); + installSignalHandlers(pxe.stop); // Start PXE JSON-RPC server startHttpRpcServer(pxe, createPXERpcServer, PXE_PORT); @@ -157,24 +165,20 @@ async function main() { logStrings.push(...accountLogStrings); } - logStrings.push(`PXE v${version} (noir v${NoirWasmVersion}) is now ready for use in port ${PXE_PORT}!`); + logStrings.push(`PXE v${version} (noir ${NoirCommit}) is now ready for use in port ${PXE_PORT}!`); } else if (mode === SandboxMode.P2PBootstrap) { // Code path for starting a P2P bootstrap node const config = getP2PConfigEnvVars(); const bootstrapNode = new BootstrapNode(logger); await bootstrapNode.start(config); - shutdown = createShutdown(bootstrapNode.stop); + installSignalHandlers(bootstrapNode.stop); logStrings.push( `Bootstrap P2P node is now ready for use. Listening on: ${config.tcpListenIp}:${config.tcpListenPort}.`, ); - } else { - shutdown = createShutdown(); } // Log startup details logger.info(`${splash}\n${github}\n\n`.concat(...logStrings)); - process.once('SIGINT', shutdown); - process.once('SIGTERM', shutdown); } /** diff --git a/yarn-project/aztec.js/package.json b/yarn-project/aztec.js/package.json index 637c4dfed82..007f54fbb1b 100644 --- a/yarn-project/aztec.js/package.json +++ b/yarn-project/aztec.js/package.json @@ -4,9 +4,20 @@ "version": "0.1.0", "type": "module", "exports": { - "node": "./dest/index.js", - "import": "./dest/index.js", - "default": "./dest/main.js" + ".": { + "node": "./dest/index.js", + "import": "./dest/index.js", + "default": "./dest/main.js" + }, + "./interfaces/pxe": "./dest/api/interfaces/pxe.js", + "./abi": "./dest/api/abi.js", + "./aztec_address": "./dest/api/aztec_address.js", + "./eth_address": "./dest/api/eth_address.js", + "./ethereum": "./dest/api/ethereum.js", + "./fields": "./dest/api/fields.js", + "./init": "./dest/api/init.js", + "./log_id": "./dest/api/log_id.js", + "./tx_hash": "./dest/api/tx_hash.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/aztec.js/src/api/README.md b/yarn-project/aztec.js/src/api/README.md new file mode 100644 index 00000000000..473a5bcd826 --- /dev/null +++ b/yarn-project/aztec.js/src/api/README.md @@ -0,0 +1,7 @@ +# API + +This provides a more modular api for importing parts of the library as needed. +The root `index.js` just exposes everything, which can have consequences for startup times and optimizations. +Here we can gradually build up a much more granular api to allow importing precisely what's needed. +This should adopt the opposite philosophy to "export all my child exports". +Every file should (usually) export one thing, and the file/directory structure should be reflected in package.json exports. diff --git a/yarn-project/aztec.js/src/api/abi.ts b/yarn-project/aztec.js/src/api/abi.ts new file mode 100644 index 00000000000..d76502b881d --- /dev/null +++ b/yarn-project/aztec.js/src/api/abi.ts @@ -0,0 +1 @@ +export { ContractArtifact, FunctionArtifact, FunctionSelector } from '@aztec/foundation/abi'; diff --git a/yarn-project/aztec.js/src/api/aztec_address.ts b/yarn-project/aztec.js/src/api/aztec_address.ts new file mode 100644 index 00000000000..c6cece77dae --- /dev/null +++ b/yarn-project/aztec.js/src/api/aztec_address.ts @@ -0,0 +1 @@ +export { AztecAddress } from '@aztec/foundation/aztec-address'; diff --git a/yarn-project/aztec.js/src/api/eth_address.ts b/yarn-project/aztec.js/src/api/eth_address.ts new file mode 100644 index 00000000000..f07492245b8 --- /dev/null +++ b/yarn-project/aztec.js/src/api/eth_address.ts @@ -0,0 +1 @@ +export { EthAddress } from '@aztec/foundation/eth-address'; diff --git a/yarn-project/aztec.js/src/api/ethereum.ts b/yarn-project/aztec.js/src/api/ethereum.ts new file mode 100644 index 00000000000..5be2a7ac37d --- /dev/null +++ b/yarn-project/aztec.js/src/api/ethereum.ts @@ -0,0 +1,6 @@ +export { + deployL1Contract, + deployL1Contracts, + DeployL1Contracts, + L1ContractArtifactsForDeployment, +} from '@aztec/ethereum'; diff --git a/yarn-project/aztec.js/src/api/fields.ts b/yarn-project/aztec.js/src/api/fields.ts new file mode 100644 index 00000000000..6f3f255f748 --- /dev/null +++ b/yarn-project/aztec.js/src/api/fields.ts @@ -0,0 +1 @@ +export { Point, Fr, Fq, GrumpkinScalar } from '@aztec/foundation/fields'; diff --git a/yarn-project/aztec.js/src/api/init.ts b/yarn-project/aztec.js/src/api/init.ts new file mode 100644 index 00000000000..2b5203c9d0b --- /dev/null +++ b/yarn-project/aztec.js/src/api/init.ts @@ -0,0 +1 @@ +export { init as initAztecJs } from '@aztec/foundation/crypto'; diff --git a/yarn-project/aztec.js/src/api/interfaces/pxe.ts b/yarn-project/aztec.js/src/api/interfaces/pxe.ts new file mode 100644 index 00000000000..3dc49f26d71 --- /dev/null +++ b/yarn-project/aztec.js/src/api/interfaces/pxe.ts @@ -0,0 +1 @@ +export { PXE } from '@aztec/types/interfaces'; diff --git a/yarn-project/aztec.js/src/api/log_id.ts b/yarn-project/aztec.js/src/api/log_id.ts new file mode 100644 index 00000000000..6439b39f7d9 --- /dev/null +++ b/yarn-project/aztec.js/src/api/log_id.ts @@ -0,0 +1 @@ +export { LogId } from '@aztec/types/log_id'; diff --git a/yarn-project/aztec.js/src/api/tx_hash.ts b/yarn-project/aztec.js/src/api/tx_hash.ts new file mode 100644 index 00000000000..35ef7f0b387 --- /dev/null +++ b/yarn-project/aztec.js/src/api/tx_hash.ts @@ -0,0 +1 @@ +export { TxHash } from '@aztec/types/tx_hash'; diff --git a/yarn-project/aztec.js/src/contract_deployer/index.ts b/yarn-project/aztec.js/src/contract_deployer/index.ts index ef3f20492c4..20add4a47c0 100644 --- a/yarn-project/aztec.js/src/contract_deployer/index.ts +++ b/yarn-project/aztec.js/src/contract_deployer/index.ts @@ -1,2 +1,3 @@ export * from './contract_deployer.js'; export * from './deploy_sent_tx.js'; +export * from './deploy_method.js'; diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index e9e42ae309b..c7c1bcc8f4e 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -1,11 +1,66 @@ -export * from './contract/index.js'; -export * from './contract_deployer/index.js'; -export * from './utils/index.js'; -export * from './pxe_client.js'; -export * from './account/index.js'; -export * from './contract_deployer/deploy_method.js'; -export * from './sandbox/index.js'; -export * from './wallet/index.js'; +/** + * This is our public api. + * Do NOT "export * from ..." here. + * Everything here should be explicit, to ensure we can clearly see everything we're exposing to the world. + * If it's exposed, people will use it, and then we can't remove/change the api without breaking client code. + * At the time of writing we overexpose things that should only be internal. + * + * TODO: Review and narrow scope of public api. + * We should also consider exposing subsections of the api via package.json exports, like we do with foundation. + * This can allow consumers to import much smaller parts of the library to incur less overhead. + * It will also allow web bundlers do perform intelligent chunking of bundles etc. + * Some work as been done on this within the api folder, providing the alternative import style of e.g.: + * ```typescript + * import { TxHash } from '@aztec.js/tx_hash' + * import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/aztec.js/abi'; + * import { AztecAddress } from '@aztec/aztec.js/aztec_address'; + * import { EthAddress } from '@aztec/aztec.js/eth_address'; + * ``` + */ +export { + WaitOpts, + ContractFunctionInteraction, + Contract, + ContractBase, + ContractMethod, + SentTx, + BatchCall, +} from './contract/index.js'; + +export { ContractDeployer, DeployMethod, DeploySentTx } from './contract_deployer/index.js'; + +export { + generatePublicKey, + FieldLike, + EthAddressLike, + computeMessageSecretHash, + CheatCodes, + AztecAddressLike, + isContractDeployed, + EthCheatCodes, + computeAuthWitMessageHash, +} from './utils/index.js'; + +export { createPXEClient } from './pxe_client.js'; + +export { + CompleteAddress, + getSchnorrAccount, + AccountContract, + AccountManager, + getUnsafeSchnorrAccount, + EcdsaAccountContract, + createAccounts, + SchnorrAccountContract, + SingleKeyAccountContract, + createAccount, + AuthWitnessProvider, + BaseAccountContract, +} from './account/index.js'; + +export { waitForSandbox, getSandboxAccountsWallets, deployInitialSandboxAccounts } from './sandbox/index.js'; + +export { AccountWalletWithPrivateKey, AccountWallet, Wallet, SignerlessWallet } from './wallet/index.js'; // TODO https://github.com/AztecProtocol/aztec-packages/issues/2632 --> FunctionSelector might not need to be exposed // here once the issue is resolved. @@ -14,11 +69,13 @@ export { EthAddress, Point, Fr, + Fq, FunctionSelector, GlobalVariables, GrumpkinScalar, getContractDeploymentInfo, } from '@aztec/circuits.js'; + export { Grumpkin, Schnorr } from '@aztec/circuits.js/barretenberg'; export { @@ -35,6 +92,7 @@ export { L2Block, L2BlockL2Logs, LogFilter, + LogId, LogType, MerkleTreeId, NodeInfo, @@ -56,15 +114,20 @@ export { mockTx, } from '@aztec/types'; -export { ContractArtifact } from '@aztec/foundation/abi'; +export { ContractArtifact, FunctionArtifact } from '@aztec/foundation/abi'; + +// TODO: These kinds of things have no place on our public api. +// External devs will almost certainly have their own methods of doing these things. +// If we want to use them in our own "aztec.js consuming code", import them from foundation as needed. export { DebugLogger, createDebugLogger, onLog } from '@aztec/foundation/log'; export { fileURLToPath } from '@aztec/foundation/url'; export { sleep } from '@aztec/foundation/sleep'; export { elapsed } from '@aztec/foundation/timer'; export { retry, retryUntil } from '@aztec/foundation/retry'; -export * from '@aztec/foundation/crypto'; +export { sha256, init } from '@aztec/foundation/crypto'; export { to2Fields, toBigInt } from '@aztec/foundation/serialize'; export { toBigIntBE } from '@aztec/foundation/bigint-buffer'; +export { makeFetch } from '@aztec/foundation/json-rpc/client'; export { deployL1Contract, diff --git a/yarn-project/circuits.js/src/abis/abis.ts b/yarn-project/circuits.js/src/abis/abis.ts index ecee67a7528..07ab7135344 100644 --- a/yarn-project/circuits.js/src/abis/abis.ts +++ b/yarn-project/circuits.js/src/abis/abis.ts @@ -116,10 +116,18 @@ export function computeFunctionLeaf(fnLeaf: FunctionLeafPreimage): Fr { ); } -// The "zero leaf" of the function tree is the hash of 5 zero fields. -// TODO: Why can we not just use a zero field as the zero leaf? Complicates things perhaps unnecessarily? -const functionTreeZeroLeaf = pedersenHash(new Array(5).fill(Buffer.alloc(32))); -const functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf); +let functionTreeRootCalculator: MerkleTreeCalculator | undefined; +/** + * The "zero leaf" of the function tree is the hash of 5 zero fields. + * TODO: Why can we not just use a zero field as the zero leaf? Complicates things perhaps unnecessarily? + */ +function getFunctionTreeRootCalculator() { + if (!functionTreeRootCalculator) { + const functionTreeZeroLeaf = pedersenHash(new Array(5).fill(Buffer.alloc(32))); + functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf); + } + return functionTreeRootCalculator; +} /** * Computes a function tree from function leaves. @@ -128,7 +136,9 @@ const functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT */ export function computeFunctionTree(fnLeaves: Fr[]) { const leaves = fnLeaves.map(fr => fr.toBuffer()); - return functionTreeRootCalculator.computeTree(leaves).map(b => Fr.fromBuffer(b)); + return getFunctionTreeRootCalculator() + .computeTree(leaves) + .map(b => Fr.fromBuffer(b)); } /** @@ -138,7 +148,7 @@ export function computeFunctionTree(fnLeaves: Fr[]) { */ export function computeFunctionTreeRoot(fnLeaves: Fr[]) { const leaves = fnLeaves.map(fr => fr.toBuffer()); - return Fr.fromBuffer(functionTreeRootCalculator.computeTreeRoot(leaves)); + return Fr.fromBuffer(getFunctionTreeRootCalculator().computeTreeRoot(leaves)); } /** diff --git a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts b/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts deleted file mode 100644 index 58966728949..00000000000 --- a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Fr } from '@aztec/foundation/fields'; - -import { MerkleTreeRootCalculator } from './merkle_tree_root_calculator.js'; - -describe('merkle tree root calculator', () => { - it('should correctly handle no leaves', () => { - // Height of 3 is 8 leaves. - const calculator = new MerkleTreeRootCalculator(4); - const expected = calculator.computeTreeRoot(new Array(8).fill(new Fr(0)).map(fr => fr.toBuffer())); - expect(calculator.computeTreeRoot()).toEqual(expected); - }); - - it('should correctly leverage zero hashes', () => { - const calculator = new MerkleTreeRootCalculator(4); - const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - const padded = [...leaves, ...new Array(3).fill(Buffer.alloc(32))]; - const expected = calculator.computeTreeRoot(padded); - const result = calculator.computeTreeRoot(leaves); - expect(result).not.toBeUndefined(); - expect(result).toEqual(expected); - }); - - it('should correctly handle non default zero leaf', () => { - const zeroLeaf = new Fr(666).toBuffer(); - const calculator = new MerkleTreeRootCalculator(4, zeroLeaf); - const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - const padded = [...leaves, ...new Array(3).fill(zeroLeaf)]; - const expected = calculator.computeTreeRoot(padded); - expect(calculator.computeTreeRoot(leaves)).toEqual(expected); - }); -}); diff --git a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts b/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts deleted file mode 100644 index 904eec35776..00000000000 --- a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { pedersenHash } from '@aztec/foundation/crypto'; - -/** - * Calculates the root of a merkle tree. - */ -export class MerkleTreeRootCalculator { - private zeroHashes: Buffer[]; - - constructor(private height: number, zeroLeaf = Buffer.alloc(32)) { - this.zeroHashes = Array.from({ length: height }).reduce( - (acc: Buffer[], _, i) => [...acc, pedersenHash([acc[i], acc[i]])], - [zeroLeaf], - ); - } - - computeTreeRoot(leaves: Buffer[] = []) { - if (leaves.length === 0) { - return this.zeroHashes[this.zeroHashes.length - 1]; - } - - for (let i = 0; i < this.height; ++i) { - let j = 0; - for (; j < leaves.length / 2; ++j) { - const l = leaves[j * 2]; - const r = leaves[j * 2 + 1] || this.zeroHashes[i]; - leaves[j] = pedersenHash([l, r]); - } - leaves = leaves.slice(0, j); - } - - return leaves[0]; - } -} diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts index 9b8afc328e2..df34d06d305 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts @@ -1,3 +1,5 @@ +import { init } from '@aztec/foundation/crypto'; + import { createCipheriv, createDecipheriv, randomBytes } from 'crypto'; import { Aes128 } from './index.js'; @@ -5,7 +7,8 @@ import { Aes128 } from './index.js'; describe('aes128', () => { let aes128!: Aes128; - beforeAll(() => { + beforeAll(async () => { + await init(); aes128 = new Aes128(); }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts index 20e0e133b9c..cf3a8a5ddec 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts @@ -2,10 +2,6 @@ import { BarretenbergSync, RawBuffer } from '@aztec/bb.js'; import { Buffer } from 'buffer'; -// Get the singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. -// This can be called from multiple other modules as needed, and it ensures it's only constructed once. -const api = await BarretenbergSync.getSingleton(); - /** * AES-128-CBC encryption/decryption. */ @@ -28,6 +24,7 @@ export class Aes128 { } const input = Buffer.concat([data, paddingBuffer]); + const api = BarretenbergSync.getSingleton(); return Buffer.from( api.aesEncryptBufferCbc(new RawBuffer(input), new RawBuffer(iv), new RawBuffer(key), input.length), ); @@ -41,6 +38,7 @@ export class Aes128 { * @returns Decrypted data. */ public decryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { + const api = BarretenbergSync.getSingleton(); return Buffer.from( api.aesDecryptBufferCbc(new RawBuffer(data), new RawBuffer(iv), new RawBuffer(key), data.length), ); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts index e52933eccf4..3b7dd7d3d73 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts @@ -4,22 +4,22 @@ import { EcdsaSignature } from './signature.js'; export * from './signature.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * ECDSA signature construction and helper operations. + * TODO: Replace with codegen api on bb.js. */ export class Ecdsa { + private wasm = BarretenbergSync.getSingleton().getWasm(); + /** * Computes a secp256k1 public key from a private key. * @param privateKey - Secp256k1 private key. * @returns A secp256k1 public key. */ public computePublicKey(privateKey: Buffer): Buffer { - wasm.writeMemory(0, privateKey); - wasm.call('ecdsa__compute_public_key', 0, 32); - return Buffer.from(wasm.getMemorySlice(32, 96)); + this.wasm.writeMemory(0, privateKey); + this.wasm.call('ecdsa__compute_public_key', 0, 32); + return Buffer.from(this.wasm.getMemorySlice(32, 96)); } /** @@ -29,15 +29,15 @@ export class Ecdsa { * @returns An ECDSA signature of the form (r, s, v). */ public constructSignature(msg: Uint8Array, privateKey: Buffer) { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, privateKey); - wasm.writeMemory(mem, msg); - wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, privateKey); + this.wasm.writeMemory(mem, msg); + this.wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); return new EcdsaSignature( - Buffer.from(wasm.getMemorySlice(32, 64)), - Buffer.from(wasm.getMemorySlice(64, 96)), - Buffer.from(wasm.getMemorySlice(96, 97)), + Buffer.from(this.wasm.getMemorySlice(32, 64)), + Buffer.from(this.wasm.getMemorySlice(64, 96)), + Buffer.from(this.wasm.getMemorySlice(96, 97)), ); } @@ -48,14 +48,14 @@ export class Ecdsa { * @returns The secp256k1 public key of the signer. */ public recoverPublicKey(msg: Uint8Array, sig: EcdsaSignature): Buffer { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, sig.r); - wasm.writeMemory(32, sig.s); - wasm.writeMemory(64, sig.v); - wasm.writeMemory(mem, msg); - wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, sig.r); + this.wasm.writeMemory(32, sig.s); + this.wasm.writeMemory(64, sig.v); + this.wasm.writeMemory(mem, msg); + this.wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); - return Buffer.from(wasm.getMemorySlice(65, 129)); + return Buffer.from(this.wasm.getMemorySlice(65, 129)); } /** @@ -66,12 +66,12 @@ export class Ecdsa { * @returns True or false. */ public verifySignature(msg: Uint8Array, pubKey: Buffer, sig: EcdsaSignature) { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, pubKey); - wasm.writeMemory(64, sig.r); - wasm.writeMemory(96, sig.s); - wasm.writeMemory(128, sig.v); - wasm.writeMemory(mem, msg); - return wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, pubKey); + this.wasm.writeMemory(64, sig.r); + this.wasm.writeMemory(96, sig.s); + this.wasm.writeMemory(128, sig.v); + this.wasm.writeMemory(mem, msg); + return this.wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts index 5a18f988c40..cea06e33b1e 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts @@ -4,7 +4,7 @@ import { mapTuple } from '@aztec/foundation/serialize'; import { randomBytes } from 'crypto'; -import { Signature } from '../index.js'; +import { Signature } from '../signature/index.js'; /** * ECDSA signature used for transactions. diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts index 154ab39075f..671c019291b 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts @@ -1,3 +1,4 @@ +import { init } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { GrumpkinScalar, Point } from '../../../index.js'; @@ -8,7 +9,8 @@ const debug = createDebugLogger('bb:grumpkin_test'); describe('grumpkin', () => { let grumpkin!: Grumpkin; - beforeAll(() => { + beforeAll(async () => { + await init(); grumpkin = new Grumpkin(); }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts index a41c0af1fa0..3abf74fd20a 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts @@ -1,15 +1,12 @@ import { BarretenbergSync } from '@aztec/bb.js'; -import { Fr, Point } from '@aztec/foundation/fields'; - -import { GrumpkinScalar } from '../../../index.js'; - -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); +import { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; /** * Grumpkin elliptic curve operations. */ export class Grumpkin { + private wasm = BarretenbergSync.getSingleton().getWasm(); + // prettier-ignore static generator = Point.fromBuffer(Buffer.from([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -33,10 +30,10 @@ export class Grumpkin { * @returns Result of the multiplication. */ public mul(point: Point, scalar: GrumpkinScalar): Point { - wasm.writeMemory(0, point.toBuffer()); - wasm.writeMemory(64, scalar.toBuffer()); - wasm.call('ecc_grumpkin__mul', 0, 64, 96); - return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(96, 160))); + this.wasm.writeMemory(0, point.toBuffer()); + this.wasm.writeMemory(64, scalar.toBuffer()); + this.wasm.call('ecc_grumpkin__mul', 0, 64, 96); + return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(96, 160))); } /** @@ -49,16 +46,16 @@ export class Grumpkin { const concatenatedPoints: Buffer = Buffer.concat(points.map(point => point.toBuffer())); const pointsByteLength = points.length * Point.SIZE_IN_BYTES; - const mem = wasm.call('bbmalloc', pointsByteLength * 2); + const mem = this.wasm.call('bbmalloc', pointsByteLength * 2); - wasm.writeMemory(mem, concatenatedPoints); - wasm.writeMemory(0, scalar.toBuffer()); - wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); + this.wasm.writeMemory(mem, concatenatedPoints); + this.wasm.writeMemory(0, scalar.toBuffer()); + this.wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); const result: Buffer = Buffer.from( - wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), + this.wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), ); - wasm.call('bbfree', mem); + this.wasm.call('bbfree', mem); const parsedResult: Point[] = []; for (let i = 0; i < pointsByteLength; i += 64) { @@ -72,8 +69,8 @@ export class Grumpkin { * @returns Random field element. */ public getRandomFr(): Fr { - wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); - return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(0, 32))); + this.wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); + return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(0, 32))); } /** @@ -82,8 +79,8 @@ export class Grumpkin { * @returns Buffer representation of the field element. */ public reduce512BufferToFr(uint512Buf: Buffer): Fr { - wasm.writeMemory(0, uint512Buf); - wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(64, 96))); + this.wasm.writeMemory(0, uint512Buf); + this.wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(64, 96))); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts index 4ab41700e7d..662d561f3f2 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts @@ -1,27 +1,27 @@ import { BarretenbergSync } from '@aztec/bb.js'; +import { Point } from '@aztec/foundation/fields'; import { numToUInt32BE } from '@aztec/foundation/serialize'; -import { GrumpkinPrivateKey, Point, PublicKey } from '../../../index.js'; +import { GrumpkinPrivateKey, PublicKey } from '../../../types/index.js'; import { SchnorrSignature } from './signature.js'; export * from './signature.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * Schnorr signature construction and helper operations. */ export class Schnorr { + private wasm = BarretenbergSync.getSingleton().getWasm(); + /** * Computes a grumpkin public key from a private key. * @param privateKey - The private key. * @returns A grumpkin public key. */ public computePublicKey(privateKey: GrumpkinPrivateKey): PublicKey { - wasm.writeMemory(0, privateKey.toBuffer()); - wasm.call('schnorr_compute_public_key', 0, 32); - return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(32, 96))); + this.wasm.writeMemory(0, privateKey.toBuffer()); + this.wasm.call('schnorr_compute_public_key', 0, 32); + return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(32, 96))); } /** @@ -31,12 +31,12 @@ export class Schnorr { * @returns A Schnorr signature of the form (s, e). */ public constructSignature(msg: Uint8Array, privateKey: GrumpkinPrivateKey) { - const mem = wasm.call('bbmalloc', msg.length + 4); - wasm.writeMemory(0, privateKey.toBuffer()); - wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - wasm.call('schnorr_construct_signature', mem, 0, 32, 64); + const mem = this.wasm.call('bbmalloc', msg.length + 4); + this.wasm.writeMemory(0, privateKey.toBuffer()); + this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + this.wasm.call('schnorr_construct_signature', mem, 0, 32, 64); - return new SchnorrSignature(Buffer.from(wasm.getMemorySlice(32, 96))); + return new SchnorrSignature(Buffer.from(this.wasm.getMemorySlice(32, 96))); } /** @@ -47,13 +47,13 @@ export class Schnorr { * @returns True or false. */ public verifySignature(msg: Uint8Array, pubKey: PublicKey, sig: SchnorrSignature) { - const mem = wasm.call('bbmalloc', msg.length + 4); - wasm.writeMemory(0, pubKey.toBuffer()); - wasm.writeMemory(64, sig.s); - wasm.writeMemory(96, sig.e); - wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); - const result = wasm.getMemorySlice(128, 129); + const mem = this.wasm.call('bbmalloc', msg.length + 4); + this.wasm.writeMemory(0, pubKey.toBuffer()); + this.wasm.writeMemory(64, sig.s); + this.wasm.writeMemory(96, sig.e); + this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + this.wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); + const result = this.wasm.getMemorySlice(128, 129); return !Buffer.alloc(1, 0).equals(result); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts index a388ce602fb..f4afdd82346 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts @@ -1,12 +1,11 @@ import { BarretenbergSync } from '@aztec/bb.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * Secp256k1 elliptic curve operations. */ export class Secp256k1 { + private wasm = BarretenbergSync.getSingleton().getWasm(); + // prettier-ignore static generator = Buffer.from([ 0x79, 0xbe, 0x66, 0x7e, 0xf9, 0xdc, 0xbb, 0xac, 0x55, 0xa0, 0x62, 0x95, 0xce, 0x87, 0x0b, 0x07, @@ -30,10 +29,10 @@ export class Secp256k1 { * @returns Result of the multiplication. */ public mul(point: Uint8Array, scalar: Uint8Array) { - wasm.writeMemory(0, point); - wasm.writeMemory(64, scalar); - wasm.call('ecc_secp256k1__mul', 0, 64, 96); - return Buffer.from(wasm.getMemorySlice(96, 160)); + this.wasm.writeMemory(0, point); + this.wasm.writeMemory(64, scalar); + this.wasm.call('ecc_secp256k1__mul', 0, 64, 96); + return Buffer.from(this.wasm.getMemorySlice(96, 160)); } /** @@ -41,8 +40,8 @@ export class Secp256k1 { * @returns Random field element. */ public getRandomFr() { - wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); - return Buffer.from(wasm.getMemorySlice(0, 32)); + this.wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); + return Buffer.from(this.wasm.getMemorySlice(0, 32)); } /** @@ -51,8 +50,8 @@ export class Secp256k1 { * @returns Buffer representation of the field element. */ public reduce512BufferToFr(uint512Buf: Buffer) { - wasm.writeMemory(0, uint512Buf); - wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Buffer.from(wasm.getMemorySlice(64, 96)); + this.wasm.writeMemory(0, uint512Buf); + this.wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Buffer.from(this.wasm.getMemorySlice(64, 96)); } } diff --git a/yarn-project/cli/Dockerfile b/yarn-project/cli/Dockerfile index 0b8570793f7..5663b1b079b 100644 --- a/yarn-project/cli/Dockerfile +++ b/yarn-project/cli/Dockerfile @@ -1,10 +1,6 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod -# Set Tini as the default entrypoint, to handle ctrl-c etc. -# Why not just yarn start? About 1 second difference in startup time. -# At time of writing it still takes 1.7s to just get the help to print. Needs investigating. -RUN apk add --no-cache tini -ENTRYPOINT ["/sbin/tini", "--", "node", "--no-warnings", "/usr/src/yarn-project/cli/dest/bin/index.js"] +ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/cli/dest/bin/index.js"] # Setup cache volume. ENV XDG_CACHE_HOME /cache diff --git a/yarn-project/cli/src/bin/index.ts b/yarn-project/cli/src/bin/index.ts index 014d5e05a24..948d81f2940 100644 --- a/yarn-project/cli/src/bin/index.ts +++ b/yarn-project/cli/src/bin/index.ts @@ -1,6 +1,5 @@ #!/usr/bin/env -S node --no-warnings -import { createDebugLogger } from '@aztec/aztec.js'; -import { createConsoleLogger } from '@aztec/foundation/log'; +import { createConsoleLogger, createDebugLogger } from '@aztec/foundation/log'; import { getProgram } from '../index.js'; @@ -9,6 +8,9 @@ const log = createConsoleLogger(); /** CLI main entrypoint */ async function main() { + process.once('SIGINT', () => process.exit(0)); + process.once('SIGTERM', () => process.exit(0)); + const program = getProgram(log, debugLogger); await program.parseAsync(process.argv); } diff --git a/yarn-project/cli/src/cmds/add_contract.ts b/yarn-project/cli/src/cmds/add_contract.ts new file mode 100644 index 00000000000..6ac361f1fbc --- /dev/null +++ b/yarn-project/cli/src/cmds/add_contract.ts @@ -0,0 +1,27 @@ +import { AztecAddress, CompleteAddress, EthAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function addContract( + rpcUrl: string, + contractArtifactPath: string, + contractAddress: AztecAddress, + partialAddress: Fr, + publicKey: Point, + portalContract: EthAddress | undefined, + debugLogger: DebugLogger, + log: LogFn, +) { + const artifact = await getContractArtifact(contractArtifactPath, log); + const completeAddress = new CompleteAddress(contractAddress, publicKey ?? Fr.ZERO, partialAddress); + const portalContractAddress: EthAddress = portalContract ?? EthAddress.ZERO; + const client = await createCompatibleClient(rpcUrl, debugLogger); + + await client.addContracts([{ artifact, completeAddress, portalContract: portalContractAddress }]); + log(`\nContract added to PXE at ${contractAddress.toString()}\n`); +} diff --git a/yarn-project/cli/src/cmds/add_note.ts b/yarn-project/cli/src/cmds/add_note.ts new file mode 100644 index 00000000000..64340034370 --- /dev/null +++ b/yarn-project/cli/src/cmds/add_note.ts @@ -0,0 +1,24 @@ +import { AztecAddress, Fr } from '@aztec/aztec.js'; +import { DebugLogger } from '@aztec/foundation/log'; +import { ExtendedNote, Note, TxHash } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; +import { parseFields } from '../utils.js'; + +/** + * + */ +export async function addNote( + address: AztecAddress, + contractAddress: AztecAddress, + storageSlot: Fr, + txHash: TxHash, + noteFields: string[], + rpcUrl: string, + debugLogger: DebugLogger, +) { + const note = new Note(parseFields(noteFields)); + const extendedNote = new ExtendedNote(note, address, contractAddress, storageSlot, txHash); + const client = await createCompatibleClient(rpcUrl, debugLogger); + await client.addNote(extendedNote); +} diff --git a/yarn-project/cli/src/cmds/block_number.ts b/yarn-project/cli/src/cmds/block_number.ts new file mode 100644 index 00000000000..37795a12966 --- /dev/null +++ b/yarn-project/cli/src/cmds/block_number.ts @@ -0,0 +1,12 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function blockNumber(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const num = await client.getBlockNumber(); + log(`${num}\n`); +} diff --git a/yarn-project/cli/src/cmds/call.ts b/yarn-project/cli/src/cmds/call.ts new file mode 100644 index 00000000000..7e395276177 --- /dev/null +++ b/yarn-project/cli/src/cmds/call.ts @@ -0,0 +1,35 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { format } from 'util'; + +import { createCompatibleClient } from '../client.js'; +import { getFunctionArtifact, getTxSender, prepTx } from '../utils.js'; + +/** + * + */ +export async function call( + functionName: string, + functionArgsIn: any[], + contractArtifactPath: string, + contractAddress: AztecAddress, + fromAddress: string | undefined, + rpcUrl: string, + debugLogger: DebugLogger, + log: LogFn, +) { + const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); + + const fnArtifact = getFunctionArtifact(contractArtifact, functionName); + if (fnArtifact.parameters.length !== functionArgs.length) { + throw Error( + `Invalid number of args passed. Expected ${fnArtifact.parameters.length}; Received: ${functionArgs.length}`, + ); + } + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const from = await getTxSender(client, fromAddress); + const result = await client.viewTx(functionName, functionArgs, contractAddress, from); + log(format('\nView result: ', result, '\n')); +} diff --git a/yarn-project/cli/src/cmds/check_deploy.ts b/yarn-project/cli/src/cmds/check_deploy.ts new file mode 100644 index 00000000000..25641418c71 --- /dev/null +++ b/yarn-project/cli/src/cmds/check_deploy.ts @@ -0,0 +1,17 @@ +import { AztecAddress, isContractDeployed } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function checkDeploy(rpcUrl: string, contractAddress: AztecAddress, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const isDeployed = await isContractDeployed(client, contractAddress); + if (isDeployed) { + log(`\nContract found at ${contractAddress.toString()}\n`); + } else { + log(`\nNo contract found at ${contractAddress.toString()}\n`); + } +} diff --git a/yarn-project/cli/src/cmds/compute_selector.ts b/yarn-project/cli/src/cmds/compute_selector.ts new file mode 100644 index 00000000000..d0ef8e14abe --- /dev/null +++ b/yarn-project/cli/src/cmds/compute_selector.ts @@ -0,0 +1,10 @@ +import { FunctionSelector } from '@aztec/foundation/abi'; +import { LogFn } from '@aztec/foundation/log'; + +/** + * + */ +export function computeSelector(functionSignature: string, log: LogFn) { + const selector = FunctionSelector.fromSignature(functionSignature); + log(`${selector}`); +} diff --git a/yarn-project/cli/src/cmds/create_account.ts b/yarn-project/cli/src/cmds/create_account.ts new file mode 100644 index 00000000000..f178409c82d --- /dev/null +++ b/yarn-project/cli/src/cmds/create_account.ts @@ -0,0 +1,39 @@ +import { GrumpkinScalar, getSchnorrAccount } from '@aztec/aztec.js'; +import { Fq, Fr } from '@aztec/foundation/fields'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function createAccount( + rpcUrl: string, + privateKey: Fq, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const actualPrivateKey = privateKey ?? GrumpkinScalar.random(); + + const account = getSchnorrAccount(client, actualPrivateKey, actualPrivateKey, Fr.ZERO); + const { address, publicKey, partialAddress } = account.getCompleteAddress(); + const tx = await account.deploy(); + const txHash = await tx.getTxHash(); + debugLogger(`Account contract tx sent with hash ${txHash}`); + if (wait) { + log(`\nWaiting for account contract deployment...`); + await tx.wait(); + } else { + log(`\nAccount deployment transaction hash: ${txHash}\n`); + } + + log(`\nNew account:\n`); + log(`Address: ${address.toString()}`); + log(`Public key: ${publicKey.toString()}`); + if (!privateKey) { + log(`Private key: ${actualPrivateKey.toString()}`); + } + log(`Partial address: ${partialAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/deploy.ts b/yarn-project/cli/src/cmds/deploy.ts new file mode 100644 index 00000000000..459f5498c12 --- /dev/null +++ b/yarn-project/cli/src/cmds/deploy.ts @@ -0,0 +1,77 @@ +import { ContractDeployer, EthAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { encodeArgs } from '../encoding.js'; +import { GITHUB_TAG_PREFIX } from '../github.js'; +import { getContractArtifact, getFunctionArtifact } from '../utils.js'; + +/** + * + */ +export async function deploy( + artifactPath: string, + json: boolean, + rpcUrl: string, + publicKey: Point | undefined, + rawArgs: any[], + portalAddress: EthAddress, + salt: Fr, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, + logJson: (output: any) => void, +) { + const contractArtifact = await getContractArtifact(artifactPath, log); + const constructorArtifact = contractArtifact.functions.find(({ name }) => name === 'constructor'); + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const nodeInfo = await client.getNodeInfo(); + const expectedAztecNrVersion = `${GITHUB_TAG_PREFIX}-v${nodeInfo.sandboxVersion}`; + if (contractArtifact.aztecNrVersion && contractArtifact.aztecNrVersion !== expectedAztecNrVersion) { + log( + `\nWarning: Contract was compiled with a different version of Aztec.nr: ${contractArtifact.aztecNrVersion}. Consider updating Aztec.nr to ${expectedAztecNrVersion}\n`, + ); + } + + const deployer = new ContractDeployer(contractArtifact, client, publicKey); + + const constructor = getFunctionArtifact(contractArtifact, 'constructor'); + if (!constructor) { + throw new Error(`Constructor not found in contract ABI`); + } + + debugLogger(`Input arguments: ${rawArgs.map((x: any) => `"${x}"`).join(', ')}`); + const args = encodeArgs(rawArgs, constructorArtifact!.parameters); + debugLogger(`Encoded arguments: ${args.join(', ')}`); + + const deploy = deployer.deploy(...args); + + await deploy.create({ contractAddressSalt: salt, portalContract: portalAddress }); + const tx = deploy.send({ contractAddressSalt: salt, portalContract: portalAddress }); + const txHash = await tx.getTxHash(); + debugLogger(`Deploy tx sent with hash ${txHash}`); + if (wait) { + const deployed = await tx.wait(); + const { address, partialAddress } = deployed.contract.completeAddress; + if (json) { + logJson({ address: address.toString(), partialAddress: partialAddress.toString() }); + } else { + log(`\nContract deployed at ${address.toString()}\n`); + log(`Contract partial address ${partialAddress.toString()}\n`); + } + } else { + const { address, partialAddress } = deploy.completeAddress ?? {}; + if (json) { + logJson({ + address: address?.toString() ?? 'N/A', + partialAddress: partialAddress?.toString() ?? 'N/A', + txHash: txHash.toString(), + }); + } else { + log(`\nContract Address: ${deploy.completeAddress?.address.toString() ?? 'N/A'}`); + log(`Contract Partial Address: ${deploy.completeAddress?.partialAddress.toString() ?? 'N/A'}`); + log(`Deployment transaction hash: ${txHash}\n`); + } + } +} diff --git a/yarn-project/cli/src/cmds/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/deploy_l1_contracts.ts new file mode 100644 index 00000000000..3b45537d88a --- /dev/null +++ b/yarn-project/cli/src/cmds/deploy_l1_contracts.ts @@ -0,0 +1,25 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { deployAztecContracts } from '../utils.js'; + +/** + * + */ +export async function deployL1Contracts( + rpcUrl: string, + apiKey: string, + privateKey: string, + mnemonic: string, + log: LogFn, + debugLogger: DebugLogger, +) { + const { l1ContractAddresses } = await deployAztecContracts(rpcUrl, apiKey, privateKey, mnemonic, debugLogger); + + log('\n'); + log(`Rollup Address: ${l1ContractAddresses.rollupAddress.toString()}`); + log(`Registry Address: ${l1ContractAddresses.registryAddress.toString()}`); + log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); + log(`L2 -> L1 Outbox address: ${l1ContractAddresses.outboxAddress.toString()}`); + log(`Contract Deployment Emitter Address: ${l1ContractAddresses.contractDeploymentEmitterAddress.toString()}`); + log('\n'); +} diff --git a/yarn-project/cli/src/cmds/example_contracts.ts b/yarn-project/cli/src/cmds/example_contracts.ts new file mode 100644 index 00000000000..a5b71e2ec0d --- /dev/null +++ b/yarn-project/cli/src/cmds/example_contracts.ts @@ -0,0 +1,12 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { getExampleContractArtifacts } from '../utils.js'; + +/** + * + */ +export async function exampleContracts(log: LogFn) { + const abisList = await getExampleContractArtifacts(); + const names = Object.keys(abisList); + names.forEach(name => log(name)); +} diff --git a/yarn-project/cli/src/cmds/generate_p2p_private_key.ts b/yarn-project/cli/src/cmds/generate_p2p_private_key.ts new file mode 100644 index 00000000000..4bf3ad7a5c4 --- /dev/null +++ b/yarn-project/cli/src/cmds/generate_p2p_private_key.ts @@ -0,0 +1,13 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; + +/** + * + */ +export async function generateP2PPrivateKey(log: LogFn) { + const peerId = await createSecp256k1PeerId(); + const exportedPeerId = Buffer.from(peerId.privateKey!).toString('hex'); + log(`Private key: ${exportedPeerId}`); + log(`Peer Id: ${peerId}`); +} diff --git a/yarn-project/cli/src/cmds/generate_private_key.ts b/yarn-project/cli/src/cmds/generate_private_key.ts new file mode 100644 index 00000000000..8586f03f37a --- /dev/null +++ b/yarn-project/cli/src/cmds/generate_private_key.ts @@ -0,0 +1,23 @@ +import { GrumpkinScalar, generatePublicKey } from '@aztec/aztec.js'; +import { LogFn } from '@aztec/foundation/log'; + +import { mnemonicToAccount } from 'viem/accounts'; + +/** + * + */ +export function generatePrivateKey(mnemonic: string | undefined, log: LogFn) { + let privKey; + let publicKey; + if (mnemonic) { + const acc = mnemonicToAccount(mnemonic); + // TODO(#2052): This reduction is not secure enough. TACKLE THIS ISSUE BEFORE MAINNET. + const key = GrumpkinScalar.fromBufferReduce(Buffer.from(acc.getHdKey().privateKey!)); + publicKey = generatePublicKey(key); + } else { + const key = GrumpkinScalar.random(); + privKey = key.toString(); + publicKey = generatePublicKey(key); + } + log(`\nPrivate Key: ${privKey}\nPublic Key: ${publicKey.toString()}\n`); +} diff --git a/yarn-project/cli/src/cmds/get_account.ts b/yarn-project/cli/src/cmds/get_account.ts new file mode 100644 index 00000000000..47b3b1056a7 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_account.ts @@ -0,0 +1,18 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getAccount(aztecAddress: AztecAddress, rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const account = await client.getRegisteredAccount(aztecAddress); + + if (!account) { + log(`Unknown account ${aztecAddress.toString()}`); + } else { + log(account.toReadableString()); + } +} diff --git a/yarn-project/cli/src/cmds/get_accounts.ts b/yarn-project/cli/src/cmds/get_accounts.ts new file mode 100644 index 00000000000..155e92d5a4e --- /dev/null +++ b/yarn-project/cli/src/cmds/get_accounts.ts @@ -0,0 +1,19 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getAccounts(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const accounts = await client.getRegisteredAccounts(); + if (!accounts.length) { + log('No accounts found.'); + } else { + log(`Accounts found: \n`); + for (const account of accounts) { + log(account.toReadableString()); + } + } +} diff --git a/yarn-project/cli/src/cmds/get_contract_data.ts b/yarn-project/cli/src/cmds/get_contract_data.ts new file mode 100644 index 00000000000..16d13047972 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_contract_data.ts @@ -0,0 +1,39 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { ContractData } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getContractData( + rpcUrl: string, + contractAddress: AztecAddress, + includeBytecode: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const contractDataWithOrWithoutBytecode = includeBytecode + ? await client.getExtendedContractData(contractAddress) + : await client.getContractData(contractAddress); + + if (!contractDataWithOrWithoutBytecode) { + log(`No contract data found at ${contractAddress}`); + return; + } + let contractData: ContractData; + + if ('contractData' in contractDataWithOrWithoutBytecode) { + contractData = contractDataWithOrWithoutBytecode.contractData; + } else { + contractData = contractDataWithOrWithoutBytecode; + } + log(`\nContract Data: \nAddress: ${contractData.contractAddress.toString()}`); + log(`Portal: ${contractData.portalContractAddress.toString()}`); + if ('bytecode' in contractDataWithOrWithoutBytecode) { + log(`Bytecode: ${contractDataWithOrWithoutBytecode.bytecode}`); + } + log('\n'); +} diff --git a/yarn-project/cli/src/cmds/get_logs.ts b/yarn-project/cli/src/cmds/get_logs.ts new file mode 100644 index 00000000000..73a6501b9cf --- /dev/null +++ b/yarn-project/cli/src/cmds/get_logs.ts @@ -0,0 +1,71 @@ +import { AztecAddress, FunctionSelector, LogFilter, LogId, TxHash } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { sleep } from '@aztec/foundation/sleep'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getLogs( + txHash: TxHash, + fromBlock: number, + toBlock: number, + afterLog: LogId, + contractAddress: AztecAddress, + selector: FunctionSelector, + rpcUrl: string, + follow: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const pxe = await createCompatibleClient(rpcUrl, debugLogger); + + if (follow) { + if (txHash) { + throw Error('Cannot use --follow with --tx-hash'); + } + if (toBlock) { + throw Error('Cannot use --follow with --to-block'); + } + } + + const filter: LogFilter = { txHash, fromBlock, toBlock, afterLog, contractAddress, selector }; + + const fetchLogs = async () => { + const response = await pxe.getUnencryptedLogs(filter); + const logs = response.logs; + + if (!logs.length) { + const filterOptions = Object.entries(filter) + .filter(([, value]) => value !== undefined) + .map(([key, value]) => `${key}: ${value}`) + .join(', '); + if (!follow) { + log(`No logs found for filter: {${filterOptions}}`); + } + } else { + if (!follow && !filter.afterLog) { + log('Logs found: \n'); + } + logs.forEach(unencryptedLog => log(unencryptedLog.toHumanReadable())); + // Set the continuation parameter for the following requests + filter.afterLog = logs[logs.length - 1].id; + } + return response.maxLogsHit; + }; + + if (follow) { + log('Fetching logs...'); + while (true) { + const maxLogsHit = await fetchLogs(); + if (!maxLogsHit) { + await sleep(1000); + } + } + } else { + while (await fetchLogs()) { + // Keep fetching logs until we reach the end. + } + } +} diff --git a/yarn-project/cli/src/cmds/get_node_info.ts b/yarn-project/cli/src/cmds/get_node_info.ts new file mode 100644 index 00000000000..bc71a7bc8cc --- /dev/null +++ b/yarn-project/cli/src/cmds/get_node_info.ts @@ -0,0 +1,17 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getNodeInfo(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const info = await client.getNodeInfo(); + log(`\nNode Info:\n`); + log(`Sandbox Version: ${info.sandboxVersion}\n`); + log(`Compatible Nargo Version: ${info.compatibleNargoVersion}\n`); + log(`Chain Id: ${info.chainId}\n`); + log(`Protocol Version: ${info.protocolVersion}\n`); + log(`Rollup Address: ${info.l1ContractAddresses.rollupAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/get_recipient.ts b/yarn-project/cli/src/cmds/get_recipient.ts new file mode 100644 index 00000000000..9edf6edecfc --- /dev/null +++ b/yarn-project/cli/src/cmds/get_recipient.ts @@ -0,0 +1,18 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getRecipient(aztecAddress: AztecAddress, rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const recipient = await client.getRecipient(aztecAddress); + + if (!recipient) { + log(`Unknown recipient ${aztecAddress.toString()}`); + } else { + log(recipient.toReadableString()); + } +} diff --git a/yarn-project/cli/src/cmds/get_recipients.ts b/yarn-project/cli/src/cmds/get_recipients.ts new file mode 100644 index 00000000000..92bc9fad973 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_recipients.ts @@ -0,0 +1,19 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getRecipients(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const recipients = await client.getRecipients(); + if (!recipients.length) { + log('No recipients found.'); + } else { + log(`Recipients found: \n`); + for (const recipient of recipients) { + log(recipient.toReadableString()); + } + } +} diff --git a/yarn-project/cli/src/cmds/get_tx_receipt.ts b/yarn-project/cli/src/cmds/get_tx_receipt.ts new file mode 100644 index 00000000000..fe133608820 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_tx_receipt.ts @@ -0,0 +1,18 @@ +import { TxHash } from '@aztec/aztec.js'; +import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getTxReceipt(rpcUrl: string, txHash: TxHash, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const receipt = await client.getTxReceipt(txHash); + if (!receipt) { + log(`No receipt found for transaction hash ${txHash.toString()}`); + } else { + log(`\nTransaction receipt: \n${JsonStringify(receipt, true)}\n`); + } +} diff --git a/yarn-project/cli/src/cmds/inspect_contract.ts b/yarn-project/cli/src/cmds/inspect_contract.ts new file mode 100644 index 00000000000..e55954adc1e --- /dev/null +++ b/yarn-project/cli/src/cmds/inspect_contract.ts @@ -0,0 +1,29 @@ +import { + FunctionSelector, + decodeFunctionSignature, + decodeFunctionSignatureWithParameterNames, +} from '@aztec/foundation/abi'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function inspectContract(contractArtifactFile: string, debugLogger: DebugLogger, log: LogFn) { + const contractArtifact = await getContractArtifact(contractArtifactFile, debugLogger); + const contractFns = contractArtifact.functions.filter( + f => !f.isInternal && f.name !== 'compute_note_hash_and_nullifier', + ); + if (contractFns.length === 0) { + log(`No external functions found for contract ${contractArtifact.name}`); + } + for (const fn of contractFns) { + const signatureWithParameterNames = decodeFunctionSignatureWithParameterNames(fn.name, fn.parameters); + const signature = decodeFunctionSignature(fn.name, fn.parameters); + const selector = FunctionSelector.fromSignature(signature); + log( + `${fn.functionType} ${signatureWithParameterNames} \n\tfunction signature: ${signature}\n\tselector: ${selector}`, + ); + } +} diff --git a/yarn-project/cli/src/cmds/parse_parameter_struct.ts b/yarn-project/cli/src/cmds/parse_parameter_struct.ts new file mode 100644 index 00000000000..1ef572fd5ce --- /dev/null +++ b/yarn-project/cli/src/cmds/parse_parameter_struct.ts @@ -0,0 +1,30 @@ +import { StructType } from '@aztec/foundation/abi'; +import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { LogFn } from '@aztec/foundation/log'; + +import { parseStructString } from '../encoding.js'; +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function parseParameterStruct( + encodedString: string, + contractArtifactPath: string, + parameterName: string, + log: LogFn, +) { + const contractArtifact = await getContractArtifact(contractArtifactPath, log); + const parameterAbitype = contractArtifact.functions + .map(({ parameters }) => parameters) + .flat() + .find(({ name, type }) => name === parameterName && type.kind === 'struct'); + + if (!parameterAbitype) { + log(`No struct parameter found with name ${parameterName}`); + return; + } + + const data = parseStructString(encodedString, parameterAbitype.type as StructType); + log(`\nStruct Data: \n${JsonStringify(data, true)}\n`); +} diff --git a/yarn-project/cli/src/cmds/register_account.ts b/yarn-project/cli/src/cmds/register_account.ts new file mode 100644 index 00000000000..fae880f81a1 --- /dev/null +++ b/yarn-project/cli/src/cmds/register_account.ts @@ -0,0 +1,24 @@ +import { Fq, Fr } from '@aztec/foundation/fields'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function registerAccount( + rpcUrl: string, + privateKey: Fq, + partialAddress: Fr, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + + const { address, publicKey } = await client.registerAccount(privateKey, partialAddress); + + log(`\nRegistered account:\n`); + log(`Address: ${address.toString()}`); + log(`Public key: ${publicKey.toString()}`); + log(`Partial address: ${partialAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/register_recipient.ts b/yarn-project/cli/src/cmds/register_recipient.ts new file mode 100644 index 00000000000..e2b3aed2f16 --- /dev/null +++ b/yarn-project/cli/src/cmds/register_recipient.ts @@ -0,0 +1,21 @@ +import { AztecAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { CompleteAddress } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function registerRecipient( + aztecAddress: AztecAddress, + publicKey: Point, + partialAddress: Fr, + rpcUrl: string, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + await client.registerRecipient(CompleteAddress.create(aztecAddress, publicKey, partialAddress)); + log(`\nRegistered details for account with address: ${aztecAddress}\n`); +} diff --git a/yarn-project/cli/src/cmds/send.ts b/yarn-project/cli/src/cmds/send.ts new file mode 100644 index 00000000000..cb8c3bfb413 --- /dev/null +++ b/yarn-project/cli/src/cmds/send.ts @@ -0,0 +1,40 @@ +import { AztecAddress, Contract, Fq, Fr, getSchnorrAccount } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { prepTx } from '../utils.js'; + +/** + * + */ +export async function send( + functionName: string, + functionArgsIn: any[], + contractArtifactPath: string, + contractAddress: AztecAddress, + privateKey: Fq, + rpcUrl: string, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const wallet = await getSchnorrAccount(client, privateKey, privateKey, Fr.ZERO).getWallet(); + const contract = await Contract.at(contractAddress, contractArtifact, wallet); + const tx = contract.methods[functionName](...functionArgs).send(); + log(`\nTransaction hash: ${(await tx.getTxHash()).toString()}`); + if (wait) { + await tx.wait(); + + log('Transaction has been mined'); + + const receipt = await tx.getReceipt(); + log(`Status: ${receipt.status}\n`); + log(`Block number: ${receipt.blockNumber}`); + log(`Block hash: ${receipt.blockHash?.toString('hex')}`); + } else { + log('Transaction pending. Check status with get-tx-receipt'); + } +} diff --git a/yarn-project/cli/src/cmds/unbox.ts b/yarn-project/cli/src/cmds/unbox.ts new file mode 100644 index 00000000000..b84694e2608 --- /dev/null +++ b/yarn-project/cli/src/cmds/unbox.ts @@ -0,0 +1,11 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { unboxContract } from '../unbox.js'; + +/** + * + */ +export async function unbox(contractName: string, localDirectory: string | undefined, cliVersion: string, log: LogFn) { + const unboxTo: string = localDirectory ? localDirectory : contractName; + await unboxContract(contractName, unboxTo, cliVersion, log); +} diff --git a/yarn-project/cli/src/index.ts b/yarn-project/cli/src/index.ts index 629efbe4eaa..6f44a6ca8b3 100644 --- a/yarn-project/cli/src/index.ts +++ b/yarn-project/cli/src/index.ts @@ -1,50 +1,16 @@ -import { - AztecAddress, - Contract, - ContractDeployer, - EthAddress, - Fr, - GrumpkinScalar, - Note, - generatePublicKey, - getSchnorrAccount, - isContractDeployed, -} from '@aztec/aztec.js'; -import { - FunctionSelector, - StructType, - decodeFunctionSignature, - decodeFunctionSignatureWithParameterNames, -} from '@aztec/foundation/abi'; -import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { initAztecJs } from '@aztec/aztec.js/init'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; -import { sleep } from '@aztec/foundation/sleep'; import { fileURLToPath } from '@aztec/foundation/url'; -import { compileNoir, generateNoirInterface, generateTypescriptInterface } from '@aztec/noir-compiler/cli'; -import { CompleteAddress, ContractData, ExtendedNote, LogFilter } from '@aztec/types'; +import { addNoirCompilerCommanderActions } from '@aztec/noir-compiler/cli'; -import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; import { Command, Option } from 'commander'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; -import { format } from 'util'; -import { mnemonicToAccount } from 'viem/accounts'; - -import { createCompatibleClient } from './client.js'; -import { encodeArgs, parseStructString } from './encoding.js'; -import { GITHUB_TAG_PREFIX } from './github.js'; -import { unboxContract } from './unbox.js'; -import { update } from './update/update.js'; + import { - deployAztecContracts, - getContractArtifact, - getExampleContractArtifacts, - getFunctionArtifact, - getTxSender, parseAztecAddress, parseEthereumAddress, parseField, - parseFields, parseOptionalAztecAddress, parseOptionalInteger, parseOptionalLogId, @@ -55,11 +21,8 @@ import { parsePublicKey, parseSaltFromHexString, parseTxHash, - prepTx, } from './utils.js'; -const accountCreationSalt = Fr.ZERO; - const { ETHEREUM_HOST = 'http://localhost:8545', PRIVATE_KEY, API_KEY } = process.env; /** @@ -88,6 +51,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argParser(parsePrivateKey) .makeOptionMandatory(mandatory); + program.hook('preAction', initAztecJs); + program .command('deploy-l1-contracts') .description('Deploys all necessary Ethereum contracts for Aztec.') @@ -104,20 +69,15 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { 'test test test test test test test test test test test junk', ) .action(async options => { - const { l1ContractAddresses } = await deployAztecContracts( + const { deployL1Contracts } = await import('./cmds/deploy_l1_contracts.js'); + await deployL1Contracts( options.rpcUrl, options.apiKey ?? '', options.privateKey, options.mnemonic, + log, debugLogger, ); - log('\n'); - log(`Rollup Address: ${l1ContractAddresses.rollupAddress.toString()}`); - log(`Registry Address: ${l1ContractAddresses.registryAddress.toString()}`); - log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); - log(`L2 -> L1 Outbox address: ${l1ContractAddresses.outboxAddress.toString()}`); - log(`Contract Deployment Emitter Address: ${l1ContractAddresses.contractDeploymentEmitterAddress.toString()}`); - log('\n'); }); program @@ -130,20 +90,9 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { '-m, --mnemonic', 'An optional mnemonic string used for the private key generation. If not provided, random private key will be generated.', ) - .action(options => { - let privKey; - let publicKey; - if (options.mnemonic) { - const acc = mnemonicToAccount(options.mnemonic); - // TODO(#2052): This reduction is not secure enough. TACKLE THIS ISSUE BEFORE MAINNET. - const key = GrumpkinScalar.fromBufferReduce(Buffer.from(acc.getHdKey().privateKey!)); - publicKey = generatePublicKey(key); - } else { - const key = GrumpkinScalar.random(); - privKey = key.toString(); - publicKey = generatePublicKey(key); - } - log(`\nPrivate Key: ${privKey}\nPublic Key: ${publicKey.toString()}\n`); + .action(async options => { + const { generatePrivateKey } = await import('./cmds/generate_private_key.js'); + generatePrivateKey(options.mnemonic, log); }); program @@ -151,10 +100,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .summary('Generates a LibP2P peer private key.') .description('Generates a private key that can be used for running a node on a LibP2P network.') .action(async () => { - const peerId = await createSecp256k1PeerId(); - const exportedPeerId = Buffer.from(peerId.privateKey!).toString('hex'); - log(`Private key: ${exportedPeerId}`); - log(`Peer Id: ${peerId}`); + const { generateP2PPrivateKey } = await import('./cmds/generate_p2p_private_key.js'); + await generateP2PPrivateKey(log); }); program @@ -171,28 +118,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { // https://github.com/tj/commander.js#other-option-types-negatable-boolean-and-booleanvalue .option('--no-wait', 'Skip waiting for the contract to be deployed. Print the hash of deployment transaction') .action(async ({ rpcUrl, privateKey, wait }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - const actualPrivateKey = privateKey ?? GrumpkinScalar.random(); - - const account = getSchnorrAccount(client, actualPrivateKey, actualPrivateKey, accountCreationSalt); - const { address, publicKey, partialAddress } = account.getCompleteAddress(); - const tx = await account.deploy(); - const txHash = await tx.getTxHash(); - debugLogger(`Account contract tx sent with hash ${txHash}`); - if (wait) { - log(`\nWaiting for account contract deployment...`); - await tx.wait(); - } else { - log(`\nAccount deployment transaction hash: ${txHash}\n`); - } - - log(`\nNew account:\n`); - log(`Address: ${address.toString()}`); - log(`Public key: ${publicKey.toString()}`); - if (!privateKey) { - log(`Private key: ${actualPrivateKey.toString()}`); - } - log(`Partial address: ${partialAddress.toString()}`); + const { createAccount } = await import('./cmds/create_account.js'); + await createAccount(rpcUrl, privateKey, wait, debugLogger, log); }); program @@ -209,14 +136,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async ({ rpcUrl, privateKey, partialAddress }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - - const { address, publicKey } = await client.registerAccount(privateKey, partialAddress); - - log(`\nRegistered account:\n`); - log(`Address: ${address.toString()}`); - log(`Public key: ${publicKey.toString()}`); - log(`Partial address: ${partialAddress.toString()}`); + const { registerAccount } = await import('./cmds/register_account.js'); + await registerAccount(rpcUrl, privateKey, partialAddress, debugLogger, log); }); program @@ -248,58 +169,20 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { // https://github.com/tj/commander.js#other-option-types-negatable-boolean-and-booleanvalue .option('--no-wait', 'Skip waiting for the contract to be deployed. Print the hash of deployment transaction') .action(async (artifactPath, { json, rpcUrl, publicKey, args: rawArgs, portalAddress, salt, wait }) => { - const contractArtifact = await getContractArtifact(artifactPath, log); - const constructorArtifact = contractArtifact.functions.find(({ name }) => name === 'constructor'); - - const client = await createCompatibleClient(rpcUrl, debugLogger); - const nodeInfo = await client.getNodeInfo(); - const expectedAztecNrVersion = `${GITHUB_TAG_PREFIX}-v${nodeInfo.sandboxVersion}`; - if (contractArtifact.aztecNrVersion && contractArtifact.aztecNrVersion !== expectedAztecNrVersion) { - log( - `\nWarning: Contract was compiled with a different version of Aztec.nr: ${contractArtifact.aztecNrVersion}. Consider updating Aztec.nr to ${expectedAztecNrVersion}\n`, - ); - } - - const deployer = new ContractDeployer(contractArtifact, client, publicKey); - - const constructor = getFunctionArtifact(contractArtifact, 'constructor'); - if (!constructor) { - throw new Error(`Constructor not found in contract ABI`); - } - - debugLogger(`Input arguments: ${rawArgs.map((x: any) => `"${x}"`).join(', ')}`); - const args = encodeArgs(rawArgs, constructorArtifact!.parameters); - debugLogger(`Encoded arguments: ${args.join(', ')}`); - - const deploy = deployer.deploy(...args); - - await deploy.create({ contractAddressSalt: salt, portalContract: portalAddress }); - const tx = deploy.send({ contractAddressSalt: salt, portalContract: portalAddress }); - const txHash = await tx.getTxHash(); - debugLogger(`Deploy tx sent with hash ${txHash}`); - if (wait) { - const deployed = await tx.wait(); - const { address, partialAddress } = deployed.contract.completeAddress; - if (json) { - logJson({ address: address.toString(), partialAddress: partialAddress.toString() }); - } else { - log(`\nContract deployed at ${address.toString()}\n`); - log(`Contract partial address ${partialAddress.toString()}\n`); - } - } else { - const { address, partialAddress } = deploy.completeAddress ?? {}; - if (json) { - logJson({ - address: address?.toString() ?? 'N/A', - partialAddress: partialAddress?.toString() ?? 'N/A', - txHash: txHash.toString(), - }); - } else { - log(`\nContract Address: ${deploy.completeAddress?.address.toString() ?? 'N/A'}`); - log(`Contract Partial Address: ${deploy.completeAddress?.partialAddress.toString() ?? 'N/A'}`); - log(`Deployment transaction hash: ${txHash}\n`); - } - } + const { deploy } = await import('./cmds/deploy.js'); + await deploy( + artifactPath, + json, + rpcUrl, + publicKey, + rawArgs, + portalAddress, + salt, + wait, + debugLogger, + log, + logJson, + ); }); program @@ -312,14 +195,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async options => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const address = options.contractAddress; - const isDeployed = await isContractDeployed(client, address); - if (isDeployed) { - log(`\nContract found at ${address.toString()}\n`); - } else { - log(`\nNo contract found at ${address.toString()}\n`); - } + const { checkDeploy } = await import('./cmds/check_deploy.js'); + await checkDeploy(options.rpcUrl, options.contractAddress, debugLogger, log); }); program @@ -337,32 +214,27 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('--portal-address
', 'Optional address to a portal contract on L1', parseEthereumAddress) .addOption(pxeOption) .action(async options => { - const artifact = await getContractArtifact(options.contractArtifact, log); - const contractAddress: AztecAddress = options.contractAddress; - const completeAddress = new CompleteAddress( - contractAddress, - options.publicKey ?? Fr.ZERO, + const { addContract } = await import('./cmds/add_contract.js'); + await addContract( + options.rpcUrl, + options.contractArtifact, + options.contractAddress, options.partialAddress, + options.publicKey, + options.portalContract, + debugLogger, + log, ); - const portalContract: EthAddress = options.portalContract ?? EthAddress.ZERO; - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - - await client.addContracts([{ artifact, completeAddress, portalContract }]); - log(`\nContract added to PXE at ${contractAddress.toString()}\n`); }); + program .command('get-tx-receipt') .description('Gets the receipt for the specified transaction hash.') .argument('', 'A transaction hash to get the receipt for.', parseTxHash) .addOption(pxeOption) .action(async (txHash, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const receipt = await client.getTxReceipt(txHash); - if (!receipt) { - log(`No receipt found for transaction hash ${txHash.toString()}`); - } else { - log(`\nTransaction receipt: \n${JsonStringify(receipt, true)}\n`); - } + const { getTxReceipt } = await import('./cmds/get_tx_receipt.js'); + await getTxReceipt(options.rpcUrl, txHash, debugLogger, log); }); program @@ -372,28 +244,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('-b, --include-bytecode ', "Include the contract's public function bytecode, if any.", false) .action(async (contractAddress, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const contractDataWithOrWithoutBytecode = options.includeBytecode - ? await client.getExtendedContractData(contractAddress) - : await client.getContractData(contractAddress); - - if (!contractDataWithOrWithoutBytecode) { - log(`No contract data found at ${contractAddress}`); - return; - } - let contractData: ContractData; - - if ('contractData' in contractDataWithOrWithoutBytecode) { - contractData = contractDataWithOrWithoutBytecode.contractData; - } else { - contractData = contractDataWithOrWithoutBytecode; - } - log(`\nContract Data: \nAddress: ${contractData.contractAddress.toString()}`); - log(`Portal: ${contractData.portalContractAddress.toString()}`); - if ('bytecode' in contractDataWithOrWithoutBytecode) { - log(`Bytecode: ${contractDataWithOrWithoutBytecode.bytecode}`); - } - log('\n'); + const { getContractData } = await import('./cmds/get_contract_data.js'); + await getContractData(options.rpcUrl, contractAddress, options.includeBytecode, debugLogger, log); }); program @@ -412,55 +264,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('--follow', 'If set, will keep polling for new logs until interrupted.') .action(async ({ txHash, fromBlock, toBlock, afterLog, contractAddress, selector, rpcUrl, follow }) => { - const pxe = await createCompatibleClient(rpcUrl, debugLogger); - - if (follow) { - if (txHash) { - throw Error('Cannot use --follow with --tx-hash'); - } - if (toBlock) { - throw Error('Cannot use --follow with --to-block'); - } - } - - const filter: LogFilter = { txHash, fromBlock, toBlock, afterLog, contractAddress, selector }; - - const fetchLogs = async () => { - const response = await pxe.getUnencryptedLogs(filter); - const logs = response.logs; - - if (!logs.length) { - const filterOptions = Object.entries(filter) - .filter(([, value]) => value !== undefined) - .map(([key, value]) => `${key}: ${value}`) - .join(', '); - if (!follow) { - log(`No logs found for filter: {${filterOptions}}`); - } - } else { - if (!follow && !filter.afterLog) { - log('Logs found: \n'); - } - logs.forEach(unencryptedLog => log(unencryptedLog.toHumanReadable())); - // Set the continuation parameter for the following requests - filter.afterLog = logs[logs.length - 1].id; - } - return response.maxLogsHit; - }; - - if (follow) { - log('Fetching logs...'); - while (true) { - const maxLogsHit = await fetchLogs(); - if (!maxLogsHit) { - await sleep(1000); - } - } - } else { - while (await fetchLogs()) { - // Keep fetching logs until we reach the end. - } - } + const { getLogs } = await import('./cmds/get_logs.js'); + await getLogs(txHash, fromBlock, toBlock, afterLog, contractAddress, selector, rpcUrl, follow, debugLogger, log); }); program @@ -475,9 +280,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async ({ address, publicKey, partialAddress, rpcUrl }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - await client.registerRecipient(CompleteAddress.create(address, publicKey, partialAddress)); - log(`\nRegistered details for account with address: ${address}\n`); + const { registerRecipient } = await import('./cmds/register_recipient.js'); + await registerRecipient(address, publicKey, partialAddress, rpcUrl, debugLogger, log); }); program @@ -485,16 +289,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets all the Aztec accounts stored in the PXE.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const accounts = await client.getRegisteredAccounts(); - if (!accounts.length) { - log('No accounts found.'); - } else { - log(`Accounts found: \n`); - for (const account of accounts) { - log(account.toReadableString()); - } - } + const { getAccounts } = await import('./cmds/get_accounts.js'); + await getAccounts(options.rpcUrl, debugLogger, log); }); program @@ -503,14 +299,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argument('
', 'The Aztec address to get account for', parseAztecAddress) .addOption(pxeOption) .action(async (address, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const account = await client.getRegisteredAccount(address); - - if (!account) { - log(`Unknown account ${address.toString()}`); - } else { - log(account.toReadableString()); - } + const { getAccount } = await import('./cmds/get_account.js'); + await getAccount(address, options.rpcUrl, debugLogger, log); }); program @@ -518,16 +308,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets all the recipients stored in the PXE.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const recipients = await client.getRecipients(); - if (!recipients.length) { - log('No recipients found.'); - } else { - log(`Recipients found: \n`); - for (const recipient of recipients) { - log(recipient.toReadableString()); - } - } + const { getRecipients } = await import('./cmds/get_recipients.js'); + await getRecipients(options.rpcUrl, debugLogger, log); }); program @@ -536,14 +318,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argument('
', 'The Aztec address to get recipient for', parseAztecAddress) .addOption(pxeOption) .action(async (address, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const recipient = await client.getRecipient(address); - - if (!recipient) { - log(`Unknown recipient ${address.toString()}`); - } else { - log(recipient.toReadableString()); - } + const { getRecipient } = await import('./cmds/get_recipient.js'); + await getRecipient(address, options.rpcUrl, debugLogger, log); }); program @@ -560,31 +336,18 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('--no-wait', 'Print transaction hash without waiting for it to be mined') .action(async (functionName, options) => { - const { functionArgs, contractArtifact } = await prepTx( - options.contractArtifact, + const { send } = await import('./cmds/send.js'); + await send( functionName, options.args, + options.contractArtifact, + options.contractAddress, + options.privateKey, + options.rpcUrl, + !options.noWait, + debugLogger, log, ); - const { contractAddress, privateKey } = options; - - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const wallet = await getSchnorrAccount(client, privateKey, privateKey, accountCreationSalt).getWallet(); - const contract = await Contract.at(contractAddress, contractArtifact, wallet); - const tx = contract.methods[functionName](...functionArgs).send(); - log(`\nTransaction hash: ${(await tx.getTxHash()).toString()}`); - if (options.wait) { - await tx.wait(); - - log('Transaction has been mined'); - - const receipt = await tx.getReceipt(); - log(`Status: ${receipt.status}\n`); - log(`Block number: ${receipt.blockNumber}`); - log(`Block hash: ${receipt.blockHash?.toString('hex')}`); - } else { - log('Transaction pending. Check status with get-tx-receipt'); - } }); program @@ -602,23 +365,17 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('-f, --from ', 'Aztec address of the caller. If empty, will use the first account from RPC.') .addOption(pxeOption) .action(async (functionName, options) => { - const { functionArgs, contractArtifact } = await prepTx( - options.contractArtifact, + const { call } = await import('./cmds/call.js'); + await call( functionName, options.args, + options.contractArtifact, + options.contractAddress, + options.from, + options.rpcUrl, + debugLogger, log, ); - - const fnArtifact = getFunctionArtifact(contractArtifact, functionName); - if (fnArtifact.parameters.length !== options.args.length) { - throw Error( - `Invalid number of args passed. Expected ${fnArtifact.parameters.length}; Received: ${options.args.length}`, - ); - } - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const from = await getTxSender(client, options.from); - const result = await client.viewTx(functionName, functionArgs, options.contractAddress, from); - log(format('\nView result: ', result, '\n')); }); program @@ -631,10 +388,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .requiredOption('-n, --note [note...]', 'The members of a Note serialized as hex strings.', []) .addOption(pxeOption) .action(async (address, contractAddress, storageSlot, txHash, options) => { - const note = new Note(parseFields(options.note)); - const extendedNote = new ExtendedNote(note, address, contractAddress, storageSlot, txHash); - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - await client.addNote(extendedNote); + const { addNote } = await import('./cmds/add_note.js'); + await addNote(address, contractAddress, storageSlot, txHash, options.note, options.rpcUrl, debugLogger); }); // Helper for users to decode hex strings into structs if needed. @@ -648,17 +403,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .requiredOption('-p, --parameter ', 'The name of the struct parameter to decode into') .action(async (encodedString, options) => { - const contractArtifact = await getContractArtifact(options.contractArtifact, log); - const parameterAbitype = contractArtifact.functions - .map(({ parameters }) => parameters) - .flat() - .find(({ name, type }) => name === options.parameter && type.kind === 'struct'); - if (!parameterAbitype) { - log(`No struct parameter found with name ${options.parameter}`); - return; - } - const data = parseStructString(encodedString, parameterAbitype.type as StructType); - log(`\nStruct Data: \n${JsonStringify(data, true)}\n`); + const { parseParameterStruct } = await import('./cmds/parse_parameter_struct.js'); + await parseParameterStruct(encodedString, options.contractArtifact, options.parameter, log); }); program @@ -666,18 +412,16 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets the current Aztec L2 block number.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const num = await client.getBlockNumber(); - log(`${num}\n`); + const { blockNumber } = await import('./cmds/block_number.js'); + await blockNumber(options.rpcUrl, debugLogger, log); }); program .command('example-contracts') .description('Lists the example contracts available to deploy from @aztec/noir-contracts') .action(async () => { - const abisList = await getExampleContractArtifacts(); - const names = Object.keys(abisList); - names.forEach(name => log(name)); + const { exampleContracts } = await import('./cmds/example_contracts.js'); + await exampleContracts(log); }); program @@ -691,8 +435,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { 'Local directory to unbox source folder to (relative or absolute), optional - defaults to `/`', ) .action(async (contractName, localDirectory) => { - const unboxTo: string = localDirectory ? localDirectory : contractName; - await unboxContract(contractName, unboxTo, cliVersion, log); + const { unbox } = await import('./cmds/unbox.js'); + await unbox(contractName, localDirectory, cliVersion, log); }); program @@ -700,14 +444,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets the information of an aztec node at a URL.') .addOption(pxeOption) .action(async options => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const info = await client.getNodeInfo(); - log(`\nNode Info:\n`); - log(`Sandbox Version: ${info.sandboxVersion}\n`); - log(`Compatible Nargo Version: ${info.compatibleNargoVersion}\n`); - log(`Chain Id: ${info.chainId}\n`); - log(`Protocol Version: ${info.protocolVersion}\n`); - log(`Rollup Address: ${info.l1ContractAddresses.rollupAddress.toString()}`); + const { getNodeInfo } = await import('./cmds/get_node_info.js'); + await getNodeInfo(options.rpcUrl, debugLogger, log); }); program @@ -718,30 +456,17 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { `A compiled Noir contract's artifact in JSON format or name of a contract artifact exported by @aztec/noir-contracts`, ) .action(async (contractArtifactFile: string) => { - const contractArtifact = await getContractArtifact(contractArtifactFile, debugLogger); - const contractFns = contractArtifact.functions.filter( - f => !f.isInternal && f.name !== 'compute_note_hash_and_nullifier', - ); - if (contractFns.length === 0) { - log(`No external functions found for contract ${contractArtifact.name}`); - } - for (const fn of contractFns) { - const signatureWithParameterNames = decodeFunctionSignatureWithParameterNames(fn.name, fn.parameters); - const signature = decodeFunctionSignature(fn.name, fn.parameters); - const selector = FunctionSelector.fromSignature(signature); - log( - `${fn.functionType} ${signatureWithParameterNames} \n\tfunction signature: ${signature}\n\tselector: ${selector}`, - ); - } + const { inspectContract } = await import('./cmds/inspect_contract.js'); + await inspectContract(contractArtifactFile, debugLogger, log); }); program .command('compute-selector') .description('Given a function signature, it computes a selector') .argument('', 'Function signature to compute selector for e.g. foo(Field)') - .action((functionSignature: string) => { - const selector = FunctionSelector.fromSignature(functionSignature); - log(`${selector}`); + .action(async (functionSignature: string) => { + const { computeSelector } = await import('./cmds/compute_selector.js'); + computeSelector(functionSignature, log); }); program @@ -752,13 +477,11 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('--sandbox-version ', 'The sandbox version to update to. Defaults to latest', 'latest') .addOption(pxeOption) .action(async (projectPath: string, options) => { - const { contract } = options; - await update(projectPath, contract, options.rpcUrl, options.sandboxVersion, log, debugLogger); + const { update } = await import('./update/update.js'); + await update(projectPath, options.contract, options.rpcUrl, options.sandboxVersion, log, debugLogger); }); - compileNoir(program, 'compile', log); - generateTypescriptInterface(program, 'generate-typescript', log); - generateNoirInterface(program, 'generate-noir-interface', log); + addNoirCompilerCommanderActions(program, log); return program; } diff --git a/yarn-project/cli/src/utils.ts b/yarn-project/cli/src/utils.ts index dc4f8732617..ed8dd3cc017 100644 --- a/yarn-project/cli/src/utils.ts +++ b/yarn-project/cli/src/utils.ts @@ -1,24 +1,15 @@ -import { AztecAddress, EthAddress, Fr, FunctionSelector, GrumpkinScalar, PXE, Point, TxHash } from '@aztec/aztec.js'; -import { L1ContractArtifactsForDeployment, createEthereumChain, deployL1Contracts } from '@aztec/ethereum'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/aztec.js/abi'; +import { AztecAddress } from '@aztec/aztec.js/aztec_address'; +import { EthAddress } from '@aztec/aztec.js/eth_address'; +import { type L1ContractArtifactsForDeployment } from '@aztec/aztec.js/ethereum'; +import { Fr, GrumpkinScalar, Point } from '@aztec/aztec.js/fields'; +import { type PXE } from '@aztec/aztec.js/interfaces/pxe'; +import { LogId } from '@aztec/aztec.js/log_id'; +import { TxHash } from '@aztec/aztec.js/tx_hash'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; -import { - ContractDeploymentEmitterAbi, - ContractDeploymentEmitterBytecode, - InboxAbi, - InboxBytecode, - OutboxAbi, - OutboxBytecode, - RegistryAbi, - RegistryBytecode, - RollupAbi, - RollupBytecode, -} from '@aztec/l1-artifacts'; -import { LogId } from '@aztec/types'; import { CommanderError, InvalidArgumentError } from 'commander'; import { readFile, rename, writeFile } from 'fs/promises'; -import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; import { encodeArgs } from './encoding.js'; @@ -35,7 +26,7 @@ interface ArtifactsType { * @param fnName - Function name to be found. * @returns The function's ABI. */ -export function getFunctionArtifact(artifact: ContractArtifact, fnName: string) { +export function getFunctionArtifact(artifact: ContractArtifact, fnName: string): FunctionArtifact { const fn = artifact.functions.find(({ name }) => name === fnName); if (!fn) { throw Error(`Function ${fnName} not found in contract ABI.`); @@ -57,6 +48,21 @@ export async function deployAztecContracts( mnemonic: string, debugLogger: DebugLogger, ) { + const { + ContractDeploymentEmitterAbi, + ContractDeploymentEmitterBytecode, + InboxAbi, + InboxBytecode, + OutboxAbi, + OutboxBytecode, + RegistryAbi, + RegistryBytecode, + RollupAbi, + RollupBytecode, + } = await import('@aztec/l1-artifacts'); + const { createEthereumChain, deployL1Contracts } = await import('@aztec/ethereum'); + const { mnemonicToAccount, privateKeyToAccount } = await import('viem/accounts'); + const account = !privateKey ? mnemonicToAccount(mnemonic!) : privateKeyToAccount(`0x${privateKey}`); const chain = createEthereumChain(rpcUrl, apiKey); const l1Artifacts: L1ContractArtifactsForDeployment = { diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index 72b064bc903..9c7cfdfc154 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -84,8 +84,9 @@ export const browserTestSuite = (setup: () => Server, pageLogger: AztecJs.DebugL }); it('Loads Aztec.js in the browser', async () => { - const generatePublicKeyExists = await page.evaluate(() => { - const { generatePublicKey } = window.AztecJs; + const generatePublicKeyExists = await page.evaluate(async () => { + const { generatePublicKey, init } = window.AztecJs; + await init(); return typeof generatePublicKey === 'function'; }); expect(generatePublicKeyExists).toBe(true); diff --git a/yarn-project/foundation/src/crypto/index.ts b/yarn-project/foundation/src/crypto/index.ts index 98abed1dacd..f574fb4d2f5 100644 --- a/yarn-project/foundation/src/crypto/index.ts +++ b/yarn-project/foundation/src/crypto/index.ts @@ -1,4 +1,16 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + export * from './keccak/index.js'; export * from './random/index.js'; export * from './sha256/index.js'; export * from './pedersen/index.js'; + +/** + * Init the bb singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. + * It takes about 100-200ms to initialize. It may not seem like much, but when in conjunction with many other things + * initializing, developers may want to pick precisely when to incur this cost. + * If in a test environment, we'll just do it on module load. + */ +export async function init() { + await BarretenbergSync.initSingleton(); +} diff --git a/yarn-project/foundation/src/crypto/pedersen/index.test.ts b/yarn-project/foundation/src/crypto/pedersen/index.test.ts index 1d152a917d0..412b07ec962 100644 --- a/yarn-project/foundation/src/crypto/pedersen/index.test.ts +++ b/yarn-project/foundation/src/crypto/pedersen/index.test.ts @@ -1,7 +1,13 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + import { toBufferBE } from '../../bigint-buffer/index.js'; import { pedersenCommit, pedersenHash, pedersenHashBuffer } from './index.js'; describe('pedersen', () => { + beforeAll(async () => { + await BarretenbergSync.initSingleton(); + }); + it('pedersen commit', () => { const r = pedersenCommit([toBufferBE(1n, 32), toBufferBE(1n, 32)]); expect(r).toEqual([ diff --git a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts index 2a117ea5519..6793b368c8f 100644 --- a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts +++ b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts @@ -1,9 +1,5 @@ import { BarretenbergSync, Fr } from '@aztec/bb.js'; -// Get the singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. -// This can be called from multiple other modules as needed, and it ensures it's only constructed once. -const api = await BarretenbergSync.getSingleton(); - /** * Create a pedersen commitment (point) from an array of input fields. * Left pads any inputs less than 32 bytes. @@ -13,7 +9,7 @@ export function pedersenCommit(input: Buffer[]) { throw new Error('All input buffers must be <= 32 bytes.'); } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); - const point = api.pedersenCommit(input.map(i => new Fr(i))); + const point = BarretenbergSync.getSingleton().pedersenCommit(input.map(i => new Fr(i))); // toBuffer returns Uint8Arrays (browser/worker-boundary friendly). // TODO: rename toTypedArray()? return [Buffer.from(point.x.toBuffer()), Buffer.from(point.y.toBuffer())]; @@ -29,7 +25,7 @@ export function pedersenHash(input: Buffer[], index = 0) { } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); return Buffer.from( - api + BarretenbergSync.getSingleton() .pedersenHash( input.map(i => new Fr(i)), index, @@ -42,5 +38,5 @@ export function pedersenHash(input: Buffer[], index = 0) { * Create a pedersen hash from an arbitrary length buffer. */ export function pedersenHashBuffer(input: Buffer, index = 0) { - return Buffer.from(api.pedersenHashBuffer(input, index).toBuffer()); + return Buffer.from(BarretenbergSync.getSingleton().pedersenHashBuffer(input, index).toBuffer()); } diff --git a/yarn-project/noir-compiler/src/cli.ts b/yarn-project/noir-compiler/src/cli.ts index 837975c8075..7416abed4cc 100644 --- a/yarn-project/noir-compiler/src/cli.ts +++ b/yarn-project/noir-compiler/src/cli.ts @@ -3,18 +3,14 @@ import { createConsoleLogger } from '@aztec/foundation/log'; import { Command } from 'commander'; -import { compileNoir } from './cli/compileNoir.js'; -import { generateNoirInterface } from './cli/noir-interface.js'; -import { generateTypescriptInterface } from './cli/typescript.js'; +import { addNoirCompilerCommanderActions } from './cli/add_noir_compiler_commander_actions.js'; const program = new Command(); const log = createConsoleLogger('aztec:compiler-cli'); const main = async () => { program.name('aztec-compile'); - compileNoir(program, 'compile', log); - generateTypescriptInterface(program, 'typescript', log); - generateNoirInterface(program, 'interface', log); + addNoirCompilerCommanderActions(program, log); await program.parseAsync(process.argv); }; diff --git a/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts new file mode 100644 index 00000000000..f651eecd996 --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts @@ -0,0 +1,67 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { Command } from 'commander'; + +/** + * CLI options for configuring behavior + */ +interface Options { + // eslint-disable-next-line jsdoc/require-jsdoc + outdir: string; + // eslint-disable-next-line jsdoc/require-jsdoc + typescript: string | undefined; + // eslint-disable-next-line jsdoc/require-jsdoc + interface: string | undefined; + // eslint-disable-next-line jsdoc/require-jsdoc + compiler: string | undefined; +} + +/** + * + */ +export function addNoirCompilerCommanderActions(program: Command, log: LogFn = () => {}) { + program + .command('compile') + .argument('', 'Path to the bin or Aztec.nr project to compile') + .option('-o, --outdir ', 'Output folder for the binary artifacts, relative to the project path', 'target') + .option('-ts, --typescript ', 'Optional output folder for generating typescript wrappers', undefined) + .option('-i, --interface ', 'Optional output folder for generating an Aztec.nr contract interface', undefined) + .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm. Defaults to nargo', 'wasm') + .description('Compiles the Noir Source in the target project') + + .action(async (projectPath: string, options: Options) => { + const { compileNoir } = await import('./compile_noir.js'); + await compileNoir(projectPath, options, log); + }); + + program + .command('generate-typescript') + .argument('', 'Path to the noir project') + .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') + .option( + '-o, --outdir ', + 'Output folder for the generated noir interfaces, relative to the project path', + 'interfaces', + ) + .description('Generates Noir interfaces from the artifacts in the given project') + + .action(async (projectPath: string, options) => { + const { generateTypescriptInterface } = await import('./generate_typescript_interface.js'); + generateTypescriptInterface(projectPath, options, log); + }); + + return program + .command('generate-noir-interface') + .argument('', 'Path to the noir project') + .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') + .option( + '-o, --outdir ', + 'Output folder for the generated noir interfaces, relative to the project path', + 'interfaces', + ) + .description('Generates Noir interfaces from the artifacts in the given project') + .action(async (projectPath: string, options) => { + const { generateNoirInterface } = await import('./generate_noir_interface.js'); + generateNoirInterface(projectPath, options, log); + }); +} diff --git a/yarn-project/noir-compiler/src/cli/compileNoir.ts b/yarn-project/noir-compiler/src/cli/compile_noir.ts similarity index 75% rename from yarn-project/noir-compiler/src/cli/compileNoir.ts rename to yarn-project/noir-compiler/src/cli/compile_noir.ts index 7e00fed39fa..f87055464a9 100644 --- a/yarn-project/noir-compiler/src/cli/compileNoir.ts +++ b/yarn-project/noir-compiler/src/cli/compile_noir.ts @@ -1,7 +1,6 @@ import { ContractArtifact } from '@aztec/foundation/abi'; import { LogFn } from '@aztec/foundation/log'; -import { Command } from 'commander'; import { mkdirSync, writeFileSync } from 'fs'; import { mkdirpSync } from 'fs-extra'; import path, { resolve } from 'path'; @@ -34,32 +33,21 @@ interface Options { * @param log - Optional logging function. * @returns The program with the command registered. */ -export function compileNoir(program: Command, name = 'compile', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the bin or Aztec.nr project to compile') - .option('-o, --outdir ', 'Output folder for the binary artifacts, relative to the project path', 'target') - .option('-ts, --typescript ', 'Optional output folder for generating typescript wrappers', undefined) - .option('-i, --interface ', 'Optional output folder for generating an Aztec.nr contract interface', undefined) - .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm. Defaults to nargo', 'wasm') - .description('Compiles the Noir Source in the target project') - - .action(async (projectPath: string, options: Options) => { - const { compiler } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - if (compiler !== 'nargo' && compiler !== 'wasm') { - throw new Error(`Invalid compiler: ${compiler}`); - } +export async function compileNoir(projectPath: string, options: Options, log: LogFn = () => {}) { + const { compiler } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + if (compiler !== 'nargo' && compiler !== 'wasm') { + throw new Error(`Invalid compiler: ${compiler}`); + } - const compile = compiler === 'wasm' ? compileUsingNoirWasm : compileUsingNargo; - log(`Compiling ${projectPath} with ${compiler} backend...`); - const results = await compile(projectPath, { log }); - for (const result of results) { - generateOutput(projectPath, result, options, log); - } - }); + const compile = compiler === 'wasm' ? compileUsingNoirWasm : compileUsingNargo; + log(`Compiling ${projectPath} with ${compiler} backend...`); + const results = await compile(projectPath, { log }); + for (const result of results) { + generateOutput(projectPath, result, options, log); + } } /** diff --git a/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts b/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts new file mode 100644 index 00000000000..4a13a1c756f --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts @@ -0,0 +1,48 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; +import { mkdirpSync } from 'fs-extra'; +import path, { resolve } from 'path'; + +import { generateNoirContractInterface } from '../index.js'; +import { isContractArtifact } from '../utils.js'; + +/** + * + */ +export function generateNoirInterface( + projectPath: string, + options: { + // eslint-disable-next-line jsdoc/require-jsdoc + outdir: string; + // eslint-disable-next-line jsdoc/require-jsdoc + artifacts: string; + }, + log: LogFn, +) { + const { outdir, artifacts } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + const currentDir = process.cwd(); + + const artifactsDir = resolve(projectPath, artifacts); + for (const artifactsDirItem of readdirSync(artifactsDir)) { + const artifactPath = resolve(artifactsDir, artifactsDirItem); + if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { + const contract = JSON.parse(readFileSync(artifactPath).toString()); + if (!isContractArtifact(contract)) { + continue; + } + const interfacePath = resolve(projectPath, outdir, `${contract.name}_interface.nr`); + log(`Writing ${contract.name} Noir external interface to ${path.relative(currentDir, interfacePath)}`); + try { + const noirInterface = generateNoirContractInterface(contract); + mkdirpSync(path.dirname(interfacePath)); + writeFileSync(interfacePath, noirInterface); + } catch (err) { + log(`Error generating interface for ${artifactPath}: ${err}`); + } + } + } +} diff --git a/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts b/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts new file mode 100644 index 00000000000..d004706c257 --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts @@ -0,0 +1,57 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; +import { mkdirpSync } from 'fs-extra'; +import path, { resolve } from 'path'; + +import { generateTypescriptContractInterface } from '../index.js'; +import { isContractArtifact } from '../utils.js'; + +/** + * Registers a 'typescript' command on the given commander program that generates typescript interface out of an ABI. + * @param program - Commander program. + * @param log - Optional logging function. + * @returns The program with the command registered. + */ +export function generateTypescriptInterface( + projectPath: string, + options: { + /* eslint-disable jsdoc/require-jsdoc */ + outdir: string; + /* eslint-disable jsdoc/require-jsdoc */ + artifacts: string; + }, + log: LogFn, +) { + const { outdir, artifacts } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + const currentDir = process.cwd(); + + const artifactsDir = resolve(projectPath, artifacts); + for (const artifactsDirItem of readdirSync(artifactsDir)) { + const artifactPath = resolve(artifactsDir, artifactsDirItem); + if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { + const contract = JSON.parse(readFileSync(artifactPath).toString()); + if (!isContractArtifact(contract)) { + continue; + } + const tsPath = resolve(projectPath, outdir, `${contract.name}.ts`); + log(`Writing ${contract.name} typescript interface to ${path.relative(currentDir, tsPath)}`); + let relativeArtifactPath = path.relative(path.dirname(tsPath), artifactPath); + if (relativeArtifactPath === `${contract.name}.json`) { + // relative path edge case, prepending ./ for local import - the above logic just does + // `${contract.name}.json`, which is not a valid import for a file in the same directory + relativeArtifactPath = `./${contract.name}.json`; + } + try { + const tsWrapper = generateTypescriptContractInterface(contract, relativeArtifactPath); + mkdirpSync(path.dirname(tsPath)); + writeFileSync(tsPath, tsWrapper); + } catch (err) { + log(`Error generating interface for ${artifactPath}: ${err}`); + } + } + } +} diff --git a/yarn-project/noir-compiler/src/cli/index.ts b/yarn-project/noir-compiler/src/cli/index.ts index 0687e01706f..df1ae3d0880 100644 --- a/yarn-project/noir-compiler/src/cli/index.ts +++ b/yarn-project/noir-compiler/src/cli/index.ts @@ -1,3 +1 @@ -export { compileNoir } from './compileNoir.js'; -export { generateNoirInterface } from './noir-interface.js'; -export { generateTypescriptInterface } from './typescript.js'; +export { addNoirCompilerCommanderActions } from './add_noir_compiler_commander_actions.js'; diff --git a/yarn-project/noir-compiler/src/cli/noir-interface.ts b/yarn-project/noir-compiler/src/cli/noir-interface.ts deleted file mode 100644 index 54db5b7b86b..00000000000 --- a/yarn-project/noir-compiler/src/cli/noir-interface.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { LogFn } from '@aztec/foundation/log'; - -import { Command } from 'commander'; -import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; -import { mkdirpSync } from 'fs-extra'; -import path, { resolve } from 'path'; - -import { generateNoirContractInterface } from '../index.js'; -import { isContractArtifact } from '../utils.js'; - -/** - * Registers a 'interface' command on the given commander program that generates a Noir interface out of an ABI. - * @param program - Commander program. - * @param log - Optional logging function. - * @returns The program with the command registered. - */ -export function generateNoirInterface(program: Command, name = 'interface', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the noir project') - .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') - .option( - '-o, --outdir ', - 'Output folder for the generated noir interfaces, relative to the project path', - 'interfaces', - ) - .description('Generates Noir interfaces from the artifacts in the given project') - - .action( - ( - projectPath: string, - /* eslint-disable jsdoc/require-jsdoc */ - options: { - outdir: string; - artifacts: string; - }, - /* eslint-enable jsdoc/require-jsdoc */ - ) => { - const { outdir, artifacts } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - const currentDir = process.cwd(); - - const artifactsDir = resolve(projectPath, artifacts); - for (const artifactsDirItem of readdirSync(artifactsDir)) { - const artifactPath = resolve(artifactsDir, artifactsDirItem); - if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { - const contract = JSON.parse(readFileSync(artifactPath).toString()); - if (!isContractArtifact(contract)) { - continue; - } - const interfacePath = resolve(projectPath, outdir, `${contract.name}_interface.nr`); - log(`Writing ${contract.name} Noir external interface to ${path.relative(currentDir, interfacePath)}`); - try { - const noirInterface = generateNoirContractInterface(contract); - mkdirpSync(path.dirname(interfacePath)); - writeFileSync(interfacePath, noirInterface); - } catch (err) { - log(`Error generating interface for ${artifactPath}: ${err}`); - } - } - } - }, - ); -} diff --git a/yarn-project/noir-compiler/src/cli/typescript.ts b/yarn-project/noir-compiler/src/cli/typescript.ts deleted file mode 100644 index cf107cc0338..00000000000 --- a/yarn-project/noir-compiler/src/cli/typescript.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { LogFn } from '@aztec/foundation/log'; - -import { Command } from 'commander'; -import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; -import { mkdirpSync } from 'fs-extra'; -import path, { resolve } from 'path'; - -import { generateTypescriptContractInterface } from '../index.js'; -import { isContractArtifact } from '../utils.js'; - -/** - * Registers a 'typescript' command on the given commander program that generates typescript interface out of an ABI. - * @param program - Commander program. - * @param log - Optional logging function. - * @returns The program with the command registered. - */ -export function generateTypescriptInterface(program: Command, name = 'typescript', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the noir project') - .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') - .option( - '-o, --outdir ', - 'Output folder for the generated typescript wrappers, relative to the project path', - 'types', - ) - .description('Generates typescript interfaces from the artifacts in the given project') - - .action( - ( - projectPath: string, - /* eslint-disable jsdoc/require-jsdoc */ - options: { - outdir: string; - artifacts: string; - }, - /* eslint-enable jsdoc/require-jsdoc */ - ) => { - const { outdir, artifacts } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - const currentDir = process.cwd(); - - const artifactsDir = resolve(projectPath, artifacts); - for (const artifactsDirItem of readdirSync(artifactsDir)) { - const artifactPath = resolve(artifactsDir, artifactsDirItem); - if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { - const contract = JSON.parse(readFileSync(artifactPath).toString()); - if (!isContractArtifact(contract)) { - continue; - } - const tsPath = resolve(projectPath, outdir, `${contract.name}.ts`); - log(`Writing ${contract.name} typescript interface to ${path.relative(currentDir, tsPath)}`); - let relativeArtifactPath = path.relative(path.dirname(tsPath), artifactPath); - if (relativeArtifactPath === `${contract.name}.json`) { - // relative path edge case, prepending ./ for local import - the above logic just does - // `${contract.name}.json`, which is not a valid import for a file in the same directory - relativeArtifactPath = `./${contract.name}.json`; - } - try { - const tsWrapper = generateTypescriptContractInterface(contract, relativeArtifactPath); - mkdirpSync(path.dirname(tsPath)); - writeFileSync(tsPath, tsWrapper); - } catch (err) { - log(`Error generating interface for ${artifactPath}: ${err}`); - } - } - } - }, - ); -} diff --git a/yarn-project/noir-contracts/scripts/compile.sh b/yarn-project/noir-contracts/scripts/compile.sh index fe2096f4cf4..bc217a6156b 100755 --- a/yarn-project/noir-contracts/scripts/compile.sh +++ b/yarn-project/noir-contracts/scripts/compile.sh @@ -19,4 +19,4 @@ build() { export -f build # run 4 builds at a time -echo "$@" | xargs -n 1 -P 4 bash -c 'build "$0"' +echo "$@" | xargs -n 1 -P $(nproc) bash -c 'build "$0"' diff --git a/yarn-project/noir-protocol-circuits/package.json b/yarn-project/noir-protocol-circuits/package.json index 2b4b8351e53..f5eccb835a5 100644 --- a/yarn-project/noir-protocol-circuits/package.json +++ b/yarn-project/noir-protocol-circuits/package.json @@ -12,7 +12,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "noir:build": "cd src && ../../../noir/target/release/nargo compile && rm -rf ./target/debug_*", + "noir:build": "cd src && ../../../noir/target/release/nargo compile --silence-warnings && rm -rf ./target/debug_*", "noir:types": "yarn ts-node --esm src/scripts/generate_ts_from_abi.ts && yarn formatting:fix", "noir:test": "cd src && ../../../noir/target/release/nargo test", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --passWithNoTests" diff --git a/yarn-project/package.json b/yarn-project/package.json index c123e44e1b3..089ce942376 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -64,7 +64,7 @@ "resolutions": { "ts-jest@^29.1.0": "patch:ts-jest@npm%3A29.1.1#./.yarn/patches/ts-jest-npm-29.1.1-04e888e48e.patch", "ts-jest@^29.1.1": "patch:ts-jest@npm%3A29.1.1#./.yarn/patches/ts-jest-npm-29.1.1-04e888e48e.patch", - "@aztec/bb.js": "portal:../barretenberg/ts/package", + "@aztec/bb.js": "portal:../barretenberg/ts", "@noir-lang/acvm_js": "portal:../noir/packages/acvm_js", "@noir-lang/backend_barretenberg": "portal:../noir/packages/backend_barretenberg", "@noir-lang/types": "portal:../noir/packages/types", diff --git a/yarn-project/types/package.json b/yarn-project/types/package.json index 17e999a421f..5c472ea84dc 100644 --- a/yarn-project/types/package.json +++ b/yarn-project/types/package.json @@ -5,7 +5,10 @@ "exports": { ".": "./dest/index.js", "./stats": "./dest/stats/index.js", - "./jest": "./dest/jest/index.js" + "./jest": "./dest/jest/index.js", + "./interfaces": "./dest/interfaces/index.js", + "./log_id": "./dest/logs/log_id.js", + "./tx_hash": "./dest/tx/tx_hash.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/types/src/index.ts b/yarn-project/types/src/index.ts index 3ffc8125103..149ec5a7a6a 100644 --- a/yarn-project/types/src/index.ts +++ b/yarn-project/types/src/index.ts @@ -23,5 +23,4 @@ export * from './interfaces/index.js'; export * from './sibling_path.js'; export * from './auth_witness.js'; export * from './aztec_node/rpc/index.js'; -export * from '@aztec/circuits.js/types'; -export { CompleteAddress } from '@aztec/circuits.js'; +export { CompleteAddress, PublicKey, PartialAddress, GrumpkinPrivateKey } from '@aztec/circuits.js'; diff --git a/yarn-project/types/src/interfaces/deployed-contract.ts b/yarn-project/types/src/interfaces/deployed-contract.ts index 1e5fd058197..784b162ef67 100644 --- a/yarn-project/types/src/interfaces/deployed-contract.ts +++ b/yarn-project/types/src/interfaces/deployed-contract.ts @@ -1,6 +1,6 @@ -import { EthAddress } from '@aztec/circuits.js'; +import { CompleteAddress } from '@aztec/circuits.js'; import { ContractArtifact } from '@aztec/foundation/abi'; -import { CompleteAddress } from '@aztec/types'; +import { EthAddress } from '@aztec/foundation/eth-address'; /** * Represents a deployed contract on the Aztec network. diff --git a/yarn-project/types/src/interfaces/pxe.ts b/yarn-project/types/src/interfaces/pxe.ts index 1a2e52c1262..a78cf957bbd 100644 --- a/yarn-project/types/src/interfaces/pxe.ts +++ b/yarn-project/types/src/interfaces/pxe.ts @@ -1,7 +1,6 @@ -import { AztecAddress, Fr, GrumpkinPrivateKey, PartialAddress } from '@aztec/circuits.js'; +import { AztecAddress, CompleteAddress, Fr, GrumpkinPrivateKey, PartialAddress } from '@aztec/circuits.js'; import { AuthWitness, - CompleteAddress, ContractData, ExtendedContractData, ExtendedNote, diff --git a/yarn-project/yarn-project-base/Dockerfile b/yarn-project/yarn-project-base/Dockerfile index af266dceb4a..ee9c4f4f6b7 100644 --- a/yarn-project/yarn-project-base/Dockerfile +++ b/yarn-project/yarn-project-base/Dockerfile @@ -53,7 +53,7 @@ RUN apk update && apk add --no-cache bash jq curl # Copy L1 contracts. COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts # Copy in bb.js -COPY --from=bb.js /usr/src/barretenberg/ts/package /usr/src/barretenberg/ts/package +COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts # Copy in nargo COPY --from=noir /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo # Copy in noir packages @@ -75,15 +75,19 @@ WORKDIR /usr/src/yarn-project # The dockerignore file ensures the context only contains package.json and tsconfig.json files. COPY . . +# List all included files and hash for debugging. +RUN echo "Context files: " && find . -type f | sort && \ + echo -n "Context hash: " && find . -type f -print0 | sort -z | xargs -0 sha256sum | sha256sum + # Install packages and rebuild the global cache with hard links. # TODO: Puppeteer is adding ~300MB to this image due to chrome download (part of e2e). # Switch to using puppeteer-core then it won't download chrome. For now just erase. RUN yarn --immutable && rm -rf /root/.cache/puppeteer && /bin/bash -c '\ - rm -rf /root/.yarn/berry/cache/* && \ - cd .yarn/cache && \ - for F in *; do \ - [[ $F =~ (.*-) ]] && ln $F /root/.yarn/berry/cache/${BASH_REMATCH[1]}8.zip; \ - done' + rm -rf /root/.yarn/berry/cache/* && \ + cd .yarn/cache && \ + for F in *; do \ + [[ $F =~ (.*-) ]] && ln $F /root/.yarn/berry/cache/${BASH_REMATCH[1]}8.zip; \ + done' # If everything's worked properly, we should no longer need access to the network. RUN echo "enableNetwork: false" >> .yarnrc.yml diff --git a/yarn-project/yarn-project-base/Dockerfile.dockerignore b/yarn-project/yarn-project-base/Dockerfile.dockerignore index a6ba1856c45..257a2d74457 100644 --- a/yarn-project/yarn-project-base/Dockerfile.dockerignore +++ b/yarn-project/yarn-project-base/Dockerfile.dockerignore @@ -6,7 +6,9 @@ .* README.md bootstrap.sh -Dockerfile +Dockerfile* +*.tsbuildinfo +node_modules # This is a sticking point, due to the project being under it's own dir. # Need to unexclude the dir and then exclude it's files. @@ -17,10 +19,12 @@ Dockerfile !boxes/blank !boxes/blank-react boxes/*/* +!boxes/*/package.json +!boxes/*/tsconfig.json # Unexclude package.json and yarn.lock files, for detecting any dependency changes. -!**/package.json -!**/package.*.json +!*/package.json +!*/package.*.json !yarn.lock # Unexclude parts of yarn related config as this also affects how dependencies are installed. @@ -30,7 +34,7 @@ boxes/*/* !.yarn/patches # Unexclude tsconfig files for running project reference checks. -!**/tsconfig.json +!*/tsconfig.json # Unexclude scripts we use in the Dockerfile. !yarn-project-base/scripts diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 6dc27eb04c8..9ccbcf8dea0 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -277,9 +277,9 @@ __metadata: languageName: unknown linkType: soft -"@aztec/bb.js@portal:../barretenberg/ts/package::locator=%40aztec%2Faztec3-packages%40workspace%3A.": +"@aztec/bb.js@portal:../barretenberg/ts::locator=%40aztec%2Faztec3-packages%40workspace%3A.": version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../barretenberg/ts/package::locator=%40aztec%2Faztec3-packages%40workspace%3A." + resolution: "@aztec/bb.js@portal:../barretenberg/ts::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: comlink: ^4.4.1 commander: ^10.0.1