From e3cc7d0fa0d842edcd24f1981b687cbdf057ce1a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 7 Aug 2024 12:27:50 +0200 Subject: [PATCH 01/61] fix: commonly occurring typo (#7807) --- .github/cache-success/restore-dist/index.js | 3446 ++++++++--------- .github/cache-success/save-dist/index.js | 3446 ++++++++--------- .../crypto/blake3s_full/blake3s.cpp | 2 +- .../merkle_tree/indexed_tree/indexed_tree.hpp | 2 +- .../src/barretenberg/eccvm/eccvm_flavor.hpp | 2 +- .../eccvm_recursive_flavor.hpp | 2 +- .../barretenberg/polynomials/univariate.hpp | 4 +- .../barretenberg/smt_verification/README.md | 4 +- .../stdlib/encryption/ecdsa/ecdsa_impl.hpp | 2 +- .../stdlib/hash/blake2s/blake2s_plookup.cpp | 2 +- .../primitives/bigfield/bigfield_impl.hpp | 4 +- .../primitives/plookup/plookup.test.cpp | 2 +- .../stdlib/primitives/uint/uint.test.cpp | 2 +- .../stdlib_circuit_builders/mega_flavor.hpp | 2 +- .../mega_recursive_flavor.hpp | 2 +- .../ultra_circuit_builder.cpp | 2 +- .../stdlib_circuit_builders/ultra_flavor.hpp | 2 +- .../stdlib_circuit_builders/ultra_keccak.hpp | 2 +- .../ultra_recursive_flavor.hpp | 2 +- .../translator_vm/translator_flavor.hpp | 2 +- .../translator_recursive_flavor.hpp | 2 +- .../src/barretenberg/vm/avm/trace/trace.cpp | 2 +- barretenberg/ts/scripts/build_wasm.sh | 2 +- .../portals/outbox.md | 2 +- noir-projects/aztec-nr/authwit/src/auth.nr | 4 +- .../src/types/balances_map.nr | 2 +- .../token_contract/src/types/balances_map.nr | 2 +- .../src/types/balances_map.nr | 2 +- .../kv_archiver_store/block_body_store.ts | 2 +- .../memory_archiver_store.ts | 2 +- .../src/logs/l1_payload/l1_payload.ts | 2 +- yarn-project/key-store/src/key_store.ts | 2 +- 32 files changed, 3480 insertions(+), 3480 deletions(-) diff --git a/.github/cache-success/restore-dist/index.js b/.github/cache-success/restore-dist/index.js index 995fc0f1a6b..4bddffd644e 100644 --- a/.github/cache-success/restore-dist/index.js +++ b/.github/cache-success/restore-dist/index.js @@ -33636,7 +33636,7 @@ class BlobClient extends StorageClient { * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. + * Fails if the given file path already exits. * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. * * @param filePath - @@ -35023,7 +35023,7 @@ class PageBlobClient extends BlobClient { * getPageRangesSegment returns a single segment of page ranges starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. + * (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. @@ -35231,7 +35231,7 @@ class PageBlobClient extends BlobClient { * specified Marker for difference between previous snapshot and the target page blob. * Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesDiffSegment again - * (passing the the previously-returned Marker) to get the next segment. + * (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. @@ -36687,7 +36687,7 @@ class ContainerClient extends StorageClient { * listBlobFlatSegment returns a single segment of blobs starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call listBlobsFlatSegment again - * (passing the the previously-returned Marker) to get the next segment. + * (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. @@ -36718,7 +36718,7 @@ class ContainerClient extends StorageClient { * listBlobHierarchySegment returns a single segment of blobs starting from * the specified Marker. Use an empty Marker to start enumeration from the * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment - * again (passing the the previously-returned Marker) to get the next segment. + * again (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * * @param delimiter - The character or string used to define the virtual hierarchy @@ -48872,427 +48872,427 @@ module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; /***/ 4351: /***/ ((module) => { -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, Symbol, Reflect, Promise, SuppressedError */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __esDecorate; -var __runInitializers; -var __propKey; -var __setFunctionName; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __classPrivateFieldIn; -var __createBinding; -var __addDisposableResource; -var __disposeResources; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context = {}; - for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context.access[p] = contextIn.access[p]; - context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } - else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; - }; - - __runInitializers = function (thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; - }; - - __propKey = function (x) { - return typeof x === "symbol" ? x : "".concat(x); - }; - - __setFunctionName = function (f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - __classPrivateFieldIn = function (state, receiver) { - if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); - }; - - __addDisposableResource = function (env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - env.stack.push({ value: value, dispose: dispose, async: async }); - } - else if (async) { - env.stack.push({ async: true }); - } - return value; - }; - - var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; - }; - - __disposeResources = function (env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - function next() { - while (env.stack.length) { - var rec = env.stack.pop(); - try { - var result = rec.dispose && rec.dispose.call(rec.value); - if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); - } - catch (e) { - fail(e); - } - } - if (env.hasError) throw env.error; - } - return next(); - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__esDecorate", __esDecorate); - exporter("__runInitializers", __runInitializers); - exporter("__propKey", __propKey); - exporter("__setFunctionName", __setFunctionName); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); - exporter("__classPrivateFieldIn", __classPrivateFieldIn); - exporter("__addDisposableResource", __addDisposableResource); - exporter("__disposeResources", __disposeResources); -}); +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); +}); /***/ }), @@ -72584,1303 +72584,1303 @@ exports.parseURL = __nccwpck_require__(2158).parseURL; /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -const punycode = __nccwpck_require__(5477); -const tr46 = __nccwpck_require__(4256); - -const specialSchemes = { - ftp: 21, - file: null, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443 -}; - -const failure = Symbol("failure"); - -function countSymbols(str) { - return punycode.ucs2.decode(str).length; -} - -function at(input, idx) { - const c = input[idx]; - return isNaN(c) ? undefined : String.fromCodePoint(c); -} - -function isASCIIDigit(c) { - return c >= 0x30 && c <= 0x39; -} - -function isASCIIAlpha(c) { - return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); -} - -function isASCIIAlphanumeric(c) { - return isASCIIAlpha(c) || isASCIIDigit(c); -} - -function isASCIIHex(c) { - return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); -} - -function isSingleDot(buffer) { - return buffer === "." || buffer.toLowerCase() === "%2e"; -} - -function isDoubleDot(buffer) { - buffer = buffer.toLowerCase(); - return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; -} - -function isWindowsDriveLetterCodePoints(cp1, cp2) { - return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); -} - -function isWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); -} - -function isNormalizedWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; -} - -function containsForbiddenHostCodePoint(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function containsForbiddenHostCodePointExcludingPercent(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function isSpecialScheme(scheme) { - return specialSchemes[scheme] !== undefined; -} - -function isSpecial(url) { - return isSpecialScheme(url.scheme); -} - -function defaultPort(scheme) { - return specialSchemes[scheme]; -} - -function percentEncode(c) { - let hex = c.toString(16).toUpperCase(); - if (hex.length === 1) { - hex = "0" + hex; - } - - return "%" + hex; -} - -function utf8PercentEncode(c) { - const buf = new Buffer(c); - - let str = ""; - - for (let i = 0; i < buf.length; ++i) { - str += percentEncode(buf[i]); - } - - return str; -} - -function utf8PercentDecode(str) { - const input = new Buffer(str); - const output = []; - for (let i = 0; i < input.length; ++i) { - if (input[i] !== 37) { - output.push(input[i]); - } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { - output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); - i += 2; - } else { - output.push(input[i]); - } - } - return new Buffer(output).toString(); -} - -function isC0ControlPercentEncode(c) { - return c <= 0x1F || c > 0x7E; -} - -const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); -function isPathPercentEncode(c) { - return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); -} - -const extraUserinfoPercentEncodeSet = - new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); -function isUserinfoPercentEncode(c) { - return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); -} - -function percentEncodeChar(c, encodeSetPredicate) { - const cStr = String.fromCodePoint(c); - - if (encodeSetPredicate(c)) { - return utf8PercentEncode(cStr); - } - - return cStr; -} - -function parseIPv4Number(input) { - let R = 10; - - if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { - input = input.substring(2); - R = 16; - } else if (input.length >= 2 && input.charAt(0) === "0") { - input = input.substring(1); - R = 8; - } - - if (input === "") { - return 0; - } - - const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); - if (regex.test(input)) { - return failure; - } - - return parseInt(input, R); -} - -function parseIPv4(input) { - const parts = input.split("."); - if (parts[parts.length - 1] === "") { - if (parts.length > 1) { - parts.pop(); - } - } - - if (parts.length > 4) { - return input; - } - - const numbers = []; - for (const part of parts) { - if (part === "") { - return input; - } - const n = parseIPv4Number(part); - if (n === failure) { - return input; - } - - numbers.push(n); - } - - for (let i = 0; i < numbers.length - 1; ++i) { - if (numbers[i] > 255) { - return failure; - } - } - if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { - return failure; - } - - let ipv4 = numbers.pop(); - let counter = 0; - - for (const n of numbers) { - ipv4 += n * Math.pow(256, 3 - counter); - ++counter; - } - - return ipv4; -} - -function serializeIPv4(address) { - let output = ""; - let n = address; - - for (let i = 1; i <= 4; ++i) { - output = String(n % 256) + output; - if (i !== 4) { - output = "." + output; - } - n = Math.floor(n / 256); - } - - return output; -} - -function parseIPv6(input) { - const address = [0, 0, 0, 0, 0, 0, 0, 0]; - let pieceIndex = 0; - let compress = null; - let pointer = 0; - - input = punycode.ucs2.decode(input); - - if (input[pointer] === 58) { - if (input[pointer + 1] !== 58) { - return failure; - } - - pointer += 2; - ++pieceIndex; - compress = pieceIndex; - } - - while (pointer < input.length) { - if (pieceIndex === 8) { - return failure; - } - - if (input[pointer] === 58) { - if (compress !== null) { - return failure; - } - ++pointer; - ++pieceIndex; - compress = pieceIndex; - continue; - } - - let value = 0; - let length = 0; - - while (length < 4 && isASCIIHex(input[pointer])) { - value = value * 0x10 + parseInt(at(input, pointer), 16); - ++pointer; - ++length; - } - - if (input[pointer] === 46) { - if (length === 0) { - return failure; - } - - pointer -= length; - - if (pieceIndex > 6) { - return failure; - } - - let numbersSeen = 0; - - while (input[pointer] !== undefined) { - let ipv4Piece = null; - - if (numbersSeen > 0) { - if (input[pointer] === 46 && numbersSeen < 4) { - ++pointer; - } else { - return failure; - } - } - - if (!isASCIIDigit(input[pointer])) { - return failure; - } - - while (isASCIIDigit(input[pointer])) { - const number = parseInt(at(input, pointer)); - if (ipv4Piece === null) { - ipv4Piece = number; - } else if (ipv4Piece === 0) { - return failure; - } else { - ipv4Piece = ipv4Piece * 10 + number; - } - if (ipv4Piece > 255) { - return failure; - } - ++pointer; - } - - address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; - - ++numbersSeen; - - if (numbersSeen === 2 || numbersSeen === 4) { - ++pieceIndex; - } - } - - if (numbersSeen !== 4) { - return failure; - } - - break; - } else if (input[pointer] === 58) { - ++pointer; - if (input[pointer] === undefined) { - return failure; - } - } else if (input[pointer] !== undefined) { - return failure; - } - - address[pieceIndex] = value; - ++pieceIndex; - } - - if (compress !== null) { - let swaps = pieceIndex - compress; - pieceIndex = 7; - while (pieceIndex !== 0 && swaps > 0) { - const temp = address[compress + swaps - 1]; - address[compress + swaps - 1] = address[pieceIndex]; - address[pieceIndex] = temp; - --pieceIndex; - --swaps; - } - } else if (compress === null && pieceIndex !== 8) { - return failure; - } - - return address; -} - -function serializeIPv6(address) { - let output = ""; - const seqResult = findLongestZeroSequence(address); - const compress = seqResult.idx; - let ignore0 = false; - - for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { - if (ignore0 && address[pieceIndex] === 0) { - continue; - } else if (ignore0) { - ignore0 = false; - } - - if (compress === pieceIndex) { - const separator = pieceIndex === 0 ? "::" : ":"; - output += separator; - ignore0 = true; - continue; - } - - output += address[pieceIndex].toString(16); - - if (pieceIndex !== 7) { - output += ":"; - } - } - - return output; -} - -function parseHost(input, isSpecialArg) { - if (input[0] === "[") { - if (input[input.length - 1] !== "]") { - return failure; - } - - return parseIPv6(input.substring(1, input.length - 1)); - } - - if (!isSpecialArg) { - return parseOpaqueHost(input); - } - - const domain = utf8PercentDecode(input); - const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); - if (asciiDomain === null) { - return failure; - } - - if (containsForbiddenHostCodePoint(asciiDomain)) { - return failure; - } - - const ipv4Host = parseIPv4(asciiDomain); - if (typeof ipv4Host === "number" || ipv4Host === failure) { - return ipv4Host; - } - - return asciiDomain; -} - -function parseOpaqueHost(input) { - if (containsForbiddenHostCodePointExcludingPercent(input)) { - return failure; - } - - let output = ""; - const decoded = punycode.ucs2.decode(input); - for (let i = 0; i < decoded.length; ++i) { - output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); - } - return output; -} - -function findLongestZeroSequence(arr) { - let maxIdx = null; - let maxLen = 1; // only find elements > 1 - let currStart = null; - let currLen = 0; - - for (let i = 0; i < arr.length; ++i) { - if (arr[i] !== 0) { - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - currStart = null; - currLen = 0; - } else { - if (currStart === null) { - currStart = i; - } - ++currLen; - } - } - - // if trailing zeros - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - return { - idx: maxIdx, - len: maxLen - }; -} - -function serializeHost(host) { - if (typeof host === "number") { - return serializeIPv4(host); - } - - // IPv6 serializer - if (host instanceof Array) { - return "[" + serializeIPv6(host) + "]"; - } - - return host; -} - -function trimControlChars(url) { - return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); -} - -function trimTabAndNewline(url) { - return url.replace(/\u0009|\u000A|\u000D/g, ""); -} - -function shortenPath(url) { - const path = url.path; - if (path.length === 0) { - return; - } - if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { - return; - } - - path.pop(); -} - -function includesCredentials(url) { - return url.username !== "" || url.password !== ""; -} - -function cannotHaveAUsernamePasswordPort(url) { - return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; -} - -function isNormalizedWindowsDriveLetter(string) { - return /^[A-Za-z]:$/.test(string); -} - -function URLStateMachine(input, base, encodingOverride, url, stateOverride) { - this.pointer = 0; - this.input = input; - this.base = base || null; - this.encodingOverride = encodingOverride || "utf-8"; - this.stateOverride = stateOverride; - this.url = url; - this.failure = false; - this.parseError = false; - - if (!this.url) { - this.url = { - scheme: "", - username: "", - password: "", - host: null, - port: null, - path: [], - query: null, - fragment: null, - - cannotBeABaseURL: false - }; - - const res = trimControlChars(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - } - - const res = trimTabAndNewline(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - - this.state = stateOverride || "scheme start"; - - this.buffer = ""; - this.atFlag = false; - this.arrFlag = false; - this.passwordTokenSeenFlag = false; - - this.input = punycode.ucs2.decode(this.input); - - for (; this.pointer <= this.input.length; ++this.pointer) { - const c = this.input[this.pointer]; - const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); - - // exec state machine - const ret = this["parse " + this.state](c, cStr); - if (!ret) { - break; // terminate algorithm - } else if (ret === failure) { - this.failure = true; - break; - } - } -} - -URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { - if (isASCIIAlpha(c)) { - this.buffer += cStr.toLowerCase(); - this.state = "scheme"; - } else if (!this.stateOverride) { - this.state = "no scheme"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { - if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { - this.buffer += cStr.toLowerCase(); - } else if (c === 58) { - if (this.stateOverride) { - if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { - return false; - } - - if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { - return false; - } - - if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { - return false; - } - - if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { - return false; - } - } - this.url.scheme = this.buffer; - this.buffer = ""; - if (this.stateOverride) { - return false; - } - if (this.url.scheme === "file") { - if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { - this.parseError = true; - } - this.state = "file"; - } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { - this.state = "special relative or authority"; - } else if (isSpecial(this.url)) { - this.state = "special authority slashes"; - } else if (this.input[this.pointer + 1] === 47) { - this.state = "path or authority"; - ++this.pointer; - } else { - this.url.cannotBeABaseURL = true; - this.url.path.push(""); - this.state = "cannot-be-a-base-URL path"; - } - } else if (!this.stateOverride) { - this.buffer = ""; - this.state = "no scheme"; - this.pointer = -1; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { - if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { - return failure; - } else if (this.base.cannotBeABaseURL && c === 35) { - this.url.scheme = this.base.scheme; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.url.cannotBeABaseURL = true; - this.state = "fragment"; - } else if (this.base.scheme === "file") { - this.state = "file"; - --this.pointer; - } else { - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { - if (c === 47) { - this.state = "authority"; - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative"] = function parseRelative(c) { - this.url.scheme = this.base.scheme; - if (isNaN(c)) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 47) { - this.state = "relative slash"; - } else if (c === 63) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else if (isSpecial(this.url) && c === 92) { - this.parseError = true; - this.state = "relative slash"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(0, this.base.path.length - 1); - - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { - if (isSpecial(this.url) && (c === 47 || c === 92)) { - if (c === 92) { - this.parseError = true; - } - this.state = "special authority ignore slashes"; - } else if (c === 47) { - this.state = "authority"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "special authority ignore slashes"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { - if (c !== 47 && c !== 92) { - this.state = "authority"; - --this.pointer; - } else { - this.parseError = true; - } - - return true; -}; - -URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { - if (c === 64) { - this.parseError = true; - if (this.atFlag) { - this.buffer = "%40" + this.buffer; - } - this.atFlag = true; - - // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars - const len = countSymbols(this.buffer); - for (let pointer = 0; pointer < len; ++pointer) { - const codePoint = this.buffer.codePointAt(pointer); - - if (codePoint === 58 && !this.passwordTokenSeenFlag) { - this.passwordTokenSeenFlag = true; - continue; - } - const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); - if (this.passwordTokenSeenFlag) { - this.url.password += encodedCodePoints; - } else { - this.url.username += encodedCodePoints; - } - } - this.buffer = ""; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - if (this.atFlag && this.buffer === "") { - this.parseError = true; - return failure; - } - this.pointer -= countSymbols(this.buffer) + 1; - this.buffer = ""; - this.state = "host"; - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse hostname"] = -URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { - if (this.stateOverride && this.url.scheme === "file") { - --this.pointer; - this.state = "file host"; - } else if (c === 58 && !this.arrFlag) { - if (this.buffer === "") { - this.parseError = true; - return failure; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "port"; - if (this.stateOverride === "hostname") { - return false; - } - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - --this.pointer; - if (isSpecial(this.url) && this.buffer === "") { - this.parseError = true; - return failure; - } else if (this.stateOverride && this.buffer === "" && - (includesCredentials(this.url) || this.url.port !== null)) { - this.parseError = true; - return false; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "path start"; - if (this.stateOverride) { - return false; - } - } else { - if (c === 91) { - this.arrFlag = true; - } else if (c === 93) { - this.arrFlag = false; - } - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { - if (isASCIIDigit(c)) { - this.buffer += cStr; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92) || - this.stateOverride) { - if (this.buffer !== "") { - const port = parseInt(this.buffer); - if (port > Math.pow(2, 16) - 1) { - this.parseError = true; - return failure; - } - this.url.port = port === defaultPort(this.url.scheme) ? null : port; - this.buffer = ""; - } - if (this.stateOverride) { - return false; - } - this.state = "path start"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); - -URLStateMachine.prototype["parse file"] = function parseFile(c) { - this.url.scheme = "file"; - - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file slash"; - } else if (this.base !== null && this.base.scheme === "file") { - if (isNaN(c)) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 63) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else { - if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points - !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || - (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points - !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - shortenPath(this.url); - } else { - this.parseError = true; - } - - this.state = "path"; - --this.pointer; - } - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file host"; - } else { - if (this.base !== null && this.base.scheme === "file") { - if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { - this.url.path.push(this.base.path[0]); - } else { - this.url.host = this.base.host; - } - } - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { - if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { - --this.pointer; - if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { - this.parseError = true; - this.state = "path"; - } else if (this.buffer === "") { - this.url.host = ""; - if (this.stateOverride) { - return false; - } - this.state = "path start"; - } else { - let host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - if (host === "localhost") { - host = ""; - } - this.url.host = host; - - if (this.stateOverride) { - return false; - } - - this.buffer = ""; - this.state = "path start"; - } - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { - if (isSpecial(this.url)) { - if (c === 92) { - this.parseError = true; - } - this.state = "path"; - - if (c !== 47 && c !== 92) { - --this.pointer; - } - } else if (!this.stateOverride && c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (!this.stateOverride && c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else if (c !== undefined) { - this.state = "path"; - if (c !== 47) { - --this.pointer; - } - } - - return true; -}; - -URLStateMachine.prototype["parse path"] = function parsePath(c) { - if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || - (!this.stateOverride && (c === 63 || c === 35))) { - if (isSpecial(this.url) && c === 92) { - this.parseError = true; - } - - if (isDoubleDot(this.buffer)) { - shortenPath(this.url); - if (c !== 47 && !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } - } else if (isSingleDot(this.buffer) && c !== 47 && - !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } else if (!isSingleDot(this.buffer)) { - if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { - if (this.url.host !== "" && this.url.host !== null) { - this.parseError = true; - this.url.host = ""; - } - this.buffer = this.buffer[0] + ":"; - } - this.url.path.push(this.buffer); - } - this.buffer = ""; - if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { - while (this.url.path.length > 1 && this.url.path[0] === "") { - this.parseError = true; - this.url.path.shift(); - } - } - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += percentEncodeChar(c, isPathPercentEncode); - } - - return true; -}; - -URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else { - // TODO: Add: not a URL code point - if (!isNaN(c) && c !== 37) { - this.parseError = true; - } - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - if (!isNaN(c)) { - this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); - } - } - - return true; -}; - -URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { - if (isNaN(c) || (!this.stateOverride && c === 35)) { - if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { - this.encodingOverride = "utf-8"; - } - - const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead - for (let i = 0; i < buffer.length; ++i) { - if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || - buffer[i] === 0x3C || buffer[i] === 0x3E) { - this.url.query += percentEncode(buffer[i]); - } else { - this.url.query += String.fromCodePoint(buffer[i]); - } - } - - this.buffer = ""; - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { - if (isNaN(c)) { // do nothing - } else if (c === 0x0) { - this.parseError = true; - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); - } - - return true; -}; - -function serializeURL(url, excludeFragment) { - let output = url.scheme + ":"; - if (url.host !== null) { - output += "//"; - - if (url.username !== "" || url.password !== "") { - output += url.username; - if (url.password !== "") { - output += ":" + url.password; - } - output += "@"; - } - - output += serializeHost(url.host); - - if (url.port !== null) { - output += ":" + url.port; - } - } else if (url.host === null && url.scheme === "file") { - output += "//"; - } - - if (url.cannotBeABaseURL) { - output += url.path[0]; - } else { - for (const string of url.path) { - output += "/" + string; - } - } - - if (url.query !== null) { - output += "?" + url.query; - } - - if (!excludeFragment && url.fragment !== null) { - output += "#" + url.fragment; - } - - return output; -} - -function serializeOrigin(tuple) { - let result = tuple.scheme + "://"; - result += serializeHost(tuple.host); - - if (tuple.port !== null) { - result += ":" + tuple.port; - } - - return result; -} - -module.exports.serializeURL = serializeURL; - -module.exports.serializeURLOrigin = function (url) { - // https://url.spec.whatwg.org/#concept-url-origin - switch (url.scheme) { - case "blob": - try { - return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); - } catch (e) { - // serializing an opaque origin returns "null" - return "null"; - } - case "ftp": - case "gopher": - case "http": - case "https": - case "ws": - case "wss": - return serializeOrigin({ - scheme: url.scheme, - host: url.host, - port: url.port - }); - case "file": - // spec says "exercise to the reader", chrome says "file://" - return "file://"; - default: - // serializing an opaque origin returns "null" - return "null"; - } -}; - -module.exports.basicURLParse = function (input, options) { - if (options === undefined) { - options = {}; - } - - const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); - if (usm.failure) { - return "failure"; - } - - return usm.url; -}; - -module.exports.setTheUsername = function (url, username) { - url.username = ""; - const decoded = punycode.ucs2.decode(username); - for (let i = 0; i < decoded.length; ++i) { - url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.setThePassword = function (url, password) { - url.password = ""; - const decoded = punycode.ucs2.decode(password); - for (let i = 0; i < decoded.length; ++i) { - url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.serializeHost = serializeHost; - -module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; - -module.exports.serializeInteger = function (integer) { - return String(integer); -}; - -module.exports.parseURL = function (input, options) { - if (options === undefined) { - options = {}; - } - - // We don't handle blobs, so this just delegates: - return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); -}; + +const punycode = __nccwpck_require__(5477); +const tr46 = __nccwpck_require__(4256); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; /***/ }), diff --git a/.github/cache-success/save-dist/index.js b/.github/cache-success/save-dist/index.js index 3c41ee6a5ce..889a4136678 100644 --- a/.github/cache-success/save-dist/index.js +++ b/.github/cache-success/save-dist/index.js @@ -33636,7 +33636,7 @@ class BlobClient extends StorageClient { * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. + * Fails if the given file path already exits. * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. * * @param filePath - @@ -35023,7 +35023,7 @@ class PageBlobClient extends BlobClient { * getPageRangesSegment returns a single segment of page ranges starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. + * (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. @@ -35231,7 +35231,7 @@ class PageBlobClient extends BlobClient { * specified Marker for difference between previous snapshot and the target page blob. * Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesDiffSegment again - * (passing the the previously-returned Marker) to get the next segment. + * (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. @@ -36687,7 +36687,7 @@ class ContainerClient extends StorageClient { * listBlobFlatSegment returns a single segment of blobs starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call listBlobsFlatSegment again - * (passing the the previously-returned Marker) to get the next segment. + * (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. @@ -36718,7 +36718,7 @@ class ContainerClient extends StorageClient { * listBlobHierarchySegment returns a single segment of blobs starting from * the specified Marker. Use an empty Marker to start enumeration from the * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment - * again (passing the the previously-returned Marker) to get the next segment. + * again (passing the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * * @param delimiter - The character or string used to define the virtual hierarchy @@ -48872,427 +48872,427 @@ module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; /***/ 4351: /***/ ((module) => { -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, Symbol, Reflect, Promise, SuppressedError */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __esDecorate; -var __runInitializers; -var __propKey; -var __setFunctionName; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __classPrivateFieldIn; -var __createBinding; -var __addDisposableResource; -var __disposeResources; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context = {}; - for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context.access[p] = contextIn.access[p]; - context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } - else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; - } - } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; - }; - - __runInitializers = function (thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; - }; - - __propKey = function (x) { - return typeof x === "symbol" ? x : "".concat(x); - }; - - __setFunctionName = function (f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - __classPrivateFieldIn = function (state, receiver) { - if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); - }; - - __addDisposableResource = function (env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - env.stack.push({ value: value, dispose: dispose, async: async }); - } - else if (async) { - env.stack.push({ async: true }); - } - return value; - }; - - var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; - }; - - __disposeResources = function (env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - function next() { - while (env.stack.length) { - var rec = env.stack.pop(); - try { - var result = rec.dispose && rec.dispose.call(rec.value); - if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); - } - catch (e) { - fail(e); - } - } - if (env.hasError) throw env.error; - } - return next(); - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__esDecorate", __esDecorate); - exporter("__runInitializers", __runInitializers); - exporter("__propKey", __propKey); - exporter("__setFunctionName", __setFunctionName); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); - exporter("__classPrivateFieldIn", __classPrivateFieldIn); - exporter("__addDisposableResource", __addDisposableResource); - exporter("__disposeResources", __disposeResources); -}); +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); +}); /***/ }), @@ -72584,1303 +72584,1303 @@ exports.parseURL = __nccwpck_require__(2158).parseURL; /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -const punycode = __nccwpck_require__(5477); -const tr46 = __nccwpck_require__(4256); - -const specialSchemes = { - ftp: 21, - file: null, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443 -}; - -const failure = Symbol("failure"); - -function countSymbols(str) { - return punycode.ucs2.decode(str).length; -} - -function at(input, idx) { - const c = input[idx]; - return isNaN(c) ? undefined : String.fromCodePoint(c); -} - -function isASCIIDigit(c) { - return c >= 0x30 && c <= 0x39; -} - -function isASCIIAlpha(c) { - return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); -} - -function isASCIIAlphanumeric(c) { - return isASCIIAlpha(c) || isASCIIDigit(c); -} - -function isASCIIHex(c) { - return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); -} - -function isSingleDot(buffer) { - return buffer === "." || buffer.toLowerCase() === "%2e"; -} - -function isDoubleDot(buffer) { - buffer = buffer.toLowerCase(); - return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; -} - -function isWindowsDriveLetterCodePoints(cp1, cp2) { - return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); -} - -function isWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); -} - -function isNormalizedWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; -} - -function containsForbiddenHostCodePoint(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function containsForbiddenHostCodePointExcludingPercent(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function isSpecialScheme(scheme) { - return specialSchemes[scheme] !== undefined; -} - -function isSpecial(url) { - return isSpecialScheme(url.scheme); -} - -function defaultPort(scheme) { - return specialSchemes[scheme]; -} - -function percentEncode(c) { - let hex = c.toString(16).toUpperCase(); - if (hex.length === 1) { - hex = "0" + hex; - } - - return "%" + hex; -} - -function utf8PercentEncode(c) { - const buf = new Buffer(c); - - let str = ""; - - for (let i = 0; i < buf.length; ++i) { - str += percentEncode(buf[i]); - } - - return str; -} - -function utf8PercentDecode(str) { - const input = new Buffer(str); - const output = []; - for (let i = 0; i < input.length; ++i) { - if (input[i] !== 37) { - output.push(input[i]); - } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { - output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); - i += 2; - } else { - output.push(input[i]); - } - } - return new Buffer(output).toString(); -} - -function isC0ControlPercentEncode(c) { - return c <= 0x1F || c > 0x7E; -} - -const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); -function isPathPercentEncode(c) { - return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); -} - -const extraUserinfoPercentEncodeSet = - new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); -function isUserinfoPercentEncode(c) { - return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); -} - -function percentEncodeChar(c, encodeSetPredicate) { - const cStr = String.fromCodePoint(c); - - if (encodeSetPredicate(c)) { - return utf8PercentEncode(cStr); - } - - return cStr; -} - -function parseIPv4Number(input) { - let R = 10; - - if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { - input = input.substring(2); - R = 16; - } else if (input.length >= 2 && input.charAt(0) === "0") { - input = input.substring(1); - R = 8; - } - - if (input === "") { - return 0; - } - - const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); - if (regex.test(input)) { - return failure; - } - - return parseInt(input, R); -} - -function parseIPv4(input) { - const parts = input.split("."); - if (parts[parts.length - 1] === "") { - if (parts.length > 1) { - parts.pop(); - } - } - - if (parts.length > 4) { - return input; - } - - const numbers = []; - for (const part of parts) { - if (part === "") { - return input; - } - const n = parseIPv4Number(part); - if (n === failure) { - return input; - } - - numbers.push(n); - } - - for (let i = 0; i < numbers.length - 1; ++i) { - if (numbers[i] > 255) { - return failure; - } - } - if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { - return failure; - } - - let ipv4 = numbers.pop(); - let counter = 0; - - for (const n of numbers) { - ipv4 += n * Math.pow(256, 3 - counter); - ++counter; - } - - return ipv4; -} - -function serializeIPv4(address) { - let output = ""; - let n = address; - - for (let i = 1; i <= 4; ++i) { - output = String(n % 256) + output; - if (i !== 4) { - output = "." + output; - } - n = Math.floor(n / 256); - } - - return output; -} - -function parseIPv6(input) { - const address = [0, 0, 0, 0, 0, 0, 0, 0]; - let pieceIndex = 0; - let compress = null; - let pointer = 0; - - input = punycode.ucs2.decode(input); - - if (input[pointer] === 58) { - if (input[pointer + 1] !== 58) { - return failure; - } - - pointer += 2; - ++pieceIndex; - compress = pieceIndex; - } - - while (pointer < input.length) { - if (pieceIndex === 8) { - return failure; - } - - if (input[pointer] === 58) { - if (compress !== null) { - return failure; - } - ++pointer; - ++pieceIndex; - compress = pieceIndex; - continue; - } - - let value = 0; - let length = 0; - - while (length < 4 && isASCIIHex(input[pointer])) { - value = value * 0x10 + parseInt(at(input, pointer), 16); - ++pointer; - ++length; - } - - if (input[pointer] === 46) { - if (length === 0) { - return failure; - } - - pointer -= length; - - if (pieceIndex > 6) { - return failure; - } - - let numbersSeen = 0; - - while (input[pointer] !== undefined) { - let ipv4Piece = null; - - if (numbersSeen > 0) { - if (input[pointer] === 46 && numbersSeen < 4) { - ++pointer; - } else { - return failure; - } - } - - if (!isASCIIDigit(input[pointer])) { - return failure; - } - - while (isASCIIDigit(input[pointer])) { - const number = parseInt(at(input, pointer)); - if (ipv4Piece === null) { - ipv4Piece = number; - } else if (ipv4Piece === 0) { - return failure; - } else { - ipv4Piece = ipv4Piece * 10 + number; - } - if (ipv4Piece > 255) { - return failure; - } - ++pointer; - } - - address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; - - ++numbersSeen; - - if (numbersSeen === 2 || numbersSeen === 4) { - ++pieceIndex; - } - } - - if (numbersSeen !== 4) { - return failure; - } - - break; - } else if (input[pointer] === 58) { - ++pointer; - if (input[pointer] === undefined) { - return failure; - } - } else if (input[pointer] !== undefined) { - return failure; - } - - address[pieceIndex] = value; - ++pieceIndex; - } - - if (compress !== null) { - let swaps = pieceIndex - compress; - pieceIndex = 7; - while (pieceIndex !== 0 && swaps > 0) { - const temp = address[compress + swaps - 1]; - address[compress + swaps - 1] = address[pieceIndex]; - address[pieceIndex] = temp; - --pieceIndex; - --swaps; - } - } else if (compress === null && pieceIndex !== 8) { - return failure; - } - - return address; -} - -function serializeIPv6(address) { - let output = ""; - const seqResult = findLongestZeroSequence(address); - const compress = seqResult.idx; - let ignore0 = false; - - for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { - if (ignore0 && address[pieceIndex] === 0) { - continue; - } else if (ignore0) { - ignore0 = false; - } - - if (compress === pieceIndex) { - const separator = pieceIndex === 0 ? "::" : ":"; - output += separator; - ignore0 = true; - continue; - } - - output += address[pieceIndex].toString(16); - - if (pieceIndex !== 7) { - output += ":"; - } - } - - return output; -} - -function parseHost(input, isSpecialArg) { - if (input[0] === "[") { - if (input[input.length - 1] !== "]") { - return failure; - } - - return parseIPv6(input.substring(1, input.length - 1)); - } - - if (!isSpecialArg) { - return parseOpaqueHost(input); - } - - const domain = utf8PercentDecode(input); - const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); - if (asciiDomain === null) { - return failure; - } - - if (containsForbiddenHostCodePoint(asciiDomain)) { - return failure; - } - - const ipv4Host = parseIPv4(asciiDomain); - if (typeof ipv4Host === "number" || ipv4Host === failure) { - return ipv4Host; - } - - return asciiDomain; -} - -function parseOpaqueHost(input) { - if (containsForbiddenHostCodePointExcludingPercent(input)) { - return failure; - } - - let output = ""; - const decoded = punycode.ucs2.decode(input); - for (let i = 0; i < decoded.length; ++i) { - output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); - } - return output; -} - -function findLongestZeroSequence(arr) { - let maxIdx = null; - let maxLen = 1; // only find elements > 1 - let currStart = null; - let currLen = 0; - - for (let i = 0; i < arr.length; ++i) { - if (arr[i] !== 0) { - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - currStart = null; - currLen = 0; - } else { - if (currStart === null) { - currStart = i; - } - ++currLen; - } - } - - // if trailing zeros - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - return { - idx: maxIdx, - len: maxLen - }; -} - -function serializeHost(host) { - if (typeof host === "number") { - return serializeIPv4(host); - } - - // IPv6 serializer - if (host instanceof Array) { - return "[" + serializeIPv6(host) + "]"; - } - - return host; -} - -function trimControlChars(url) { - return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); -} - -function trimTabAndNewline(url) { - return url.replace(/\u0009|\u000A|\u000D/g, ""); -} - -function shortenPath(url) { - const path = url.path; - if (path.length === 0) { - return; - } - if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { - return; - } - - path.pop(); -} - -function includesCredentials(url) { - return url.username !== "" || url.password !== ""; -} - -function cannotHaveAUsernamePasswordPort(url) { - return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; -} - -function isNormalizedWindowsDriveLetter(string) { - return /^[A-Za-z]:$/.test(string); -} - -function URLStateMachine(input, base, encodingOverride, url, stateOverride) { - this.pointer = 0; - this.input = input; - this.base = base || null; - this.encodingOverride = encodingOverride || "utf-8"; - this.stateOverride = stateOverride; - this.url = url; - this.failure = false; - this.parseError = false; - - if (!this.url) { - this.url = { - scheme: "", - username: "", - password: "", - host: null, - port: null, - path: [], - query: null, - fragment: null, - - cannotBeABaseURL: false - }; - - const res = trimControlChars(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - } - - const res = trimTabAndNewline(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - - this.state = stateOverride || "scheme start"; - - this.buffer = ""; - this.atFlag = false; - this.arrFlag = false; - this.passwordTokenSeenFlag = false; - - this.input = punycode.ucs2.decode(this.input); - - for (; this.pointer <= this.input.length; ++this.pointer) { - const c = this.input[this.pointer]; - const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); - - // exec state machine - const ret = this["parse " + this.state](c, cStr); - if (!ret) { - break; // terminate algorithm - } else if (ret === failure) { - this.failure = true; - break; - } - } -} - -URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { - if (isASCIIAlpha(c)) { - this.buffer += cStr.toLowerCase(); - this.state = "scheme"; - } else if (!this.stateOverride) { - this.state = "no scheme"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { - if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { - this.buffer += cStr.toLowerCase(); - } else if (c === 58) { - if (this.stateOverride) { - if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { - return false; - } - - if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { - return false; - } - - if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { - return false; - } - - if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { - return false; - } - } - this.url.scheme = this.buffer; - this.buffer = ""; - if (this.stateOverride) { - return false; - } - if (this.url.scheme === "file") { - if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { - this.parseError = true; - } - this.state = "file"; - } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { - this.state = "special relative or authority"; - } else if (isSpecial(this.url)) { - this.state = "special authority slashes"; - } else if (this.input[this.pointer + 1] === 47) { - this.state = "path or authority"; - ++this.pointer; - } else { - this.url.cannotBeABaseURL = true; - this.url.path.push(""); - this.state = "cannot-be-a-base-URL path"; - } - } else if (!this.stateOverride) { - this.buffer = ""; - this.state = "no scheme"; - this.pointer = -1; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { - if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { - return failure; - } else if (this.base.cannotBeABaseURL && c === 35) { - this.url.scheme = this.base.scheme; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.url.cannotBeABaseURL = true; - this.state = "fragment"; - } else if (this.base.scheme === "file") { - this.state = "file"; - --this.pointer; - } else { - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { - if (c === 47) { - this.state = "authority"; - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative"] = function parseRelative(c) { - this.url.scheme = this.base.scheme; - if (isNaN(c)) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 47) { - this.state = "relative slash"; - } else if (c === 63) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else if (isSpecial(this.url) && c === 92) { - this.parseError = true; - this.state = "relative slash"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(0, this.base.path.length - 1); - - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { - if (isSpecial(this.url) && (c === 47 || c === 92)) { - if (c === 92) { - this.parseError = true; - } - this.state = "special authority ignore slashes"; - } else if (c === 47) { - this.state = "authority"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "special authority ignore slashes"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { - if (c !== 47 && c !== 92) { - this.state = "authority"; - --this.pointer; - } else { - this.parseError = true; - } - - return true; -}; - -URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { - if (c === 64) { - this.parseError = true; - if (this.atFlag) { - this.buffer = "%40" + this.buffer; - } - this.atFlag = true; - - // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars - const len = countSymbols(this.buffer); - for (let pointer = 0; pointer < len; ++pointer) { - const codePoint = this.buffer.codePointAt(pointer); - - if (codePoint === 58 && !this.passwordTokenSeenFlag) { - this.passwordTokenSeenFlag = true; - continue; - } - const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); - if (this.passwordTokenSeenFlag) { - this.url.password += encodedCodePoints; - } else { - this.url.username += encodedCodePoints; - } - } - this.buffer = ""; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - if (this.atFlag && this.buffer === "") { - this.parseError = true; - return failure; - } - this.pointer -= countSymbols(this.buffer) + 1; - this.buffer = ""; - this.state = "host"; - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse hostname"] = -URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { - if (this.stateOverride && this.url.scheme === "file") { - --this.pointer; - this.state = "file host"; - } else if (c === 58 && !this.arrFlag) { - if (this.buffer === "") { - this.parseError = true; - return failure; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "port"; - if (this.stateOverride === "hostname") { - return false; - } - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - --this.pointer; - if (isSpecial(this.url) && this.buffer === "") { - this.parseError = true; - return failure; - } else if (this.stateOverride && this.buffer === "" && - (includesCredentials(this.url) || this.url.port !== null)) { - this.parseError = true; - return false; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "path start"; - if (this.stateOverride) { - return false; - } - } else { - if (c === 91) { - this.arrFlag = true; - } else if (c === 93) { - this.arrFlag = false; - } - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { - if (isASCIIDigit(c)) { - this.buffer += cStr; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92) || - this.stateOverride) { - if (this.buffer !== "") { - const port = parseInt(this.buffer); - if (port > Math.pow(2, 16) - 1) { - this.parseError = true; - return failure; - } - this.url.port = port === defaultPort(this.url.scheme) ? null : port; - this.buffer = ""; - } - if (this.stateOverride) { - return false; - } - this.state = "path start"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); - -URLStateMachine.prototype["parse file"] = function parseFile(c) { - this.url.scheme = "file"; - - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file slash"; - } else if (this.base !== null && this.base.scheme === "file") { - if (isNaN(c)) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 63) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else { - if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points - !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || - (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points - !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - shortenPath(this.url); - } else { - this.parseError = true; - } - - this.state = "path"; - --this.pointer; - } - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file host"; - } else { - if (this.base !== null && this.base.scheme === "file") { - if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { - this.url.path.push(this.base.path[0]); - } else { - this.url.host = this.base.host; - } - } - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { - if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { - --this.pointer; - if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { - this.parseError = true; - this.state = "path"; - } else if (this.buffer === "") { - this.url.host = ""; - if (this.stateOverride) { - return false; - } - this.state = "path start"; - } else { - let host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - if (host === "localhost") { - host = ""; - } - this.url.host = host; - - if (this.stateOverride) { - return false; - } - - this.buffer = ""; - this.state = "path start"; - } - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { - if (isSpecial(this.url)) { - if (c === 92) { - this.parseError = true; - } - this.state = "path"; - - if (c !== 47 && c !== 92) { - --this.pointer; - } - } else if (!this.stateOverride && c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (!this.stateOverride && c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else if (c !== undefined) { - this.state = "path"; - if (c !== 47) { - --this.pointer; - } - } - - return true; -}; - -URLStateMachine.prototype["parse path"] = function parsePath(c) { - if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || - (!this.stateOverride && (c === 63 || c === 35))) { - if (isSpecial(this.url) && c === 92) { - this.parseError = true; - } - - if (isDoubleDot(this.buffer)) { - shortenPath(this.url); - if (c !== 47 && !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } - } else if (isSingleDot(this.buffer) && c !== 47 && - !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } else if (!isSingleDot(this.buffer)) { - if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { - if (this.url.host !== "" && this.url.host !== null) { - this.parseError = true; - this.url.host = ""; - } - this.buffer = this.buffer[0] + ":"; - } - this.url.path.push(this.buffer); - } - this.buffer = ""; - if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { - while (this.url.path.length > 1 && this.url.path[0] === "") { - this.parseError = true; - this.url.path.shift(); - } - } - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += percentEncodeChar(c, isPathPercentEncode); - } - - return true; -}; - -URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else { - // TODO: Add: not a URL code point - if (!isNaN(c) && c !== 37) { - this.parseError = true; - } - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - if (!isNaN(c)) { - this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); - } - } - - return true; -}; - -URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { - if (isNaN(c) || (!this.stateOverride && c === 35)) { - if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { - this.encodingOverride = "utf-8"; - } - - const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead - for (let i = 0; i < buffer.length; ++i) { - if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || - buffer[i] === 0x3C || buffer[i] === 0x3E) { - this.url.query += percentEncode(buffer[i]); - } else { - this.url.query += String.fromCodePoint(buffer[i]); - } - } - - this.buffer = ""; - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { - if (isNaN(c)) { // do nothing - } else if (c === 0x0) { - this.parseError = true; - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); - } - - return true; -}; - -function serializeURL(url, excludeFragment) { - let output = url.scheme + ":"; - if (url.host !== null) { - output += "//"; - - if (url.username !== "" || url.password !== "") { - output += url.username; - if (url.password !== "") { - output += ":" + url.password; - } - output += "@"; - } - - output += serializeHost(url.host); - - if (url.port !== null) { - output += ":" + url.port; - } - } else if (url.host === null && url.scheme === "file") { - output += "//"; - } - - if (url.cannotBeABaseURL) { - output += url.path[0]; - } else { - for (const string of url.path) { - output += "/" + string; - } - } - - if (url.query !== null) { - output += "?" + url.query; - } - - if (!excludeFragment && url.fragment !== null) { - output += "#" + url.fragment; - } - - return output; -} - -function serializeOrigin(tuple) { - let result = tuple.scheme + "://"; - result += serializeHost(tuple.host); - - if (tuple.port !== null) { - result += ":" + tuple.port; - } - - return result; -} - -module.exports.serializeURL = serializeURL; - -module.exports.serializeURLOrigin = function (url) { - // https://url.spec.whatwg.org/#concept-url-origin - switch (url.scheme) { - case "blob": - try { - return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); - } catch (e) { - // serializing an opaque origin returns "null" - return "null"; - } - case "ftp": - case "gopher": - case "http": - case "https": - case "ws": - case "wss": - return serializeOrigin({ - scheme: url.scheme, - host: url.host, - port: url.port - }); - case "file": - // spec says "exercise to the reader", chrome says "file://" - return "file://"; - default: - // serializing an opaque origin returns "null" - return "null"; - } -}; - -module.exports.basicURLParse = function (input, options) { - if (options === undefined) { - options = {}; - } - - const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); - if (usm.failure) { - return "failure"; - } - - return usm.url; -}; - -module.exports.setTheUsername = function (url, username) { - url.username = ""; - const decoded = punycode.ucs2.decode(username); - for (let i = 0; i < decoded.length; ++i) { - url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.setThePassword = function (url, password) { - url.password = ""; - const decoded = punycode.ucs2.decode(password); - for (let i = 0; i < decoded.length; ++i) { - url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.serializeHost = serializeHost; - -module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; - -module.exports.serializeInteger = function (integer) { - return String(integer); -}; - -module.exports.parseURL = function (input, options) { - if (options === undefined) { - options = {}; - } - - // We don't handle blobs, so this just delegates: - return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); -}; + +const punycode = __nccwpck_require__(5477); +const tr46 = __nccwpck_require__(4256); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; /***/ }), diff --git a/barretenberg/cpp/src/barretenberg/crypto/blake3s_full/blake3s.cpp b/barretenberg/cpp/src/barretenberg/crypto/blake3s_full/blake3s.cpp index e6545f3a83e..c00419e9de6 100644 --- a/barretenberg/cpp/src/barretenberg/crypto/blake3s_full/blake3s.cpp +++ b/barretenberg/cpp/src/barretenberg/crypto/blake3s_full/blake3s.cpp @@ -493,7 +493,7 @@ INLINE void hasher_merge_cv_stack(blake3_hasher* self, uint64_t total_len) // of the whole tree, and it would need to be ROOT finalized. We can't // compress it until we know. // 2) This 64 KiB input might complete a larger tree, whose root node is -// similarly going to be the the root of the whole tree. For example, maybe +// similarly going to be the root of the whole tree. For example, maybe // we have 196 KiB (that is, 128 + 64) hashed so far. We can't compress the // node at the root of the 256 KiB subtree until we know how to finalize it. // diff --git a/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/indexed_tree/indexed_tree.hpp b/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/indexed_tree/indexed_tree.hpp index b7112be5447..e87b4438508 100644 --- a/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/indexed_tree/indexed_tree.hpp +++ b/barretenberg/cpp/src/barretenberg/crypto/merkle_tree/indexed_tree/indexed_tree.hpp @@ -10,7 +10,7 @@ namespace bb::crypto::merkle_tree { using index_t = uint256_t; /** - * @brief Used in parallel insertions in the the IndexedTree. Workers signal to other following workes as they move up + * @brief Used in parallel insertions in the IndexedTree. Workers signal to other following workes as they move up * the level of the tree. * */ diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index bdb969415cb..b57873289f4 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -652,7 +652,7 @@ class ECCVMFlavor { }; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to diff --git a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_flavor.hpp index 131c5fcb686..e2aacc1d532 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_flavor.hpp @@ -77,7 +77,7 @@ template class ECCVMRecursiveFlavor_ { using VerifierCommitmentKey = bb::VerifierCommitmentKey; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witness) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witness) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to diff --git a/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp b/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp index 2a64416f21e..69ebd9765a3 100644 --- a/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp +++ b/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp @@ -459,7 +459,7 @@ template STerm` and the inverse of it. Where `term_name` is the the name you provided earlier. + It will generate all the symbolic values of the circuit wires, add all the gate constrains, create a map `term_name->STerm` and the inverse of it. Where `term_name` is the name you provided earlier. In case you want to create two similar circuits with the same `solver` and `schema`, then you should specify the `tag`(name) of a circuit. @@ -209,7 +209,7 @@ Another one is - `pair StandardCircuit::unique_witness_ext(CircuitSchema circuit_info, Solver* s, TermType type, vector equal_variables, vector nequal_variables, vector at_least_one_equal_variable, vector at_least_one_nequal_variable)` that does the same but provides you with more flexible settings. - Same in `UltraCircuit` -The return circuits can be useful, if you want to define some additional constraints, that are not covered by the the above functions. +The return circuits can be useful, if you want to define some additional constraints, that are not covered by the above functions. You can call `s.check`, `s.model`, `smt_timer` or `default_model` further. ## 5. smt_util.hpp diff --git a/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp b/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp index a40ba91f45d..02c69d8f770 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp @@ -38,7 +38,7 @@ bool_t ecdsa_verify_signature(const stdlib::byte_array& messag * It is used to recover signing public key from an ecdsa signature. In practice, the value * of v is offset by 27 following the convention from the original bitcoin whitepaper. * - * The value of v depends on the the point R = (x, y) s.t. r = x % |Fr| + * The value of v depends on the point R = (x, y) s.t. r = x % |Fr| * 0: y is even && x < |Fr| (x = r) * 1: y is odd && x < |Fr| (x = r) * 2: y is even && |Fr| <= x < |Fq| (x = r + |Fr|) diff --git a/barretenberg/cpp/src/barretenberg/stdlib/hash/blake2s/blake2s_plookup.cpp b/barretenberg/cpp/src/barretenberg/stdlib/hash/blake2s/blake2s_plookup.cpp index 744fafa2d4b..3b939e5f042 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/hash/blake2s/blake2s_plookup.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/hash/blake2s/blake2s_plookup.cpp @@ -40,7 +40,7 @@ enum blake2s_constant { * h: A 64-byte chain value denoted decomposed as (h_0, h_1, ..., h_7), each h_i is a 32-bit number. * It form the first two rows on the internal state matrix v of the compression function G. * - * t: It is a counter (t_0 lsb and t_1 msb) used in the the initialization of the internal state v. + * t: It is a counter (t_0 lsb and t_1 msb) used in the initialization of the internal state v. * * f: f_0 and f_1 are finalization flags used in the initialization of the internal state v. * / 0xfff...ff if the block processed is the last diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp index b77a4358518..be51db0b7bb 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp @@ -1223,7 +1223,7 @@ void bigfield::perform_reductions_for_mult_madd(std::vector::unsafe_evaluate_multiple_multiply_add(const std::vect ctx->decompose_into_default_range(carry_lo.witness_index, static_cast(carry_lo_msb)); ctx->decompose_into_default_range(carry_hi.witness_index, static_cast(carry_hi_msb)); } - was removed from the the `else` block below. See the conversation at + was removed from the `else` block below. See the conversation at https://github.com/AztecProtocol/aztec2-internal/pull/1023 We should make sure that no constraint like this is needed but missing (e.g., an equivalent constraint was just imposed?). */ diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp index 5d96a94293e..9c36c59a809 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp @@ -257,7 +257,7 @@ TEST(stdlib_plookup, blake2s_xor_rotate_16) } /* - * The following out coefficients are the the ones multiplied for computing the cumulative intermediate terms + * The following out coefficients are the ones multiplied for computing the cumulative intermediate terms * in the expected output. If the column_3_coefficients for this table are (a0, a1, ..., a5), then the * out_coefficients must be (a5/a4, a4/a3, a3/a2, a2/a1, a1/a0). Note that these are stored in reverse orde * for simplicity. diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/uint/uint.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/uint/uint.test.cpp index 08ec8b849a2..6a0c281f05d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/uint/uint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/uint/uint.test.cpp @@ -1705,7 +1705,7 @@ template class stdlib_uint : public testing::Test { } /** - * @brief Test the the function uint_ct::at used to extract bits. + * @brief Test the function uint_ct::at used to extract bits. */ static void test_at() { diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp index 64409b23361..ec085459272 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp @@ -455,7 +455,7 @@ class MegaFlavor { }; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to resolve diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp index fde032f061e..d0beed7bed2 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp @@ -88,7 +88,7 @@ template class MegaRecursiveFlavor_ { using Base::Base; }; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to resolve diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp index 2797c1b8fcf..42e24195960 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp @@ -457,7 +457,7 @@ void UltraCircuitBuilder_::create_poly_gate(const poly_triple_< * * @details x and y are defined over scalar field. * - * @param in Elliptic curve point addition gate parameters, including the the affine coordinates of the two points being + * @param in Elliptic curve point addition gate parameters, including the affine coordinates of the two points being * added, the resulting point coordinates and the selector values that describe whether the second point is negated. */ template diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp index 7d501ae0fdf..dfda4a2a24e 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp @@ -351,7 +351,7 @@ class UltraFlavor { }; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to resolve diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak.hpp index 9b8d8dd6070..30db8636251 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_keccak.hpp @@ -352,7 +352,7 @@ class UltraKeccakFlavor { }; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to resolve diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp index f46d4d60f47..b0228d4748b 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_recursive_flavor.hpp @@ -95,7 +95,7 @@ template class UltraRecursiveFlavor_ { public: /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to resolve diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp index a8d4d709100..6abd04d0661 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp @@ -843,7 +843,7 @@ class TranslatorFlavor { }; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to diff --git a/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_flavor.hpp index d922e941bfd..054874779ef 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_flavor.hpp @@ -110,7 +110,7 @@ template class TranslatorRecursiveFlavor_ { using Base::Base; }; /** - * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * @brief The verification key is responsible for storing the commitments to the precomputed (non-witnessk) * polynomials used by the verifier. * * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp index 0e0954ee2e8..4d83635b8a8 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp @@ -2760,7 +2760,7 @@ void AvmTraceBuilder::op_call(uint8_t indirect, { hint.success }); external_call_counter++; pc++; - // Adjust the side_effect_counter to the the value at the end of the external call. + // Adjust the side_effect_counter to the value at the end of the external call. side_effect_counter = static_cast(hint.end_side_effect_counter); } diff --git a/barretenberg/ts/scripts/build_wasm.sh b/barretenberg/ts/scripts/build_wasm.sh index 77a9fa8a314..d7aa1e0dee0 100755 --- a/barretenberg/ts/scripts/build_wasm.sh +++ b/barretenberg/ts/scripts/build_wasm.sh @@ -14,7 +14,7 @@ fi # We only need the threads wasm, as node always uses threads. # We need to take two copies for both esm and cjs builds. You can't use symlinks when publishing. # This probably isn't a big deal however due to compression. -# When building the the browser bundle, both wasms are inlined directly. +# When building the browser bundle, both wasms are inlined directly. mkdir -p ./dest/node/barretenberg_wasm mkdir -p ./dest/node-cjs/barretenberg_wasm cp ../cpp/build-wasm-threads/bin/barretenberg.wasm ./dest/node/barretenberg_wasm/barretenberg-threads.wasm diff --git a/docs/docs/reference/smart_contract_reference/portals/outbox.md b/docs/docs/reference/smart_contract_reference/portals/outbox.md index 80de81ffaa1..daf37b473a4 100644 --- a/docs/docs/reference/smart_contract_reference/portals/outbox.md +++ b/docs/docs/reference/smart_contract_reference/portals/outbox.md @@ -45,7 +45,7 @@ Allows a recipient to consume a message from the `Outbox`. - Will revert with `Outbox__InvalidChainId()` if `block.chainid != _message.recipient.chainId`. - Will revert with `Outbox__NothingToConsumeAtBlock(uint256 l2BlockNumber)` if the root for the block has not been set yet. - Will revert with `Outbox__AlreadyNullified(uint256 l2BlockNumber, uint256 leafIndex)` if the message at leafIndex for the block has already been consumed. -- Will revert with `Outbox__InvalidPathLength(uint256 expected, uint256 actual)` if the the supplied height is less than the existing minimum height of the L2 to L1 message tree, or the supplied height is greater than the maximum (minimum height + log2(maximum messages)). +- Will revert with `Outbox__InvalidPathLength(uint256 expected, uint256 actual)` if the supplied height is less than the existing minimum height of the L2 to L1 message tree, or the supplied height is greater than the maximum (minimum height + log2(maximum messages)). - Will revert with `MerkleLib__InvalidRoot(bytes32 expected, bytes32 actual, bytes32 leaf, uint256 leafIndex)` if unable to verify the message existence in the tree. It returns the message as a leaf, as well as the index of the leaf to expose more info about the error. diff --git a/noir-projects/aztec-nr/authwit/src/auth.nr b/noir-projects/aztec-nr/authwit/src/auth.nr index a6c2d91f9cb..dd3cbb900f1 100644 --- a/noir-projects/aztec-nr/authwit/src/auth.nr +++ b/noir-projects/aztec-nr/authwit/src/auth.nr @@ -258,7 +258,7 @@ pub fn assert_current_call_valid_authwit_public(context: &mut PublicContext, on_ * Note that the authentication registry will take the `msg_sender` into account as the consumer, so this will only * work if the `msg_sender` is the same as the `consumer` when the `message_hash` was inserted into the registry. * - * @param on_behalf_of The address that have authorized the the `inner_hash` + * @param on_behalf_of The address that have authorized the `inner_hash` */ pub fn assert_inner_hash_valid_authwit_public(context: &mut PublicContext, on_behalf_of: AztecAddress, inner_hash: Field) { let result: Field = context.call_public_function( @@ -316,7 +316,7 @@ pub fn compute_inner_authwit_hash(args: [Field; N]) -> Field { * * Using the `on_behalf_of` and the `inner_hash` to ensure that the nullifier is siloed for a specific `on_behalf_of`. * - * @param on_behalf_of The address that have authorized the the `inner_hash` + * @param on_behalf_of The address that have authorized the `inner_hash` * @param inner_hash The hash of the message to authorize */ pub fn compute_authwit_nullifier(on_behalf_of: AztecAddress, inner_hash: Field) -> Field { diff --git a/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/types/balances_map.nr b/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/types/balances_map.nr index 882364d9f65..c2a43a8b0f6 100644 --- a/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/types/balances_map.nr +++ b/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/types/balances_map.nr @@ -92,7 +92,7 @@ impl BalancesMap { let note = notes.get_unchecked(i); // Removes the note from the owner's set of notes. - // This will call the the `compute_nullifer` function of the `token_note` + // This will call the `compute_nullifer` function of the `token_note` // which require knowledge of the secret key (currently the users encryption key). // The contract logic must ensure that the spending key is used as well. // docs:start:remove diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/types/balances_map.nr b/noir-projects/noir-contracts/contracts/token_contract/src/types/balances_map.nr index b357b93bf1a..2035976606f 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/types/balances_map.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/types/balances_map.nr @@ -118,7 +118,7 @@ impl BalancesMap { let note = notes.get_unchecked(i); // Removes the note from the owner's set of notes. - // This will call the the `compute_nullifer` function of the `token_note` + // This will call the `compute_nullifer` function of the `token_note` // which require knowledge of the secret key (currently the users encryption key). // The contract logic must ensure that the spending key is used as well. // docs:start:remove diff --git a/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/types/balances_map.nr b/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/types/balances_map.nr index 63a0092ea5c..1e16e80f616 100644 --- a/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/types/balances_map.nr +++ b/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/types/balances_map.nr @@ -95,7 +95,7 @@ impl BalancesMap { let note = notes.get_unchecked(i); // Removes the note from the owner's set of notes. - // This will call the the `compute_nullifer` function of the `token_note` + // This will call the `compute_nullifer` function of the `token_note` // which require knowledge of the secret key (currently the users encryption key). // The contract logic must ensure that the spending key is used as well. // docs:start:remove diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts index 4dce18fca50..62c718d5ae6 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts @@ -43,7 +43,7 @@ export class BlockBodyStore { /** * Gets an L2 block body. - * @param txsEffectsHash - The txHash of the the block body to return + * @param txsEffectsHash - The txHash of the block body to return * @returns The requested L2 block body */ getBlockBody(txsEffectsHash: Buffer): Body | undefined { diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 8cad7257baa..827f0fbd225 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -45,7 +45,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { private l2BlockBodies: Map = new Map(); /** - * An array containing all the the tx effects in the L2 blocks that have been fetched so far. + * An array containing all the tx effects in the L2 blocks that have been fetched so far. */ private txEffects: TxEffect[] = []; diff --git a/yarn-project/circuit-types/src/logs/l1_payload/l1_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/l1_payload.ts index 5531b011675..d8e12624c63 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/l1_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/l1_payload.ts @@ -33,7 +33,7 @@ export abstract class L1Payload { /** * Encrypts an event payload for a given recipient and sender. - * Creates an incoming log the the recipient using the recipient's ivsk, and + * Creates an incoming log the recipient using the recipient's ivsk, and * an outgoing log for the sender using the sender's ovsk. * * @param ephSk - An ephemeral secret key used for the encryption diff --git a/yarn-project/key-store/src/key_store.ts b/yarn-project/key-store/src/key_store.ts index 34a54548d27..6914c2fc83d 100644 --- a/yarn-project/key-store/src/key_store.ts +++ b/yarn-project/key-store/src/key_store.ts @@ -101,7 +101,7 @@ export class KeyStore { * Gets the key validation request for a given master public key hash and contract address. * @throws If the account corresponding to the master public key hash does not exist in the key store. * @param pkMHash - The master public key hash. - * @param contractAddress - The contract address to silo the secret key in the the key validation request with. + * @param contractAddress - The contract address to silo the secret key in the key validation request with. * @returns The key validation request. */ public getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { From 24b3e05a9bfca29f7741de49fe12f73cc219953b Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Wed, 7 Aug 2024 14:08:52 +0100 Subject: [PATCH 02/61] chore!: rename fee juice (#7793) Fix #7570 --- .github/workflows/devnet-deploys.yml | 6 +- docker-compose.yml | 4 +- docs/docs/aztec/concepts/accounts/index.md | 2 +- .../run_more_than_one_pxe_sandbox.md | 2 +- docs/docs/migration_notes.md | 32 +++++++-- .../{fee-payment-asset.md => fee-juice.md} | 14 ++-- .../gas-and-fees/fee-schedule.md | 25 ++++--- .../docs/protocol-specs/gas-and-fees/index.md | 3 +- .../gas-and-fees/kernel-tracking.md | 4 +- .../published-gas-and-fee-data.md | 8 +-- .../gas-and-fees/specifying-gas-fee-info.md | 7 +- .../sandbox_reference/cli_reference.md | 8 +-- .../sandbox_reference/sandbox-reference.md | 2 +- .../write_accounts_contract.md | 4 +- docs/netlify.toml | 6 +- docs/sidebars.js | 2 +- l1-contracts/src/core/Rollup.sol | 10 +-- .../src/core/libraries/ConstantsGen.sol | 12 ++-- l1-contracts/terraform/main.tf | 12 ++-- .../{GasPortal.sol => FeeJuicePortal.sol} | 2 +- noir-projects/noir-contracts/Nargo.toml | 2 +- .../app_subscription_contract/Nargo.toml | 3 +- .../app_subscription_contract/src/main.nr | 14 ++-- .../Nargo.toml | 4 +- .../src/lib.nr | 0 .../src/main.nr | 14 ++-- .../contracts/fpc_contract/Nargo.toml | 1 - .../contracts/fpc_contract/src/main.nr | 5 +- .../rollup-lib/src/base/base_rollup_inputs.nr | 64 ++++++++--------- .../crates/types/src/constants.nr | 2 +- yarn-project/aztec-faucet/terraform/main.tf | 2 +- .../aztec-faucet/terraform/variables.tf | 2 +- .../aztec-node/src/aztec-node/server.ts | 4 +- yarn-project/aztec.js/src/api/fee.ts | 4 +- .../aztec.js/src/contract/contract.test.ts | 6 +- ..._method.ts => fee_juice_payment_method.ts} | 8 +-- ...=> fee_juice_payment_method_with_claim.ts} | 12 ++-- yarn-project/aztec/src/cli/texts.ts | 4 +- yarn-project/aztec/src/sandbox.ts | 22 +++--- yarn-project/aztec/terraform/node/main.tf | 8 +-- yarn-project/bot/src/bot.ts | 4 +- yarn-project/circuits.js/src/constants.gen.ts | 10 +-- .../circuits.js/src/contract/artifact_hash.ts | 2 +- .../kernel/kernel_circuit_public_inputs.ts | 2 +- .../structs/public_circuit_public_inputs.ts | 2 +- .../src/structs/rollup/base_rollup.ts | 4 +- .../circuits.js/src/tests/factories.ts | 4 +- yarn-project/cli-wallet/src/utils/fees.ts | 8 +-- .../cli/src/cmds/devnet/bootstrap_network.ts | 13 ++-- .../deploy_protocol_contract.ts | 6 +- .../cli/src/cmds/l1/deploy_l1_contracts.ts | 4 +- yarn-project/cli/src/cmds/l1/index.ts | 6 +- .../cli/src/cmds/misc/deploy_contracts.ts | 38 +++++----- .../cli/src/cmds/pxe/get_node_info.ts | 2 +- yarn-project/cli/src/cmds/pxe/get_pxe_info.ts | 2 +- yarn-project/cli/src/portal_manager.ts | 10 +-- yarn-project/cli/src/utils/aztec.ts | 16 ++--- yarn-project/end-to-end/Earthfile | 4 +- .../src/benchmarks/bench_prover.test.ts | 16 ++--- .../src/benchmarks/bench_tx_size_fees.test.ts | 18 ++--- .../end-to-end/src/devnet/e2e_smoke.test.ts | 14 ++-- .../src/e2e_fees/account_init.test.ts | 26 +++---- ...nts.test.ts => fee_juice_payments.test.ts} | 36 +++++----- .../end-to-end/src/e2e_fees/fees_test.ts | 70 +++++++++---------- .../src/e2e_fees/gas_estimation.test.ts | 8 +-- .../src/e2e_fees/private_payments.test.ts | 10 +-- .../src/e2e_fees/private_refunds.test.ts | 2 +- .../src/fixtures/setup_l1_contracts.ts | 16 ++--- yarn-project/end-to-end/src/fixtures/utils.ts | 64 ++++++++--------- .../src/shared/gas_portal_test_harness.ts | 62 ++++++++-------- .../ethereum/src/deploy_l1_contracts.ts | 58 +++++++-------- .../ethereum/src/l1_contract_addresses.ts | 14 ++-- .../scripts/generate-artifacts.sh | 2 +- .../src/type_conversion.ts | 2 +- .../scripts/copy-contracts.sh | 2 +- .../src/{gas-token => fee-juice}/artifact.ts | 4 +- .../{gas-token => fee-juice}/index.test.ts | 6 +- .../protocol-contracts/src/fee-juice/index.ts | 19 +++++ .../protocol-contracts/src/gas-token/index.ts | 19 ----- .../orchestrator/block-building-helpers.ts | 6 +- .../pxe/src/pxe_service/create_pxe_service.ts | 4 +- .../pxe/src/pxe_service/pxe_service.ts | 4 +- .../src/pxe_service/test/pxe_service.test.ts | 4 +- yarn-project/sequencer-client/src/config.ts | 4 +- .../src/tx_validator/gas_validator.test.ts | 18 ++--- .../src/tx_validator/gas_validator.ts | 18 ++--- .../src/tx_validator/tx_validator_factory.ts | 4 +- .../simulator/src/public/fee_payment.ts | 14 ++-- .../simulator/src/public/public_processor.ts | 10 +-- .../contracts/protocol_contract_addresses.ts | 2 +- 90 files changed, 521 insertions(+), 518 deletions(-) rename docs/docs/protocol-specs/gas-and-fees/{fee-payment-asset.md => fee-juice.md} (59%) rename l1-contracts/test/portals/{GasPortal.sol => FeeJuicePortal.sol} (98%) rename noir-projects/noir-contracts/contracts/{gas_token_contract => fee_juice_contract}/Nargo.toml (72%) rename noir-projects/noir-contracts/contracts/{gas_token_contract => fee_juice_contract}/src/lib.nr (100%) rename noir-projects/noir-contracts/contracts/{gas_token_contract => fee_juice_contract}/src/main.nr (92%) rename yarn-project/aztec.js/src/fee/{native_fee_payment_method.ts => fee_juice_payment_method.ts} (67%) rename yarn-project/aztec.js/src/fee/{native_fee_payment_method_with_claim.ts => fee_juice_payment_method_with_claim.ts} (66%) rename yarn-project/end-to-end/src/e2e_fees/{native_payments.test.ts => fee_juice_payments.test.ts} (72%) rename yarn-project/protocol-contracts/src/{gas-token => fee-juice}/artifact.ts (56%) rename yarn-project/protocol-contracts/src/{gas-token => fee-juice}/index.test.ts (74%) create mode 100644 yarn-project/protocol-contracts/src/fee-juice/index.ts delete mode 100644 yarn-project/protocol-contracts/src/gas-token/index.ts diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index 7280857a12d..cb297afb6b2 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -468,8 +468,8 @@ jobs: echo "TF_VAR_INBOX_CONTRACT_ADDRESS=$(extract inboxAddress)" >>$GITHUB_ENV echo "TF_VAR_OUTBOX_CONTRACT_ADDRESS=$(extract outboxAddress)" >>$GITHUB_ENV echo "TF_VAR_AVAILABILITY_ORACLE_CONTRACT_ADDRESS=$(extract availabilityOracleAddress)" >>$GITHUB_ENV - echo "TF_VAR_GAS_TOKEN_CONTRACT_ADDRESS=$(extract gasTokenAddress)" >>$GITHUB_ENV - echo "TF_VAR_GAS_PORTAL_CONTRACT_ADDRESS=$(extract gasPortalAddress)" >>$GITHUB_ENV + echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(extract feeJuiceAddress)" >>$GITHUB_ENV + echo "TF_VAR_FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$(extract feeJuicePortalAddress)" >>$GITHUB_ENV - name: Apply l1-contracts Terraform working-directory: ./l1-contracts/terraform @@ -591,7 +591,7 @@ jobs: aws s3 cp ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/l1_contracts.json ./l1_contracts.json aws s3 cp ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/basic_contracts.json ./basic_contracts.json - echo "TF_VAR_GAS_TOKEN_CONTRACT_ADDRESS=$(jq -r '.gasTokenAddress' ./l1_contracts.json)" >>$GITHUB_ENV + echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(jq -r '.feeJuiceAddress' ./l1_contracts.json)" >>$GITHUB_ENV echo "TF_VAR_DEV_COIN_CONTRACT_ADDRESS=$(jq -r '.devCoinL1' ./basic_contracts.json)" >>$GITHUB_ENV - name: Deploy Faucet diff --git a/docker-compose.yml b/docker-compose.yml index 09ad44729ad..8cc9bda4c66 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -50,8 +50,8 @@ services: INBOX_CONTRACT_ADDRESS: "0x12d9b5effc69bf5c0c29c8258c6b6fa95a08de74" OUTBOX_CONTRACT_ADDRESS: "0x3ec4b6c68a8c2ce4c78cdd465b3019b11a568d1d" AVAILABILITY_ORACLE_CONTRACT_ADDRESS: "0x98a4089127f3f5d555656f1c9b1801342c9d6bce" - GAS_TOKEN_CONTRACT_ADDRESS: "0x73c43b919973711e096bfc04c9d4b3be511ffc0b" - GAS_PORTAL_CONTRACT_ADDRESS: "0xdf25b0a34dbee9f25518f7a4d63bab8b3bb3e496" + FEE_JUICE_CONTRACT_ADDRESS: "0x73c43b919973711e096bfc04c9d4b3be511ffc0b" + FEE_JUICE_PORTAL_CONTRACT_ADDRESS: "0xdf25b0a34dbee9f25518f7a4d63bab8b3bb3e496" ETHEREUM_HOST: P2P_TCP_LISTEN_ADDR: "0.0.0.0:9000" P2P_UDP_LISTEN_ADDR: "0.0.0.0:9001" diff --git a/docs/docs/aztec/concepts/accounts/index.md b/docs/docs/aztec/concepts/accounts/index.md index 027427f9bf3..05c405ed950 100644 --- a/docs/docs/aztec/concepts/accounts/index.md +++ b/docs/docs/aztec/concepts/accounts/index.md @@ -117,7 +117,7 @@ However, this is not required when sitting on the receiving end. A user can dete ### Account contract deployment -Users will need to pay transaction fees in order to deploy their account contract. This can be done by sending a fee paying asset to their account contract address (which can be derived deterministically, as mentioned above), so they have funds to pay for the deployment. Alternatively, the fee can be paid for by another account, using [fee abstraction](#fee-management). +Users will need to pay transaction fees in order to deploy their account contract. This can be done by sending Fee Juice to their account contract address (which can be derived deterministically, as mentioned above), so they have funds to pay for the deployment. Alternatively, the fee can be paid for by another account, using [fee abstraction](#fee-management). ### Authorizing actions diff --git a/docs/docs/guides/local_env/run_more_than_one_pxe_sandbox.md b/docs/docs/guides/local_env/run_more_than_one_pxe_sandbox.md index 0bf2c4f12f3..53c4b3cb8be 100644 --- a/docs/docs/guides/local_env/run_more_than_one_pxe_sandbox.md +++ b/docs/docs/guides/local_env/run_more_than_one_pxe_sandbox.md @@ -43,7 +43,7 @@ You should see something like this: pxe_service Added contract ContractClassRegisterer at 0x030c6b23cf81a1c1387674e7d180ef04abc19387eb0ec71eea67c2b602b517b7 pxe_service Added contract ContractInstanceDeployer at 0x2d8e7aedc70b65d49e6aa0794d8d12721896c177e87126701f6e60d184358e74 pxe_service Added contract MultiCallEntrypoint at 0x0325a7874e168991a060b7f54e7324a42f87f48ffa592a903a5ce170b9d99e20 - pxe_service Added contract GasToken at 0x0f0be9c2e88fe0a7baa0823fbf7cfba98a6ba71558d6b5a4ee497e3b38f0aa7c + pxe_service Added contract FeeJuice at 0x0f0be9c2e88fe0a7baa0823fbf7cfba98a6ba71558d6b5a4ee497e3b38f0aa7c pxe_synchronizer Initial sync complete pxe_service Started PXE connected to chain 31337 version 1 Aztec Server listening on port 8080 diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index 685ba62b85c..816bd834e43 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -6,6 +6,19 @@ keywords: [sandbox, aztec, notes, migration, updating, upgrading] Aztec is in full-speed development. Literally every version breaks compatibility with the previous ones. This page attempts to target errors and difficulties you might encounter when upgrading, and how to resolve them. +## 0.48.0 + +### Fee Juice rename + +The name of the canonical Gas contract has changed to Fee Juice. Update noir code: + +```diff +-GasToken::at(contract_address) ++FeeJuice::at(contract_address) +``` + +Additionally, `NativePaymentMethod` and `NativePaymentMethodWithClaim` have been renamed to `FeeJuicePaymentMethod` and `FeeJuicePaymentMethodWithClaim`. + ## 0.47.0 # [Aztec sandbox] TXE deployment changes @@ -38,19 +51,23 @@ Sandbox commands have been cleaned up and simplified. Doing `aztec-up` now gets **REMOVED/RENAMED**: -* `aztec-sandbox` and `aztec sandbox`: now `aztec start --sandbox` -* `aztec-builder`: now `aztec codegen` and `aztec update` +- `aztec-sandbox` and `aztec sandbox`: now `aztec start --sandbox` +- `aztec-builder`: now `aztec codegen` and `aztec update` **ADDED**: -* `aztec test [options]`: runs `aztec start --txe && aztec-nargo test --oracle-resolver http://aztec:8081 --silence-warnings [options]` via docker-compose allowing users to easily run contract tests using TXE +- `aztec test [options]`: runs `aztec start --txe && aztec-nargo test --oracle-resolver http://aztec:8081 --silence-warnings [options]` via docker-compose allowing users to easily run contract tests using TXE ## 0.45.0 + ### [Aztec.nr] Remove unencrypted logs from private + They leak privacy so is a footgun! ## 0.44.0 + ### [Aztec.nr] Autogenerate Serialize methods for events + ```diff #[aztec(event)] struct WithdrawalProcessed { @@ -66,10 +83,11 @@ struct WithdrawalProcessed { ``` ### [Aztec.nr] rename `encode_and_encrypt_with_keys` to `encode_and_encrypt_note_with_keys` -```diff + +````diff contract XYZ { - use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_with_keys; -+ use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; ++ use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; .... - numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_with_keys(&mut context, owner_ovpk_m, owner_ivpk_m)); @@ -129,7 +147,7 @@ These changes were done because having the note hash exposed allowed us to not h + (note_hash_for_nullify, nullifier) + } + } -``` +```` ### [Aztec.nr] `note_getter` returns `BoundedVec` @@ -1708,4 +1726,4 @@ Now, just remove the `src` folder,: ```rust easy_private_token_contract = {git = "https://github.com/AztecProtocol/aztec-packages/", tag ="v0.17.0", directory = "noir-projects/noir-contracts/contracts/easy_private_token_contract"} -``` \ No newline at end of file +``` diff --git a/docs/docs/protocol-specs/gas-and-fees/fee-payment-asset.md b/docs/docs/protocol-specs/gas-and-fees/fee-juice.md similarity index 59% rename from docs/docs/protocol-specs/gas-and-fees/fee-payment-asset.md rename to docs/docs/protocol-specs/gas-and-fees/fee-juice.md index d5dfb648f4b..175042f6f02 100644 --- a/docs/docs/protocol-specs/gas-and-fees/fee-payment-asset.md +++ b/docs/docs/protocol-specs/gas-and-fees/fee-juice.md @@ -1,22 +1,22 @@ --- -title: Fee Payment Asset +title: Fee Juice --- -# Fee Payment Asset +# Fee Juice -The Fee Payment Asset (FPA) is an enshrined asset in the Aztec network that is used to pay fees. +Fee Juice is an enshrined asset in the Aztec network that is used to pay fees. -The FPA has several important properties: +It has several important properties: - It is fungible - It cannot be transferred between accounts on the Aztec network - It is obtained on Aztec via a bridge from Ethereum - It only has public balances -All transactions on the Aztec network have a [non-zero transaction_fee](./fee-schedule.md#da-gas), denominated in FPA, which must be paid for the transaction to be included in the block. +All transactions on the Aztec network have a [non-zero transaction_fee](./fee-schedule.md#da-gas), denominated in FPA, which must be paid for the transaction to be included in the block. When a block is successfully published on L1, the sequencer is paid on L1 the sum of all transaction fees in the block, denominated in FPA. -:::danger -We need a definition of the L1 fee payment asset. +:::danger +We need a definition of the L1 fee juice. ::: diff --git a/docs/docs/protocol-specs/gas-and-fees/fee-schedule.md b/docs/docs/protocol-specs/gas-and-fees/fee-schedule.md index 3bb448e906d..a548bd540f8 100644 --- a/docs/docs/protocol-specs/gas-and-fees/fee-schedule.md +++ b/docs/docs/protocol-specs/gas-and-fees/fee-schedule.md @@ -1,18 +1,19 @@ # Fee Schedule -The [transaction fee](./specifying-gas-fee-info.md#transaction-fee) is comprised of a DA component, an L2 component, and an inclusion fee. The DA and L2 components are calculated by multiplying the gas consumed in each dimension by the respective `feePerGas` value. The inclusion fee is a fixed cost associated with the transaction, which is used to cover the cost of verifying the encompassing rollup proof on L1. +The [transaction fee](./specifying-gas-fee-info.md#transaction-fee) is comprised of a DA component, an L2 component, and an inclusion fee. The DA and L2 components are calculated by multiplying the gas consumed in each dimension by the respective `feePerGas` value. The inclusion fee is a fixed cost associated with the transaction, which is used to cover the cost of verifying the encompassing rollup proof on L1. ## DA Gas DA gas is consumed to cover the costs associated with publishing data associated with a transaction. These data include: - - new note hashes - - new nullifiers - - new l2 -> l1 message hashes - - new public data writes - - new logs - - protocol metadata (e.g. the amount of gas consumed, revert code, etc.) + +- new note hashes +- new nullifiers +- new l2 -> l1 message hashes +- new public data writes +- new logs +- protocol metadata (e.g. the amount of gas consumed, revert code, etc.) The DA gas used is then calculated as: @@ -75,16 +76,19 @@ l2_gas_used = FIXED_L2_GAS ``` ### L2 Gas from Private + Private execution also consumes L2 gas, because there is still work that needs to be performed by the sequencer correspondent to the private outputs, which is effectively L2 gas. The following operations performed in private execution will consume L2 gas: + - 32 L2 gas per note hash - 64 L2 gas per nullifier - 4 L2 gas per byte of logs (note encrypted, encrypted, and unencrypted) ## Max Inclusion Fee -Each transaction, and each block, has inescapable overhead costs associated with it which are not directly related to the amount of data or computation performed. +Each transaction, and each block, has inescapable overhead costs associated with it which are not directly related to the amount of data or computation performed. These costs include: + - verifying the private kernel proof of each transaction - executing/proving the base/merge/root rollup circuits - includes verifying that every new nullifier is unique across the tx/block @@ -99,12 +103,11 @@ These costs include: See [the l1 contracts section](../l1-smart-contracts/index.md) for more information on the L1 Inbox and Outbox. -Users cover these costs by [specifying an inclusion fee](./specifying-gas-fee-info.md#specifying-gas--fee-info), which is different from other parameters specified in that it is a fixed fee offered to the sequencer, denominated in [FPA](./fee-payment-asset.md). +Users cover these costs by [specifying an inclusion fee](./specifying-gas-fee-info.md#specifying-gas--fee-info), which is different from other parameters specified in that it is a fixed fee offered to the sequencer, denominated in [Fee Juice](./fee-juice.md). Even though these line items will be the same for every transaction in a block, the **cost** to the sequencer will vary, particularly based on: + - congestion on L1 - prevailing price of proof generation A price discovery mechanism is being developed to help users set the inclusion fee appropriately. - - diff --git a/docs/docs/protocol-specs/gas-and-fees/index.md b/docs/docs/protocol-specs/gas-and-fees/index.md index 55e2808eeef..469f6974504 100644 --- a/docs/docs/protocol-specs/gas-and-fees/index.md +++ b/docs/docs/protocol-specs/gas-and-fees/index.md @@ -7,7 +7,8 @@ title: Gas & Fees The Aztec network uses a fee system to incentivize sequencers to process transactions and publish blocks. This section breaks down: -- [the fee payment asset](./fee-payment-asset.md) + +- [fee juice](./fee-juice.md) - [how users specify gas/fee parameters in their transactions](./specifying-gas-fee-info.md) - [fee abstraction](./tx-setup-and-teardown.md) - [tracking gas/fee information in the kernel circuits](./kernel-tracking.md) diff --git a/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md b/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md index 3aaa83f7d8d..738c4ba5ffe 100644 --- a/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md +++ b/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md @@ -256,7 +256,7 @@ Regardless, it has a `fee_payer` set. When a node receives a transaction, it must check that: 1. the `fee_payer` is set -2. the `fee_payer` has a balance of [FPA](./fee-payment-asset.md) greater than the computed [transaction fee](./specifying-gas-fee-info.md#transaction-fee) if the transaction has no public component +2. the `fee_payer` has a balance of [Fee Juice](./fee-juice.md) greater than the computed [transaction fee](./specifying-gas-fee-info.md#transaction-fee) if the transaction has no public component 3. the `fee_payer` has a balance of FPA greater than the computed [max transaction fee](./specifying-gas-fee-info.md#maximum-transaction-fee) if the transaction has a public component See other [validity conditions](../transactions/validity.md). @@ -547,4 +547,4 @@ Additionally, it verifies that the max fees per gas specified by the user are gr After the public data writes specific to this transaction have been processed, and a new tree root is produced, the kernel circuit injects an additional public data write based upon that root which deducts the transaction fee from the `fee_payer`'s balance. -The calculated trasaction fee is set as output on the base rollup as `accumulated_fees`. Each subsequent merge rollup circuit sums this value from both of its inputs. The root rollup circuit then uses this value to set the `total_fees` in the `Header`. \ No newline at end of file +The calculated trasaction fee is set as output on the base rollup as `accumulated_fees`. Each subsequent merge rollup circuit sums this value from both of its inputs. The root rollup circuit then uses this value to set the `total_fees` in the `Header`. diff --git a/docs/docs/protocol-specs/gas-and-fees/published-gas-and-fee-data.md b/docs/docs/protocol-specs/gas-and-fees/published-gas-and-fee-data.md index 3c69c036214..1aded7d8a3e 100644 --- a/docs/docs/protocol-specs/gas-and-fees/published-gas-and-fee-data.md +++ b/docs/docs/protocol-specs/gas-and-fees/published-gas-and-fee-data.md @@ -9,10 +9,11 @@ When a block is published to L1, it includes information about the gas and fees ## Block-level Data The block header contains a `GlobalVariables`, which contains a `GasFees` object. This object contains the following fields: -- `feePerDaGas`: The fee in [FPA](./fee-payment-asset.md) per unit of DA gas consumed for transactions in the block. + +- `feePerDaGas`: The fee in [Fee Juice](./fee-juice.md) per unit of DA gas consumed for transactions in the block. - `feePerL2Gas`: The fee in FPA per unit of L2 gas consumed for transactions in the block. -`GlobalVariables` also includes a `coinbase` field, which is the L1 address that receives the fees. +`GlobalVariables` also includes a `coinbase` field, which is the L1 address that receives the fees. The block header also contains a `totalFees` field, which is the total fees collected in the block in FPA. @@ -29,6 +30,5 @@ Should we move to a 1559-style fee market with block-level gas targets, there is ## Transaction-level Data The transaction data which is published to L1 is a `TxEffects` object, which includes -- `transaction_fee`: the fee paid by the transaction in FPA - +- `transaction_fee`: the fee paid by the transaction in FPA diff --git a/docs/docs/protocol-specs/gas-and-fees/specifying-gas-fee-info.md b/docs/docs/protocol-specs/gas-and-fees/specifying-gas-fee-info.md index 30a8a97d83d..936559390ec 100644 --- a/docs/docs/protocol-specs/gas-and-fees/specifying-gas-fee-info.md +++ b/docs/docs/protocol-specs/gas-and-fees/specifying-gas-fee-info.md @@ -40,8 +40,8 @@ GasSettings --> Gas GasSettings --> GasFees ``` -:::note -All fees are denominated in the [Fee Payment Asset (FPA)](./fee-payment-asset.md). +:::note +All fees are denominated in [Fee Juice](./fee-juice.md). ::: ## Gas Dimensions and Max Inclusion Fee @@ -57,7 +57,6 @@ Separately, every transaction has overhead costs associated with it, e.g. verify See the [Fee Schedule](./fee-schedule.md) for a detailed breakdown of costs associated with different actions. - ## `gasLimits` and `teardownGasLimits` Transactions can optionally have a "teardown" phase as part of their public execution, during which the "transaction fee" is available to public functions. This is useful to transactions/contracts that need to compute a "refund", e.g. contracts that facilitate [fee abstraction](./tx-setup-and-teardown.md). @@ -129,7 +128,7 @@ This is useful for imposing [validity conditions](./kernel-tracking.md#mempoolno ## `fee_payer` -The `fee_payer` is the entity that pays the transaction fee. +The `fee_payer` is the entity that pays the transaction fee. It is effectively set in private by the contract that calls `context.set_as_fee_payer()`. diff --git a/docs/docs/reference/sandbox_reference/cli_reference.md b/docs/docs/reference/sandbox_reference/cli_reference.md index 808e83ea9c2..1747303700b 100644 --- a/docs/docs/reference/sandbox_reference/cli_reference.md +++ b/docs/docs/reference/sandbox_reference/cli_reference.md @@ -301,18 +301,18 @@ Required options: - `-pk, --private-key `: The private key to use for deployment. - `--verifier `: Either 'mock' or 'real'. -### bridge-l1-gas -Mints L1 gas tokens and pushes them to L2. +### bridge-fee-juice +Bridges (and optionally mints) L1 Fee Juice and pushes them to L2. ``` -aztec bridge-l1-gas [options] +aztec bridge-fee-juice [options] ``` Required option: - `--l1-rpc-url `: URL of the Ethereum host. ### get-l1-balance -Gets the balance of gas tokens in L1 for a given Ethereum address. +Gets the balance of ETH or an ERC20 token on L1 for a given Ethereum address. ``` aztec get-l1-balance [options] diff --git a/docs/docs/reference/sandbox_reference/sandbox-reference.md b/docs/docs/reference/sandbox_reference/sandbox-reference.md index 00496d916f5..0820b19833c 100644 --- a/docs/docs/reference/sandbox_reference/sandbox-reference.md +++ b/docs/docs/reference/sandbox_reference/sandbox-reference.md @@ -131,7 +131,7 @@ EasyPrivateVotingContractArtifact EcdsaAccountContractArtifact EscrowContractArtifact FPCContractArtifact -GasTokenContractArtifact +FeeJuiceContractArtifact ImportTestContractArtifact InclusionProofsContractArtifact LendingContractArtifact diff --git a/docs/docs/tutorials/contract_tutorials/write_accounts_contract.md b/docs/docs/tutorials/contract_tutorials/write_accounts_contract.md index 82e32e53173..a8c4219af96 100644 --- a/docs/docs/tutorials/contract_tutorials/write_accounts_contract.md +++ b/docs/docs/tutorials/contract_tutorials/write_accounts_contract.md @@ -51,7 +51,7 @@ For our account contract, we will take the hash of the action to authorize, requ ### Fee Abstraction -The `FeePayload`, being distinct from the `AppPayload`, allows for fee abstraction, meaning the account paying the fee for the transaction can be different than the account that is initiating the transaction. This is also useful for maintaining privacy, as fee payments on the network must be public. For example, Alice could pay a relayer transaction fees in private, and the relayer could pay the transaction fee in public. This also allows for accounts without a fee paying asset to use a non-fee paying asset to pay for fees, provided they can find a relayer willing to accept a non-fee paying asset as payment (or do it for free). +The `FeePayload`, being distinct from the `AppPayload`, allows for fee abstraction, meaning the account paying the fee for the transaction can be different than the account that is initiating the transaction. This is also useful for maintaining privacy, as fee payments on the network must be public. For example, Alice could pay a relayer transaction fees in private, and the relayer could pay the transaction fee in public. This also allows for accounts without Fee Juice to use another asset to pay for fees, provided they can find a relayer willing to accept the asset as payment (or do it for free). ### Nonce Abstraction @@ -113,4 +113,4 @@ Follow the token contract tutorial on the [next page](./token_contract.md) and l - [Schnorr signer account contract (GitHub link)](https://github.com/AztecProtocol/aztec-packages/tree/#include_aztec_version/noir-projects/noir-contracts/contracts/schnorr_account_contract) - [Account abstraction](../../aztec/concepts/accounts/index.md#what-is-account-abstraction) - [Authentication witness](../../aztec/concepts/accounts/authwit.md) -- [Fee abstraction](../../protocol-specs/gas-and-fees/tx-setup-and-teardown.md). \ No newline at end of file +- [Fee abstraction](../../protocol-specs/gas-and-fees/tx-setup-and-teardown.md). diff --git a/docs/netlify.toml b/docs/netlify.toml index 60322008128..9dc2eb71d85 100644 --- a/docs/netlify.toml +++ b/docs/netlify.toml @@ -56,4 +56,8 @@ [[redirects]] from = "/compliance" - to = "/" \ No newline at end of file + to = "/" + +[[redirects]] + from = "/protocol-specs/gas-and-fees/fee-payment-asset" + to = "/protocol-specs/gas-and-fees/fee-juice" diff --git a/docs/sidebars.js b/docs/sidebars.js index 0093b1ab1b0..b5b0a8fe385 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -199,7 +199,7 @@ export default { type: "category", link: { type: "doc", id: "protocol-specs/gas-and-fees/index" }, items: [ - "protocol-specs/gas-and-fees/fee-payment-asset", + "protocol-specs/gas-and-fees/fee-juice", "protocol-specs/gas-and-fees/specifying-gas-fee-info", "protocol-specs/gas-and-fees/tx-setup-and-teardown", "protocol-specs/gas-and-fees/kernel-tracking", diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 7d73451658a..7da3e120ff0 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -42,7 +42,7 @@ contract Rollup is Leonidas, IRollup { IInbox public immutable INBOX; IOutbox public immutable OUTBOX; uint256 public immutable VERSION; - IERC20 public immutable GAS_TOKEN; + IERC20 public immutable FEE_JUICE; IVerifier public verifier; @@ -66,13 +66,13 @@ contract Rollup is Leonidas, IRollup { constructor( IRegistry _registry, IAvailabilityOracle _availabilityOracle, - IERC20 _gasToken, + IERC20 _fpcJuice, bytes32 _vkTreeRoot ) Leonidas(msg.sender) { verifier = new MockVerifier(); REGISTRY = _registry; AVAILABILITY_ORACLE = _availabilityOracle; - GAS_TOKEN = _gasToken; + FEE_JUICE = _fpcJuice; INBOX = new Inbox(address(this), Constants.L1_TO_L2_MSG_SUBTREE_HEIGHT); OUTBOX = new Outbox(address(this)); vkTreeRoot = _vkTreeRoot; @@ -150,9 +150,9 @@ contract Rollup is Leonidas, IRollup { header.globalVariables.blockNumber, header.contentCommitment.outHash, l2ToL1TreeMinHeight ); - // pay the coinbase 1 gas token if it is not empty and header.totalFees is not zero + // pay the coinbase 1 Fee Juice if it is not empty and header.totalFees is not zero if (header.globalVariables.coinbase != address(0) && header.totalFees > 0) { - GAS_TOKEN.transfer(address(header.globalVariables.coinbase), header.totalFees); + FEE_JUICE.transfer(address(header.globalVariables.coinbase), header.totalFees); } emit L2BlockProcessed(header.globalVariables.blockNumber); diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 8f8c1a8c45b..fa281063301 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -131,8 +131,8 @@ library Constants { 19310994760783330368337163480198602393920956587162708699802190083077641908361; uint256 internal constant REGISTERER_CONTRACT_ADDRESS = 2631409926445785927331173506476539962589925110142857699603561302478860342858; - uint256 internal constant GAS_TOKEN_ADDRESS = - 5232557136129983235483007223848855544006161252993804924983404332852577870185; + uint256 internal constant FEE_JUICE_ADDRESS = + 10248142274714515101077825679585135641434041564851038865006795089686437446849; uint256 internal constant AZTEC_ADDRESS_LENGTH = 1; uint256 internal constant GAS_FEES_LENGTH = 2; uint256 internal constant GAS_LENGTH = 2; @@ -184,13 +184,13 @@ library Constants { uint256 internal constant PRIVATE_VALIDATION_REQUESTS_LENGTH = 772; uint256 internal constant PUBLIC_VALIDATION_REQUESTS_LENGTH = 514; uint256 internal constant PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 3; - uint256 internal constant COMBINED_ACCUMULATED_DATA_LENGTH = 364; + uint256 internal constant COMBINED_ACCUMULATED_DATA_LENGTH = 388; uint256 internal constant COMBINED_CONSTANT_DATA_LENGTH = 43; uint256 internal constant PRIVATE_ACCUMULATED_DATA_LENGTH = 1336; uint256 internal constant PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 2167; - uint256 internal constant PUBLIC_ACCUMULATED_DATA_LENGTH = 1279; - uint256 internal constant PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3565; - uint256 internal constant KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 417; + uint256 internal constant PUBLIC_ACCUMULATED_DATA_LENGTH = 1303; + uint256 internal constant PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3613; + uint256 internal constant KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 441; uint256 internal constant CONSTANT_ROLLUP_DATA_LENGTH = 12; uint256 internal constant BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 29; uint256 internal constant GET_NOTES_ORACLE_RETURN_LENGTH = 674; diff --git a/l1-contracts/terraform/main.tf b/l1-contracts/terraform/main.tf index b5d237b047e..4b7a1877459 100644 --- a/l1-contracts/terraform/main.tf +++ b/l1-contracts/terraform/main.tf @@ -57,20 +57,20 @@ output "outbox_contract_address" { } -variable "GAS_TOKEN_CONTRACT_ADDRESS" { +variable "FEE_JUICE_CONTRACT_ADDRESS" { type = string default = "" } -output "gas_token_contract_address" { - value = var.GAS_TOKEN_CONTRACT_ADDRESS +output "fee_juice_contract_address" { + value = var.FEE_JUICE_CONTRACT_ADDRESS } -variable "GAS_PORTAL_CONTRACT_ADDRESS" { +variable "FEE_JUICE_PORTAL_CONTRACT_ADDRESS" { type = string default = "" } -output "gas_portal_contract_address" { - value = var.GAS_PORTAL_CONTRACT_ADDRESS +output "FEE_JUICE_PORTAL_CONTRACT_ADDRESS" { + value = var.FEE_JUICE_PORTAL_CONTRACT_ADDRESS } diff --git a/l1-contracts/test/portals/GasPortal.sol b/l1-contracts/test/portals/FeeJuicePortal.sol similarity index 98% rename from l1-contracts/test/portals/GasPortal.sol rename to l1-contracts/test/portals/FeeJuicePortal.sol index 5f4eb2dda54..da5e40b5e68 100644 --- a/l1-contracts/test/portals/GasPortal.sol +++ b/l1-contracts/test/portals/FeeJuicePortal.sol @@ -12,7 +12,7 @@ import {Hash} from "../../src/core/libraries/Hash.sol"; // docs:end:content_hash_sol_import // docs:start:init -contract GasPortal { +contract FeeJuicePortal { using SafeERC20 for IERC20; IRegistry public registry; diff --git a/noir-projects/noir-contracts/Nargo.toml b/noir-projects/noir-contracts/Nargo.toml index bbd466a5585..7ab5fb07ec7 100644 --- a/noir-projects/noir-contracts/Nargo.toml +++ b/noir-projects/noir-contracts/Nargo.toml @@ -22,7 +22,7 @@ members = [ "contracts/easy_private_voting_contract", "contracts/ecdsa_account_contract", "contracts/escrow_contract", - "contracts/gas_token_contract", + "contracts/fee_juice_contract", "contracts/import_test_contract", "contracts/key_registry_contract", "contracts/inclusion_proofs_contract", diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml index cf460fd9497..956f7855db0 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/Nargo.toml @@ -7,5 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } -gas_token = { path = "../gas_token_contract" } -token = { path = "../token_contract" } \ No newline at end of file +token = { path = "../token_contract" } diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index ab5054e44b0..9547ff2c0a2 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -13,7 +13,6 @@ contract AppSubscription { protocol_types::constants::MAX_FIELD_VALUE }; use authwit::{auth_witness::get_auth_witness, auth::assert_current_call_valid_authwit}; - use gas_token::GasToken; use token::Token; #[aztec(storage)] @@ -26,8 +25,7 @@ contract AppSubscription { subscription_recipient_address: SharedImmutable, subscription_price: SharedImmutable, subscriptions: Map>, - gas_token_address: SharedImmutable, - gas_token_limit_per_tx: SharedImmutable, + fee_juice_limit_per_tx: SharedImmutable, } global SUBSCRIPTION_DURATION_IN_BLOCKS = 5; @@ -50,8 +48,8 @@ contract AppSubscription { context.set_as_fee_payer(); - // TODO(palla/gas) Assert gas_token_limit_per_tx is less than this tx gas_limit - let _gas_limit = storage.gas_token_limit_per_tx.read_private(); + // TODO(palla/gas) Assert fee_juice_limit_per_tx is less than this tx gas_limit + let _gas_limit = storage.fee_juice_limit_per_tx.read_private(); context.end_setup(); @@ -67,15 +65,13 @@ contract AppSubscription { subscription_recipient_address: AztecAddress, subscription_token_address: AztecAddress, subscription_price: Field, - gas_token_address: AztecAddress, - gas_token_limit_per_tx: Field + fee_juice_limit_per_tx: Field ) { storage.target_address.initialize(target_address); storage.subscription_token_address.initialize(subscription_token_address); storage.subscription_recipient_address.initialize(subscription_recipient_address); storage.subscription_price.initialize(subscription_price); - storage.gas_token_address.initialize(gas_token_address); - storage.gas_token_limit_per_tx.initialize(gas_token_limit_per_tx); + storage.fee_juice_limit_per_tx.initialize(fee_juice_limit_per_tx); } #[aztec(public)] diff --git a/noir-projects/noir-contracts/contracts/gas_token_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/fee_juice_contract/Nargo.toml similarity index 72% rename from noir-projects/noir-contracts/contracts/gas_token_contract/Nargo.toml rename to noir-projects/noir-contracts/contracts/fee_juice_contract/Nargo.toml index 4ae5a749f3b..bab26316c4e 100644 --- a/noir-projects/noir-contracts/contracts/gas_token_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/fee_juice_contract/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "gas_token_contract" +name = "fee_juice_contract" authors = [""] compiler_version = ">=0.25.0" type = "contract" @@ -8,4 +8,4 @@ type = "contract" aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } deployer = { path = "../contract_instance_deployer_contract" } -registerer = { path = "../contract_class_registerer_contract" } \ No newline at end of file +registerer = { path = "../contract_class_registerer_contract" } diff --git a/noir-projects/noir-contracts/contracts/gas_token_contract/src/lib.nr b/noir-projects/noir-contracts/contracts/fee_juice_contract/src/lib.nr similarity index 100% rename from noir-projects/noir-contracts/contracts/gas_token_contract/src/lib.nr rename to noir-projects/noir-contracts/contracts/fee_juice_contract/src/lib.nr diff --git a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fee_juice_contract/src/main.nr similarity index 92% rename from noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr rename to noir-projects/noir-contracts/contracts/fee_juice_contract/src/main.nr index 96b25801626..b753fa9cecd 100644 --- a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fee_juice_contract/src/main.nr @@ -1,6 +1,6 @@ mod lib; -contract GasToken { +contract FeeJuice { use dep::aztec::{ protocol_types::{ contract_class_id::ContractClassId, abis::function_selector::FunctionSelector, @@ -42,12 +42,12 @@ contract GasToken { public_bytecode_commitment ); assert( - instance.contract_class_id == contract_class_id, "Invalid contract class id computed for gas token" + instance.contract_class_id == contract_class_id, "Invalid contract class id computed for Fee Juice" ); // Increase self balance and set as fee payer, and end setup let deploy_fees = 20000000000; - GasToken::at(self)._increase_public_balance(self, deploy_fees).enqueue(&mut context); + FeeJuice::at(self)._increase_public_balance(self, deploy_fees).enqueue(&mut context); context.set_as_fee_payer(); context.end_setup(); @@ -66,11 +66,11 @@ contract GasToken { ).call(&mut context); // Enqueue call to set the portal address - GasToken::at(self).set_portal(portal_address).enqueue(&mut context); + FeeJuice::at(self).set_portal(portal_address).enqueue(&mut context); } // We purposefully not set this function as an initializer so we do not bind - // the contract to a specific L1 portal address, since the gas token address + // the contract to a specific L1 portal address, since the Fee Juice address // is a hardcoded constant in the rollup circuits. #[aztec(public)] fn set_portal(portal_address: EthAddress) { @@ -90,7 +90,7 @@ contract GasToken { // TODO(palla/gas) Emit an unencrypted log to announce which L1 to L2 message has been claimed // Otherwise, we cannot trace L1 deposits to their corresponding claims on L2 - GasToken::at(context.this_address())._increase_public_balance(to, amount).enqueue(&mut context); + FeeJuice::at(context.this_address())._increase_public_balance(to, amount).enqueue(&mut context); } #[aztec(public)] @@ -116,7 +116,7 @@ contract GasToken { // TODO(@just-mitch): remove this function before mainnet deployment // convenience function for testing - // the true canonical gas token contract will not have this function + // the true canonical Fee Juice contract will not have this function #[aztec(public)] fn mint_public(to: AztecAddress, amount: Field) { let amount = U128::from_integer(amount); diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml index 96bd8bacba7..b1f9481c44e 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml @@ -8,4 +8,3 @@ type = "contract" aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } token = { path = "../token_contract" } -gas_token = { path = "../gas_token_contract" } diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index 68be91d4dde..2aab1481669 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -6,20 +6,17 @@ contract FPC { state_vars::SharedImmutable, context::gas::GasOpts }; use dep::token::Token; - use dep::gas_token::GasToken; use crate::lib::compute_rebate; #[aztec(storage)] struct Storage { other_asset: SharedImmutable, - gas_token_address: SharedImmutable, } #[aztec(public)] #[aztec(initializer)] - fn constructor(other_asset: AztecAddress, gas_token_address: AztecAddress) { + fn constructor(other_asset: AztecAddress) { storage.other_asset.initialize(other_asset); - storage.gas_token_address.initialize(gas_token_address); } #[aztec(private)] diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr index 70b9cd5438b..005df73f7d0 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr @@ -25,7 +25,7 @@ use dep::types::{ MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_L2_TO_L1_MSGS_PER_TX, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_TREE_HEIGHT, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_SUBTREE_HEIGHT, - PROTOCOL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, ARCHIVE_HEIGHT, GAS_TOKEN_ADDRESS, + PROTOCOL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, ARCHIVE_HEIGHT, FEE_JUICE_ADDRESS, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PUBLIC_KERNEL_TAIL_INDEX, PRIVATE_KERNEL_EMPTY_INDEX, PRIVATE_KERNEL_TAIL_INDEX }, @@ -49,7 +49,7 @@ struct BaseRollupInputs { start: PartialStateReference, state_diff_hints: StateDiffHints, - fee_payer_gas_token_balance_read_hint: PublicDataHint, + fee_payer_fee_juice_balance_read_hint: PublicDataHint, // TODO: The following 6 values are eventually going to be nuked from here. See discussion: // https://aztecprotocol.slack.com/archives/C060PU5R327/p1701965354071269 @@ -264,10 +264,10 @@ impl BaseRollupInputs { all_update_requests } - // Deducts the tx_fee from the GasToken balance of the fee_payer. If there is already a PublicDataUpdateRequest + // Deducts the tx_fee from the FeeJuice balance of the fee_payer. If there is already a PublicDataUpdateRequest // in this tx for their balance (because they issued a 'claim' to increase their balance by bridging from L1), // update it by subtracting the tx_fee. Otherwise, build a new PublicDataUpdateRequest to subtract the tx_fee - // from the balance of the fee_payer, using the fee_payer_gas_token_balance_read_hint to read the current balance. + // from the balance of the fee_payer, using the fee_payer_fee_juice_balance_read_hint to read the current balance. // Returns the data update request that subtracts the tx_fee from the fee_payer's balance, and the index where it // should be inserted in the public data update requests array. fn build_or_patch_payment_update_request(self, tx_fee: Field) -> (PublicDataUpdateRequest, u32) { @@ -275,15 +275,15 @@ impl BaseRollupInputs { // TODO(@spalladino) Eventually remove the is_zero condition as we should always charge fees to every tx if !fee_payer.is_zero() { - let read_hint = self.fee_payer_gas_token_balance_read_hint; - let leaf_slot = compute_fee_payer_gas_token_balance_leaf_slot(fee_payer); + let read_hint = self.fee_payer_fee_juice_balance_read_hint; + let leaf_slot = compute_fee_payer_fee_juice_balance_leaf_slot(fee_payer); if read_hint.leaf_slot == 0 { // Is there a balance update already in this tx? If so, update it and return its index. - let existing_update_index = self.find_fee_payer_gas_token_update_index(leaf_slot); + let existing_update_index = self.find_fee_payer_fee_juice_update_index(leaf_slot); let existing_update = self.kernel_data.public_inputs.end.public_data_update_requests[existing_update_index]; assert( - existing_update.leaf_slot == leaf_slot, "Wrong leaf slot for gas token balance update request" + existing_update.leaf_slot == leaf_slot, "Wrong leaf slot for Fee Juice balance update request" ); assert( !existing_update.new_value.lt(tx_fee), "Not enough balance for fee payer after claim to pay for transaction" @@ -297,7 +297,7 @@ impl BaseRollupInputs { read_hint.validate(self.start.public_data_tree.root); let balance = read_hint.value; - assert(read_hint.leaf_slot == leaf_slot, "Wrong leaf slot for gas token balance read hint"); + assert(read_hint.leaf_slot == leaf_slot, "Wrong leaf slot for Fee Juice balance read hint"); assert(!balance.lt(tx_fee), "Not enough balance for fee payer to pay for transaction"); let new_value = compute_public_data_tree_value(balance - tx_fee); @@ -332,7 +332,7 @@ impl BaseRollupInputs { ); } - unconstrained fn find_fee_payer_gas_token_update_index(self, leaf_slot: Field) -> u32 { + unconstrained fn find_fee_payer_fee_juice_update_index(self, leaf_slot: Field) -> u32 { let mut update_index = MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX + 1; for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX { if self.kernel_data.public_inputs.end.public_data_update_requests[i].leaf_slot == leaf_slot { @@ -340,7 +340,7 @@ impl BaseRollupInputs { } } assert( - update_index < MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, "Could not find fee payer gas token update request" + update_index < MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, "Could not find fee payer Fee Juice update request" ); update_index } @@ -418,11 +418,11 @@ fn insert_public_data_update_requests( ) } -fn compute_fee_payer_gas_token_balance_leaf_slot(fee_payer: AztecAddress) -> Field { - let balances_slot_in_gas_token_contract = 1; - let gas_token = AztecAddress::from_field(GAS_TOKEN_ADDRESS); - let fee_payer_balance_slot_in_gas_token_contract = derive_storage_slot_in_map(balances_slot_in_gas_token_contract, fee_payer); - compute_public_data_tree_index(gas_token, fee_payer_balance_slot_in_gas_token_contract) +fn compute_fee_payer_fee_juice_balance_leaf_slot(fee_payer: AztecAddress) -> Field { + let balances_slot_in_fee_juice_contract = 1; + let fee_juice = AztecAddress::from_field(FEE_JUICE_ADDRESS); + let fee_payer_balance_slot_in_fee_juice_contract = derive_storage_slot_in_map(balances_slot_in_fee_juice_contract, fee_payer); + compute_public_data_tree_index(fee_juice, fee_payer_balance_slot_in_fee_juice_contract) } #[test] @@ -458,7 +458,7 @@ mod tests { }, base::{ state_diff_hints::StateDiffHints, - base_rollup_inputs::{BaseRollupInputs, compute_fee_payer_gas_token_balance_leaf_slot, ALLOWED_PREVIOUS_CIRCUITS} + base_rollup_inputs::{BaseRollupInputs, compute_fee_payer_fee_juice_balance_leaf_slot, ALLOWED_PREVIOUS_CIRCUITS} }, components::{TX_EFFECTS_HASH_INPUT_FIELDS, compute_kernel_out_hash} }; @@ -605,9 +605,9 @@ mod tests { final_public_data_writes: BoundedVec<(u32, PublicDataTreeLeaf), MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX>, nullifiers: BoundedVec, constants: ConstantRollupData, - // Index of the item in the pre_existing_public_data array that contains the fee payer's gas token balance. + // Index of the item in the pre_existing_public_data array that contains the fee payer's Fee Juice balance. // Used for building the public data hint read for the payment update request. If set to none, no hint is built. - fee_payer_gas_token_balance_pre_existing_public_data_index: Option + fee_payer_fee_juice_balance_pre_existing_public_data_index: Option } fn test_compute_empty_root(size: [Field; N]) -> Field { @@ -627,11 +627,11 @@ mod tests { inputs } - fn build_fee_payer_gas_token_balance_read_hint( + fn build_fee_payer_fee_juice_balance_read_hint( self, start_public_data_tree: NonEmptyMerkleTree ) -> PublicDataHint { - self.fee_payer_gas_token_balance_pre_existing_public_data_index.map_or( + self.fee_payer_fee_juice_balance_pre_existing_public_data_index.map_or( PublicDataHint::empty(), |leaf_index_u32: u32| { let leaf_index = leaf_index_u32 as Field; @@ -759,7 +759,7 @@ mod tests { next_available_leaf_index: start_public_data_tree.get_next_available_index() as u32 }; - let fee_payer_gas_token_balance_read_hint = self.build_fee_payer_gas_token_balance_read_hint(start_public_data_tree); + let fee_payer_fee_juice_balance_read_hint = self.build_fee_payer_fee_juice_balance_read_hint(start_public_data_tree); let start_archive = NonEmptyMerkleTree::new( self.pre_existing_blocks, @@ -831,7 +831,7 @@ mod tests { low_public_data_writes_witnesses, archive_root_membership_witness: MembershipWitness { leaf_index: 0, sibling_path: start_archive.get_sibling_path(0) }, constants: self.constants, - fee_payer_gas_token_balance_read_hint + fee_payer_fee_juice_balance_read_hint } } @@ -863,7 +863,7 @@ mod tests { final_public_data_writes: BoundedVec::new(), nullifiers: BoundedVec::new(), constants: ConstantRollupData::empty(), - fee_payer_gas_token_balance_pre_existing_public_data_index: Option::none() + fee_payer_fee_juice_balance_pre_existing_public_data_index: Option::none() } } } @@ -1281,7 +1281,7 @@ mod tests { #[test] unconstrained fn updates_fee_payer_balance_with_new_data_write() { let fee_payer = AztecAddress::from_field(0x1234); - let balance_slot = compute_fee_payer_gas_token_balance_leaf_slot(fee_payer); + let balance_slot = compute_fee_payer_fee_juice_balance_leaf_slot(fee_payer); let initial_balance = 300_000; let tx_fee = 100_000; let expected_balance = 200_000; @@ -1298,7 +1298,7 @@ mod tests { next_slot: 0, next_index: 0, }; - builder.fee_payer_gas_token_balance_pre_existing_public_data_index = Option::some(0); + builder.fee_payer_fee_juice_balance_pre_existing_public_data_index = Option::some(0); // Set values for computing exact tx_fee builder.kernel_data.tx_context.gas_settings.inclusion_fee = tx_fee; @@ -1323,7 +1323,7 @@ mod tests { #[test] unconstrained fn updates_fee_payer_balance_in_existing_data_write() { let fee_payer = AztecAddress::from_field(0x1234); - let balance_slot = compute_fee_payer_gas_token_balance_leaf_slot(fee_payer); + let balance_slot = compute_fee_payer_fee_juice_balance_leaf_slot(fee_payer); let initial_balance = 100_000; let after_claim_balance = 300_000; let tx_fee = 100_000; @@ -1369,7 +1369,7 @@ mod tests { #[test(should_fail_with="Not enough balance for fee payer to pay for transaction")] unconstrained fn fails_to_update_fee_payer_balance_if_not_enough_funds() { let fee_payer = AztecAddress::from_field(0x1234); - let balance_slot = compute_fee_payer_gas_token_balance_leaf_slot(fee_payer); + let balance_slot = compute_fee_payer_fee_juice_balance_leaf_slot(fee_payer); // Set low initial balance so it fails! let initial_balance = 10_000; let tx_fee = 100_000; @@ -1386,7 +1386,7 @@ mod tests { next_slot: 0, next_index: 0, }; - builder.fee_payer_gas_token_balance_pre_existing_public_data_index = Option::some(0); + builder.fee_payer_fee_juice_balance_pre_existing_public_data_index = Option::some(0); // Set values for computing exact tx_fee builder.kernel_data.tx_context.gas_settings.inclusion_fee = tx_fee; @@ -1397,10 +1397,10 @@ mod tests { builder.fails(); } - #[test(should_fail_with="Wrong leaf slot for gas token balance read hint")] + #[test(should_fail_with="Wrong leaf slot for Fee Juice balance read hint")] unconstrained fn fails_to_update_fee_payer_balance_if_wrong_read_hint() { let fee_payer = AztecAddress::from_field(0x1234); - let balance_slot = compute_fee_payer_gas_token_balance_leaf_slot(fee_payer); + let balance_slot = compute_fee_payer_fee_juice_balance_leaf_slot(fee_payer); let initial_balance = 300_000; let expected_balance = 200_000; let tx_fee = 100_000; @@ -1426,7 +1426,7 @@ mod tests { }; // But point the read hint to the wrong one! - builder.fee_payer_gas_token_balance_pre_existing_public_data_index = Option::some(1); + builder.fee_payer_fee_juice_balance_pre_existing_public_data_index = Option::some(1); // Set values for computing exact tx_fee builder.kernel_data.tx_context.gas_settings.inclusion_fee = tx_fee; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index f3cb9300fc8..a08e052ebc3 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -179,7 +179,7 @@ global CANONICAL_KEY_REGISTRY_ADDRESS = 0x1dc0848be99ba522c157b46ab5ed64d86703a7 global CANONICAL_AUTH_REGISTRY_ADDRESS = 0x24877c50868f86712240eb535d90d1c97403d074805dd3758c3aecb02958f8d4; global DEPLOYER_CONTRACT_ADDRESS = 0x2ab1a2bd6d07d8d61ea56d85861446349e52c6b7c0612b702cb1e6db6ad0b089; global REGISTERER_CONTRACT_ADDRESS = 0x05d15342d76e46e5be07d3cda0d753158431cdc5e39d29ce4e8fe1f5c070564a; -global GAS_TOKEN_ADDRESS = 0x0b9185bb21ae5ec07011d459dca15a226712e49d9f6238ba0799bd6c2768a169; +global FEE_JUICE_ADDRESS = 0x16a83e3395bc921a2441db55dce24f0e0932636901a2e676fa68b9b2b9a644c1; // LENGTH OF STRUCTS SERIALIZED TO FIELDS global AZTEC_ADDRESS_LENGTH = 1; diff --git a/yarn-project/aztec-faucet/terraform/main.tf b/yarn-project/aztec-faucet/terraform/main.tf index ab2e8630665..691401b4ef1 100644 --- a/yarn-project/aztec-faucet/terraform/main.tf +++ b/yarn-project/aztec-faucet/terraform/main.tf @@ -144,7 +144,7 @@ resource "aws_ecs_task_definition" "aztec-faucet" { }, { name = "EXTRA_ASSETS", - value = "fee_juice:${var.GAS_TOKEN_CONTRACT_ADDRESS},dev_coin:${var.DEV_COIN_CONTRACT_ADDRESS}" + value = "fee_juice:${var.FEE_JUICE_CONTRACT_ADDRESS},dev_coin:${var.DEV_COIN_CONTRACT_ADDRESS}" }, { name = "EXTRA_ASSET_AMOUNT", diff --git a/yarn-project/aztec-faucet/terraform/variables.tf b/yarn-project/aztec-faucet/terraform/variables.tf index 0151c28bdf8..f1d2fbf5c86 100644 --- a/yarn-project/aztec-faucet/terraform/variables.tf +++ b/yarn-project/aztec-faucet/terraform/variables.tf @@ -31,7 +31,7 @@ variable "FAUCET_ACCOUNT_INDEX" { type = string } -variable "GAS_TOKEN_CONTRACT_ADDRESS" { +variable "FEE_JUICE_CONTRACT_ADDRESS" { type = string } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 1cb78ba9b89..8fa83268f59 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -55,7 +55,7 @@ import { createStore, openTmpStore } from '@aztec/kv-store/utils'; import { SHA256Trunc, StandardTree, UnbalancedTree } from '@aztec/merkle-tree'; import { AztecKVTxPool, type P2P, createP2PClient } from '@aztec/p2p'; import { getCanonicalClassRegisterer } from '@aztec/protocol-contracts/class-registerer'; -import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance-deployer'; import { getCanonicalKeyRegistryAddress } from '@aztec/protocol-contracts/key-registry'; import { getCanonicalMultiCallEntrypointAddress } from '@aztec/protocol-contracts/multi-call-entrypoint'; @@ -787,7 +787,7 @@ export class AztecNodeService implements AztecNode { public getProtocolContractAddresses(): Promise { return Promise.resolve({ classRegisterer: getCanonicalClassRegisterer().address, - gasToken: getCanonicalGasToken().address, + feeJuice: getCanonicalFeeJuice().address, instanceDeployer: getCanonicalInstanceDeployer().address, keyRegistry: getCanonicalKeyRegistryAddress(), multiCallEntrypoint: getCanonicalMultiCallEntrypointAddress(), diff --git a/yarn-project/aztec.js/src/api/fee.ts b/yarn-project/aztec.js/src/api/fee.ts index 13dfffad38d..19d77c5c0fb 100644 --- a/yarn-project/aztec.js/src/api/fee.ts +++ b/yarn-project/aztec.js/src/api/fee.ts @@ -1,6 +1,6 @@ export type { FeePaymentMethod } from '../fee/fee_payment_method.js'; -export { NativeFeePaymentMethod } from '../fee/native_fee_payment_method.js'; +export { FeeJuicePaymentMethod } from '../fee/fee_juice_payment_method.js'; export { PrivateFeePaymentMethod } from '../fee/private_fee_payment_method.js'; export { PublicFeePaymentMethod } from '../fee/public_fee_payment_method.js'; -export { NativeFeePaymentMethodWithClaim } from '../fee/native_fee_payment_method_with_claim.js'; +export { FeeJuicePaymentMethodWithClaim } from '../fee/fee_juice_payment_method_with_claim.js'; export { NoFeePaymentMethod } from '../fee/no_fee_payment_method.js'; diff --git a/yarn-project/aztec.js/src/contract/contract.test.ts b/yarn-project/aztec.js/src/contract/contract.test.ts index f6801fe4ec2..49aad6296de 100644 --- a/yarn-project/aztec.js/src/contract/contract.test.ts +++ b/yarn-project/aztec.js/src/contract/contract.test.ts @@ -27,8 +27,8 @@ describe('Contract Class', () => { registryAddress: EthAddress.random(), inboxAddress: EthAddress.random(), outboxAddress: EthAddress.random(), - gasTokenAddress: EthAddress.random(), - gasPortalAddress: EthAddress.random(), + feeJuiceAddress: EthAddress.random(), + feeJuicePortalAddress: EthAddress.random(), }; const mockNodeInfo: NodeInfo = { nodeVersion: 'vx.x.x', @@ -37,7 +37,7 @@ describe('Contract Class', () => { l1ContractAddresses: l1Addresses, protocolContractAddresses: { classRegisterer: AztecAddress.random(), - gasToken: AztecAddress.random(), + feeJuice: AztecAddress.random(), instanceDeployer: AztecAddress.random(), keyRegistry: AztecAddress.random(), multiCallEntrypoint: AztecAddress.random(), diff --git a/yarn-project/aztec.js/src/fee/native_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/fee_juice_payment_method.ts similarity index 67% rename from yarn-project/aztec.js/src/fee/native_fee_payment_method.ts rename to yarn-project/aztec.js/src/fee/fee_juice_payment_method.ts index 49a1567482f..d8d24f31b70 100644 --- a/yarn-project/aztec.js/src/fee/native_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/fee_juice_payment_method.ts @@ -1,17 +1,17 @@ import { type FunctionCall } from '@aztec/circuit-types'; import { type AztecAddress } from '@aztec/circuits.js'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { type FeePaymentMethod } from './fee_payment_method.js'; /** - * Pay fee directly in the native gas token. + * Pay fee directly in the native Fee Juice. */ -export class NativeFeePaymentMethod implements FeePaymentMethod { +export class FeeJuicePaymentMethod implements FeePaymentMethod { constructor(protected sender: AztecAddress) {} getAsset() { - return GasTokenAddress; + return FeeJuiceAddress; } getFunctionCalls(): Promise { diff --git a/yarn-project/aztec.js/src/fee/native_fee_payment_method_with_claim.ts b/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts similarity index 66% rename from yarn-project/aztec.js/src/fee/native_fee_payment_method_with_claim.ts rename to yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts index 39e82d28663..c7d4adf18c5 100644 --- a/yarn-project/aztec.js/src/fee/native_fee_payment_method_with_claim.ts +++ b/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts @@ -1,26 +1,26 @@ import { type FunctionCall } from '@aztec/circuit-types'; import { type AztecAddress, Fr, FunctionSelector } from '@aztec/circuits.js'; import { FunctionType } from '@aztec/foundation/abi'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; -import { NativeFeePaymentMethod } from './native_fee_payment_method.js'; +import { FeeJuicePaymentMethod } from './fee_juice_payment_method.js'; /** - * Pay fee directly with native gas token claimed on the same tx. + * Pay fee directly with native Fee Juice claimed on the same tx. */ -export class NativeFeePaymentMethodWithClaim extends NativeFeePaymentMethod { +export class FeeJuicePaymentMethodWithClaim extends FeeJuicePaymentMethod { constructor(sender: AztecAddress, private claimAmount: bigint | Fr, private claimSecret: Fr) { super(sender); } /** - * Creates a function call to pay the fee in gas token. + * Creates a function call to pay the fee in Fee Juice. * @returns A function call */ override getFunctionCalls(): Promise { return Promise.resolve([ { - to: GasTokenAddress, + to: FeeJuiceAddress, name: 'claim', selector: FunctionSelector.fromSignature('claim((Field),Field,Field)'), isStatic: false, diff --git a/yarn-project/aztec/src/cli/texts.ts b/yarn-project/aztec/src/cli/texts.ts index fe1903dcd8b..d0fcc1896a0 100644 --- a/yarn-project/aztec/src/cli/texts.ts +++ b/yarn-project/aztec/src/cli/texts.ts @@ -5,8 +5,8 @@ const contractAddresses = 'inboxAddress:INBOX_CONTRACT_ADDRESS - string - The deployed L1 inbox contract address.\n' + 'outboxAddress:OUTBOX_CONTRACT_ADDRESS - string - The deployed L1 outbox contract address.\n' + 'availabilityOracleAddress:AVAILABILITY_ORACLE_CONTRACT_ADDRESS - string - The deployed L1 availability oracle contract address.\n' + - 'gasTokenAddress:GAS_TOKEN_CONTRACT_ADDRESS - string - The deployed L1 gas token contract address.\n' + - 'gasPortalAddress:GAS_PORTAL_CONTRACT_ADDRESS - string - The deployed L1 gas portal contract address.\n'; + 'feeJuiceAddress:FEE_JUICE_TOKEN_CONTRACT_ADDRESS - string - The deployed L1 Fee Juice contract address.\n' + + 'feeJuicePortalAddress:FEE_JUICE_PORTAL_CONTRACT_ADDRESS - string - The deployed L1 gas portal contract address.\n'; const p2pOptions = 'p2pBlockCheckIntervalMS:P2P_BLOCK_CHECK_INTERVAL_MS - number - The frequency in which to check for blocks. Default: 100\n' + 'p2pPeerCheckIntervalMS:P2P_PEER_CHECK_INTERVAL_MS - number - The frequency in which to check for peers. Default: 1000\n' + diff --git a/yarn-project/aztec/src/sandbox.ts b/yarn-project/aztec/src/sandbox.ts index 57c7a25f3e5..8d843b7b4d2 100644 --- a/yarn-project/aztec/src/sandbox.ts +++ b/yarn-project/aztec/src/sandbox.ts @@ -3,7 +3,7 @@ import { type AztecNodeConfig, AztecNodeService, getConfigEnvVars } from '@aztec import { SignerlessWallet } from '@aztec/aztec.js'; import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint'; import { type AztecNode } from '@aztec/circuit-types'; -import { deployCanonicalAuthRegistry, deployCanonicalKeyRegistry, deployCanonicalL2GasToken } from '@aztec/cli/misc'; +import { deployCanonicalAuthRegistry, deployCanonicalKeyRegistry, deployCanonicalL2FeeJuice } from '@aztec/cli/misc'; import { type DeployL1Contracts, type L1ContractArtifactsForDeployment, @@ -16,8 +16,8 @@ import { retryUntil } from '@aztec/foundation/retry'; import { AvailabilityOracleAbi, AvailabilityOracleBytecode, - GasPortalAbi, - GasPortalBytecode, + FeeJuicePortalAbi, + FeeJuicePortalBytecode, InboxAbi, InboxBytecode, OutboxAbi, @@ -30,7 +30,7 @@ import { RollupBytecode, } from '@aztec/l1-artifacts'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { @@ -112,19 +112,19 @@ export async function deployContractsToL1( contractAbi: RollupAbi, contractBytecode: RollupBytecode, }, - gasToken: { + feeJuice: { contractAbi: PortalERC20Abi, contractBytecode: PortalERC20Bytecode, }, - gasPortal: { - contractAbi: GasPortalAbi, - contractBytecode: GasPortalBytecode, + feeJuicePortal: { + contractAbi: FeeJuicePortalAbi, + contractBytecode: FeeJuicePortalBytecode, }, }; const l1Contracts = await waitThenDeploy(aztecNodeConfig, () => deployL1Contracts(aztecNodeConfig.rpcUrl, hdAccount, localAnvil, contractDeployLogger, l1Artifacts, { - l2GasTokenAddress: GasTokenAddress, + l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), }), ); @@ -171,9 +171,9 @@ export async function createSandbox(config: Partial = {}) { ); if (config.enableGas) { - await deployCanonicalL2GasToken( + await deployCanonicalL2FeeJuice( new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(aztecNodeConfig.l1ChainId, aztecNodeConfig.version)), - aztecNodeConfig.l1Contracts.gasPortalAddress, + aztecNodeConfig.l1Contracts.feeJuicePortalAddress, ); } diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index d4d8ffd881e..b3c4aabc906 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -257,12 +257,12 @@ resource "aws_ecs_task_definition" "aztec-node" { value = data.terraform_remote_state.l1_contracts.outputs.availability_oracle_contract_address }, { - name = "GAS_TOKEN_CONTRACT_ADDRESS" - value = data.terraform_remote_state.l1_contracts.outputs.gas_token_contract_address + name = "FEE_JUICE_CONTRACT_ADDRESS" + value = data.terraform_remote_state.l1_contracts.outputs.fee_juice_contract_address }, { - name = "GAS_PORTAL_CONTRACT_ADDRESS" - value = data.terraform_remote_state.l1_contracts.outputs.gas_portal_contract_address + name = "FEE_JUICE_PORTAL_CONTRACT_ADDRESS" + value = data.terraform_remote_state.l1_contracts.outputs.FEE_JUICE_PORTAL_CONTRACT_ADDRESS }, { name = "API_KEY" diff --git a/yarn-project/bot/src/bot.ts b/yarn-project/bot/src/bot.ts index ca0543ebccc..a79bc342627 100644 --- a/yarn-project/bot/src/bot.ts +++ b/yarn-project/bot/src/bot.ts @@ -1,7 +1,7 @@ import { type AztecAddress, BatchCall, - NativeFeePaymentMethod, + FeeJuicePaymentMethod, NoFeePaymentMethod, type SendMethodOptions, type Wallet, @@ -51,7 +51,7 @@ export class Bot { ), ]; - const paymentMethod = feePaymentMethod === 'native' ? new NativeFeePaymentMethod(sender) : new NoFeePaymentMethod(); + const paymentMethod = feePaymentMethod === 'native' ? new FeeJuicePaymentMethod(sender) : new NoFeePaymentMethod(); const gasSettings = GasSettings.default(); const opts: SendMethodOptions = { estimateGas: true, fee: { paymentMethod, gasSettings } }; diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index e3efc23cd07..8ba3d76e437 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -116,7 +116,7 @@ export const CANONICAL_AUTH_REGISTRY_ADDRESS = export const DEPLOYER_CONTRACT_ADDRESS = 19310994760783330368337163480198602393920956587162708699802190083077641908361n; export const REGISTERER_CONTRACT_ADDRESS = 2631409926445785927331173506476539962589925110142857699603561302478860342858n; -export const GAS_TOKEN_ADDRESS = 5232557136129983235483007223848855544006161252993804924983404332852577870185n; +export const FEE_JUICE_ADDRESS = 10248142274714515101077825679585135641434041564851038865006795089686437446849n; export const AZTEC_ADDRESS_LENGTH = 1; export const GAS_FEES_LENGTH = 2; export const GAS_LENGTH = 2; @@ -168,13 +168,13 @@ export const PUBLIC_DATA_READ_LENGTH = 2; export const PRIVATE_VALIDATION_REQUESTS_LENGTH = 772; export const PUBLIC_VALIDATION_REQUESTS_LENGTH = 514; export const PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 3; -export const COMBINED_ACCUMULATED_DATA_LENGTH = 364; +export const COMBINED_ACCUMULATED_DATA_LENGTH = 388; export const COMBINED_CONSTANT_DATA_LENGTH = 43; export const PRIVATE_ACCUMULATED_DATA_LENGTH = 1336; export const PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 2167; -export const PUBLIC_ACCUMULATED_DATA_LENGTH = 1279; -export const PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3565; -export const KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 417; +export const PUBLIC_ACCUMULATED_DATA_LENGTH = 1303; +export const PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3613; +export const KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 441; export const CONSTANT_ROLLUP_DATA_LENGTH = 12; export const BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 29; export const GET_NOTES_ORACLE_RETURN_LENGTH = 674; diff --git a/yarn-project/circuits.js/src/contract/artifact_hash.ts b/yarn-project/circuits.js/src/contract/artifact_hash.ts index cbaadfdc7d9..a5d6f7673af 100644 --- a/yarn-project/circuits.js/src/contract/artifact_hash.ts +++ b/yarn-project/circuits.js/src/contract/artifact_hash.ts @@ -69,7 +69,7 @@ export function computeArtifactMetadataHash(artifact: ContractArtifact) { const exceptions: string[] = [ 'AuthRegistry', 'KeyRegistry', - 'GasToken', + 'FeeJuice', 'ContractInstanceDeployer', 'ContractClassRegisterer', ]; diff --git a/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts index e729c2accd9..3c001d19860 100644 --- a/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts @@ -46,7 +46,7 @@ export class KernelCircuitPublicInputs { * Computes the transaction fee for the transaction. * @param gasFees - Gas fees for the block. We cannot source this from the constants * since they may be unset if this comes from a private kernel directly. - * @returns The amount in gas tokens to pay for this tx. + * @returns The amount in Fee Juice to pay for this tx. * @remarks It is safe to compute this method in typescript because we compute the * transaction_fee ourselves in the base rollup. This value must match the value * computed in the base rollup, otherwise the content commitment of the block will be invalid. diff --git a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts index 86ef8e72928..270e21727ef 100644 --- a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts @@ -140,7 +140,7 @@ export class PublicCircuitPublicInputs { /** How much gas was left after execution. */ public endGasLeft: Gas, - /** Transaction fee in the fee-payment asset. Zero in all phases except teardown. */ + /** Transaction fee in fee juice. Zero in all phases except teardown. */ public transactionFee: Fr, ) {} diff --git a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts index 4e7ef406af3..e7a9835974b 100644 --- a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts @@ -75,7 +75,7 @@ export class BaseRollupInputs { /** Hints used while proving state diff validity. */ public stateDiffHints: StateDiffHints, /** Public data read hint for accessing the balance of the fee payer. */ - public feePayerGasTokenBalanceReadHint: PublicDataHint, + public feePayerFeeJuiceBalanceReadHint: PublicDataHint, /** * The public data writes to be inserted in the tree, sorted high slot to low slot. @@ -122,7 +122,7 @@ export class BaseRollupInputs { fields.kernelData, fields.start, fields.stateDiffHints, - fields.feePayerGasTokenBalanceReadHint, + fields.feePayerFeeJuiceBalanceReadHint, fields.sortedPublicDataWrites, fields.sortedPublicDataWritesIndexes, fields.lowPublicDataWritesPreimages, diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index dc713a88f37..f855be3294f 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -1134,7 +1134,7 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { const constants = makeConstantBaseRollupData(0x100); - const feePayerGasTokenBalanceReadHint = PublicDataHint.empty(); + const feePayerFeeJuiceBalanceReadHint = PublicDataHint.empty(); return BaseRollupInputs.from({ kernelData, @@ -1146,7 +1146,7 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { lowPublicDataWritesMembershipWitnesses, archiveRootMembershipWitness, constants, - feePayerGasTokenBalanceReadHint, + feePayerFeeJuiceBalanceReadHint: feePayerFeeJuiceBalanceReadHint, }); } diff --git a/yarn-project/cli-wallet/src/utils/fees.ts b/yarn-project/cli-wallet/src/utils/fees.ts index 72dadb2ef42..a3836151021 100644 --- a/yarn-project/cli-wallet/src/utils/fees.ts +++ b/yarn-project/cli-wallet/src/utils/fees.ts @@ -1,8 +1,8 @@ import { type AccountWallet, + FeeJuicePaymentMethod, + FeeJuicePaymentMethodWithClaim, type FeePaymentMethod, - NativeFeePaymentMethod, - NativeFeePaymentMethodWithClaim, NoFeePaymentMethod, PrivateFeePaymentMethod, PublicFeePaymentMethod, @@ -139,14 +139,14 @@ function parsePaymentMethod(payment: string, log: LogFn): (sender: AccountWallet case 'native': if (parsed.claimSecret && parsed.claimAmount) { log(`Using native fee payment method with claim for ${parsed.claimAmount} tokens`); - return new NativeFeePaymentMethodWithClaim( + return new FeeJuicePaymentMethodWithClaim( sender.getAddress(), BigInt(parsed.claimAmount), Fr.fromString(parsed.claimSecret), ); } else { log(`Using native fee payment`); - return new NativeFeePaymentMethod(sender.getAddress()); + return new FeeJuicePaymentMethod(sender.getAddress()); } case 'fpc-public': { const [asset, fpc] = getFpcOpts(parsed); diff --git a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts index 90c21da829d..b65c4d8c7a7 100644 --- a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts +++ b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts @@ -198,10 +198,7 @@ async function deployFPC(wallet: Wallet, tokenAddress: AztecAddress): Promise L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); log(`L2 -> L1 Outbox Address: ${l1ContractAddresses.outboxAddress.toString()}`); log(`Availability Oracle Address: ${l1ContractAddresses.availabilityOracleAddress.toString()}`); - log(`Gas Token Address: ${l1ContractAddresses.gasTokenAddress.toString()}`); - log(`Gas Portal Address: ${l1ContractAddresses.gasPortalAddress.toString()}`); + log(`Fee Juice Address: ${l1ContractAddresses.feeJuiceAddress.toString()}`); + log(`Gas Portal Address: ${l1ContractAddresses.feeJuicePortalAddress.toString()}`); } } diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index 4607af02c20..40774fddf0b 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -87,8 +87,8 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL program .command('bridge-fee-juice') - .description('Mints L1 gas tokens and pushes them to L2.') - .argument('', 'The amount of gas tokens to mint and bridge.', parseBigint) + .description('Mints L1 Fee Juice and pushes them to L2.') + .argument('', 'The amount of Fee Juice to mint and bridge.', parseBigint) .argument('', 'Aztec address of the recipient.', parseAztecAddress) .requiredOption( '--l1-rpc-url ', @@ -125,7 +125,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL program .command('bridge-erc20') .description('Bridges ERC20 tokens to L2.') - .argument('', 'The amount of gas tokens to mint and bridge.', parseBigint) + .argument('', 'The amount of Fee Juice to mint and bridge.', parseBigint) .argument('', 'Aztec address of the recipient.', parseAztecAddress) .requiredOption( '--l1-rpc-url ', diff --git a/yarn-project/cli/src/cmds/misc/deploy_contracts.ts b/yarn-project/cli/src/cmds/misc/deploy_contracts.ts index 70c1bf9599b..8aeaed0089e 100644 --- a/yarn-project/cli/src/cmds/misc/deploy_contracts.ts +++ b/yarn-project/cli/src/cmds/misc/deploy_contracts.ts @@ -8,52 +8,52 @@ import { } from '@aztec/circuits.js'; import { bufferAsFields } from '@aztec/foundation/abi'; import { getCanonicalAuthRegistry } from '@aztec/protocol-contracts/auth-registry'; -import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; /** * Deploys the contract to pay for gas on L2. */ -export async function deployCanonicalL2GasToken( +export async function deployCanonicalL2FeeJuice( deployer: Wallet, - gasPortalAddress: EthAddress, + feeJuicePortalAddress: EthAddress, waitOpts = DefaultWaitOpts, ): Promise { // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore - Importing noir-contracts.js even in devDeps results in a circular dependency error. Need to ignore because this line doesn't cause an error in a dev environment - const { GasTokenContract } = await import('@aztec/noir-contracts.js'); + const { FeeJuiceContract } = await import('@aztec/noir-contracts.js'); - const canonicalGasToken = getCanonicalGasToken(); + const canonicalFeeJuice = getCanonicalFeeJuice(); - if (await deployer.isContractClassPubliclyRegistered(canonicalGasToken.contractClass.id)) { - return canonicalGasToken.address; + if (await deployer.isContractClassPubliclyRegistered(canonicalFeeJuice.contractClass.id)) { + return canonicalFeeJuice.address; } - const publicBytecode = canonicalGasToken.contractClass.packedBytecode; + const publicBytecode = canonicalFeeJuice.contractClass.packedBytecode; const encodedBytecode = bufferAsFields(publicBytecode, MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS); await deployer.addCapsule(encodedBytecode); - const gasToken = await GasTokenContract.at(canonicalGasToken.address, deployer); - await gasToken.methods + const feeJuiceContract = await FeeJuiceContract.at(canonicalFeeJuice.address, deployer); + await feeJuiceContract.methods .deploy( - canonicalGasToken.contractClass.artifactHash, - canonicalGasToken.contractClass.privateFunctionsRoot, - canonicalGasToken.contractClass.publicBytecodeCommitment, - gasPortalAddress, + canonicalFeeJuice.contractClass.artifactHash, + canonicalFeeJuice.contractClass.privateFunctionsRoot, + canonicalFeeJuice.contractClass.publicBytecodeCommitment, + feeJuicePortalAddress, ) .send({ fee: { paymentMethod: new NoFeePaymentMethod(), gasSettings: GasSettings.teardownless() } }) .wait(waitOpts); - if (!gasToken.address.equals(canonicalGasToken.address)) { + if (!feeJuiceContract.address.equals(canonicalFeeJuice.address)) { throw new Error( - `Deployed Gas Token address ${gasToken.address} does not match expected address ${canonicalGasToken.address}`, + `Deployed Fee Juice address ${feeJuiceContract.address} does not match expected address ${canonicalFeeJuice.address}`, ); } - if (!(await deployer.isContractPubliclyDeployed(canonicalGasToken.address))) { - throw new Error(`Failed to deploy Gas Token to ${canonicalGasToken.address}`); + if (!(await deployer.isContractPubliclyDeployed(canonicalFeeJuice.address))) { + throw new Error(`Failed to deploy Fee Juice to ${canonicalFeeJuice.address}`); } - return canonicalGasToken.address; + return canonicalFeeJuice.address; } /** diff --git a/yarn-project/cli/src/cmds/pxe/get_node_info.ts b/yarn-project/cli/src/cmds/pxe/get_node_info.ts index 9f9581a0d4a..55084e76898 100644 --- a/yarn-project/cli/src/cmds/pxe/get_node_info.ts +++ b/yarn-project/cli/src/cmds/pxe/get_node_info.ts @@ -10,7 +10,7 @@ export async function getNodeInfo(rpcUrl: string, debugLogger: DebugLogger, log: log(`Rollup Address: ${info.l1ContractAddresses.rollupAddress.toString()}`); log(`Protocol Contract Addresses:`); log(` Class Registerer: ${info.protocolContractAddresses.classRegisterer.toString()}`); - log(` Gas Token: ${info.protocolContractAddresses.gasToken.toString()}`); + log(` Fee Juice: ${info.protocolContractAddresses.feeJuice.toString()}`); log(` Instance Deployer: ${info.protocolContractAddresses.instanceDeployer.toString()}`); log(` Key Registry: ${info.protocolContractAddresses.keyRegistry.toString()}`); log(` MultiCall: ${info.protocolContractAddresses.multiCallEntrypoint.toString()}`); diff --git a/yarn-project/cli/src/cmds/pxe/get_pxe_info.ts b/yarn-project/cli/src/cmds/pxe/get_pxe_info.ts index 21674f9ed2a..c448afa4613 100644 --- a/yarn-project/cli/src/cmds/pxe/get_pxe_info.ts +++ b/yarn-project/cli/src/cmds/pxe/get_pxe_info.ts @@ -7,7 +7,7 @@ export async function getPXEInfo(rpcUrl: string, debugLogger: DebugLogger, log: log(`PXE Version: ${info.pxeVersion}`); log(`Protocol Contract Addresses:`); log(` Class Registerer: ${info.protocolContractAddresses.classRegisterer.toString()}`); - log(` Gas Token: ${info.protocolContractAddresses.gasToken.toString()}`); + log(` Fee Juice: ${info.protocolContractAddresses.feeJuice.toString()}`); log(` Instance Deployer: ${info.protocolContractAddresses.instanceDeployer.toString()}`); log(` Key Registry: ${info.protocolContractAddresses.keyRegistry.toString()}`); log(` Multi Call Entrypoint: ${info.protocolContractAddresses.multiCallEntrypoint.toString()}`); diff --git a/yarn-project/cli/src/portal_manager.ts b/yarn-project/cli/src/portal_manager.ts index 9b1808c406c..19a0829b2f0 100644 --- a/yarn-project/cli/src/portal_manager.ts +++ b/yarn-project/cli/src/portal_manager.ts @@ -1,7 +1,7 @@ // REFACTOR: This file has been shamelessly copied from yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts // We should make this a shared utility in the aztec.js package. import { type AztecAddress, type DebugLogger, type EthAddress, Fr, type PXE, computeSecretHash } from '@aztec/aztec.js'; -import { GasPortalAbi, PortalERC20Abi, TokenPortalAbi } from '@aztec/l1-artifacts'; +import { FeeJuicePortalAbi, PortalERC20Abi, TokenPortalAbi } from '@aztec/l1-artifacts'; import { type Account, @@ -90,7 +90,7 @@ export class FeeJuicePortalManager extends PortalManager { async bridgeTokens(to: AztecAddress, amount: bigint, secretHash: Fr): Promise { const portal = getContract({ address: this.tokenPortalAddress.toString(), - abi: GasPortalAbi, + abi: FeeJuicePortalAbi, client: this.walletClient, }); @@ -115,14 +115,14 @@ export class FeeJuicePortalManager extends PortalManager { logger: DebugLogger, ): Promise { const { - l1ContractAddresses: { gasTokenAddress, gasPortalAddress }, + l1ContractAddresses: { feeJuiceAddress, feeJuicePortalAddress }, } = await pxe.getNodeInfo(); - if (gasTokenAddress.isZero() || gasPortalAddress.isZero()) { + if (feeJuiceAddress.isZero() || feeJuicePortalAddress.isZero()) { throw new Error('Portal or token not deployed on L1'); } - return new FeeJuicePortalManager(gasTokenAddress, gasPortalAddress, publicClient, walletClient, logger); + return new FeeJuicePortalManager(feeJuiceAddress, feeJuicePortalAddress, publicClient, walletClient, logger); } } diff --git a/yarn-project/cli/src/utils/aztec.ts b/yarn-project/cli/src/utils/aztec.ts index b7d494150c1..3449e8d9f6a 100644 --- a/yarn-project/cli/src/utils/aztec.ts +++ b/yarn-project/cli/src/utils/aztec.ts @@ -4,7 +4,7 @@ import { type PXE } from '@aztec/circuit-types'; import { type DeployL1Contracts } from '@aztec/ethereum'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { type NoirPackageConfig } from '@aztec/foundation/noir'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import TOML from '@iarna/toml'; import { readFile } from 'fs/promises'; @@ -58,8 +58,8 @@ export async function deployAztecContracts( RollupBytecode, AvailabilityOracleAbi, AvailabilityOracleBytecode, - GasPortalAbi, - GasPortalBytecode, + FeeJuicePortalAbi, + FeeJuicePortalBytecode, PortalERC20Abi, PortalERC20Bytecode, } = await import('@aztec/l1-artifacts'); @@ -91,19 +91,19 @@ export async function deployAztecContracts( contractAbi: RollupAbi, contractBytecode: RollupBytecode, }, - gasToken: { + feeJuice: { contractAbi: PortalERC20Abi, contractBytecode: PortalERC20Bytecode, }, - gasPortal: { - contractAbi: GasPortalAbi, - contractBytecode: GasPortalBytecode, + feeJuicePortal: { + contractAbi: FeeJuicePortalAbi, + contractBytecode: FeeJuicePortalBytecode, }, }; const { getVKTreeRoot } = await import('@aztec/noir-protocol-circuits-types'); return await deployL1Contracts(chain.rpcUrl, account, chain.chainInfo, debugLogger, l1Artifacts, { - l2GasTokenAddress: GasTokenAddress, + l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), }); } diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 4d91c75af46..98f9eb6596a 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -160,8 +160,8 @@ e2e-fees-dapp-subscription: e2e-fees-failures: DO +E2E_TEST --test=./src/e2e_fees/failures.test.ts -e2e-fees-native-payments: - DO +E2E_TEST --test=./src/e2e_fees/native_payments.test.ts +e2e-fees-fee-juice-payments: + DO +E2E_TEST --test=./src/e2e_fees/fee_juice_payments.test.ts e2e-fees-account-init: DO +E2E_TEST --test=./src/e2e_fees/account_init.test.ts diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts index c3b04643c88..5d435fed52e 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -3,8 +3,8 @@ import { PublicFeePaymentMethod, TxStatus, sleep } from '@aztec/aztec.js'; import { type AccountWallet } from '@aztec/aztec.js/wallet'; import { BBCircuitVerifier } from '@aztec/bb-prover'; import { CompleteAddress, Fq, Fr, GasSettings } from '@aztec/circuits.js'; -import { FPCContract, GasTokenContract, TestContract, TokenContract } from '@aztec/noir-contracts.js'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FPCContract, FeeJuiceContract, TestContract, TokenContract } from '@aztec/noir-contracts.js'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { type PXEService, createPXEService } from '@aztec/pxe'; import { jest } from '@jest/globals'; @@ -37,7 +37,7 @@ describe('benchmarks/proving', () => { let recipient: CompleteAddress; - let initialGasContract: GasTokenContract; + let initialGasContract: FeeJuiceContract; let initialTestContract: TestContract; let initialTokenContract: TokenContract; let initialFpContract: FPCContract; @@ -88,14 +88,8 @@ describe('benchmarks/proving', () => { ) .send() .deployed(); - initialGasContract = await GasTokenContract.at(GasTokenAddress, initialSchnorrWallet); - initialFpContract = await FPCContract.deploy( - initialSchnorrWallet, - initialTokenContract.address, - initialGasContract.address, - ) - .send() - .deployed(); + initialGasContract = await FeeJuiceContract.at(FeeJuiceAddress, initialSchnorrWallet); + initialFpContract = await FPCContract.deploy(initialSchnorrWallet, initialTokenContract.address).send().deployed(); await Promise.all([ initialGasContract.methods.mint_public(initialFpContract.address, 1e12).send().wait(), diff --git a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts index cc818d8cb31..3aa63524f55 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts @@ -1,15 +1,15 @@ import { type AccountWalletWithSecretKey, type AztecAddress, + FeeJuicePaymentMethod, type FeePaymentMethod, - NativeFeePaymentMethod, PrivateFeePaymentMethod, PublicFeePaymentMethod, TxStatus, } from '@aztec/aztec.js'; import { GasSettings } from '@aztec/circuits.js'; -import { FPCContract, GasTokenContract, TokenContract } from '@aztec/noir-contracts.js'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FPCContract, FeeJuiceContract, TokenContract } from '@aztec/noir-contracts.js'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { jest } from '@jest/globals'; @@ -21,7 +21,7 @@ describe('benchmarks/tx_size_fees', () => { let aliceWallet: AccountWalletWithSecretKey; let bobAddress: AztecAddress; let sequencerAddress: AztecAddress; - let gas: GasTokenContract; + let feeJuice: FeeJuiceContract; let fpc: FPCContract; let token: TokenContract; @@ -42,16 +42,16 @@ describe('benchmarks/tx_size_fees', () => { // deploy the contracts beforeAll(async () => { - gas = await GasTokenContract.at(GasTokenAddress, aliceWallet); + feeJuice = await FeeJuiceContract.at(FeeJuiceAddress, aliceWallet); token = await TokenContract.deploy(aliceWallet, aliceWallet.getAddress(), 'test', 'test', 18).send().deployed(); - fpc = await FPCContract.deploy(aliceWallet, token.address, gas.address).send().deployed(); + fpc = await FPCContract.deploy(aliceWallet, token.address).send().deployed(); }); // mint tokens beforeAll(async () => { await Promise.all([ - gas.methods.mint_public(aliceWallet.getAddress(), 100e9).send().wait(), - gas.methods.mint_public(fpc.address, 100e9).send().wait(), + feeJuice.methods.mint_public(aliceWallet.getAddress(), 100e9).send().wait(), + feeJuice.methods.mint_public(fpc.address, 100e9).send().wait(), ]); await token.methods.privately_mint_private_note(100e9).send().wait(); await token.methods.mint_public(aliceWallet.getAddress(), 100e9).send().wait(); @@ -61,7 +61,7 @@ describe('benchmarks/tx_size_fees', () => { ['no', () => undefined /*200021120n*/], [ 'native fee', - () => new NativeFeePaymentMethod(aliceWallet.getAddress()), + () => new FeeJuicePaymentMethod(aliceWallet.getAddress()), // Same cost as no fee payment, since payment is done natively // 200021120n, ], diff --git a/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts b/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts index a0f8cfba405..5e50a80a9bd 100644 --- a/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts +++ b/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts @@ -1,8 +1,8 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type EthAddress, + FeeJuicePaymentMethodWithClaim, Fr, - NativeFeePaymentMethodWithClaim, type PXE, SignerlessWallet, TxStatus, @@ -17,7 +17,7 @@ import { GasSettings, deriveSigningKey } from '@aztec/circuits.js'; import { startHttpRpcServer } from '@aztec/foundation/json-rpc/server'; import { type DebugLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; -import { GasTokenContract, TestContract } from '@aztec/noir-contracts.js'; +import { FeeJuiceContract, TestContract } from '@aztec/noir-contracts.js'; import { createPXERpcServer } from '@aztec/pxe'; import getPort from 'get-port'; @@ -100,7 +100,7 @@ describe('End-to-end tests for devnet', () => { expect(nodeInfo.protocolContractAddresses.instanceDeployer).toEqual( pxeInfo.protocolContractAddresses.instanceDeployer, ); - expect(nodeInfo.protocolContractAddresses.gasToken).toEqual(pxeInfo.protocolContractAddresses.gasToken); + expect(nodeInfo.protocolContractAddresses.feeJuice).toEqual(pxeInfo.protocolContractAddresses.feeJuice); expect(nodeInfo.protocolContractAddresses.keyRegistry).toEqual(pxeInfo.protocolContractAddresses.keyRegistry); expect(nodeInfo.protocolContractAddresses.multiCallEntrypoint).toEqual( pxeInfo.protocolContractAddresses.multiCallEntrypoint, @@ -132,7 +132,7 @@ describe('End-to-end tests for devnet', () => { ({ l1ChainId, - l1ContractAddresses: { gasTokenAddress: feeJuiceL1 }, + l1ContractAddresses: { feeJuiceAddress: feeJuiceL1 }, } = await pxe.getNodeInfo()); logger.info(`PXE instance started`); }); @@ -178,7 +178,7 @@ describe('End-to-end tests for devnet', () => { .deploy({ fee: { gasSettings: GasSettings.default(), - paymentMethod: new NativeFeePaymentMethodWithClaim( + paymentMethod: new FeeJuicePaymentMethodWithClaim( l2Account.getAddress(), BigInt(claimAmount), Fr.fromString(claimSecret.value), @@ -208,10 +208,10 @@ describe('End-to-end tests for devnet', () => { // ); expect(txReceipt.status).toBe(TxStatus.SUCCESS); - const feeJuice = await GasTokenContract.at( + const feeJuice = await FeeJuiceContract.at( ( await pxe.getNodeInfo() - ).protocolContractAddresses.gasToken, + ).protocolContractAddresses.feeJuice, await l2Account.getWallet(), ); const balance = await feeJuice.methods.balance_of_public(l2Account.getAddress()).simulate(); diff --git a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts index 7a97a9f7d69..3288cef8284 100644 --- a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts @@ -3,9 +3,9 @@ import { type AccountManager, type AccountWallet, type DebugLogger, + FeeJuicePaymentMethod, + FeeJuicePaymentMethodWithClaim, Fr, - NativeFeePaymentMethod, - NativeFeePaymentMethodWithClaim, type PXE, PrivateFeePaymentMethod, PublicFeePaymentMethod, @@ -84,26 +84,26 @@ describe('e2e_fees account_init', () => { }); describe('account pays its own fee', () => { - it('pays natively in the gas token after Alice bridges funds', async () => { - await t.gasTokenContract.methods.mint_public(bobsAddress, t.INITIAL_GAS_BALANCE).send().wait(); + it('pays natively in the Fee Juice after Alice bridges funds', async () => { + await t.feeJuiceContract.methods.mint_public(bobsAddress, t.INITIAL_GAS_BALANCE).send().wait(); const [bobsInitialGas] = await t.getGasBalanceFn(bobsAddress); expect(bobsInitialGas).toEqual(t.INITIAL_GAS_BALANCE); - const paymentMethod = new NativeFeePaymentMethod(bobsAddress); + const paymentMethod = new FeeJuicePaymentMethod(bobsAddress); const tx = await bobsAccountManager.deploy({ fee: { gasSettings, paymentMethod } }).wait(); expect(tx.transactionFee!).toBeGreaterThan(0n); await expect(t.getGasBalanceFn(bobsAddress)).resolves.toEqual([bobsInitialGas - tx.transactionFee!]); }); - it('pays natively in the gas token by bridging funds themselves', async () => { - const { secret } = await t.gasBridgeTestHarness.prepareTokensOnL1( + it('pays natively in the Fee Juice by bridging funds themselves', async () => { + const { secret } = await t.feeJuiceBridgeTestHarness.prepareTokensOnL1( t.INITIAL_GAS_BALANCE, t.INITIAL_GAS_BALANCE, bobsAddress, ); - const paymentMethod = new NativeFeePaymentMethodWithClaim(bobsAddress, t.INITIAL_GAS_BALANCE, secret); + const paymentMethod = new FeeJuicePaymentMethodWithClaim(bobsAddress, t.INITIAL_GAS_BALANCE, secret); const tx = await bobsAccountManager.deploy({ fee: { gasSettings, paymentMethod } }).wait(); expect(tx.transactionFee!).toBeGreaterThan(0n); await expect(t.getGasBalanceFn(bobsAddress)).resolves.toEqual([t.INITIAL_GAS_BALANCE - tx.transactionFee!]); @@ -177,9 +177,9 @@ describe('e2e_fees account_init', () => { }); describe('another account pays the fee', () => { - it('pays natively in the gas token', async () => { - // mint gas tokens to alice - await t.gasTokenContract.methods.mint_public(aliceAddress, t.INITIAL_GAS_BALANCE).send().wait(); + it('pays natively in the Fee Juice', async () => { + // mint Fee Juice to alice + await t.feeJuiceContract.methods.mint_public(aliceAddress, t.INITIAL_GAS_BALANCE).send().wait(); const [alicesInitialGas] = await t.getGasBalanceFn(aliceAddress); // bob generates the private keys for his account on his own @@ -191,7 +191,7 @@ describe('e2e_fees account_init', () => { await pxe.registerRecipient(bobsCompleteAddress); // and deploys bob's account, paying the fee from her balance - const paymentMethod = new NativeFeePaymentMethod(aliceAddress); + const paymentMethod = new FeeJuicePaymentMethod(aliceAddress); const tx = await SchnorrAccountContract.deployWithPublicKeysHash( bobsPublicKeysHash, aliceWallet, @@ -208,7 +208,7 @@ describe('e2e_fees account_init', () => { }) .wait(); - // alice paid in gas tokens + // alice paid in Fee Juice expect(tx.transactionFee!).toBeGreaterThan(0n); await expect(t.getGasBalanceFn(aliceAddress)).resolves.toEqual([alicesInitialGas - tx.transactionFee!]); diff --git a/yarn-project/end-to-end/src/e2e_fees/native_payments.test.ts b/yarn-project/end-to-end/src/e2e_fees/fee_juice_payments.test.ts similarity index 72% rename from yarn-project/end-to-end/src/e2e_fees/native_payments.test.ts rename to yarn-project/end-to-end/src/e2e_fees/fee_juice_payments.test.ts index b3837d3de66..ddd1288c518 100644 --- a/yarn-project/end-to-end/src/e2e_fees/native_payments.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fee_juice_payments.test.ts @@ -1,31 +1,31 @@ import { type AccountWallet, type AztecAddress, - NativeFeePaymentMethod, - NativeFeePaymentMethodWithClaim, + FeeJuicePaymentMethod, + FeeJuicePaymentMethodWithClaim, } from '@aztec/aztec.js'; import { type GasSettings } from '@aztec/circuits.js'; -import { type TokenContract as BananaCoin, type GasTokenContract } from '@aztec/noir-contracts.js'; +import { type TokenContract as BananaCoin, type FeeJuiceContract } from '@aztec/noir-contracts.js'; import { FeesTest } from './fees_test.js'; -describe('e2e_fees native_payments', () => { +describe('e2e_fees Fee Juice payments', () => { let aliceAddress: AztecAddress; let aliceWallet: AccountWallet; let bobAddress: AztecAddress; let bananaCoin: BananaCoin; let gasSettings: GasSettings; - let gasTokenContract: GasTokenContract; - let paymentMethod: NativeFeePaymentMethod; + let feeJuiceContract: FeeJuiceContract; + let paymentMethod: FeeJuicePaymentMethod; - const t = new FeesTest('native_payments'); + const t = new FeesTest('fee_juice'); beforeAll(async () => { await t.applyBaseSnapshots(); await t.applyFundAliceWithBananas(); - ({ gasTokenContract, aliceAddress, aliceWallet, bobAddress, bananaCoin, gasSettings } = await t.setup()); + ({ feeJuiceContract, aliceAddress, aliceWallet, bobAddress, bananaCoin, gasSettings } = await t.setup()); - paymentMethod = new NativeFeePaymentMethod(aliceAddress); + paymentMethod = new FeeJuicePaymentMethod(aliceAddress); // We let Alice see Bob's notes because the expect uses Alice's wallet to interact with the contracts to "get" state. aliceWallet.setScopes([aliceAddress, bobAddress]); @@ -37,7 +37,7 @@ describe('e2e_fees native_payments', () => { describe('without initial funds', () => { beforeAll(async () => { - expect(await gasTokenContract.methods.balance_of_public(aliceAddress).simulate()).toEqual(0n); + expect(await feeJuiceContract.methods.balance_of_public(aliceAddress).simulate()).toEqual(0n); }); it('fails to send a tx', async () => { @@ -50,17 +50,17 @@ describe('e2e_fees native_payments', () => { }); it('claims bridged funds and pays with them on the same tx', async () => { - const { secret } = await t.gasBridgeTestHarness.prepareTokensOnL1( + const { secret } = await t.feeJuiceBridgeTestHarness.prepareTokensOnL1( t.INITIAL_GAS_BALANCE, t.INITIAL_GAS_BALANCE, aliceAddress, ); - const paymentMethod = new NativeFeePaymentMethodWithClaim(aliceAddress, t.INITIAL_GAS_BALANCE, secret); + const paymentMethod = new FeeJuicePaymentMethodWithClaim(aliceAddress, t.INITIAL_GAS_BALANCE, secret); const receipt = await bananaCoin.methods .transfer_public(aliceAddress, bobAddress, 1n, 0n) .send({ fee: { gasSettings, paymentMethod } }) .wait(); - const endBalance = await gasTokenContract.methods.balance_of_public(aliceAddress).simulate(); + const endBalance = await feeJuiceContract.methods.balance_of_public(aliceAddress).simulate(); expect(endBalance).toBeGreaterThan(0n); expect(endBalance).toBeLessThan(t.INITIAL_GAS_BALANCE); @@ -70,28 +70,28 @@ describe('e2e_fees native_payments', () => { describe('with initial funds', () => { beforeAll(async () => { - await t.applyFundAliceWithGasToken(); + await t.applyFundAliceWithFeeJuice(); }); it('sends tx with native fee payment method with public calls', async () => { - const initialBalance = await gasTokenContract.methods.balance_of_public(aliceAddress).simulate(); + const initialBalance = await feeJuiceContract.methods.balance_of_public(aliceAddress).simulate(); const { transactionFee } = await bananaCoin.methods .transfer_public(aliceAddress, bobAddress, 1n, 0n) .send({ fee: { gasSettings, paymentMethod } }) .wait(); expect(transactionFee).toBeGreaterThan(0n); - const endBalance = await gasTokenContract.methods.balance_of_public(aliceAddress).simulate(); + const endBalance = await feeJuiceContract.methods.balance_of_public(aliceAddress).simulate(); expect(endBalance).toBeLessThan(initialBalance); }); it('sends tx with native fee payment method with no public calls', async () => { - const initialBalance = await gasTokenContract.methods.balance_of_public(aliceAddress).simulate(); + const initialBalance = await feeJuiceContract.methods.balance_of_public(aliceAddress).simulate(); const { transactionFee } = await bananaCoin.methods .transfer(bobAddress, 1n) .send({ fee: { gasSettings, paymentMethod } }) .wait(); expect(transactionFee).toBeGreaterThan(0n); - const endBalance = await gasTokenContract.methods.balance_of_public(aliceAddress).simulate(); + const endBalance = await feeJuiceContract.methods.balance_of_public(aliceAddress).simulate(); expect(endBalance).toBeLessThan(initialBalance); }); }); diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts index 77eb81550f3..72ef46ddb24 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -22,18 +22,21 @@ import { TokenContract as BananaCoin, CounterContract, FPCContract, - GasTokenContract, + FeeJuiceContract, PrivateFPCContract, TokenWithRefundsContract, } from '@aztec/noir-contracts.js'; -import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getContract } from 'viem'; import { MNEMONIC } from '../fixtures/fixtures.js'; import { type ISnapshotManager, addAccounts, createSnapshotManager } from '../fixtures/snapshot_manager.js'; -import { type BalancesFn, deployCanonicalGasToken, getBalancesFn, publicDeployAccounts } from '../fixtures/utils.js'; -import { GasPortalTestingHarnessFactory, type IGasBridgingTestHarness } from '../shared/gas_portal_test_harness.js'; +import { type BalancesFn, deployCanonicalFeeJuice, getBalancesFn, publicDeployAccounts } from '../fixtures/utils.js'; +import { + FeeJuicePortalTestingHarnessFactory, + type IGasBridgingTestHarness, +} from '../shared/gas_portal_test_harness.js'; const { E2E_DATA_PATH: dataPath } = process.env; @@ -41,10 +44,10 @@ const { E2E_DATA_PATH: dataPath } = process.env; * Test fixture for testing fees. Provides the following snapshots: * InitialAccounts: Initializes 3 Schnorr account contracts. * PublicDeployAccounts: Deploys the accounts publicly. - * DeployGasToken: Deploys the gas token contract. + * DeployFeeJuice: Deploys the Fee Juice contract. * FPCSetup: Deploys BananaCoin and FPC contracts, and bridges gas from L1. * FundAlice: Mints private and public bananas to Alice. - * SetupSubscription: Deploys a counter contract and a subscription contract, and mints gas token to the subscription contract. + * SetupSubscription: Deploys a counter contract and a subscription contract, and mints Fee Juice to the subscription contract. */ export class FeesTest { private snapshotManager: ISnapshotManager; @@ -64,14 +67,14 @@ export class FeesTest { public gasSettings = GasSettings.default(); public maxFee = this.gasSettings.getFeeLimit().toBigInt(); - public gasTokenContract!: GasTokenContract; + public feeJuiceContract!: FeeJuiceContract; public bananaCoin!: BananaCoin; public bananaFPC!: FPCContract; public tokenWithRefunds!: TokenWithRefundsContract; public privateFPC!: PrivateFPCContract; public counterContract!: CounterContract; public subscriptionContract!: AppSubscriptionContract; - public gasBridgeTestHarness!: IGasBridgingTestHarness; + public feeJuiceBridgeTestHarness!: IGasBridgingTestHarness; public getCoinbaseBalance!: () => Promise; public getGasBalanceFn!: BalancesFn; @@ -151,7 +154,7 @@ export class FeesTest { public async applyBaseSnapshots() { await this.applyInitialAccountsSnapshot(); await this.applyPublicDeployAccountsSnapshot(); - await this.applyDeployGasTokenSnapshot(); + await this.applyDeployFeeJuiceSnapshot(); await this.applyDeployBananaTokenSnapshot(); } @@ -168,7 +171,7 @@ export class FeesTest { this.wallets.forEach((w, i) => this.logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); [this.aliceWallet, this.bobWallet] = this.wallets.slice(0, 2); [this.aliceAddress, this.bobAddress, this.sequencerAddress] = this.wallets.map(w => w.getAddress()); - this.gasTokenContract = await GasTokenContract.at(getCanonicalGasToken().address, this.aliceWallet); + this.feeJuiceContract = await FeeJuiceContract.at(getCanonicalFeeJuice().address, this.aliceWallet); const bobInstance = await this.bobWallet.getContractInstance(this.bobAddress); if (!bobInstance) { throw new Error('Bob instance not found'); @@ -177,7 +180,7 @@ export class FeesTest { this.coinbase = EthAddress.random(); const { publicClient, walletClient } = createL1Clients(aztecNodeConfig.rpcUrl, MNEMONIC); - this.gasBridgeTestHarness = await GasPortalTestingHarnessFactory.create({ + this.feeJuiceBridgeTestHarness = await FeeJuicePortalTestingHarnessFactory.create({ aztecNode: aztecNode, pxeService: pxe, publicClient: publicClient, @@ -196,11 +199,11 @@ export class FeesTest { ); } - async applyDeployGasTokenSnapshot() { + async applyDeployFeeJuiceSnapshot() { await this.snapshotManager.snapshot( - 'deploy_gas_token', + 'deploy_fee_juice', async context => { - await deployCanonicalGasToken( + await deployCanonicalFeeJuice( new SignerlessWallet( context.pxe, new DefaultMultiCallEntrypoint(context.aztecNodeConfig.l1ChainId, context.aztecNodeConfig.version), @@ -208,12 +211,12 @@ export class FeesTest { ); }, async (_data, context) => { - this.gasTokenContract = await GasTokenContract.at(getCanonicalGasToken().address, this.aliceWallet); + this.feeJuiceContract = await FeeJuiceContract.at(getCanonicalFeeJuice().address, this.aliceWallet); - this.getGasBalanceFn = getBalancesFn('⛽', this.gasTokenContract.methods.balance_of_public, this.logger); + this.getGasBalanceFn = getBalancesFn('⛽', this.feeJuiceContract.methods.balance_of_public, this.logger); const { publicClient, walletClient } = createL1Clients(context.aztecNodeConfig.rpcUrl, MNEMONIC); - this.gasBridgeTestHarness = await GasPortalTestingHarnessFactory.create({ + this.feeJuiceBridgeTestHarness = await FeeJuicePortalTestingHarnessFactory.create({ aztecNode: context.aztecNode, pxeService: context.pxe, publicClient: publicClient, @@ -247,8 +250,8 @@ export class FeesTest { 'token_with_refunds_and_private_fpc', async context => { // Deploy token/fpc flavors for private refunds - const gasTokenContract = this.gasBridgeTestHarness.l2Token; - expect(await context.pxe.isContractPubliclyDeployed(gasTokenContract.address)).toBe(true); + const feeJuiceContract = this.feeJuiceBridgeTestHarness.l2Token; + expect(await context.pxe.isContractPubliclyDeployed(feeJuiceContract.address)).toBe(true); const tokenWithRefunds = await TokenWithRefundsContract.deploy( this.aliceWallet, @@ -270,7 +273,7 @@ export class FeesTest { const privateFPC = await privateFPCSent.deployed(); this.logger.info(`PrivateFPC deployed at ${privateFPC.address}`); - await this.gasBridgeTestHarness.bridgeFromL1ToL2( + await this.feeJuiceBridgeTestHarness.bridgeFromL1ToL2( this.INITIAL_GAS_BALANCE, this.INITIAL_GAS_BALANCE, privateFPC.address, @@ -299,17 +302,15 @@ export class FeesTest { await this.snapshotManager.snapshot( 'fpc_setup', async context => { - const gasTokenContract = this.gasBridgeTestHarness.l2Token; - expect(await context.pxe.isContractPubliclyDeployed(gasTokenContract.address)).toBe(true); + const feeJuiceContract = this.feeJuiceBridgeTestHarness.l2Token; + expect(await context.pxe.isContractPubliclyDeployed(feeJuiceContract.address)).toBe(true); const bananaCoin = this.bananaCoin; - const bananaFPC = await FPCContract.deploy(this.aliceWallet, bananaCoin.address, gasTokenContract.address) - .send() - .deployed(); + const bananaFPC = await FPCContract.deploy(this.aliceWallet, bananaCoin.address).send().deployed(); this.logger.info(`BananaPay deployed at ${bananaFPC.address}`); - await this.gasBridgeTestHarness.bridgeFromL1ToL2( + await this.feeJuiceBridgeTestHarness.bridgeFromL1ToL2( this.INITIAL_GAS_BALANCE, this.INITIAL_GAS_BALANCE, bananaFPC.address, @@ -317,8 +318,8 @@ export class FeesTest { return { bananaFPCAddress: bananaFPC.address, - gasTokenAddress: gasTokenContract.address, - l1GasTokenAddress: this.gasBridgeTestHarness.l1GasTokenAddress, + feeJuiceAddress: feeJuiceContract.address, + l1FeeJuiceAddress: this.feeJuiceBridgeTestHarness.l1FeeJuiceAddress, }; }, async (data, context) => { @@ -336,7 +337,7 @@ export class FeesTest { this.getCoinbaseBalance = async () => { const { walletClient } = createL1Clients(context.aztecNodeConfig.rpcUrl, MNEMONIC); const gasL1 = getContract({ - address: data.l1GasTokenAddress.toString(), + address: data.l1FeeJuiceAddress.toString(), abi: PortalERC20Abi, client: walletClient, }); @@ -367,11 +368,11 @@ export class FeesTest { ); } - public async applyFundAliceWithGasToken() { + public async applyFundAliceWithFeeJuice() { await this.snapshotManager.snapshot( - 'fund_alice_with_gas_token', + 'fund_alice_with_fee_juice', async () => { - await this.gasTokenContract.methods.mint_public(this.aliceAddress, this.INITIAL_GAS_BALANCE).send().wait(); + await this.feeJuiceContract.methods.mint_public(this.aliceAddress, this.INITIAL_GAS_BALANCE).send().wait(); }, () => Promise.resolve(), ); @@ -394,15 +395,14 @@ export class FeesTest { this.bobAddress, this.bananaCoin.address, this.SUBSCRIPTION_AMOUNT, - this.gasTokenContract.address, this.APP_SPONSORED_TX_GAS_LIMIT, ) .send() .deployed(); - // Mint some gas tokens to the subscription contract + // Mint some Fee Juice to the subscription contract // Could also use bridgeFromL1ToL2 from the harness, but this is more direct - await this.gasTokenContract.methods + await this.feeJuiceContract.methods .mint_public(subscriptionContract.address, this.INITIAL_GAS_BALANCE) .send() .wait(); diff --git a/yarn-project/end-to-end/src/e2e_fees/gas_estimation.test.ts b/yarn-project/end-to-end/src/e2e_fees/gas_estimation.test.ts index 2b5477de80b..fb53b08a58b 100644 --- a/yarn-project/end-to-end/src/e2e_fees/gas_estimation.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/gas_estimation.test.ts @@ -1,8 +1,8 @@ import { type AccountWallet, type AztecAddress, + FeeJuicePaymentMethod, type FeePaymentMethod, - NativeFeePaymentMethod, PublicFeePaymentMethod, } from '@aztec/aztec.js'; import { GasFees, type GasSettings } from '@aztec/circuits.js'; @@ -29,7 +29,7 @@ describe('e2e_fees gas_estimation', () => { await t.applyBaseSnapshots(); await t.applyFPCSetupSnapshot(); await t.applyFundAliceWithBananas(); - await t.applyFundAliceWithGasToken(); + await t.applyFundAliceWithFeeJuice(); ({ aliceWallet, aliceAddress, bobAddress, bananaCoin, bananaFPC, gasSettings, logger } = await t.setup()); teardownFixedFee = gasSettings.teardownGasLimits.computeFee(GasFees.default()).toBigInt(); @@ -65,7 +65,7 @@ describe('e2e_fees gas_estimation', () => { }); it('estimates gas with native fee payment method', async () => { - const paymentMethod = new NativeFeePaymentMethod(aliceAddress); + const paymentMethod = new FeeJuicePaymentMethod(aliceAddress); const estimatedGas = await makeTransferRequest().estimateGas({ fee: { gasSettings, paymentMethod } }); logGasEstimate(estimatedGas); @@ -112,7 +112,7 @@ describe('e2e_fees gas_estimation', () => { }); it('estimates gas for public contract initialization with native fee payment method', async () => { - const paymentMethod = new NativeFeePaymentMethod(aliceAddress); + const paymentMethod = new FeeJuicePaymentMethod(aliceAddress); const deployMethod = () => BananaCoin.deploy(aliceWallet, aliceAddress, 'TKN', 'TKN', 8); const deployOpts = { fee: { gasSettings, paymentMethod }, skipClassRegistration: true }; const estimatedGas = await deployMethod().estimateGas(deployOpts); diff --git a/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts index 8ad767e05be..3987c8137d7 100644 --- a/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts @@ -8,7 +8,7 @@ import { computeSecretHash, } from '@aztec/aztec.js'; import { type GasSettings } from '@aztec/circuits.js'; -import { type TokenContract as BananaCoin, FPCContract, type GasTokenContract } from '@aztec/noir-contracts.js'; +import { type TokenContract as BananaCoin, FPCContract } from '@aztec/noir-contracts.js'; import { expectMapping } from '../fixtures/utils.js'; import { FeesTest } from './fees_test.js'; @@ -18,7 +18,6 @@ describe('e2e_fees private_payment', () => { let aliceAddress: AztecAddress; let bobAddress: AztecAddress; let sequencerAddress: AztecAddress; - let gasTokenContract: GasTokenContract; let bananaCoin: BananaCoin; let bananaFPC: FPCContract; let gasSettings: GasSettings; @@ -29,8 +28,7 @@ describe('e2e_fees private_payment', () => { await t.applyBaseSnapshots(); await t.applyFPCSetupSnapshot(); await t.applyFundAliceWithBananas(); - ({ aliceWallet, aliceAddress, bobAddress, sequencerAddress, gasTokenContract, bananaCoin, bananaFPC, gasSettings } = - await t.setup()); + ({ aliceWallet, aliceAddress, bobAddress, sequencerAddress, bananaCoin, bananaFPC, gasSettings } = await t.setup()); }); afterAll(async () => { @@ -361,9 +359,7 @@ describe('e2e_fees private_payment', () => { it('rejects txs that dont have enough balance to cover gas costs', async () => { // deploy a copy of bananaFPC but don't fund it! - const bankruptFPC = await FPCContract.deploy(aliceWallet, bananaCoin.address, gasTokenContract.address) - .send() - .deployed(); + const bankruptFPC = await FPCContract.deploy(aliceWallet, bananaCoin.address).send().deployed(); await expectMapping(t.getGasBalanceFn, [bankruptFPC.address], [0n]); diff --git a/yarn-project/end-to-end/src/e2e_fees/private_refunds.test.ts b/yarn-project/end-to-end/src/e2e_fees/private_refunds.test.ts index 8ae4466f20a..a382cd116be 100644 --- a/yarn-project/end-to-end/src/e2e_fees/private_refunds.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/private_refunds.test.ts @@ -33,7 +33,7 @@ describe('e2e_fees/private_refunds', () => { beforeAll(async () => { await t.applyInitialAccountsSnapshot(); await t.applyPublicDeployAccountsSnapshot(); - await t.applyDeployGasTokenSnapshot(); + await t.applyDeployFeeJuiceSnapshot(); await t.applyTokenWithRefundsAndFPC(); await t.applyFundAliceWithTokens(); ({ aliceWallet, aliceAddress, bobAddress, privateFPC, tokenWithRefunds } = await t.setup()); diff --git a/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts b/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts index c50097d639c..4e37c77f3b6 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_l1_contracts.ts @@ -2,8 +2,8 @@ import { type DebugLogger, type L1ContractArtifactsForDeployment, deployL1Contra import { AvailabilityOracleAbi, AvailabilityOracleBytecode, - GasPortalAbi, - GasPortalBytecode, + FeeJuicePortalAbi, + FeeJuicePortalBytecode, InboxAbi, InboxBytecode, OutboxAbi, @@ -16,7 +16,7 @@ import { RollupBytecode, } from '@aztec/l1-artifacts'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { type HDAccount, type PrivateKeyAccount } from 'viem'; import { foundry } from 'viem/chains'; @@ -49,18 +49,18 @@ export const setupL1Contracts = async ( contractAbi: RollupAbi, contractBytecode: RollupBytecode, }, - gasToken: { + feeJuice: { contractAbi: PortalERC20Abi, contractBytecode: PortalERC20Bytecode, }, - gasPortal: { - contractAbi: GasPortalAbi, - contractBytecode: GasPortalBytecode, + feeJuicePortal: { + contractAbi: FeeJuicePortalAbi, + contractBytecode: FeeJuicePortalBytecode, }, }; const l1Data = await deployL1Contracts(l1RpcUrl, account, foundry, logger, l1Artifacts, { - l2GasTokenAddress: GasTokenAddress, + l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), }); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 8507db86f00..a9837f73931 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -42,8 +42,8 @@ import { makeBackoff, retry } from '@aztec/foundation/retry'; import { AvailabilityOracleAbi, AvailabilityOracleBytecode, - GasPortalAbi, - GasPortalBytecode, + FeeJuicePortalAbi, + FeeJuicePortalBytecode, InboxAbi, InboxBytecode, OutboxAbi, @@ -56,10 +56,10 @@ import { RollupBytecode, } from '@aztec/l1-artifacts'; import { AuthRegistryContract, KeyRegistryContract } from '@aztec/noir-contracts.js'; -import { GasTokenContract } from '@aztec/noir-contracts.js/GasToken'; +import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; import { getCanonicalAuthRegistry } from '@aztec/protocol-contracts/auth-registry'; -import { GasTokenAddress, getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress, getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { type ProverClient } from '@aztec/prover-client'; import { PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; @@ -128,18 +128,18 @@ export const setupL1Contracts = async ( contractAbi: RollupAbi, contractBytecode: RollupBytecode, }, - gasToken: { + feeJuice: { contractAbi: PortalERC20Abi, contractBytecode: PortalERC20Bytecode, }, - gasPortal: { - contractAbi: GasPortalAbi, - contractBytecode: GasPortalBytecode, + feeJuicePortal: { + contractAbi: FeeJuicePortalAbi, + contractBytecode: FeeJuicePortalBytecode, }, }; const l1Data = await deployL1Contracts(l1RpcUrl, account, foundry, logger, l1Artifacts, { - l2GasTokenAddress: GasTokenAddress, + l2FeeJuiceAddress: FeeJuiceAddress, vkTreeRoot: getVKTreeRoot(), }); @@ -245,7 +245,7 @@ async function setupWithRemoteEnvironment( ); if (enableGas) { - await deployCanonicalGasToken( + await deployCanonicalFeeJuice( new SignerlessWallet(pxeClient, new DefaultMultiCallEntrypoint(chainId, protocolVersion)), ); } @@ -398,8 +398,8 @@ export async function setup( ); if (enableGas) { - logger.verbose('Deploying gas token...'); - await deployCanonicalGasToken( + logger.verbose('Deploying Fee Juice...'); + await deployCanonicalFeeJuice( new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(config.l1ChainId, config.version)), ); } @@ -625,41 +625,41 @@ export async function expectMappingDelta( /** * Deploy the protocol contracts to a running instance. */ -export async function deployCanonicalGasToken(pxe: PXE) { - // "deploy" the Gas token as it contains public functions - const gasPortalAddress = (await pxe.getNodeInfo()).l1ContractAddresses.gasPortalAddress; - const canonicalGasToken = getCanonicalGasToken(); - - if (await pxe.isContractClassPubliclyRegistered(canonicalGasToken.contractClass.id)) { - getLogger().debug('Gas token already deployed'); - await expect(pxe.isContractPubliclyDeployed(canonicalGasToken.address)).resolves.toBe(true); +export async function deployCanonicalFeeJuice(pxe: PXE) { + // "deploy" the Fee Juice as it contains public functions + const feeJuicePortalAddress = (await pxe.getNodeInfo()).l1ContractAddresses.feeJuicePortalAddress; + const canonicalFeeJuice = getCanonicalFeeJuice(); + + if (await pxe.isContractClassPubliclyRegistered(canonicalFeeJuice.contractClass.id)) { + getLogger().debug('Fee Juice already deployed'); + await expect(pxe.isContractPubliclyDeployed(canonicalFeeJuice.address)).resolves.toBe(true); return; } // Capsules will die soon, patience! - const publicBytecode = canonicalGasToken.contractClass.packedBytecode; + const publicBytecode = canonicalFeeJuice.contractClass.packedBytecode; const encodedBytecode = bufferAsFields(publicBytecode, MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS); await pxe.addCapsule(encodedBytecode); - await pxe.registerContract(canonicalGasToken); + await pxe.registerContract(canonicalFeeJuice); const wallet = new SignerlessWallet(pxe); - const gasToken = await GasTokenContract.at(canonicalGasToken.address, wallet); + const feeJuice = await FeeJuiceContract.at(canonicalFeeJuice.address, wallet); - await gasToken.methods + await feeJuice.methods .deploy( - canonicalGasToken.contractClass.artifactHash, - canonicalGasToken.contractClass.privateFunctionsRoot, - canonicalGasToken.contractClass.publicBytecodeCommitment, - gasPortalAddress, + canonicalFeeJuice.contractClass.artifactHash, + canonicalFeeJuice.contractClass.privateFunctionsRoot, + canonicalFeeJuice.contractClass.publicBytecodeCommitment, + feeJuicePortalAddress, ) .send({ fee: { paymentMethod: new NoFeePaymentMethod(), gasSettings: GasSettings.teardownless() } }) .wait(); - getLogger().info(`Gas token publicly deployed at ${gasToken.address}`); + getLogger().info(`Fee Juice publicly deployed at ${feeJuice.address}`); - await expect(pxe.isContractClassPubliclyRegistered(gasToken.instance.contractClassId)).resolves.toBe(true); - await expect(pxe.getContractInstance(gasToken.address)).resolves.toBeDefined(); - await expect(pxe.isContractPubliclyDeployed(gasToken.address)).resolves.toBe(true); + await expect(pxe.isContractClassPubliclyRegistered(feeJuice.instance.contractClassId)).resolves.toBe(true); + await expect(pxe.getContractInstance(feeJuice.address)).resolves.toBeDefined(); + await expect(pxe.isContractPubliclyDeployed(feeJuice.address)).resolves.toBe(true); } export async function deployCanonicalKeyRegistry(deployer: Wallet) { diff --git a/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts b/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts index a81cecbbab3..12c8f801aec 100644 --- a/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts @@ -8,9 +8,9 @@ import { type Wallet, computeSecretHash, } from '@aztec/aztec.js'; -import { GasPortalAbi, OutboxAbi, PortalERC20Abi } from '@aztec/l1-artifacts'; -import { GasTokenContract } from '@aztec/noir-contracts.js'; -import { GasTokenAddress, getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuicePortalAbi, OutboxAbi, PortalERC20Abi } from '@aztec/l1-artifacts'; +import { FeeJuiceContract } from '@aztec/noir-contracts.js'; +import { FeeJuiceAddress, getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { type Account, @@ -23,18 +23,18 @@ import { } from 'viem'; export interface IGasBridgingTestHarness { - getL1GasTokenBalance(address: EthAddress): Promise; + getL1FeeJuiceBalance(address: EthAddress): Promise; prepareTokensOnL1( l1TokenBalance: bigint, bridgeAmount: bigint, owner: AztecAddress, ): Promise<{ secret: Fr; secretHash: Fr; msgHash: Fr }>; bridgeFromL1ToL2(l1TokenBalance: bigint, bridgeAmount: bigint, owner: AztecAddress): Promise; - l2Token: GasTokenContract; - l1GasTokenAddress: EthAddress; + l2Token: FeeJuiceContract; + l1FeeJuiceAddress: EthAddress; } -export interface GasPortalTestingHarnessFactoryConfig { +export interface FeeJuicePortalTestingHarnessFactoryConfig { aztecNode: AztecNode; pxeService: PXE; publicClient: PublicClient; @@ -44,15 +44,15 @@ export interface GasPortalTestingHarnessFactoryConfig { mockL1?: boolean; } -export class GasPortalTestingHarnessFactory { - private constructor(private config: GasPortalTestingHarnessFactoryConfig) {} +export class FeeJuicePortalTestingHarnessFactory { + private constructor(private config: FeeJuicePortalTestingHarnessFactoryConfig) {} private async createMock() { const wallet = this.config.wallet; // In this case we are not using a portal we just yolo it. - const gasL2 = await GasTokenContract.deploy(wallet) - .send({ contractAddressSalt: getCanonicalGasToken().instance.salt }) + const gasL2 = await FeeJuiceContract.deploy(wallet) + .send({ contractAddressSalt: getCanonicalFeeJuice().instance.salt }) .deployed(); return Promise.resolve(new MockGasBridgingTestHarness(gasL2, EthAddress.ZERO)); } @@ -63,10 +63,10 @@ export class GasPortalTestingHarnessFactory { const ethAccount = EthAddress.fromString((await walletClient.getAddresses())[0]); const l1ContractAddresses = (await pxeService.getNodeInfo()).l1ContractAddresses; - const gasTokenAddress = l1ContractAddresses.gasTokenAddress; - const gasPortalAddress = l1ContractAddresses.gasPortalAddress; + const feeJuiceAddress = l1ContractAddresses.feeJuiceAddress; + const feeJuicePortalAddress = l1ContractAddresses.feeJuicePortalAddress; - if (gasTokenAddress.isZero() || gasPortalAddress.isZero()) { + if (feeJuiceAddress.isZero() || feeJuicePortalAddress.isZero()) { throw new Error('Gas portal not deployed on L1'); } @@ -77,18 +77,18 @@ export class GasPortalTestingHarnessFactory { }); const gasL1 = getContract({ - address: gasTokenAddress.toString(), + address: feeJuiceAddress.toString(), abi: PortalERC20Abi, client: walletClient, }); - const gasPortal = getContract({ - address: gasPortalAddress.toString(), - abi: GasPortalAbi, + const feeJuicePortal = getContract({ + address: feeJuicePortalAddress.toString(), + abi: FeeJuicePortalAbi, client: walletClient, }); - const gasL2 = await GasTokenContract.at(GasTokenAddress, wallet); + const gasL2 = await FeeJuiceContract.at(FeeJuiceAddress, wallet); return new GasBridgingTestHarness( aztecNode, @@ -96,8 +96,8 @@ export class GasPortalTestingHarnessFactory { logger, gasL2, ethAccount, - gasPortalAddress, - gasPortal, + feeJuicePortalAddress, + feeJuicePortal, gasL1, outbox, publicClient, @@ -105,8 +105,8 @@ export class GasPortalTestingHarnessFactory { ); } - static create(config: GasPortalTestingHarnessFactoryConfig): Promise { - const factory = new GasPortalTestingHarnessFactory(config); + static create(config: FeeJuicePortalTestingHarnessFactoryConfig): Promise { + const factory = new FeeJuicePortalTestingHarnessFactory(config); if (config.mockL1) { return factory.createMock(); } else { @@ -116,7 +116,7 @@ export class GasPortalTestingHarnessFactory { } class MockGasBridgingTestHarness implements IGasBridgingTestHarness { - constructor(public l2Token: GasTokenContract, public l1GasTokenAddress: EthAddress) {} + constructor(public l2Token: FeeJuiceContract, public l1FeeJuiceAddress: EthAddress) {} prepareTokensOnL1( _l1TokenBalance: bigint, _bridgeAmount: bigint, @@ -127,8 +127,8 @@ class MockGasBridgingTestHarness implements IGasBridgingTestHarness { async bridgeFromL1ToL2(_l1TokenBalance: bigint, bridgeAmount: bigint, owner: AztecAddress): Promise { await this.l2Token.methods.mint_public(owner, bridgeAmount).send().wait(); } - getL1GasTokenBalance(_address: EthAddress): Promise { - throw new Error('Cannot get gas token balance on mocked L1.'); + getL1FeeJuiceBalance(_address: EthAddress): Promise { + throw new Error('Cannot get Fee Juice balance on mocked L1.'); } } @@ -146,7 +146,7 @@ class GasBridgingTestHarness implements IGasBridgingTestHarness { public logger: DebugLogger, /** L2 Token/Bridge contract. */ - public l2Token: GasTokenContract, + public l2Token: FeeJuiceContract, /** Eth account to interact with. */ public ethAccount: EthAddress, @@ -154,7 +154,7 @@ class GasBridgingTestHarness implements IGasBridgingTestHarness { /** Portal address. */ public tokenPortalAddress: EthAddress, /** Token portal instance. */ - public tokenPortal: GetContractReturnType>, + public tokenPortal: GetContractReturnType>, /** Underlying token for portal tests. */ public underlyingERC20: GetContractReturnType>, /** Message Bridge Outbox. */ @@ -165,7 +165,7 @@ class GasBridgingTestHarness implements IGasBridgingTestHarness { public walletClient: WalletClient, ) {} - get l1GasTokenAddress() { + get l1FeeJuiceAddress() { return EthAddress.fromString(this.underlyingERC20.address); } @@ -185,7 +185,7 @@ class GasBridgingTestHarness implements IGasBridgingTestHarness { expect(await this.underlyingERC20.read.balanceOf([this.ethAccount.toString()])).toBe(amount); } - async getL1GasTokenBalance(address: EthAddress) { + async getL1FeeJuiceBalance(address: EthAddress) { return await this.underlyingERC20.read.balanceOf([address.toString()]); } @@ -230,7 +230,7 @@ class GasBridgingTestHarness implements IGasBridgingTestHarness { // Deposit tokens to the TokenPortal const msgHash = await this.sendTokensToPortalPublic(bridgeAmount, owner, secretHash); - expect(await this.getL1GasTokenBalance(this.ethAccount)).toBe(l1TokenBalance - bridgeAmount); + expect(await this.getL1FeeJuiceBalance(this.ethAccount)).toBe(l1TokenBalance - bridgeAmount); // Perform an unrelated transactions on L2 to progress the rollup by 2 blocks. await this.l2Token.methods.check_balance(0).send().wait(); diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 3013cf45795..42f7869cba2 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -79,13 +79,13 @@ export interface L1ContractArtifactsForDeployment { */ rollup: ContractArtifacts; /** - * The token to pay for gas. This will be bridged to L2 via the gasPortal below + * The token to pay for gas. This will be bridged to L2 via the feeJuicePortal below */ - gasToken: ContractArtifacts; + feeJuice: ContractArtifacts; /** * Gas portal contract artifacts. Optional for now as gas is not strictly enforced */ - gasPortal: ContractArtifacts; + feeJuicePortal: ContractArtifacts; } export type L1Clients = { @@ -141,7 +141,7 @@ export const deployL1Contracts = async ( chain: Chain, logger: DebugLogger, contractsToDeploy: L1ContractArtifactsForDeployment, - args: { l2GasTokenAddress: AztecAddress; vkTreeRoot: Fr }, + args: { l2FeeJuiceAddress: AztecAddress; vkTreeRoot: Fr }, ): Promise => { logger.debug('Deploying contracts...'); @@ -171,14 +171,14 @@ export const deployL1Contracts = async ( ); logger.info(`Deployed AvailabilityOracle at ${availabilityOracleAddress}`); - const gasTokenAddress = await deployL1Contract( + const feeJuiceAddress = await deployL1Contract( walletClient, publicClient, - contractsToDeploy.gasToken.contractAbi, - contractsToDeploy.gasToken.contractBytecode, + contractsToDeploy.feeJuice.contractAbi, + contractsToDeploy.feeJuice.contractBytecode, ); - logger.info(`Deployed Gas Token at ${gasTokenAddress}`); + logger.info(`Deployed Fee Juice at ${feeJuiceAddress}`); const rollupAddress = await deployL1Contract( walletClient, @@ -188,7 +188,7 @@ export const deployL1Contracts = async ( [ getAddress(registryAddress.toString()), getAddress(availabilityOracleAddress.toString()), - getAddress(gasTokenAddress.toString()), + getAddress(feeJuiceAddress.toString()), args.vkTreeRoot.toString(), ], ); @@ -228,43 +228,43 @@ export const deployL1Contracts = async ( { account }, ); - // this contract remains uninitialized because at this point we don't know the address of the gas token on L2 - const gasPortalAddress = await deployL1Contract( + // this contract remains uninitialized because at this point we don't know the address of the Fee Juice on L2 + const feeJuicePortalAddress = await deployL1Contract( walletClient, publicClient, - contractsToDeploy.gasPortal.contractAbi, - contractsToDeploy.gasPortal.contractBytecode, + contractsToDeploy.feeJuicePortal.contractAbi, + contractsToDeploy.feeJuicePortal.contractBytecode, ); - logger.info(`Deployed Gas Portal at ${gasPortalAddress}`); + logger.info(`Deployed Gas Portal at ${feeJuicePortalAddress}`); - const gasPortal = getContract({ - address: gasPortalAddress.toString(), - abi: contractsToDeploy.gasPortal.contractAbi, + const feeJuicePortal = getContract({ + address: feeJuicePortalAddress.toString(), + abi: contractsToDeploy.feeJuicePortal.contractAbi, client: walletClient, }); await publicClient.waitForTransactionReceipt({ - hash: await gasPortal.write.initialize([ + hash: await feeJuicePortal.write.initialize([ registryAddress.toString(), - gasTokenAddress.toString(), - args.l2GasTokenAddress.toString(), + feeJuiceAddress.toString(), + args.l2FeeJuiceAddress.toString(), ]), }); logger.info( - `Initialized Gas Portal at ${gasPortalAddress} to bridge between L1 ${gasTokenAddress} to L2 ${args.l2GasTokenAddress}`, + `Initialized Gas Portal at ${feeJuicePortalAddress} to bridge between L1 ${feeJuiceAddress} to L2 ${args.l2FeeJuiceAddress}`, ); - // fund the rollup contract with gas tokens - const gasToken = getContract({ - address: gasTokenAddress.toString(), - abi: contractsToDeploy.gasToken.contractAbi, + // fund the rollup contract with Fee Juice + const feeJuice = getContract({ + address: feeJuiceAddress.toString(), + abi: contractsToDeploy.feeJuice.contractAbi, client: walletClient, }); - const receipt = await gasToken.write.mint([rollupAddress.toString(), 100000000000000000000n], {} as any); + const receipt = await feeJuice.write.mint([rollupAddress.toString(), 100000000000000000000n], {} as any); await publicClient.waitForTransactionReceipt({ hash: receipt }); - logger.info(`Funded rollup contract with gas tokens`); + logger.info(`Funded rollup contract with Fee Juice`); const l1Contracts: L1ContractAddresses = { availabilityOracleAddress, @@ -272,8 +272,8 @@ export const deployL1Contracts = async ( registryAddress, inboxAddress, outboxAddress, - gasTokenAddress, - gasPortalAddress, + feeJuiceAddress, + feeJuicePortalAddress, }; return { diff --git a/yarn-project/ethereum/src/l1_contract_addresses.ts b/yarn-project/ethereum/src/l1_contract_addresses.ts index ec2f1c0be40..ad43dc72d51 100644 --- a/yarn-project/ethereum/src/l1_contract_addresses.ts +++ b/yarn-project/ethereum/src/l1_contract_addresses.ts @@ -12,8 +12,8 @@ export const l1ContractsNames = [ 'registryAddress', 'inboxAddress', 'outboxAddress', - 'gasTokenAddress', - 'gasPortalAddress', + 'feeJuiceAddress', + 'feeJuicePortalAddress', ] as const; /** @@ -30,8 +30,8 @@ export function getL1ContractAddressesFromEnv() { REGISTRY_CONTRACT_ADDRESS, INBOX_CONTRACT_ADDRESS, OUTBOX_CONTRACT_ADDRESS, - GAS_TOKEN_CONTRACT_ADDRESS, - GAS_PORTAL_CONTRACT_ADDRESS, + FEE_JUICE_CONTRACT_ADDRESS, + FEE_JUICE_PORTAL_CONTRACT_ADDRESS, } = process.env; return { @@ -42,9 +42,9 @@ export function getL1ContractAddressesFromEnv() { registryAddress: REGISTRY_CONTRACT_ADDRESS ? EthAddress.fromString(REGISTRY_CONTRACT_ADDRESS) : EthAddress.ZERO, inboxAddress: INBOX_CONTRACT_ADDRESS ? EthAddress.fromString(INBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, outboxAddress: OUTBOX_CONTRACT_ADDRESS ? EthAddress.fromString(OUTBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, - gasTokenAddress: GAS_TOKEN_CONTRACT_ADDRESS ? EthAddress.fromString(GAS_TOKEN_CONTRACT_ADDRESS) : EthAddress.ZERO, - gasPortalAddress: GAS_PORTAL_CONTRACT_ADDRESS - ? EthAddress.fromString(GAS_PORTAL_CONTRACT_ADDRESS) + feeJuiceAddress: FEE_JUICE_CONTRACT_ADDRESS ? EthAddress.fromString(FEE_JUICE_CONTRACT_ADDRESS) : EthAddress.ZERO, + feeJuicePortalAddress: FEE_JUICE_PORTAL_CONTRACT_ADDRESS + ? EthAddress.fromString(FEE_JUICE_PORTAL_CONTRACT_ADDRESS) : EthAddress.ZERO, }; } diff --git a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh index af6dec6c058..520619a558b 100755 --- a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh +++ b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh @@ -19,7 +19,7 @@ CONTRACTS=( "l1-contracts:PortalERC20" "l1-contracts:UniswapPortal" "l1-contracts:IERC20" - "l1-contracts:GasPortal" + "l1-contracts:FeeJuicePortal" "l1-contracts:MockVerifier" ) diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index d4525bca0e8..8845f2f010c 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -2340,7 +2340,7 @@ export function mapBaseRollupInputsToNoir(inputs: BaseRollupInputs): BaseRollupI archive_root_membership_witness: mapMembershipWitnessToNoir(inputs.archiveRootMembershipWitness), constants: mapConstantRollupDataToNoir(inputs.constants), - fee_payer_gas_token_balance_read_hint: mapPublicDataHintToNoir(inputs.feePayerGasTokenBalanceReadHint), + fee_payer_fee_juice_balance_read_hint: mapPublicDataHintToNoir(inputs.feePayerFeeJuiceBalanceReadHint), }; } diff --git a/yarn-project/protocol-contracts/scripts/copy-contracts.sh b/yarn-project/protocol-contracts/scripts/copy-contracts.sh index 5001bf8c254..d9f4a60a25c 100755 --- a/yarn-project/protocol-contracts/scripts/copy-contracts.sh +++ b/yarn-project/protocol-contracts/scripts/copy-contracts.sh @@ -5,7 +5,7 @@ mkdir -p ./artifacts contracts=( contract_class_registerer_contract-ContractClassRegisterer contract_instance_deployer_contract-ContractInstanceDeployer - gas_token_contract-GasToken + fee_juice_contract-FeeJuice key_registry_contract-KeyRegistry auth_registry_contract-AuthRegistry multi_call_entrypoint_contract-MultiCallEntrypoint diff --git a/yarn-project/protocol-contracts/src/gas-token/artifact.ts b/yarn-project/protocol-contracts/src/fee-juice/artifact.ts similarity index 56% rename from yarn-project/protocol-contracts/src/gas-token/artifact.ts rename to yarn-project/protocol-contracts/src/fee-juice/artifact.ts index 9d83dd2d21b..a1cf8a75119 100644 --- a/yarn-project/protocol-contracts/src/gas-token/artifact.ts +++ b/yarn-project/protocol-contracts/src/fee-juice/artifact.ts @@ -1,6 +1,6 @@ import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import GasTokenJson from '../../artifacts/GasToken.json' assert { type: 'json' }; +import FeeJuiceJson from '../../artifacts/FeeJuice.json' assert { type: 'json' }; -export const GasTokenArtifact = loadContractArtifact(GasTokenJson as NoirCompiledContract); +export const FeeJuiceArtifact = loadContractArtifact(FeeJuiceJson as NoirCompiledContract); diff --git a/yarn-project/protocol-contracts/src/gas-token/index.test.ts b/yarn-project/protocol-contracts/src/fee-juice/index.test.ts similarity index 74% rename from yarn-project/protocol-contracts/src/gas-token/index.test.ts rename to yarn-project/protocol-contracts/src/fee-juice/index.test.ts index 9cbde768216..f2b8afea016 100644 --- a/yarn-project/protocol-contracts/src/gas-token/index.test.ts +++ b/yarn-project/protocol-contracts/src/fee-juice/index.test.ts @@ -1,10 +1,10 @@ import { computeContractAddressFromInstance, getContractClassFromArtifact } from '@aztec/circuits.js'; -import { getCanonicalGasToken } from './index.js'; +import { getCanonicalFeeJuice } from './index.js'; -describe('GasToken', () => { +describe('FeeJuice', () => { it('returns canonical protocol contract', () => { - const contract = getCanonicalGasToken(); + const contract = getCanonicalFeeJuice(); expect(computeContractAddressFromInstance(contract.instance)).toEqual(contract.address); expect(getContractClassFromArtifact(contract.artifact).id).toEqual(contract.contractClass.id); }); diff --git a/yarn-project/protocol-contracts/src/fee-juice/index.ts b/yarn-project/protocol-contracts/src/fee-juice/index.ts new file mode 100644 index 00000000000..1742db9ed96 --- /dev/null +++ b/yarn-project/protocol-contracts/src/fee-juice/index.ts @@ -0,0 +1,19 @@ +import { AztecAddress, FEE_JUICE_ADDRESS } from '@aztec/circuits.js'; + +import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; +import { FeeJuiceArtifact } from './artifact.js'; + +/** Returns the canonical deployment of the Fee Juice. */ +export function getCanonicalFeeJuice(): ProtocolContract { + const contract = getCanonicalProtocolContract(FeeJuiceArtifact, 1); + if (!contract.address.equals(FeeJuiceAddress)) { + throw new Error( + `Incorrect address for Fee Juice (got ${contract.address.toString()} but expected ${FeeJuiceAddress.toString()}).`, + ); + } + return contract; +} + +export const FeeJuiceAddress = AztecAddress.fromBigInt(FEE_JUICE_ADDRESS); + +export { FeeJuiceArtifact as FeeJuiceArtifact }; diff --git a/yarn-project/protocol-contracts/src/gas-token/index.ts b/yarn-project/protocol-contracts/src/gas-token/index.ts deleted file mode 100644 index b1524a67c2c..00000000000 --- a/yarn-project/protocol-contracts/src/gas-token/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { AztecAddress, GAS_TOKEN_ADDRESS } from '@aztec/circuits.js'; - -import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; -import { GasTokenArtifact } from './artifact.js'; - -/** Returns the canonical deployment of the gas token. */ -export function getCanonicalGasToken(): ProtocolContract { - const contract = getCanonicalProtocolContract(GasTokenArtifact, 1); - if (!contract.address.equals(GasTokenAddress)) { - throw new Error( - `Incorrect address for gas token (got ${contract.address.toString()} but expected ${GasTokenAddress.toString()}).`, - ); - } - return contract; -} - -export const GasTokenAddress = AztecAddress.fromBigInt(GAS_TOKEN_ADDRESS); - -export { GasTokenArtifact }; diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 556523a5263..f1d99085771 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -84,13 +84,13 @@ export async function buildBaseRollupInput( i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO, ); - // Create data hint for reading fee payer initial balance in gas tokens + // Create data hint for reading fee payer initial balance in Fee Juice // If no fee payer is set, read hint should be empty // If there is already a public data write for this slot, also skip the read hint const hintsBuilder = new HintsBuilder(db); const leafSlot = computeFeePayerBalanceLeafSlot(tx.data.feePayer); const existingBalanceWrite = tx.data.end.publicDataUpdateRequests.find(write => write.leafSlot.equals(leafSlot)); - const feePayerGasTokenBalanceReadHint = + const feePayerFeeJuiceBalanceReadHint = leafSlot.isZero() || existingBalanceWrite ? PublicDataHint.empty() : await hintsBuilder.getPublicDataHint(leafSlot.toBigInt()); @@ -163,7 +163,7 @@ export async function buildBaseRollupInput( kernelData: getKernelDataFor(tx, kernelVk, proof), start, stateDiffHints, - feePayerGasTokenBalanceReadHint, + feePayerFeeJuiceBalanceReadHint: feePayerFeeJuiceBalanceReadHint, sortedPublicDataWrites: txPublicDataUpdateRequestInfo.sortedPublicDataWrites, sortedPublicDataWritesIndexes: txPublicDataUpdateRequestInfo.sortedPublicDataWritesIndexes, lowPublicDataWritesPreimages: txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages, diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index 2b258944701..23234b509c2 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -7,7 +7,7 @@ import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; import { initStoreForRollup } from '@aztec/kv-store/utils'; import { getCanonicalAuthRegistry } from '@aztec/protocol-contracts/auth-registry'; import { getCanonicalClassRegisterer } from '@aztec/protocol-contracts/class-registerer'; -import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance-deployer'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { getCanonicalMultiCallEntrypointContract } from '@aztec/protocol-contracts/multi-call-entrypoint'; @@ -66,7 +66,7 @@ export async function createPXEService( getCanonicalClassRegisterer(), getCanonicalInstanceDeployer(), getCanonicalMultiCallEntrypointContract(), - getCanonicalGasToken(), + getCanonicalFeeJuice(), getCanonicalKeyRegistry(), getCanonicalAuthRegistry(), ]) { diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index d6d3e2437a7..1b8708d9582 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -48,7 +48,7 @@ import { SerialQueue } from '@aztec/foundation/fifo'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { type KeyStore } from '@aztec/key-store'; import { ClassRegistererAddress } from '@aztec/protocol-contracts/class-registerer'; -import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance-deployer'; import { getCanonicalKeyRegistryAddress } from '@aztec/protocol-contracts/key-registry'; import { getCanonicalMultiCallEntrypointAddress } from '@aztec/protocol-contracts/multi-call-entrypoint'; @@ -618,7 +618,7 @@ export class PXEService implements PXE { pxeVersion: this.packageVersion, protocolContractAddresses: { classRegisterer: ClassRegistererAddress, - gasToken: getCanonicalGasToken().address, + feeJuice: getCanonicalFeeJuice().address, instanceDeployer: getCanonicalInstanceDeployer().address, keyRegistry: getCanonicalKeyRegistryAddress(), multiCallEntrypoint: getCanonicalMultiCallEntrypointAddress(), diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts index c649d944533..b8931137f01 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts @@ -31,8 +31,8 @@ function createPXEService(): Promise { registryAddress: EthAddress.random(), inboxAddress: EthAddress.random(), outboxAddress: EthAddress.random(), - gasTokenAddress: EthAddress.random(), - gasPortalAddress: EthAddress.random(), + feeJuiceAddress: EthAddress.random(), + feeJuicePortalAddress: EthAddress.random(), }; node.getL1ContractAddresses.mockResolvedValue(mockedContracts); diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index cf24b865947..160565ef4c4 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -5,7 +5,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { FPCContract } from '@aztec/noir-contracts.js/FPC'; import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { AuthRegistryAddress } from '@aztec/protocol-contracts/auth-registry'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { type GlobalReaderConfig } from './global_variable_builder/index.js'; import { type PublisherConfig, type TxSenderConfig, getTxSenderConfigFromEnv } from './publisher/config.js'; @@ -136,7 +136,7 @@ function getDefaultAllowedSetupFunctions(): AllowedElement[] { }, // needed for claiming on the same tx as a spend { - address: GasTokenAddress, + address: FeeJuiceAddress, selector: FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'), }, // needed for private transfers via FPC diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts index 5cc1219637c..c14b8792083 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts @@ -1,8 +1,8 @@ import { type Tx, mockTx } from '@aztec/circuit-types'; import { AztecAddress, Fr, FunctionSelector, GasSettings } from '@aztec/circuits.js'; import { pedersenHash } from '@aztec/foundation/crypto'; -import { GasTokenContract } from '@aztec/noir-contracts.js'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceContract } from '@aztec/noir-contracts.js'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; @@ -12,15 +12,15 @@ import { patchNonRevertibleFn, patchRevertibleFn } from './test_utils.js'; describe('GasTxValidator', () => { let validator: GasTxValidator; let publicStateSource: MockProxy; - let gasTokenAddress: AztecAddress; + let feeJuiceAddress: AztecAddress; beforeEach(() => { - gasTokenAddress = GasTokenAddress; + feeJuiceAddress = FeeJuiceAddress; publicStateSource = mock({ storageRead: mockFn().mockImplementation((_address: AztecAddress, _slot: Fr) => Fr.ZERO), }); - validator = new GasTxValidator(publicStateSource, gasTokenAddress, false); + validator = new GasTxValidator(publicStateSource, feeJuiceAddress, false); }); let tx: Tx; @@ -37,14 +37,14 @@ describe('GasTxValidator', () => { inclusionFee: new Fr(TX_FEE), }); payer = tx.data.feePayer; - expectedBalanceSlot = pedersenHash([GasTokenContract.storage.balances.slot, payer]); + expectedBalanceSlot = pedersenHash([FeeJuiceContract.storage.balances.slot, payer]); expect(tx.data.constants.txContext.gasSettings.getFeeLimit()).toEqual(new Fr(TX_FEE)); }); const mockBalance = (balance: bigint) => { publicStateSource.storageRead.mockImplementation((address, slot) => - Promise.resolve(address.equals(gasTokenAddress) && slot.equals(expectedBalanceSlot) ? new Fr(balance) : Fr.ZERO), + Promise.resolve(address.equals(feeJuiceAddress) && slot.equals(expectedBalanceSlot) ? new Fr(balance) : Fr.ZERO), ); }; @@ -68,10 +68,10 @@ describe('GasTxValidator', () => { it('allows fee paying txs if fee payer claims enough balance during setup', async () => { mockBalance(TX_FEE - 1n); patchNonRevertibleFn(tx, 0, { - address: GasTokenAddress, + address: FeeJuiceAddress, selector: FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'), args: [payer, new Fr(1n)], - msgSender: GasTokenAddress, + msgSender: FeeJuiceAddress, }); await expectValidateSuccess(tx); }); diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts index b3e1c605812..ea93d51cafa 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts @@ -1,7 +1,7 @@ import { PublicKernelType, type Tx, type TxValidator } from '@aztec/circuit-types'; import { type AztecAddress, type Fr } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; -import { GasTokenArtifact } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceArtifact } from '@aztec/protocol-contracts/fee-juice'; import { AbstractPhaseManager, computeFeePayerBalanceStorageSlot } from '@aztec/simulator'; /** Provides a view into public contract state */ @@ -12,11 +12,11 @@ export interface PublicStateSource { export class GasTxValidator implements TxValidator { #log = createDebugLogger('aztec:sequencer:tx_validator:tx_gas'); #publicDataSource: PublicStateSource; - #gasTokenAddress: AztecAddress; + #feeJuiceAddress: AztecAddress; - constructor(publicDataSource: PublicStateSource, gasTokenAddress: AztecAddress, public enforceFees: boolean) { + constructor(publicDataSource: PublicStateSource, feeJuiceAddress: AztecAddress, public enforceFees: boolean) { this.#publicDataSource = publicDataSource; - this.#gasTokenAddress = gasTokenAddress; + this.#feeJuiceAddress = feeJuiceAddress; } async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { @@ -50,18 +50,18 @@ export class GasTxValidator implements TxValidator { // Read current balance of the feePayer const initialBalance = await this.#publicDataSource.storageRead( - this.#gasTokenAddress, + this.#feeJuiceAddress, computeFeePayerBalanceStorageSlot(feePayer), ); - // If there is a claim in this tx that increases the fee payer balance in gas token, add it to balance + // If there is a claim in this tx that increases the fee payer balance in Fee Juice, add it to balance const { [PublicKernelType.SETUP]: setupFns } = AbstractPhaseManager.extractEnqueuedPublicCallsByPhase(tx); const claimFunctionCall = setupFns.find( fn => - fn.contractAddress.equals(this.#gasTokenAddress) && - fn.callContext.msgSender.equals(this.#gasTokenAddress) && + fn.contractAddress.equals(this.#feeJuiceAddress) && + fn.callContext.msgSender.equals(this.#feeJuiceAddress) && fn.callContext.functionSelector.equals( - GasTokenArtifact.functions.find(f => f.name === '_increase_public_balance')!, + FeeJuiceArtifact.functions.find(f => f.name === '_increase_public_balance')!, ) && fn.args[0].equals(feePayer) && !fn.callContext.isStaticCall && diff --git a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts index 38d44fdf7b2..97f50cf4fae 100644 --- a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts +++ b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts @@ -1,6 +1,6 @@ import { type AllowedElement, type ProcessedTx, type Tx, type TxValidator } from '@aztec/circuit-types'; import { type GlobalVariables } from '@aztec/circuits.js'; -import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceAddress } from '@aztec/protocol-contracts/fee-juice'; import { WorldStateDB, WorldStatePublicDB } from '@aztec/simulator'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; @@ -25,7 +25,7 @@ export class TxValidatorFactory { new MetadataTxValidator(globalVariables), new DoubleSpendTxValidator(new WorldStateDB(this.merkleTreeDb)), new PhasesTxValidator(this.contractDataSource, setupAllowList), - new GasTxValidator(new WorldStatePublicDB(this.merkleTreeDb), GasTokenAddress, this.enforceFees), + new GasTxValidator(new WorldStatePublicDB(this.merkleTreeDb), FeeJuiceAddress, this.enforceFees), ); } diff --git a/yarn-project/simulator/src/public/fee_payment.ts b/yarn-project/simulator/src/public/fee_payment.ts index 48082451122..1483a8a7d7f 100644 --- a/yarn-project/simulator/src/public/fee_payment.ts +++ b/yarn-project/simulator/src/public/fee_payment.ts @@ -1,24 +1,24 @@ -import { GAS_TOKEN_ADDRESS } from '@aztec/circuits.js'; +import { FEE_JUICE_ADDRESS } from '@aztec/circuits.js'; import { computePublicDataTreeLeafSlot, deriveStorageSlotInMap } from '@aztec/circuits.js/hash'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; -import { GasTokenArtifact } from '@aztec/protocol-contracts/gas-token'; +import { FeeJuiceArtifact } from '@aztec/protocol-contracts/fee-juice'; /** - * Computes the storage slot within the gas token contract for the balance of the fee payer. + * Computes the storage slot within the Fee Juice contract for the balance of the fee payer. */ export function computeFeePayerBalanceStorageSlot(feePayer: AztecAddress) { - return deriveStorageSlotInMap(GasTokenArtifact.storageLayout.balances.slot, feePayer); + return deriveStorageSlotInMap(FeeJuiceArtifact.storageLayout.balances.slot, feePayer); } /** - * Computes the leaf slot in the public data tree for the balance of the fee payer in the gas token. + * Computes the leaf slot in the public data tree for the balance of the fee payer in the Fee Juice. */ export function computeFeePayerBalanceLeafSlot(feePayer: AztecAddress): Fr { if (feePayer.isZero()) { return Fr.ZERO; } - const gasToken = AztecAddress.fromBigInt(GAS_TOKEN_ADDRESS); + const feeJuice = AztecAddress.fromBigInt(FEE_JUICE_ADDRESS); const balanceSlot = computeFeePayerBalanceStorageSlot(feePayer); - return computePublicDataTreeLeafSlot(gasToken, balanceSlot); + return computePublicDataTreeLeafSlot(feeJuice, balanceSlot); } diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index e3392c67154..995e46ac032 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -14,7 +14,7 @@ import { import { AztecAddress, ContractClassRegisteredEvent, - GAS_TOKEN_ADDRESS, + FEE_JUICE_ADDRESS, type GlobalVariables, type Header, type KernelCircuitPublicInputs, @@ -202,12 +202,12 @@ export class PublicProcessor { return finalPublicDataUpdateRequests; } - const gasToken = AztecAddress.fromBigInt(GAS_TOKEN_ADDRESS); + const feeJuiceAddress = AztecAddress.fromBigInt(FEE_JUICE_ADDRESS); const balanceSlot = computeFeePayerBalanceStorageSlot(feePayer); const leafSlot = computeFeePayerBalanceLeafSlot(feePayer); const txFee = tx.data.getTransactionFee(this.globalVariables.gasFees); - this.log.debug(`Deducting ${txFee} balance in gas tokens for ${feePayer}`); + this.log.debug(`Deducting ${txFee} balance in Fee Juice for ${feePayer}`); const existingBalanceWriteIndex = finalPublicDataUpdateRequests.findIndex(request => request.leafSlot.equals(leafSlot), @@ -216,14 +216,14 @@ export class PublicProcessor { const balance = existingBalanceWriteIndex > -1 ? finalPublicDataUpdateRequests[existingBalanceWriteIndex].newValue - : await this.publicStateDB.storageRead(gasToken, balanceSlot); + : await this.publicStateDB.storageRead(feeJuiceAddress, balanceSlot); if (balance.lt(txFee)) { throw new Error(`Not enough balance for fee payer to pay for transaction (got ${balance} needs ${txFee})`); } const updatedBalance = balance.sub(txFee); - await this.publicStateDB.storageWrite(gasToken, balanceSlot, updatedBalance); + await this.publicStateDB.storageWrite(feeJuiceAddress, balanceSlot, updatedBalance); finalPublicDataUpdateRequests[ existingBalanceWriteIndex > -1 ? existingBalanceWriteIndex : MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX diff --git a/yarn-project/types/src/contracts/protocol_contract_addresses.ts b/yarn-project/types/src/contracts/protocol_contract_addresses.ts index 84c05ae5d9a..f031a8658f5 100644 --- a/yarn-project/types/src/contracts/protocol_contract_addresses.ts +++ b/yarn-project/types/src/contracts/protocol_contract_addresses.ts @@ -2,7 +2,7 @@ import { type AztecAddress } from '@aztec/foundation/aztec-address'; export type ProtocolContractAddresses = { classRegisterer: AztecAddress; - gasToken: AztecAddress; + feeJuice: AztecAddress; instanceDeployer: AztecAddress; keyRegistry: AztecAddress; multiCallEntrypoint: AztecAddress; From a65f79bff890947052016b6756c3296a5a1f96ce Mon Sep 17 00:00:00 2001 From: josh crites Date: Wed, 7 Aug 2024 09:24:58 -0400 Subject: [PATCH 03/61] chore(docs): Minor update to TXE docs page (#7757) Including some feedback from community members using the TXE. --- .../testing_contracts/index.md | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/docs/docs/guides/smart_contracts/testing_contracts/index.md b/docs/docs/guides/smart_contracts/testing_contracts/index.md index 187035425ef..c0dd73b3f91 100644 --- a/docs/docs/guides/smart_contracts/testing_contracts/index.md +++ b/docs/docs/guides/smart_contracts/testing_contracts/index.md @@ -26,22 +26,26 @@ TXE is a JSON RPC server much like PXE, but provides an extra set of oracle func End-to-end tests are written in typescripts and use compiled Aztec contracts and generated Typescript interfaces, a private execution environment (PXE) and a simulated execution environment to process transactions, create blocks and apply state updates. This allows for advanced checks on state updates like generation the of logs, cross-chain messages and checking transaction status and also enforce the rules of the protocol (e.g. checks in our rollup circuits). If you need the rules of the protocol to be enforced or require complex interactions (such as with L1 contracts), please refer to [Testing Aztec.nr contracts with Typescript](../../js_apps/test.md). -The TXE is a super fast framework in Noir to quickly test your smart contract code. +The TXE is a super fast framework in Noir to quickly test your smart contract code. So to summarize: -* End-to-end tests are written in Typescript. TXE in Noir. -* End-to-end tests are most similar to using mocha + ethers.js to test Solidity Contracts. TXE is like foundry (fast tests in solidity) -### Running TXE +- End-to-end tests are written in Typescript. TXE in Noir. +- End-to-end tests are most similar to using mocha + ethers.js to test Solidity Contracts. TXE is like foundry (fast tests in solidity) -In order to use the TXE, it must be running on a known address. +### Running TXE -:::tip If you have [the sandbox](../../../getting_started.md) installed, you can run TXE tests using: `aztec test` -::: +The complete process for running tests: + +1. Compile contracts +2. Start the sandbox +3. Run `aztec test` + +In order to use the TXE, it must be running on a known address. :::warning Since TXE tests are written in Noir and executed with `aztec-nargo`, they all run in parallel. This also means every test creates their own isolated environment, so state modifications are local to each one of them. @@ -147,6 +151,8 @@ Once accounts have been created, you can impersonate them in your test by callin ```rust env.impersonate(account_address); +// or (these are equivalent) +cheatcodes::set_contract_address(contract_address); ``` ### Checking state @@ -202,3 +208,7 @@ For example: You can also use the `assert_public_call_fails` or `assert_private_call_fails` methods on the `TestEnvironment` to check that a call fails. #include_code assert_public_fail /noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_public.nr rust + +### All Cheatcodes + +You can find the full list of cheatcodes available in the TXE [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/aztec-nr/aztec/src/test/helpers/cheatcodes.nr) From 9fc38b9147a51b8e5b84ba475ef8b2a3492727d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 7 Aug 2024 15:48:53 +0200 Subject: [PATCH 04/61] refactor: nicer way to fetch slots in `TokenWithRefunds` (#7797) --- boxes/boxes/react/src/contracts/src/main.nr | 3 +-- boxes/boxes/vanilla/src/contracts/src/main.nr | 3 +-- noir-projects/aztec-nr/aztec/src/prelude.nr | 2 +- .../token_with_refunds_contract/src/main.nr | 21 +++++++------------ 4 files changed, 10 insertions(+), 19 deletions(-) diff --git a/boxes/boxes/react/src/contracts/src/main.nr b/boxes/boxes/react/src/contracts/src/main.nr index 7a3c6578afe..39984e28df7 100644 --- a/boxes/boxes/react/src/contracts/src/main.nr +++ b/boxes/boxes/react/src/contracts/src/main.nr @@ -1,6 +1,5 @@ contract BoxReact { - use dep::aztec::prelude::{AztecAddress, PrivateMutable, Map, NoteInterface, NoteHeader}; - use dep::aztec::protocol_types::point::Point; + use dep::aztec::prelude::{AztecAddress, PrivateMutable, Map, NoteInterface, NoteHeader, Point}; use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; use dep::value_note::value_note::{ValueNote, VALUE_NOTE_LEN}; diff --git a/boxes/boxes/vanilla/src/contracts/src/main.nr b/boxes/boxes/vanilla/src/contracts/src/main.nr index 73be92a5b01..9d08c789e32 100644 --- a/boxes/boxes/vanilla/src/contracts/src/main.nr +++ b/boxes/boxes/vanilla/src/contracts/src/main.nr @@ -1,6 +1,5 @@ contract Vanilla { - use dep::aztec::prelude::{AztecAddress, PrivateMutable, Map, NoteInterface, NoteHeader}; - use dep::aztec::protocol_types::point::Point; + use dep::aztec::prelude::{AztecAddress, PrivateMutable, Map, NoteInterface, NoteHeader, Point}; use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note_with_keys; use dep::value_note::value_note::{ValueNote, VALUE_NOTE_LEN}; diff --git a/noir-projects/aztec-nr/aztec/src/prelude.nr b/noir-projects/aztec-nr/aztec/src/prelude.nr index a280ee03154..5b2744d20af 100644 --- a/noir-projects/aztec-nr/aztec/src/prelude.nr +++ b/noir-projects/aztec-nr/aztec/src/prelude.nr @@ -1,6 +1,6 @@ // docs:start:prelude use dep::protocol_types::{ - address::{AztecAddress, EthAddress}, abis::function_selector::FunctionSelector, + address::{AztecAddress, EthAddress}, abis::function_selector::FunctionSelector, point::Point, traits::{Serialize, Deserialize} }; use crate::{ diff --git a/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/main.nr index 527024d7ea3..a7bdec92902 100644 --- a/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_with_refunds_contract/src/main.nr @@ -424,10 +424,7 @@ contract TokenWithRefunds { // docs:end:balance_of_private // REFUNDS SPECIFIC FUNCTIONALITY FOLLOWS - use dep::aztec::{ - prelude::{FunctionSelector, NoteHeader}, - protocol_types::{storage::map::derive_storage_slot_in_map, point::Point} - }; + use dep::aztec::prelude::{FunctionSelector, NoteHeader, Point}; use crate::types::token_note::TokenNoteHidingPoint; /// We need to use different randomness for the user and for the fee payer notes because if the randomness values @@ -469,18 +466,13 @@ contract TokenWithRefunds { // to the user in the `complete_refund(...)` function. storage.balances.sub(user, U128::from_integer(funded_amount)).emit(encode_and_encrypt_note_with_keys(&mut context, user_ovpk, user_ivpk, user)); - // 4. Now we "manually" compute the slots (by setting the slots we insert the notes to the balances map under - // the correct keys) - let fee_payer_balances_slot = derive_storage_slot_in_map(TokenWithRefunds::storage().balances.slot, fee_payer); - let user_balances_slot = derive_storage_slot_in_map(TokenWithRefunds::storage().balances.slot, user); - - // 5. We create the partial notes for the fee payer and the user. + // 4. We create the partial notes for the fee payer and the user. // --> Called "partial" because they don't have the amount set yet (that will be done in `complete_refund(...)`). let fee_payer_partial_note = TokenNote { header: NoteHeader { contract_address: AztecAddress::zero(), nonce: 0, - storage_slot: fee_payer_balances_slot, + storage_slot: storage.balances.map.at(fee_payer).storage_slot, note_hash_counter: 0 }, amount: U128::zero(), @@ -491,7 +483,7 @@ contract TokenWithRefunds { header: NoteHeader { contract_address: AztecAddress::zero(), nonce: 0, - storage_slot: user_balances_slot, + storage_slot: storage.balances.map.at(user).storage_slot, note_hash_counter: 0 }, amount: U128::zero(), @@ -499,11 +491,11 @@ contract TokenWithRefunds { randomness: user_randomness }; - // 6. Now we get the note hiding points. + // 5. Now we get the note hiding points. let mut fee_payer_point = fee_payer_partial_note.to_note_hiding_point(); let mut user_point = user_partial_note.to_note_hiding_point(); - // 7. Set the public teardown function to `complete_refund(...)`. Public teardown is the only time when a public + // 6. Set the public teardown function to `complete_refund(...)`. Public teardown is the only time when a public // function has access to the final transaction fee, which is needed to compute the actual refund amount. context.set_public_teardown_function( context.this_address(), @@ -535,6 +527,7 @@ contract TokenWithRefunds { let tx_fee = U128::from_integer(context.transaction_fee()); // 1. We check that user funded the fee payer contract with at least the transaction fee. + // TODO(#7796): we should try to prevent reverts here assert(funded_amount >= tx_fee, "funded amount not enough to cover tx fee"); // 2. We compute the refund amount as the difference between funded amount and tx fee. From 1ecfe1d7273c61d6fef260aed2aed3832149635e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Wed, 7 Aug 2024 10:51:55 -0300 Subject: [PATCH 05/61] [chore] hackily apply no_predicates to call creation (#7804) Ideally we'd do #7729, but as noted there that is currently not working. The noir team is looking into improving how `no_predicates` work to eventually solve this. As a temporary measure however, wrapping the construction of the call in a function that has the attribute gets us the expected gate count reduction. --- .../contracts/token_contract/src/main.nr | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index 03832189f32..8b455041f7d 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -394,10 +394,22 @@ contract Token { // try_sub failed to nullify enough notes to reach the target amount, so we compute the amount remaining // and try again. let remaining = amount - subtracted; - Token::at(context.this_address())._recurse_subtract_balance(account, remaining.to_field()).call(context) + compute_recurse_subtract_balance_call(*context, account, remaining).call(context) } } + // TODO(#7729): apply no_predicates to the contract interface method directly instead of having to use a wrapper + // like we do here. + #[no_predicates] + #[contract_library_method] + fn compute_recurse_subtract_balance_call( + context: PrivateContext, + account: AztecAddress, + remaining: U128 + ) -> PrivateCallInterface<25, U128, (AztecAddress, Field)> { + Token::at(context.this_address())._recurse_subtract_balance(account, remaining.to_field()) + } + // TODO(#7728): even though the amount should be a U128, we can't have that type in a contract interface due to // serialization issues. #[aztec(internal)] From f0f28fc24cfeba18f5c16c77a4505d16dc1e02df Mon Sep 17 00:00:00 2001 From: ledwards2225 <98505400+ledwards2225@users.noreply.github.com> Date: Wed, 7 Aug 2024 09:33:31 -0700 Subject: [PATCH 06/61] feat: Hook up secondary calldata column in dsl (#7759) Previously we could use a single calldata and return_data from noir with support from the bberg backend. With [this](https://github.com/noir-lang/noir/pull/5599/files) PR, noir has an interface for multiple calldata entities. The backend has support for two calldata columns (`calldata` and `secondary_calldata`). This work hooks up a second calldata column in dsl. The main limitation of this work is that there is no way to distinguish between the two calldata columns in dsl. This is OK for the operations within a single circuit because in that context there is no important distinction between the two calldata columns (`calldata`, `secondary_calldata`). It does cause a problem however in the mechanism for linking two circuits via the databus. This is because we need to know which calldata corresponds to app data and which corresponds to previous kernel data in order to prove that the connection was made faithfully. The ideal solution is probably to treat `secondary_calldata` (possibly rename to `app_calldata`?) as a unique entity in noir (similar to how `calldata` and `return_data` are treated as different entities), rather than allowing arbitrarily many individual `calldata` entities. I made an issue [here](https://github.com/AztecProtocol/barretenberg/issues/1070). --------- Co-authored-by: sirasistant --- barretenberg/acir_tests/reset_acir_tests.sh | 4 ++ .../dsl/acir_format/acir_format.cpp | 1 + .../dsl/acir_format/acir_integration.test.cpp | 56 ++++++++++++++++++- .../dsl/acir_format/block_constraint.cpp | 56 ++++++++++++++----- .../dsl/acir_format/block_constraint.hpp | 11 ++++ .../databus_two_calldata/Nargo.toml | 6 ++ .../databus_two_calldata/Prover.toml | 3 + .../databus_two_calldata/src/main.nr | 11 ++++ .../databus_two_calldata_simple/Nargo.toml | 6 ++ .../databus_two_calldata_simple/Prover.toml | 3 + .../databus_two_calldata_simple/src/main.nr | 5 ++ 11 files changed, 148 insertions(+), 14 deletions(-) create mode 100644 noir/noir-repo/test_programs/execution_success/databus_two_calldata/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/databus_two_calldata/Prover.toml create mode 100644 noir/noir-repo/test_programs/execution_success/databus_two_calldata/src/main.nr create mode 100644 noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Prover.toml create mode 100644 noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/src/main.nr diff --git a/barretenberg/acir_tests/reset_acir_tests.sh b/barretenberg/acir_tests/reset_acir_tests.sh index c89731cd79c..dbb21572fe2 100755 --- a/barretenberg/acir_tests/reset_acir_tests.sh +++ b/barretenberg/acir_tests/reset_acir_tests.sh @@ -1,8 +1,12 @@ # Run from within barretenberg/acir_tests + +# clean and rebuild noir then compile the test programs cd ../../noir/noir-repo cargo clean noirup -p . cd test_programs && ./rebuild.sh +# remove and repopulate the test artifacts in bberg cd ../../../barretenberg/acir_tests rm -rf acir_tests +./clone_test_vectors.sh \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp index 8fb66b1b522..091dd13752f 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp @@ -190,6 +190,7 @@ void build_constraints(Builder& builder, } // Add block constraints + assign_calldata_ids(constraint_system.block_constraints); for (size_t i = 0; i < constraint_system.block_constraints.size(); ++i) { const auto& constraint = constraint_system.block_constraints.at(i); create_block_constraints(builder, constraint, has_valid_witness_assignments); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp index c0f889b432f..387c11e7609 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp @@ -69,6 +69,7 @@ class AcirIntegrationTest : public ::testing::Test { info("log circuit size = ", prover.instance->proving_key.log_circuit_size); #endif auto proof = prover.construct_proof(); + // Verify Honk proof auto verification_key = std::make_shared(prover.instance->proving_key); Verifier verifier{ verification_key }; @@ -430,7 +431,7 @@ INSTANTIATE_TEST_SUITE_P(AcirTests, testing::Values("fold_basic", "fold_basic_nested_call")); /** - *@brief A basic test of a circuit generated in noir that makes use of the databus + * @brief A basic test of a circuit generated in noir that makes use of the databus * */ TEST_F(AcirIntegrationTest, DISABLED_Databus) @@ -452,6 +453,59 @@ TEST_F(AcirIntegrationTest, DISABLED_Databus) EXPECT_TRUE(prove_and_verify_honk(builder)); } +/** + * @brief Test a program that uses two databus calldata columns + * @details In addition to checking that a proof of the resulting circuit verfies, check that the specific structure of + * the calldata/return data interaction in the noir program is reflected in the bberg circuit + */ +TEST_F(AcirIntegrationTest, DISABLED_DatabusTwoCalldata) +{ + using Flavor = MegaFlavor; + using Builder = Flavor::CircuitBuilder; + + std::string test_name = "databus_two_calldata"; + info("Test: ", test_name); + acir_format::AcirProgram acir_program = get_program_data_from_test_file(test_name); + + // Construct a bberg circuit from the acir representation + Builder builder = acir_format::create_circuit(acir_program.constraints, 0, acir_program.witness); + + // Check that the databus columns in the builder have been populated as expected + const auto& calldata = builder.get_calldata(); + const auto& secondary_calldata = builder.get_secondary_calldata(); + const auto& return_data = builder.get_return_data(); + + ASSERT(calldata.size() == 4); + ASSERT(secondary_calldata.size() == 3); + ASSERT(return_data.size() == 4); + + // Check that return data was computed from the two calldata inputs as expected + ASSERT_EQ(builder.get_variable(calldata[0]) + builder.get_variable(secondary_calldata[0]), + builder.get_variable(return_data[0])); + ASSERT_EQ(builder.get_variable(calldata[1]) + builder.get_variable(secondary_calldata[1]), + builder.get_variable(return_data[1])); + ASSERT_EQ(builder.get_variable(calldata[2]) + builder.get_variable(secondary_calldata[2]), + builder.get_variable(return_data[2])); + ASSERT_EQ(builder.get_variable(calldata[3]), builder.get_variable(return_data[3])); + + // Ensure that every index of each bus column was read once as expected + for (size_t idx = 0; idx < calldata.size(); ++idx) { + ASSERT_EQ(calldata.get_read_count(idx), 1); + } + for (size_t idx = 0; idx < secondary_calldata.size(); ++idx) { + ASSERT_EQ(secondary_calldata.get_read_count(idx), 1); + } + for (size_t idx = 0; idx < return_data.size(); ++idx) { + ASSERT_EQ(return_data.get_read_count(idx), 1); + } + + // This prints a summary of the types of gates in the circuit + builder.blocks.summarize(); + + // Construct and verify Honk proof + EXPECT_TRUE(prove_and_verify_honk(builder)); +} + /** * @brief Ensure that adding gates post-facto to a circuit generated from acir still results in a valid circuit * @details This is a pattern required by e.g. ClientIvc which appends recursive verifiers to acir-generated circuits diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp index 1ba27ff530d..f31501bdfda 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp @@ -163,20 +163,34 @@ void process_call_data_operations(Builder& builder, using databus_ct = stdlib::databus; databus_ct databus; - // Populate the calldata in the databus - databus.calldata.set_values(init); - for (const auto& op : constraint.trace) { - ASSERT(op.access_type == 0); - field_ct value = poly_to_field_ct(op.value, builder); - field_ct index = poly_to_field_ct(op.index, builder); - fr w_value = 0; - if (has_valid_witness_assignments) { - // If witness are assigned, we use the correct value for w - w_value = index.get_value(); + + // Method for processing operations on a generic databus calldata array + auto process_calldata = [&](auto& calldata_array) { + calldata_array.set_values(init); // Initialize the data in the bus array + + for (const auto& op : constraint.trace) { + ASSERT(op.access_type == 0); + field_ct value = poly_to_field_ct(op.value, builder); + field_ct index = poly_to_field_ct(op.index, builder); + fr w_value = 0; + if (has_valid_witness_assignments) { + // If witness are assigned, we use the correct value for w + w_value = index.get_value(); + } + field_ct w = field_ct::from_witness(&builder, w_value); + value.assert_equal(calldata_array[w]); + w.assert_equal(index); } - field_ct w = field_ct::from_witness(&builder, w_value); - value.assert_equal(databus.calldata[w]); - w.assert_equal(index); + }; + + // Process primary or secondary calldata based on calldata_id + if (constraint.calldata_id == 0) { + process_calldata(databus.calldata); + } else if (constraint.calldata_id == 1) { + process_calldata(databus.secondary_calldata); + } else { + info("Databus only supports two calldata arrays."); + ASSERT(false); } } @@ -199,4 +213,20 @@ void process_return_data_operations(const BlockConstraint& constraint, std::vect ASSERT(constraint.trace.size() == 0); } +// Do nothing for Ultra since it does not support Databus +template <> void assign_calldata_ids([[maybe_unused]] std::vector& constraints) {} + +template <> void assign_calldata_ids(std::vector& constraints) +{ + // Assign unique ID to each calldata block constraint + uint32_t calldata_id = 0; + for (auto& constraint : constraints) { + if (constraint.type == BlockType::CallData) { + constraint.calldata_id = calldata_id++; + } + } + // The backend only supports 2 calldata columns + ASSERT(calldata_id <= 2); +} + } // namespace acir_format \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.hpp index 8a0da27058b..4fccf1d9998 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.hpp @@ -22,6 +22,7 @@ struct BlockConstraint { std::vector init; std::vector trace; BlockType type; + uint32_t calldata_id{ 0 }; }; template @@ -47,6 +48,16 @@ void process_call_data_operations(Builder& builder, template void process_return_data_operations(const BlockConstraint& constraint, std::vector>& init); +/** + * @brief Assign a unique ID to each calldata block constraint based on the order in which it was recieved + * TODO(https://github.com/AztecProtocol/barretenberg/issues/1070): this is a workaround to allow calldata inputs to be + * distinguished by the backend since no identifiers are received from noir. + * + * @tparam Builder + * @param constraints + */ +template void assign_calldata_ids(std::vector& constraints); + template inline void read(B& buf, MemOp& mem_op) { using serialize::read; diff --git a/noir/noir-repo/test_programs/execution_success/databus_two_calldata/Nargo.toml b/noir/noir-repo/test_programs/execution_success/databus_two_calldata/Nargo.toml new file mode 100644 index 00000000000..15d4b01ac44 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/databus_two_calldata/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "databus_two_calldata" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/databus_two_calldata/Prover.toml b/noir/noir-repo/test_programs/execution_success/databus_two_calldata/Prover.toml new file mode 100644 index 00000000000..1229857d3f5 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/databus_two_calldata/Prover.toml @@ -0,0 +1,3 @@ +x = [0,1,2,3] +y = [0,2,4] +z = [1,3,5,7] diff --git a/noir/noir-repo/test_programs/execution_success/databus_two_calldata/src/main.nr b/noir/noir-repo/test_programs/execution_success/databus_two_calldata/src/main.nr new file mode 100644 index 00000000000..75df2a0953c --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/databus_two_calldata/src/main.nr @@ -0,0 +1,11 @@ +// An simple program demonstrating two calldata array inputs and a single return data array. As an arbitrary example, +// the return data is computed as a linear combination of the calldata. +fn main(mut x: [u32; 4], y: call_data(0) [u32; 3], z: call_data(1) [u32; 4]) -> return_data [u32; 4] { + let mut result = [0; 4]; + for i in 0..3 { + let idx = x[i]; + result[idx] = y[idx] + z[idx]; + } + result[x[3]] = z[x[3]]; + result +} diff --git a/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Nargo.toml b/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Nargo.toml new file mode 100644 index 00000000000..5104029c08e --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "databus_two_calldata_simple" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Prover.toml b/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Prover.toml new file mode 100644 index 00000000000..58257d1fe14 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/Prover.toml @@ -0,0 +1,3 @@ +idx = "1" +y = [7, 9] +z = [1,2,3,4] diff --git a/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/src/main.nr b/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/src/main.nr new file mode 100644 index 00000000000..2477f0006c8 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/databus_two_calldata_simple/src/main.nr @@ -0,0 +1,5 @@ +fn main(mut idx: u32, y: call_data(0) [u32; 2], z: call_data(1) [u32; 4]) -> return_data u32 { + let a = y[idx]; + let b = z[idx]; + a + b +} From d3c823705fb167d3e15f2c67bd92efd36716a9a3 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 7 Aug 2024 13:44:36 -0300 Subject: [PATCH 07/61] feat: Run block-proving jobs in parallel by forking world-state (#7655) # Goal We want to be able to kick off more than one `prove(blocknumber)` job in the prover-node at the same time. We currently cannot do it because the prover-node has a single world-state, and building a proof modifies world-state. In particular, preparing the inputs for base rollups modifies state trees, and finalising the block modifies the archive tree. # Why? This'll be needed for the proving integration contest, in case we generate blocks faster than the time it takes to prove them. It may still be useful once we move to epoch proving and sequencer-prover coordination, since the same prover could be picked for generating proofs for two consecutive epochs. # How? ## The easy way Easiest approach is to keep everything as-is today, and clone the world state before kicking off a job. Eventually, once we implement [Phil's world-state](https://github.com/AztecProtocol/engineering-designs/pull/9), we can use the writeable world-state snapshots for this. **This is what we're doing on this PR.** ## The not-so-easy way Another approach is to decouple input-generation from proving. Today the prover-orchestrator is responsible for computing all inputs, but this is not strictly needed. We can have one component that generates all inputs, modifies world-state, and outputs a graph of proving jobs (as Mitch commented [here](https://aztecprotocol.slack.com/archives/C04BTJAA694/p1722195399887399?thread_ts=1722195378.794149&cid=C04BTJAA694)). And then another component that orchestrates the execution of proving jobs exclusively based on their dependencies. Note that this new component would be a good fit for generating a block in the sequencer, which today runs an orchestrator without proving enabled to get to a block header. It's unclear whether this component should run everything serially (like [the old block builder](https://aztecprotocol.slack.com/archives/C04BTJAA694/p1722195399887399?thread_ts=1722195378.794149&cid=C04BTJAA694) did), or it makes more sense to fan out circuit simulation jobs to workers (like the proving orchestrator can do now). --- .../aztec-node/src/aztec-node/server.ts | 3 +- .../circuit-types/src/interfaces/index.ts | 1 + .../src/interfaces}/merkle_tree_operations.ts | 54 ++++++-- .../src/interfaces/prover-client.ts | 6 +- .../l2_block_downloader.ts | 33 ++++- .../composed/integration_l1_publisher.test.ts | 15 ++- .../end-to-end/src/e2e_prover_node.test.ts | 29 ++--- yarn-project/kv-store/src/interfaces/store.ts | 5 + yarn-project/kv-store/src/lmdb/store.test.ts | 29 +++++ yarn-project/kv-store/src/lmdb/store.ts | 23 +++- .../src/interfaces/indexed_tree.ts | 42 +------ .../standard_indexed_tree.ts | 9 +- .../prover-client/src/mocks/test_context.ts | 31 +---- .../src/orchestrator/orchestrator.ts | 21 +++- .../prover-client/src/tx-prover/factory.ts | 11 +- .../prover-client/src/tx-prover/tx-prover.ts | 84 +++---------- yarn-project/prover-node/src/factory.ts | 19 ++- .../prover-node/src/job/block-proving-job.ts | 118 +++++++++--------- yarn-project/prover-node/src/prover-node.ts | 58 ++++++--- .../src/client/sequencer-client.ts | 4 +- .../src/sequencer/sequencer.test.ts | 57 +++++---- .../src/sequencer/sequencer.ts | 19 ++- .../simulator/src/public/hints_builder.ts | 4 +- .../src/public/public_processor.test.ts | 3 +- .../simulator/src/public/public_processor.ts | 16 +-- .../src/public/setup_phase_manager.test.ts | 4 +- .../server_world_state_synchronizer.ts | 84 ++++++++++--- .../synchronizer/world_state_synchronizer.ts | 8 ++ .../world-state/src/world-state-db/index.ts | 3 +- .../src/world-state-db/merkle_tree_db.ts | 8 +- .../src/world-state-db/merkle_tree_map.ts | 11 ++ .../merkle_tree_operations_facade.ts | 13 +- .../merkle_tree_snapshot_operations_facade.ts | 13 +- .../src/world-state-db/merkle_trees.ts | 27 ++-- 34 files changed, 495 insertions(+), 370 deletions(-) rename yarn-project/{world-state/src/world-state-db => circuit-types/src/interfaces}/merkle_tree_operations.ts (82%) create mode 100644 yarn-project/kv-store/src/lmdb/store.test.ts create mode 100644 yarn-project/world-state/src/world-state-db/merkle_tree_map.ts diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 8fa83268f59..931e33dc032 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -169,7 +169,7 @@ export class AztecNodeService implements AztecNode { const simulationProvider = await createSimulationProvider(config, log); - const prover = await createProverClient(config, worldStateSynchronizer, archiver, telemetry); + const prover = await createProverClient(config, telemetry); if (!prover && !config.disableSequencer) { throw new Error("Can't start a sequencer without a prover"); @@ -742,6 +742,7 @@ export class AztecNodeService implements AztecNode { this.telemetry, ); const processor = publicProcessorFactory.create(prevHeader, newGlobalVariables); + // REFACTOR: Consider merging ProcessReturnValues into ProcessedTx const [processedTxs, failedTxs, returns] = await processor.process([tx]); // REFACTOR: Consider returning the error/revert rather than throwing diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index c898f123b13..1c12714c1a3 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -10,3 +10,4 @@ export * from './block-prover.js'; export * from './server_circuit_prover.js'; export * from './private_kernel_prover.js'; export * from './tx-provider.js'; +export * from './merkle_tree_operations.js'; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_operations.ts b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts similarity index 82% rename from yarn-project/world-state/src/world-state-db/merkle_tree_operations.ts rename to yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts index ecddc43dcc6..58b29323712 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_operations.ts +++ b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts @@ -1,14 +1,56 @@ -import { type L2Block, type MerkleTreeId, type SiblingPath } from '@aztec/circuit-types'; import { type Fr, type Header, type NullifierLeafPreimage, type StateReference } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; -import { type AppendOnlyTree, type BatchInsertionResult, type IndexedTree } from '@aztec/merkle-tree'; + +import { type L2Block } from '../l2_block.js'; +import { type MerkleTreeId } from '../merkle_tree_id.js'; +import { type SiblingPath } from '../sibling_path/sibling_path.js'; /** * Type alias for the nullifier tree ID. */ export type IndexedTreeId = MerkleTreeId.NULLIFIER_TREE | MerkleTreeId.PUBLIC_DATA_TREE; +/** + * All of the data to be return during batch insertion. + */ +export interface LowLeafWitnessData { + /** + * Preimage of the low nullifier that proves non membership. + */ + leafPreimage: IndexedTreeLeafPreimage; + /** + * Sibling path to prove membership of low nullifier. + */ + siblingPath: SiblingPath; + /** + * The index of low nullifier. + */ + index: bigint; +} + +/** + * The result of a batch insertion in an indexed merkle tree. + */ +export interface BatchInsertionResult { + /** + * Data for the leaves to be updated when inserting the new ones. + */ + lowLeavesWitnessData?: LowLeafWitnessData[]; + /** + * Sibling path "pointing to" where the new subtree should be inserted into the tree. + */ + newSubtreeSiblingPath: SiblingPath; + /** + * The new leaves being inserted in high to low order. This order corresponds with the order of the low leaves witness. + */ + sortedNewLeaves: Buffer[]; + /** + * The indexes of the sorted new leaves to the original ones. + */ + sortedNewLeavesIndexes: number[]; +} + /** * Defines tree information. */ @@ -32,14 +74,6 @@ export interface TreeInfo { depth: number; } -export type MerkleTreeMap = { - [MerkleTreeId.NULLIFIER_TREE]: IndexedTree; - [MerkleTreeId.NOTE_HASH_TREE]: AppendOnlyTree; - [MerkleTreeId.PUBLIC_DATA_TREE]: IndexedTree; - [MerkleTreeId.L1_TO_L2_MESSAGE_TREE]: AppendOnlyTree; - [MerkleTreeId.ARCHIVE]: AppendOnlyTree; -}; - type LeafTypes = { [MerkleTreeId.NULLIFIER_TREE]: Buffer; [MerkleTreeId.NOTE_HASH_TREE]: Fr; diff --git a/yarn-project/circuit-types/src/interfaces/prover-client.ts b/yarn-project/circuit-types/src/interfaces/prover-client.ts index bdf92e94e2c..e25875b67bd 100644 --- a/yarn-project/circuit-types/src/interfaces/prover-client.ts +++ b/yarn-project/circuit-types/src/interfaces/prover-client.ts @@ -2,6 +2,7 @@ import { type TxHash } from '@aztec/circuit-types'; import { type Fr } from '@aztec/circuits.js'; import { type BlockProver } from './block-prover.js'; +import { type MerkleTreeOperations } from './merkle_tree_operations.js'; import { type ProvingJobSource } from './proving-job.js'; /** @@ -29,8 +30,11 @@ export type ProverConfig = { /** * The interface to the prover client. * Provides the ability to generate proofs and build rollups. + * TODO(palla/prover-node): Rename this interface */ -export interface ProverClient extends BlockProver { +export interface ProverClient { + createBlockProver(db: MerkleTreeOperations): BlockProver; + start(): Promise; stop(): Promise; diff --git a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_downloader.ts b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_downloader.ts index f4d412426a5..8c43ce23925 100644 --- a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_downloader.ts +++ b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_downloader.ts @@ -68,15 +68,36 @@ export class L2BlockDownloader { /** * Repeatedly queries the block source and adds the received blocks to the block queue. * Stops when no further blocks are received. + * @param targetBlockNumber - Optional block number to stop at. + * @param proven - Optional override of the default "proven" setting. * @returns The total number of blocks added to the block queue. */ - private async collectBlocks() { + private async collectBlocks(targetBlockNumber?: number, onlyProven?: boolean) { let totalBlocks = 0; while (true) { - const blocks = await this.l2BlockSource.getBlocks(this.from, 10, this.proven); + // If we have a target and have reached it, return + if (targetBlockNumber !== undefined && this.from > targetBlockNumber) { + log.verbose(`Reached target block number ${targetBlockNumber}`); + return totalBlocks; + } + + // If we have a target, then request at most the number of blocks to get to it + const limit = targetBlockNumber !== undefined ? Math.min(targetBlockNumber - this.from + 1, 10) : 10; + const proven = onlyProven === undefined ? this.proven : onlyProven; + + // Hit the archiver for blocks + const blocks = await this.l2BlockSource.getBlocks(this.from, limit, proven); + + // If there are no more blocks, return if (!blocks.length) { return totalBlocks; } + + log.verbose( + `Received ${blocks.length} blocks from archiver after querying from ${this.from} limit ${limit} (proven ${proven})`, + ); + + // Push new blocks into the queue and loop await this.semaphore.acquire(); this.blockQueue.put(blocks); this.from += blocks.length; @@ -116,9 +137,13 @@ export class L2BlockDownloader { /** * Forces an immediate request for blocks. + * Repeatedly queries the block source and adds the received blocks to the block queue. + * Stops when no further blocks are received. + * @param targetBlockNumber - Optional block number to stop at. + * @param proven - Optional override of the default "proven" setting. * @returns A promise that fulfills once the poll is complete */ - public pollImmediate(): Promise { - return this.jobQueue.put(() => this.collectBlocks()); + public pollImmediate(targetBlockNumber?: number, onlyProven?: boolean): Promise { + return this.jobQueue.put(() => this.collectBlocks(targetBlockNumber, onlyProven)); } } diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 87c5573176d..d638d72a3d3 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -3,6 +3,7 @@ import { getConfigEnvVars } from '@aztec/aztec-node'; import { AztecAddress, Body, Fr, GlobalVariables, type L2Block, createDebugLogger, mockTx } from '@aztec/aztec.js'; // eslint-disable-next-line no-restricted-imports import { + type BlockProver, PROVING_STATUS, type ProcessedTx, makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, @@ -82,6 +83,7 @@ describe('L1Publisher integration', () => { let builder: TxProver; let builderDb: MerkleTrees; + let prover: BlockProver; // The header of the last block let prevHeader: Header; @@ -138,7 +140,8 @@ describe('L1Publisher integration', () => { }; const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); - builder = await TxProver.new(config, worldStateSynchronizer, blockSource, new NoopTelemetryClient()); + builder = await TxProver.new(config, new NoopTelemetryClient()); + prover = builder.createBlockProver(builderDb.asLatest()); publisher = getL1Publisher( { @@ -285,9 +288,9 @@ describe('L1Publisher integration', () => { }; const buildBlock = async (globalVariables: GlobalVariables, txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { - const blockTicket = await builder.startNewBlock(txs.length, globalVariables, l1ToL2Messages); + const blockTicket = await prover.startNewBlock(txs.length, globalVariables, l1ToL2Messages); for (const tx of txs) { - await builder.addNewTx(tx); + await prover.addNewTx(tx); } return blockTicket; }; @@ -360,7 +363,7 @@ describe('L1Publisher integration', () => { const ticket = await buildBlock(globalVariables, txs, currentL1ToL2Messages); const result = await ticket.provingPromise; expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const blockResult = await builder.finaliseBlock(); + const blockResult = await prover.finaliseBlock(); const block = blockResult.block; prevHeader = block.header; blockSource.getL1ToL2Messages.mockResolvedValueOnce(currentL1ToL2Messages); @@ -450,10 +453,10 @@ describe('L1Publisher integration', () => { GasFees.empty(), ); const blockTicket = await buildBlock(globalVariables, txs, l1ToL2Messages); - await builder.setBlockCompleted(); + await prover.setBlockCompleted(); const result = await blockTicket.provingPromise; expect(result.status).toBe(PROVING_STATUS.SUCCESS); - const blockResult = await builder.finaliseBlock(); + const blockResult = await prover.finaliseBlock(); const block = blockResult.block; prevHeader = block.header; blockSource.getL1ToL2Messages.mockResolvedValueOnce(l1ToL2Messages); diff --git a/yarn-project/end-to-end/src/e2e_prover_node.test.ts b/yarn-project/end-to-end/src/e2e_prover_node.test.ts index 3035ff969f3..df5a6207904 100644 --- a/yarn-project/end-to-end/src/e2e_prover_node.test.ts +++ b/yarn-project/end-to-end/src/e2e_prover_node.test.ts @@ -15,7 +15,7 @@ import { sleep, } from '@aztec/aztec.js'; import { StatefulTestContract, TestContract } from '@aztec/noir-contracts.js'; -import { type ProverNode, createProverNode } from '@aztec/prover-node'; +import { createProverNode } from '@aztec/prover-node'; import { type SequencerClientConfig } from '@aztec/sequencer-client'; import { sendL1ToL2Message } from './fixtures/l1_to_l2_messaging.js'; @@ -107,20 +107,12 @@ describe('e2e_prover_node', () => { ctx = await snapshotManager.setup(); }); - const prove = async (proverNode: ProverNode, blockNumber: number) => { - logger.info(`Proving block ${blockNumber}`); - await proverNode.prove(blockNumber, blockNumber); - - logger.info(`Proof submitted. Awaiting aztec node to sync...`); - await retryUntil(async () => (await ctx.aztecNode.getProvenBlockNumber()) === blockNumber, 'block-1', 10, 1); - expect(await ctx.aztecNode.getProvenBlockNumber()).toEqual(blockNumber); - }; - it('submits three blocks, then prover proves the first two', async () => { // Check everything went well during setup and txs were mined in two different blocks const [txReceipt1, txReceipt2, txReceipt3] = txReceipts; const firstBlock = txReceipt1.blockNumber!; - expect(txReceipt2.blockNumber).toEqual(firstBlock + 1); + const secondBlock = firstBlock + 1; + expect(txReceipt2.blockNumber).toEqual(secondBlock); expect(txReceipt3.blockNumber).toEqual(firstBlock + 2); expect(await contract.methods.get_public_value(recipient).simulate()).toEqual(20n); expect(await contract.methods.summed_values(recipient).simulate()).toEqual(10n); @@ -141,9 +133,18 @@ describe('e2e_prover_node', () => { const archiver = ctx.aztecNode.getBlockSource() as Archiver; const proverNode = await createProverNode(proverConfig, { aztecNodeTxProvider: ctx.aztecNode, archiver }); - // Prove the first two blocks - await prove(proverNode, firstBlock); - await prove(proverNode, firstBlock + 1); + // Prove the first two blocks simultaneously + logger.info(`Starting proof for first block #${firstBlock}`); + await proverNode.startProof(firstBlock, firstBlock); + logger.info(`Starting proof for second block #${secondBlock}`); + await proverNode.startProof(secondBlock, secondBlock); + + // Confirm that we cannot go back to prove an old one + await expect(proverNode.startProof(firstBlock, firstBlock)).rejects.toThrow(/behind the current world state/i); + + // Await until proofs get submitted + await retryUntil(async () => (await ctx.aztecNode.getProvenBlockNumber()) === secondBlock, 'proven', 10, 1); + expect(await ctx.aztecNode.getProvenBlockNumber()).toEqual(secondBlock); // Check that the prover id made it to the emitted event const { publicClient, l1ContractAddresses } = ctx.deployL1ContractsValues; diff --git a/yarn-project/kv-store/src/interfaces/store.ts b/yarn-project/kv-store/src/interfaces/store.ts index f13a3241ab5..076d39da1e0 100644 --- a/yarn-project/kv-store/src/interfaces/store.ts +++ b/yarn-project/kv-store/src/interfaces/store.ts @@ -58,4 +58,9 @@ export interface AztecKVStore { * Clears the store */ clear(): Promise; + + /** + * Forks the store. + */ + fork(): Promise; } diff --git a/yarn-project/kv-store/src/lmdb/store.test.ts b/yarn-project/kv-store/src/lmdb/store.test.ts new file mode 100644 index 00000000000..f6babd0cb67 --- /dev/null +++ b/yarn-project/kv-store/src/lmdb/store.test.ts @@ -0,0 +1,29 @@ +import { mkdtemp } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { AztecLmdbStore } from './store.js'; + +describe('AztecLmdbStore', () => { + const itForks = async (store: AztecLmdbStore) => { + const singleton = store.openSingleton('singleton'); + await singleton.set('foo'); + + const forkedStore = await store.fork(); + const forkedSingleton = forkedStore.openSingleton('singleton'); + expect(forkedSingleton.get()).toEqual('foo'); + await forkedSingleton.set('bar'); + expect(singleton.get()).toEqual('foo'); + }; + + it('forks a persistent store', async () => { + const path = join(await mkdtemp(join(tmpdir(), 'aztec-store-test-')), 'main.mdb'); + const store = AztecLmdbStore.open(path, false); + await itForks(store); + }); + + it('forks an ephemeral store', async () => { + const store = AztecLmdbStore.open(undefined, true); + await itForks(store); + }); +}); diff --git a/yarn-project/kv-store/src/lmdb/store.ts b/yarn-project/kv-store/src/lmdb/store.ts index 4b7a115f2f9..cb6ee87d7d7 100644 --- a/yarn-project/kv-store/src/lmdb/store.ts +++ b/yarn-project/kv-store/src/lmdb/store.ts @@ -1,6 +1,9 @@ import { createDebugLogger } from '@aztec/foundation/log'; +import { mkdtemp } from 'fs/promises'; import { type Database, type Key, type RootDatabase, open } from 'lmdb'; +import { tmpdir } from 'os'; +import { join } from 'path'; import { type AztecArray } from '../interfaces/array.js'; import { type AztecCounter } from '../interfaces/counter.js'; @@ -22,7 +25,7 @@ export class AztecLmdbStore implements AztecKVStore { #data: Database; #multiMapData: Database; - constructor(rootDb: RootDatabase) { + constructor(rootDb: RootDatabase, public readonly isEphemeral: boolean) { this.#rootDb = rootDb; // big bucket to store all the data @@ -57,11 +60,19 @@ export class AztecLmdbStore implements AztecKVStore { log = createDebugLogger('aztec:kv-store:lmdb'), ): AztecLmdbStore { log.info(`Opening LMDB database at ${path || 'temporary location'}`); - const rootDb = open({ - path, - noSync: ephemeral, - }); - return new AztecLmdbStore(rootDb); + const rootDb = open({ path, noSync: ephemeral }); + return new AztecLmdbStore(rootDb, ephemeral); + } + + /** + * Forks the current DB into a new DB by backing it up to a temporary location and opening a new lmdb db. + * @returns A new AztecLmdbStore. + */ + async fork() { + const forkPath = join(await mkdtemp(join(tmpdir(), 'aztec-store-fork-')), 'root.mdb'); + await this.#rootDb.backup(forkPath, false); + const forkDb = open(forkPath, { noSync: this.isEphemeral }); + return new AztecLmdbStore(forkDb, this.isEphemeral); } /** diff --git a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts index 3e7c1b64c5c..54f30c2a61f 100644 --- a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts @@ -1,4 +1,4 @@ -import { type SiblingPath } from '@aztec/circuit-types'; +import { type BatchInsertionResult } from '@aztec/circuit-types'; import { type IndexedTreeLeaf, type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { @@ -36,46 +36,6 @@ export interface PreimageFactory { clone(preimage: IndexedTreeLeafPreimage): IndexedTreeLeafPreimage; } -/** - * All of the data to be return during batch insertion. - */ -export interface LowLeafWitnessData { - /** - * Preimage of the low nullifier that proves non membership. - */ - leafPreimage: IndexedTreeLeafPreimage; - /** - * Sibling path to prove membership of low nullifier. - */ - siblingPath: SiblingPath; - /** - * The index of low nullifier. - */ - index: bigint; -} - -/** - * The result of a batch insertion in an indexed merkle tree. - */ -export interface BatchInsertionResult { - /** - * Data for the leaves to be updated when inserting the new ones. - */ - lowLeavesWitnessData?: LowLeafWitnessData[]; - /** - * Sibling path "pointing to" where the new subtree should be inserted into the tree. - */ - newSubtreeSiblingPath: SiblingPath; - /** - * The new leaves being inserted in high to low order. This order corresponds with the order of the low leaves witness. - */ - sortedNewLeaves: Buffer[]; - /** - * The indexes of the sorted new leaves to the original ones. - */ - sortedNewLeavesIndexes: number[]; -} - /** * Indexed merkle tree. */ diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts index 06d6d91f6c8..bacfeaa2e02 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts @@ -1,4 +1,4 @@ -import { SiblingPath } from '@aztec/circuit-types'; +import { type BatchInsertionResult, type LowLeafWitnessData, SiblingPath } from '@aztec/circuit-types'; import { type TreeInsertionStats } from '@aztec/circuit-types/stats'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { type FromBuffer } from '@aztec/foundation/serialize'; @@ -7,12 +7,7 @@ import { type IndexedTreeLeaf, type IndexedTreeLeafPreimage } from '@aztec/found import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; import { type Hasher } from '@aztec/types/interfaces'; -import { - type BatchInsertionResult, - type IndexedTree, - type LowLeafWitnessData, - type PreimageFactory, -} from '../interfaces/indexed_tree.js'; +import { type IndexedTree, type PreimageFactory } from '../interfaces/indexed_tree.js'; import { IndexedTreeSnapshotBuilder } from '../snapshots/indexed_tree_snapshot.js'; import { type IndexedTreeSnapshot } from '../snapshots/snapshot_builder.js'; import { TreeBase } from '../tree_base.js'; diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 99f7738f049..f1a20ccba0d 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -1,9 +1,7 @@ import { type BBProverConfig } from '@aztec/bb-prover'; import { type BlockProver, - type BlockResult, type ProcessedTx, - type ProvingTicket, type PublicExecutionRequest, type ServerCircuitProver, type Tx, @@ -36,30 +34,7 @@ import { MemoryProvingQueue } from '../prover-agent/memory-proving-queue.js'; import { ProverAgent } from '../prover-agent/prover-agent.js'; import { getEnvironmentConfig, getSimulationProvider, makeGlobals } from './fixtures.js'; -class DummyProverClient implements BlockProver { - constructor(private orchestrator: ProvingOrchestrator) {} - startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { - return this.orchestrator.startNewBlock(numTxs, globalVariables, l1ToL2Messages); - } - addNewTx(tx: ProcessedTx): Promise { - return this.orchestrator.addNewTx(tx); - } - cancelBlock(): void { - return this.orchestrator.cancelBlock(); - } - finaliseBlock(): Promise { - return this.orchestrator.finaliseBlock(); - } - setBlockCompleted(): Promise { - return this.orchestrator.setBlockCompleted(); - } - getProverId(): Fr { - return this.orchestrator.proverId; - } -} - export class TestContext { - public blockProver: BlockProver; constructor( public publicExecutor: MockProxy, public publicContractsDB: MockProxy, @@ -74,8 +49,10 @@ export class TestContext { public blockNumber: number, public directoriesToCleanup: string[], public logger: DebugLogger, - ) { - this.blockProver = new DummyProverClient(this.orchestrator); + ) {} + + public get blockProver() { + return this.orchestrator; } static async new( diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index be0f21a8460..78760bc7e3c 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -15,6 +15,7 @@ import { } from '@aztec/circuit-types'; import { BlockProofError, + type BlockProver, type BlockResult, PROVING_STATUS, type ProvingResult, @@ -93,7 +94,7 @@ const logger = createDebugLogger('aztec:prover:proving-orchestrator'); /** * The orchestrator, managing the flow of recursive proving operations required to build the rollup proof tree. */ -export class ProvingOrchestrator { +export class ProvingOrchestrator implements BlockProver { private provingState: ProvingState | undefined = undefined; private pendingProvingJobs: AbortController[] = []; private paddingTx: PaddingProcessedTx | undefined = undefined; @@ -104,7 +105,7 @@ export class ProvingOrchestrator { private db: MerkleTreeOperations, private prover: ServerCircuitProver, telemetryClient: TelemetryClient, - public readonly proverId: Fr = Fr.ZERO, + private readonly proverId: Fr = Fr.ZERO, ) { this.metrics = new ProvingOrchestratorMetrics(telemetryClient, 'ProvingOrchestrator'); } @@ -113,6 +114,10 @@ export class ProvingOrchestrator { return this.metrics.tracer; } + public getProverId(): Fr { + return this.proverId; + } + /** * Resets the orchestrator's cached padding tx. */ @@ -140,8 +145,20 @@ export class ProvingOrchestrator { if (!Number.isInteger(numTxs) || numTxs < 2) { throw new Error(`Length of txs for the block should be at least two (got ${numTxs})`); } + + // TODO(palla/prover-node): Store block number in the db itself to make this check more reliable, + // and turn this warning into an exception that we throw. + const { blockNumber } = globalVariables; + const dbBlockNumber = (await this.db.getTreeInfo(MerkleTreeId.ARCHIVE)).size - 1n; + if (dbBlockNumber !== blockNumber.toBigInt() - 1n) { + logger.warn( + `Database is at wrong block number (starting block ${blockNumber.toBigInt()} with db at ${dbBlockNumber})`, + ); + } + // Cancel any currently proving block before starting a new one this.cancelBlock(); + logger.info(`Starting new block with ${numTxs} transactions`); // we start the block by enqueueing all of the base parity circuits let baseParityInputs: BaseParityInputs[] = []; diff --git a/yarn-project/prover-client/src/tx-prover/factory.ts b/yarn-project/prover-client/src/tx-prover/factory.ts index 6fc40d7f634..742002239a8 100644 --- a/yarn-project/prover-client/src/tx-prover/factory.ts +++ b/yarn-project/prover-client/src/tx-prover/factory.ts @@ -1,16 +1,9 @@ -import { type L2BlockSource } from '@aztec/circuit-types'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; -import { type WorldStateSynchronizer } from '@aztec/world-state'; import { type ProverClientConfig } from '../config.js'; import { TxProver } from './tx-prover.js'; -export function createProverClient( - config: ProverClientConfig, - worldStateSynchronizer: WorldStateSynchronizer, - blockSource: L2BlockSource, - telemetry: TelemetryClient = new NoopTelemetryClient(), -) { - return config.disableProver ? undefined : TxProver.new(config, worldStateSynchronizer, blockSource, telemetry); +export function createProverClient(config: ProverClientConfig, telemetry: TelemetryClient = new NoopTelemetryClient()) { + return config.disableProver ? undefined : TxProver.new(config, telemetry); } diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index f57d475d576..7ea62671292 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -1,16 +1,14 @@ import { BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; -import { type L2BlockSource, type ProcessedTx } from '@aztec/circuit-types'; import { - type BlockResult, + type BlockProver, type ProverClient, type ProvingJobSource, - type ProvingTicket, type ServerCircuitProver, } from '@aztec/circuit-types/interfaces'; -import { Fr, type GlobalVariables } from '@aztec/circuits.js'; +import { Fr } from '@aztec/circuits.js'; import { NativeACVMSimulator } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; -import { type WorldStateSynchronizer } from '@aztec/world-state'; +import { type MerkleTreeOperations } from '@aztec/world-state'; import { type ProverClientConfig } from '../config.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; @@ -18,26 +16,25 @@ import { MemoryProvingQueue } from '../prover-agent/memory-proving-queue.js'; import { ProverAgent } from '../prover-agent/prover-agent.js'; /** - * A prover accepting individual transaction requests + * A prover factory. + * TODO(palla/prover-node): Rename this class */ export class TxProver implements ProverClient { - private orchestrator: ProvingOrchestrator; private queue: MemoryProvingQueue; private running = false; private constructor( private config: ProverClientConfig, - private worldStateSynchronizer: WorldStateSynchronizer, private telemetry: TelemetryClient, private agent?: ProverAgent, ) { + // TODO(palla/prover-node): Cache the paddingTx here, and not in each proving orchestrator, + // so it can be reused across multiple ones and not recomputed every time. this.queue = new MemoryProvingQueue(telemetry, config.proverJobTimeoutMs, config.proverJobPollIntervalMs); - this.orchestrator = new ProvingOrchestrator( - worldStateSynchronizer.getLatest(), - this.queue, - telemetry, - config.proverId, - ); + } + + public createBlockProver(db: MerkleTreeOperations): BlockProver { + return new ProvingOrchestrator(db, this.queue, this.telemetry, this.config.proverId); } public getProverId(): Fr { @@ -57,7 +54,7 @@ export class TxProver implements ProverClient { } if (!this.config.realProofs && newConfig.realProofs) { - this.orchestrator.reset(); + // TODO(palla/prover-node): Reset padding tx here once we cache it at this class } this.config = newConfig; @@ -85,6 +82,8 @@ export class TxProver implements ProverClient { return; } this.running = false; + + // TODO(palla/prover-node): Keep a reference to all proving orchestrators that are alive and stop them? await this.agent?.stop(); await this.queue.stop(); } @@ -96,12 +95,7 @@ export class TxProver implements ProverClient { * @param worldStateSynchronizer - An instance of the world state * @returns An instance of the prover, constructed and started. */ - public static async new( - config: ProverClientConfig, - worldStateSynchronizer: WorldStateSynchronizer, - blockSource: L2BlockSource, - telemetry: TelemetryClient, - ) { + public static async new(config: ProverClientConfig, telemetry: TelemetryClient) { const agent = config.proverAgentEnabled ? new ProverAgent( await TxProver.buildCircuitProver(config, telemetry), @@ -110,7 +104,7 @@ export class TxProver implements ProverClient { ) : undefined; - const prover = new TxProver(config, worldStateSynchronizer, telemetry, agent); + const prover = new TxProver(config, telemetry, agent); await prover.start(); return prover; } @@ -130,52 +124,6 @@ export class TxProver implements ProverClient { return new TestCircuitProver(telemetry, simulationProvider); } - /** - * Cancels any block that is currently being built and prepares for a new one to be built - * @param numTxs - The complete size of the block, must be a power of 2 - * @param globalVariables - The global variables for this block - * @param l1ToL2Messages - The set of L1 to L2 messages to be included in this block - */ - public async startNewBlock( - numTxs: number, - globalVariables: GlobalVariables, - newL1ToL2Messages: Fr[], - ): Promise { - const previousBlockNumber = globalVariables.blockNumber.toNumber() - 1; - await this.worldStateSynchronizer.syncImmediate(previousBlockNumber); - return this.orchestrator.startNewBlock(numTxs, globalVariables, newL1ToL2Messages); - } - - /** - * Add a processed transaction to the current block - * @param tx - The transaction to be added - */ - public addNewTx(tx: ProcessedTx): Promise { - return this.orchestrator.addNewTx(tx); - } - - /** - * Cancels the block currently being proven. Proofs already bring built may continue but further proofs should not be started. - */ - public cancelBlock(): void { - this.orchestrator.cancelBlock(); - } - - /** - * Performs the final archive tree insertion for this block and returns the L2Block and Proof instances - */ - public finaliseBlock(): Promise { - return this.orchestrator.finaliseBlock(); - } - - /** - * Mark the block as having all the transactions it is going to contain. - * Will pad the block to it's complete size with empty transactions and prove all the way to the root rollup. - */ - public setBlockCompleted(): Promise { - return this.orchestrator.setBlockCompleted(); - } - public getProvingJobSource(): ProvingJobSource { return this.queue; } diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index 26a8bd6cf69..f1e966de091 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -4,7 +4,7 @@ import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { createStore } from '@aztec/kv-store/utils'; import { createProverClient } from '@aztec/prover-client'; import { getL1Publisher } from '@aztec/sequencer-client'; -import { PublicProcessorFactory, createSimulationProvider } from '@aztec/simulator'; +import { createSimulationProvider } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { createWorldStateSynchronizer } from '@aztec/world-state'; @@ -40,17 +40,24 @@ export async function createProverNode( const simulationProvider = await createSimulationProvider(config, log); - const prover = await createProverClient(config, worldStateSynchronizer, archiver); + const prover = await createProverClient(config, telemetry); // REFACTOR: Move publisher out of sequencer package and into an L1-related package const publisher = getL1Publisher(config, telemetry); - const latestWorldState = worldStateSynchronizer.getLatest(); - const publicProcessorFactory = new PublicProcessorFactory(latestWorldState, archiver, simulationProvider, telemetry); - const txProvider = deps.aztecNodeTxProvider ? new AztecNodeTxProvider(deps.aztecNodeTxProvider) : createTxProvider(config); - return new ProverNode(prover!, publicProcessorFactory, publisher, archiver, archiver, txProvider); + return new ProverNode( + prover!, + publisher, + archiver, + archiver, + archiver, + worldStateSynchronizer, + txProvider, + simulationProvider, + telemetry, + ); } diff --git a/yarn-project/prover-node/src/job/block-proving-job.ts b/yarn-project/prover-node/src/job/block-proving-job.ts index 37c5989a963..111651868aa 100644 --- a/yarn-project/prover-node/src/job/block-proving-job.ts +++ b/yarn-project/prover-node/src/job/block-proving-job.ts @@ -42,66 +42,70 @@ export class BlockProvingJob { } this.log.info(`Starting block proving job`, { fromBlock, toBlock }); - this.state = 'started'; - - // TODO: Fast-forward world state to fromBlock and/or await fromBlock to be published to the unproven chain - this.state = 'processing'; - - let historicalHeader = (await this.l2BlockSource.getBlock(fromBlock - 1))?.header; - for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) { - const block = await this.getBlock(blockNumber); - const globalVariables = block.header.globalVariables; - const txHashes = block.body.txEffects.map(tx => tx.txHash); - const txCount = block.body.numberOfTxsIncludingPadded; - const l1ToL2Messages = await this.getL1ToL2Messages(block); - - this.log.verbose(`Starting block processing`, { - number: block.number, - blockHash: block.hash().toString(), - lastArchive: block.header.lastArchive.root, - noteHashTreeRoot: block.header.state.partial.noteHashTree.root, - nullifierTreeRoot: block.header.state.partial.nullifierTree.root, - publicDataTreeRoot: block.header.state.partial.publicDataTree.root, - historicalHeader: historicalHeader?.hash(), - ...globalVariables, - }); - const provingTicket = await this.prover.startNewBlock(txCount, globalVariables, l1ToL2Messages); - const publicProcessor = this.publicProcessorFactory.create(historicalHeader, globalVariables); - - const txs = await this.getTxs(txHashes); - await this.processTxs(publicProcessor, txs, txCount); - - this.log.verbose(`Processed all txs for block`, { - blockNumber: block.number, - blockHash: block.hash().toString(), - }); - - await this.prover.setBlockCompleted(); - - const result = await provingTicket.provingPromise; - if (result.status === PROVING_STATUS.FAILURE) { - throw new Error(`Block proving failed: ${result.reason}`); + try { + let historicalHeader = (await this.l2BlockSource.getBlock(fromBlock - 1))?.header; + for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) { + const block = await this.getBlock(blockNumber); + const globalVariables = block.header.globalVariables; + const txHashes = block.body.txEffects.map(tx => tx.txHash); + const txCount = block.body.numberOfTxsIncludingPadded; + const l1ToL2Messages = await this.getL1ToL2Messages(block); + + this.log.verbose(`Starting block processing`, { + number: block.number, + blockHash: block.hash().toString(), + lastArchive: block.header.lastArchive.root, + noteHashTreeRoot: block.header.state.partial.noteHashTree.root, + nullifierTreeRoot: block.header.state.partial.nullifierTree.root, + publicDataTreeRoot: block.header.state.partial.publicDataTree.root, + historicalHeader: historicalHeader?.hash(), + ...globalVariables, + }); + + // When we move to proving epochs, this should change into a startNewEpoch and be lifted outside the loop. + const provingTicket = await this.prover.startNewBlock(txCount, globalVariables, l1ToL2Messages); + + const publicProcessor = this.publicProcessorFactory.create(historicalHeader, globalVariables); + + const txs = await this.getTxs(txHashes); + await this.processTxs(publicProcessor, txs, txCount); + + this.log.verbose(`Processed all txs for block`, { + blockNumber: block.number, + blockHash: block.hash().toString(), + }); + + await this.prover.setBlockCompleted(); + + // This should be moved outside the loop to match the creation of the proving ticket when we move to epochs. + this.state = 'awaiting-prover'; + const result = await provingTicket.provingPromise; + if (result.status === PROVING_STATUS.FAILURE) { + throw new Error(`Block proving failed: ${result.reason}`); + } + + historicalHeader = block.header; } - historicalHeader = block.header; - } + const { block, aggregationObject, proof } = await this.prover.finaliseBlock(); + this.log.info(`Finalised proof for block range`, { fromBlock, toBlock }); - this.state = 'awaiting-prover'; - const { block, aggregationObject, proof } = await this.prover.finaliseBlock(); - this.log.info(`Finalised proof for block range`, { fromBlock, toBlock }); - - this.state = 'publishing-proof'; - await this.publisher.submitProof( - block.header, - block.archive.root, - this.prover.getProverId(), - aggregationObject, - proof, - ); - this.log.info(`Submitted proof for block range`, { fromBlock, toBlock }); + this.state = 'publishing-proof'; + await this.publisher.submitProof( + block.header, + block.archive.root, + this.prover.getProverId(), + aggregationObject, + proof, + ); + this.log.info(`Submitted proof for block range`, { fromBlock, toBlock }); - this.state = 'completed'; + this.state = 'completed'; + } catch (err) { + this.log.error(`Error running block prover job: ${err}`); + this.state = 'failed'; + } } private async getBlock(blockNumber: number): Promise { @@ -151,8 +155,8 @@ export class BlockProvingJob { export type BlockProvingJobState = | 'initialized' - | 'started' | 'processing' | 'awaiting-prover' | 'publishing-proof' - | 'completed'; + | 'completed' + | 'failed'; diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 1c435e3cfa3..ca4b3042913 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -2,8 +2,11 @@ import { type L1ToL2MessageSource, type L2BlockSource, type ProverClient, type T import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import { type L1Publisher } from '@aztec/sequencer-client'; -import { type PublicProcessorFactory } from '@aztec/simulator'; +import { PublicProcessorFactory, type SimulationProvider } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { type WorldStateSynchronizer } from '@aztec/world-state'; +import { type ContractDataSource } from '../../types/src/contracts/contract_data_source.js'; import { BlockProvingJob } from './job/block-proving-job.js'; /** @@ -14,14 +17,18 @@ import { BlockProvingJob } from './job/block-proving-job.js'; export class ProverNode { private log = createDebugLogger('aztec:prover-node'); private runningPromise: RunningPromise | undefined; + private latestBlockWeAreProving: number | undefined; constructor( private prover: ProverClient, - private publicProcessorFactory: PublicProcessorFactory, private publisher: L1Publisher, private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, + private contractDataSource: ContractDataSource, + private worldState: WorldStateSynchronizer, private txProvider: TxProvider, + private simulator: SimulationProvider, + private telemetryClient: TelemetryClient, private options: { pollingIntervalMs: number; disableAutomaticProving: boolean } = { pollingIntervalMs: 1_000, disableAutomaticProving: false, @@ -48,12 +55,12 @@ export class ProverNode { await this.l2BlockSource.stop(); this.publisher.interrupt(); this.log.info('Stopped ProverNode'); + // TODO(palla/prover-node): Keep a reference to all ongoing ProvingJobs and stop them. } /** * Single iteration of recurring work. This method is called periodically by the running promise. * Checks whether there are new blocks to prove, proves them, and submits them. - * Only proves one block per job and one job at a time (for now). */ protected async work() { if (this.options.disableAutomaticProving) { @@ -65,29 +72,35 @@ export class ProverNode { this.l2BlockSource.getProvenBlockNumber(), ]); - if (latestProvenBlockNumber >= latestBlockNumber) { - this.log.debug(`No new blocks to prove`, { latestBlockNumber, latestProvenBlockNumber }); + // Consider both the latest block we are proving and the last block proven on the chain + const latestBlockBeingProven = this.latestBlockWeAreProving ?? 0; + const latestProven = Math.max(latestBlockBeingProven, latestProvenBlockNumber); + if (latestProven >= latestBlockNumber) { + this.log.debug(`No new blocks to prove`, { latestBlockNumber, latestProvenBlockNumber, latestBlockBeingProven }); return; } - const fromBlock = latestProvenBlockNumber + 1; + const fromBlock = latestProven + 1; const toBlock = fromBlock; // We only prove one block at a time for now - await this.prove(fromBlock, toBlock); + + await this.startProof(fromBlock, toBlock); + this.latestBlockWeAreProving = toBlock; } /** * Creates a proof for a block range. Returns once the proof has been submitted to L1. */ - public prove(fromBlock: number, toBlock: number) { - return this.createProvingJob().run(fromBlock, toBlock); + public async prove(fromBlock: number, toBlock: number) { + const job = await this.createProvingJob(fromBlock); + return job.run(fromBlock, toBlock); } /** * Starts a proving process and returns immediately. */ - public startProof(fromBlock: number, toBlock: number) { - void this.createProvingJob().run(fromBlock, toBlock); - return Promise.resolve(); + public async startProof(fromBlock: number, toBlock: number) { + const job = await this.createProvingJob(fromBlock); + void job.run(fromBlock, toBlock); } /** @@ -97,10 +110,25 @@ export class ProverNode { return this.prover; } - private createProvingJob() { + private async createProvingJob(fromBlock: number) { + if ((await this.worldState.status()).syncedToL2Block >= fromBlock) { + throw new Error(`Cannot create proving job for block ${fromBlock} as it is behind the current world state`); + } + + // Fast forward world state to right before the target block and get a fork + const db = await this.worldState.syncImmediateAndFork(fromBlock - 1, true); + + // Create a processor using the forked world state + const publicProcessorFactory = new PublicProcessorFactory( + db, + this.contractDataSource, + this.simulator, + this.telemetryClient, + ); + return new BlockProvingJob( - this.prover, - this.publicProcessorFactory, + this.prover.createBlockProver(db), + publicProcessorFactory, this.publisher, this.l2BlockSource, this.l1ToL2MessageSource, diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index db9a3704978..dcb72921a15 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -1,5 +1,5 @@ import { type L1ToL2MessageSource, type L2BlockSource } from '@aztec/circuit-types'; -import { type BlockProver } from '@aztec/circuit-types/interfaces'; +import { type ProverClient } from '@aztec/circuit-types/interfaces'; import { type P2P } from '@aztec/p2p'; import { PublicProcessorFactory, type SimulationProvider } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; @@ -37,7 +37,7 @@ export class SequencerClient { contractDataSource: ContractDataSource, l2BlockSource: L2BlockSource, l1ToL2MessageSource: L1ToL2MessageSource, - prover: BlockProver, + prover: ProverClient, simulationProvider: SimulationProvider, telemetryClient: TelemetryClient, ) { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index f21cb1eec8e..912d9ebe6b1 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -1,4 +1,5 @@ import { + type BlockProver, type L1ToL2MessageSource, L2Block, type L2BlockSource, @@ -42,6 +43,7 @@ describe('sequencer', () => { let globalVariableBuilder: MockProxy; let p2p: MockProxy; let worldState: MockProxy; + let blockProver: MockProxy; let proverClient: MockProxy; let merkleTreeOps: MockProxy; let publicProcessor: MockProxy; @@ -67,7 +69,11 @@ describe('sequencer', () => { globalVariableBuilder = mock(); merkleTreeOps = mock(); - proverClient = mock(); + blockProver = mock(); + + proverClient = mock({ + createBlockProver: () => blockProver, + }); p2p = mock({ getStatus: () => Promise.resolve({ state: P2PClientState.IDLE, syncedToL2Block: lastBlockNumber }), @@ -87,7 +93,7 @@ describe('sequencer', () => { }); publicProcessorFactory = mock({ - create: (_a, _b_) => publicProcessor, + create: (_a, _b) => publicProcessor, }); l2BlockSource = mock({ @@ -132,8 +138,8 @@ describe('sequencer', () => { }; p2p.getTxs.mockReturnValueOnce([tx]); - proverClient.startNewBlock.mockResolvedValueOnce(ticket); - proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); + blockProver.startNewBlock.mockResolvedValueOnce(ticket); + blockProver.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables( @@ -151,7 +157,7 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(proverClient.startNewBlock).toHaveBeenCalledWith( + expect(blockProver.startNewBlock).toHaveBeenCalledWith( 2, new GlobalVariables( chainId, @@ -166,7 +172,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledWith(block); - expect(proverClient.cancelBlock).toHaveBeenCalledTimes(0); + expect(blockProver.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block when it is their turn', async () => { @@ -182,8 +188,8 @@ describe('sequencer', () => { }; p2p.getTxs.mockReturnValueOnce([tx]); - proverClient.startNewBlock.mockResolvedValueOnce(ticket); - proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); + blockProver.startNewBlock.mockResolvedValueOnce(ticket); + blockProver.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables( @@ -202,12 +208,12 @@ describe('sequencer', () => { publisher.isItMyTurnToSubmit.mockClear().mockResolvedValue(false); await sequencer.initialSync(); await sequencer.work(); - expect(proverClient.startNewBlock).not.toHaveBeenCalled(); + expect(blockProver.startNewBlock).not.toHaveBeenCalled(); // Now it is! publisher.isItMyTurnToSubmit.mockClear().mockResolvedValue(true); await sequencer.work(); - expect(proverClient.startNewBlock).toHaveBeenCalledWith( + expect(blockProver.startNewBlock).toHaveBeenCalledWith( 2, new GlobalVariables( chainId, @@ -222,7 +228,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledWith(block); - expect(proverClient.cancelBlock).toHaveBeenCalledTimes(0); + expect(blockProver.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs rejecting double spends', async () => { @@ -241,8 +247,8 @@ describe('sequencer', () => { }; p2p.getTxs.mockReturnValueOnce(txs); - proverClient.startNewBlock.mockResolvedValueOnce(ticket); - proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); + blockProver.startNewBlock.mockResolvedValueOnce(ticket); + blockProver.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables( @@ -268,7 +274,7 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(proverClient.startNewBlock).toHaveBeenCalledWith( + expect(blockProver.startNewBlock).toHaveBeenCalledWith( 2, new GlobalVariables( chainId, @@ -284,7 +290,7 @@ describe('sequencer', () => { ); expect(publisher.processL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); - expect(proverClient.cancelBlock).toHaveBeenCalledTimes(0); + expect(blockProver.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs rejecting incorrect chain ids', async () => { @@ -303,8 +309,8 @@ describe('sequencer', () => { }; p2p.getTxs.mockReturnValueOnce(txs); - proverClient.startNewBlock.mockResolvedValueOnce(ticket); - proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); + blockProver.startNewBlock.mockResolvedValueOnce(ticket); + blockProver.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables( @@ -325,7 +331,7 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(proverClient.startNewBlock).toHaveBeenCalledWith( + expect(blockProver.startNewBlock).toHaveBeenCalledWith( 2, new GlobalVariables( chainId, @@ -341,7 +347,7 @@ describe('sequencer', () => { ); expect(publisher.processL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); - expect(proverClient.cancelBlock).toHaveBeenCalledTimes(0); + expect(blockProver.cancelBlock).toHaveBeenCalledTimes(0); }); it('builds a block out of several txs dropping the ones that go over max size', async () => { @@ -359,8 +365,8 @@ describe('sequencer', () => { }; p2p.getTxs.mockReturnValueOnce(txs); - proverClient.startNewBlock.mockResolvedValueOnce(ticket); - proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); + blockProver.startNewBlock.mockResolvedValueOnce(ticket); + blockProver.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables( @@ -382,7 +388,7 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - expect(proverClient.startNewBlock).toHaveBeenCalledWith( + expect(blockProver.startNewBlock).toHaveBeenCalledWith( 2, new GlobalVariables( chainId, @@ -397,7 +403,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledWith(block); - expect(proverClient.cancelBlock).toHaveBeenCalledTimes(0); + expect(blockProver.cancelBlock).toHaveBeenCalledTimes(0); }); it('aborts building a block if the chain moves underneath it', async () => { @@ -413,8 +419,8 @@ describe('sequencer', () => { }; p2p.getTxs.mockReturnValueOnce([tx]); - proverClient.startNewBlock.mockResolvedValueOnce(ticket); - proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); + blockProver.startNewBlock.mockResolvedValueOnce(ticket); + blockProver.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables( @@ -442,7 +448,6 @@ describe('sequencer', () => { await sequencer.work(); expect(publisher.processL2Block).not.toHaveBeenCalled(); - expect(proverClient.cancelBlock).toHaveBeenCalledTimes(1); }); }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 2a08c56dae2..996ca295bca 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -9,8 +9,8 @@ import { import { type AllowedElement, BlockProofError, - type BlockProver, PROVING_STATUS, + type ProverClient, } from '@aztec/circuit-types/interfaces'; import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; import { AztecAddress, EthAddress, type GlobalVariables, type Header } from '@aztec/circuits.js'; @@ -61,7 +61,7 @@ export class Sequencer { private globalsBuilder: GlobalVariableBuilder, private p2pClient: P2P, private worldState: WorldStateSynchronizer, - private prover: BlockProver, + private prover: ProverClient, private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, private publicProcessorFactory: PublicProcessorFactory, @@ -264,8 +264,6 @@ export class Sequencer { await this.p2pClient.deleteTxs(txHashes); } this.log.error(`Rolling back world state DB due to error assembling block`, (err as any).stack); - // Cancel any further proving on the block - this.prover?.cancelBlock(); await this.worldState.getLatest().rollback(); } } @@ -315,10 +313,11 @@ export class Sequencer { const blockSize = Math.max(2, numRealTxs); const blockBuildingTimer = new Timer(); - const blockTicket = await this.prover.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); + const prover = this.prover.createBlockProver(this.worldState.getLatest()); + const blockTicket = await prover.startNewBlock(blockSize, newGlobalVariables, l1ToL2Messages); const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => - processor.process(validTxs, blockSize, this.prover, this.txValidatorFactory.validatorForProcessedTxs()), + processor.process(validTxs, blockSize, prover, this.txValidatorFactory.validatorForProcessedTxs()), ); if (failedTxs.length > 0) { const failedTxData = failedTxs.map(fail => fail.tx); @@ -332,14 +331,14 @@ export class Sequencer { // we should bail. if (processedTxs.length === 0 && !this.skipMinTxsPerBlockCheck(elapsedSinceLastBlock)) { this.log.verbose('No txs processed correctly to build block. Exiting'); - this.prover.cancelBlock(); + prover.cancelBlock(); return; } await assertBlockHeight(); // All real transactions have been added, set the block as full and complete the proving. - await this.prover.setBlockCompleted(); + await prover.setBlockCompleted(); // Here we are now waiting for the block to be proven. // TODO(@PhilWindle) We should probably periodically check for things like another @@ -352,7 +351,7 @@ export class Sequencer { await assertBlockHeight(); // Block is proven, now finalise and publish! - const { block, aggregationObject, proof } = await this.prover.finaliseBlock(); + const { block, aggregationObject, proof } = await prover.finaliseBlock(); await assertBlockHeight(); @@ -381,7 +380,7 @@ export class Sequencer { await this.publisher.submitProof( block.header, block.archive.root, - this.prover.getProverId(), + prover.getProverId(), aggregationObject, proof, ); diff --git a/yarn-project/simulator/src/public/hints_builder.ts b/yarn-project/simulator/src/public/hints_builder.ts index 7846f10f152..5b0ad19713e 100644 --- a/yarn-project/simulator/src/public/hints_builder.ts +++ b/yarn-project/simulator/src/public/hints_builder.ts @@ -1,4 +1,4 @@ -import { MerkleTreeId } from '@aztec/circuit-types'; +import { type IndexedTreeId, MerkleTreeId } from '@aztec/circuit-types'; import { type Fr, type MAX_NULLIFIERS_PER_TX, @@ -23,7 +23,7 @@ import { buildSiloedNullifierReadRequestHints, } from '@aztec/circuits.js'; import { type Tuple } from '@aztec/foundation/serialize'; -import { type IndexedTreeId, type MerkleTreeOperations } from '@aztec/world-state'; +import { type MerkleTreeOperations } from '@aztec/world-state'; export class HintsBuilder { constructor(private db: MerkleTreeOperations) {} diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index ec406ecb348..1b79581195b 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -4,6 +4,7 @@ import { PublicDataWrite, PublicKernelType, SimulationError, + type TreeInfo, type TxValidator, mockTx, toTxEffect, @@ -43,7 +44,7 @@ import { computeFeePayerBalanceLeafSlot, } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; -import { type MerkleTreeOperations, type TreeInfo } from '@aztec/world-state'; +import { type MerkleTreeOperations } from '@aztec/world-state'; import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 995e46ac032..f2fbdf92352 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -60,24 +60,24 @@ export class PublicProcessorFactory { * Creates a new instance of a PublicProcessor. * @param historicalHeader - The header of a block previous to the one in which the tx is included. * @param globalVariables - The global variables for the block being processed. - * @param newContracts - Provides access to contract bytecode for public executions. * @returns A new instance of a PublicProcessor. */ - public create(historicalHeader: Header | undefined, globalVariables: GlobalVariables): PublicProcessor { - historicalHeader = historicalHeader ?? this.merkleTree.getInitialHeader(); - + public create(maybeHistoricalHeader: Header | undefined, globalVariables: GlobalVariables): PublicProcessor { + const { merkleTree, telemetryClient } = this; + const historicalHeader = maybeHistoricalHeader ?? merkleTree.getInitialHeader(); const publicContractsDB = new ContractsDataSourcePublicDB(this.contractDataSource); - const worldStatePublicDB = new WorldStatePublicDB(this.merkleTree); - const worldStateDB = new WorldStateDB(this.merkleTree); + + const worldStatePublicDB = new WorldStatePublicDB(merkleTree); + const worldStateDB = new WorldStateDB(merkleTree); const publicExecutor = new PublicExecutor( worldStatePublicDB, publicContractsDB, worldStateDB, historicalHeader, - this.telemetryClient, + telemetryClient, ); return new PublicProcessor( - this.merkleTree, + merkleTree, publicExecutor, new RealPublicKernelCircuitSimulator(this.simulator), globalVariables, diff --git a/yarn-project/simulator/src/public/setup_phase_manager.test.ts b/yarn-project/simulator/src/public/setup_phase_manager.test.ts index 41bfa9ea5a7..a765c59e984 100644 --- a/yarn-project/simulator/src/public/setup_phase_manager.test.ts +++ b/yarn-project/simulator/src/public/setup_phase_manager.test.ts @@ -1,7 +1,7 @@ -import { mockTx } from '@aztec/circuit-types'; +import { type TreeInfo, mockTx } from '@aztec/circuit-types'; import { GlobalVariables, Header } from '@aztec/circuits.js'; import { type PublicExecutor } from '@aztec/simulator'; -import { type MerkleTreeOperations, type TreeInfo } from '@aztec/world-state'; +import { type MerkleTreeOperations } from '@aztec/world-state'; import { it } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts index d7ee9504f71..fd99f6ba9ce 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts @@ -1,19 +1,22 @@ -import { type L1ToL2MessageSource, type L2Block, L2BlockDownloader, type L2BlockSource } from '@aztec/circuit-types'; +import { + type HandleL2BlockAndMessagesResult, + type L1ToL2MessageSource, + type L2Block, + L2BlockDownloader, + type L2BlockSource, +} from '@aztec/circuit-types'; import { type L2BlockHandledStats } from '@aztec/circuit-types/stats'; import { L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/circuits.js/constants'; import { Fr } from '@aztec/foundation/fields'; import { SerialQueue } from '@aztec/foundation/fifo'; import { createDebugLogger } from '@aztec/foundation/log'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; import { elapsed } from '@aztec/foundation/timer'; import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; import { SHA256Trunc, StandardTree } from '@aztec/merkle-tree'; -import { - type HandleL2BlockAndMessagesResult, - type MerkleTreeOperations, - type MerkleTrees, -} from '../world-state-db/index.js'; +import { type MerkleTreeOperations, type MerkleTrees } from '../world-state-db/index.js'; import { MerkleTreeOperationsFacade } from '../world-state-db/merkle_tree_operations_facade.js'; import { MerkleTreeSnapshotOperationsFacade } from '../world-state-db/merkle_tree_snapshot_operations_facade.js'; import { type WorldStateConfig } from './config.js'; @@ -31,12 +34,16 @@ import { export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { private latestBlockNumberAtStart = 0; + // TODO(palla/prover-node): JobQueue, stopping, runningPromise, pausedPromise, pausedResolve + // should all be hidden under a single abstraction. Also, check if we actually need the jobqueue. private l2BlockDownloader: L2BlockDownloader; private syncPromise: Promise = Promise.resolve(); private syncResolve?: () => void = undefined; private jobQueue = new SerialQueue(); private stopping = false; private runningPromise: Promise = Promise.resolve(); + private pausedPromise?: Promise = undefined; + private pausedResolve?: () => void = undefined; private currentState: WorldStateRunningState = WorldStateRunningState.IDLE; private blockNumber: AztecSingleton; @@ -67,6 +74,11 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { return new MerkleTreeSnapshotOperationsFacade(this.merkleTreeDb, blockNumber); } + private async getFork(includeUncommitted: boolean): Promise { + this.log.verbose(`Forking world state at ${this.blockNumber.get()}`); + return new MerkleTreeOperationsFacade(await this.merkleTreeDb.fork(), includeUncommitted); + } + public async start() { if (this.currentState === WorldStateRunningState.STOPPED) { throw new Error('Synchronizer already stopped'); @@ -102,6 +114,9 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { const blockProcess = async () => { while (!this.stopping) { await this.jobQueue.put(() => this.collectAndProcessBlocks()); + if (this.pausedPromise) { + await this.pausedPromise; + } } }; this.jobQueue.start(); @@ -129,6 +144,23 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { return this.blockNumber.get() ?? 0; } + private async pause() { + this.log.debug('Pausing world state synchronizer'); + ({ promise: this.pausedPromise, resolve: this.pausedResolve } = promiseWithResolvers()); + await this.jobQueue.syncPoint(); + this.log.debug('Paused world state synchronizer'); + } + + private resume() { + if (this.pausedResolve) { + this.log.debug('Resuming world state synchronizer'); + this.pausedResolve(); + this.pausedResolve = undefined; + this.pausedPromise = undefined; + this.log.debug('Resumed world state synchronizer'); + } + } + public status(): Promise { const status = { syncedToL2Block: this.currentL2BlockNum, @@ -138,30 +170,29 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { } /** - * Forces an immediate sync - * @param minBlockNumber - The minimum block number that we must sync to + * Forces an immediate sync. + * @param targetBlockNumber - The target block number that we must sync to. Will download unproven blocks if needed to reach it. Throws if cannot be reached. * @returns A promise that resolves with the block number the world state was synced to */ - public async syncImmediate(minBlockNumber?: number): Promise { + public async syncImmediate(targetBlockNumber?: number): Promise { if (this.currentState !== WorldStateRunningState.RUNNING) { throw new Error(`World State is not running, unable to perform sync`); } - // If we have been given a block number to sync to and we have reached that number - // then return. - if (minBlockNumber !== undefined && minBlockNumber <= this.currentL2BlockNum) { + // If we have been given a block number to sync to and we have reached that number then return. + if (targetBlockNumber !== undefined && targetBlockNumber <= this.currentL2BlockNum) { return this.currentL2BlockNum; } - const blockToSyncTo = minBlockNumber === undefined ? 'latest' : `${minBlockNumber}`; - this.log.debug(`World State at block ${this.currentL2BlockNum}, told to sync to block ${blockToSyncTo}...`); - // ensure any outstanding block updates are completed first. + this.log.debug(`World State at ${this.currentL2BlockNum} told to sync to ${targetBlockNumber ?? 'latest'}`); + // ensure any outstanding block updates are completed first await this.jobQueue.syncPoint(); + while (true) { // Check the block number again - if (minBlockNumber !== undefined && minBlockNumber <= this.currentL2BlockNum) { + if (targetBlockNumber !== undefined && targetBlockNumber <= this.currentL2BlockNum) { return this.currentL2BlockNum; } - // Poll for more blocks - const numBlocks = await this.l2BlockDownloader.pollImmediate(); + // Poll for more blocks, requesting even unproven blocks. + const numBlocks = await this.l2BlockDownloader.pollImmediate(targetBlockNumber, false); this.log.debug(`Block download immediate poll yielded ${numBlocks} blocks`); if (numBlocks) { // More blocks were received, process them and go round again @@ -169,15 +200,28 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { continue; } // No blocks are available, if we have been given a block number then we can't achieve it - if (minBlockNumber !== undefined) { + if (targetBlockNumber !== undefined) { throw new Error( - `Unable to sync to block number ${minBlockNumber}, currently synced to block ${this.currentL2BlockNum}`, + `Unable to sync to block number ${targetBlockNumber}, currently synced to block ${this.currentL2BlockNum}`, ); } return this.currentL2BlockNum; } } + public async syncImmediateAndFork( + targetBlockNumber: number, + forkIncludeUncommitted: boolean, + ): Promise { + try { + await this.pause(); + await this.syncImmediate(targetBlockNumber); + return await this.getFork(forkIncludeUncommitted); + } finally { + this.resume(); + } + } + /** * Checks for the availability of new blocks and processes them. */ diff --git a/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts index 24dfa22226c..0411827e10d 100644 --- a/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/world_state_synchronizer.ts @@ -52,6 +52,14 @@ export interface WorldStateSynchronizer { */ syncImmediate(minBlockNumber?: number): Promise; + /** + * Pauses the synchronizer, syncs to the target block number, forks world state, and resumes. + * @param targetBlockNumber - The block number to sync to. + * @param forkIncludeUncommitted - Whether to include uncommitted data in the fork. + * @returns The db forked at the requested target block number. + */ + syncImmediateAndFork(targetBlockNumber: number, forkIncludeUncommitted: boolean): Promise; + /** * Returns an instance of MerkleTreeOperations that will include uncommitted data. * @returns An instance of MerkleTreeOperations that will include uncommitted data. diff --git a/yarn-project/world-state/src/world-state-db/index.ts b/yarn-project/world-state/src/world-state-db/index.ts index f4c20a567f9..63ec2e7ba65 100644 --- a/yarn-project/world-state/src/world-state-db/index.ts +++ b/yarn-project/world-state/src/world-state-db/index.ts @@ -1,5 +1,6 @@ export * from './merkle_trees.js'; export * from './merkle_tree_db.js'; -export * from './merkle_tree_operations.js'; export * from './merkle_tree_operations_facade.js'; export * from './merkle_tree_snapshot_operations_facade.js'; + +export { MerkleTreeOperations } from '@aztec/circuit-types/interfaces'; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts index 4cee2af4522..d31ba02339c 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts @@ -1,9 +1,8 @@ import { type MerkleTreeId } from '@aztec/circuit-types'; +import { type MerkleTreeOperations } from '@aztec/circuit-types/interfaces'; import { type Fr, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX } from '@aztec/circuits.js'; import { type IndexedTreeSnapshot, type TreeSnapshot } from '@aztec/merkle-tree'; -import { type MerkleTreeOperations } from './merkle_tree_operations.js'; - /** * * @remarks Short explanation: @@ -62,4 +61,9 @@ export type MerkleTreeDb = { * @param block - The block number to take the snapshot at. */ getSnapshot(block: number): Promise; + + /** + * Forks the database at its current state. + */ + fork(): Promise; }; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_map.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_map.ts new file mode 100644 index 00000000000..61c6a91c198 --- /dev/null +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_map.ts @@ -0,0 +1,11 @@ +import { type MerkleTreeId } from '@aztec/circuit-types'; +import { type Fr } from '@aztec/circuits.js'; +import { type AppendOnlyTree, type IndexedTree } from '@aztec/merkle-tree'; + +export type MerkleTreeMap = { + [MerkleTreeId.NULLIFIER_TREE]: IndexedTree; + [MerkleTreeId.NOTE_HASH_TREE]: AppendOnlyTree; + [MerkleTreeId.PUBLIC_DATA_TREE]: IndexedTree; + [MerkleTreeId.L1_TO_L2_MESSAGE_TREE]: AppendOnlyTree; + [MerkleTreeId.ARCHIVE]: AppendOnlyTree; +}; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts index b67636866db..ae014d7d122 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts @@ -1,16 +1,15 @@ -import { type L2Block, type MerkleTreeId, type SiblingPath } from '@aztec/circuit-types'; -import { type Fr, type Header, type NullifierLeafPreimage, type StateReference } from '@aztec/circuits.js'; -import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; -import { type BatchInsertionResult } from '@aztec/merkle-tree'; - -import { type MerkleTreeDb } from './merkle_tree_db.js'; +import { type BatchInsertionResult, type L2Block, type MerkleTreeId, type SiblingPath } from '@aztec/circuit-types'; import { type HandleL2BlockAndMessagesResult, type IndexedTreeId, type MerkleTreeLeafType, type MerkleTreeOperations, type TreeInfo, -} from './merkle_tree_operations.js'; +} from '@aztec/circuit-types/interfaces'; +import { type Fr, type Header, type NullifierLeafPreimage, type StateReference } from '@aztec/circuits.js'; +import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; + +import { type MerkleTreeDb } from './merkle_tree_db.js'; /** * Wraps a MerkleTreeDbOperations to call all functions with a preset includeUncommitted flag. diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts index 3f72c680732..a25037b3d68 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts @@ -1,16 +1,17 @@ import { MerkleTreeId, type SiblingPath } from '@aztec/circuit-types'; -import { AppendOnlyTreeSnapshot, Fr, type Header, PartialStateReference, StateReference } from '@aztec/circuits.js'; -import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; -import { type BatchInsertionResult, type IndexedTreeSnapshot } from '@aztec/merkle-tree'; - -import { type MerkleTreeDb, type TreeSnapshots } from './merkle_tree_db.js'; import { + type BatchInsertionResult, type HandleL2BlockAndMessagesResult, type IndexedTreeId, type MerkleTreeLeafType, type MerkleTreeOperations, type TreeInfo, -} from './merkle_tree_operations.js'; +} from '@aztec/circuit-types/interfaces'; +import { AppendOnlyTreeSnapshot, Fr, type Header, PartialStateReference, StateReference } from '@aztec/circuits.js'; +import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { type IndexedTreeSnapshot } from '@aztec/merkle-tree'; + +import { type MerkleTreeDb, type TreeSnapshots } from './merkle_tree_db.js'; /** * Merkle tree operations on readonly tree snapshots. diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 881141caae0..0ebd9937bca 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -1,4 +1,12 @@ import { type L2Block, MerkleTreeId, PublicDataWrite, type SiblingPath, TxEffect } from '@aztec/circuit-types'; +import { + type BatchInsertionResult, + type HandleL2BlockAndMessagesResult, + type IndexedTreeId, + type MerkleTreeLeafType, + type MerkleTreeOperations, + type TreeInfo, +} from '@aztec/circuit-types/interfaces'; import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, @@ -28,7 +36,6 @@ import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; import { type AppendOnlyTree, - type BatchInsertionResult, type IndexedTree, Poseidon, StandardIndexedTree, @@ -46,14 +53,7 @@ import { type MerkleTreeDb, type TreeSnapshots, } from './merkle_tree_db.js'; -import { - type HandleL2BlockAndMessagesResult, - type IndexedTreeId, - type MerkleTreeLeafType, - type MerkleTreeMap, - type MerkleTreeOperations, - type TreeInfo, -} from './merkle_tree_operations.js'; +import { type MerkleTreeMap } from './merkle_tree_map.js'; import { MerkleTreeOperationsFacade } from './merkle_tree_operations_facade.js'; /** @@ -180,6 +180,15 @@ export class MerkleTrees implements MerkleTreeDb { await this.#commit(); } + public async fork(): Promise { + // TODO(palla/prover-node): If the underlying store is being shared with other components, we're unnecessarily + // copying a lot of data unrelated to merkle trees. This may be fine for now, and we may be able to ditch backup-based + // forking in favor of a more elegant proposal. But if we see this operation starts taking a lot of time, we may want + // to open separate stores for merkle trees and other components. + const forked = await this.store.fork(); + return MerkleTrees.new(forked, this.log); + } + public getInitialHeader(): Header { return Header.empty({ state: this.#loadInitialStateReference() }); } From ef30c8ff036f8d05c4f9727029801918c2522ebc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Wed, 7 Aug 2024 14:59:15 -0300 Subject: [PATCH 08/61] [chore] fix bootstrap clean for people who are too nice (#7813) I was doing `boostrap.sh clean` and typing `yes`, and the process exited cleanly without me realizing nothing had happened. --- bootstrap.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index a1c1a4b623d..9b6cc7d073b 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -103,7 +103,8 @@ if [ "$CMD" = "clean" ]; then echo "WARNING: This will erase *all* untracked files, including hooks and submodules." echo -n "Continue? [y/n] " read user_input - if [ "$user_input" != "y" ] && [ "$user_input" != "Y" ]; then + if [ "$user_input" != "y" ] && [ "$user_input" != "yes" ] && [ "$user_input" != "Y" ] && [ "$user_input" != "YES" ]; then + echo "Exiting without cleaning" exit 1 fi @@ -117,6 +118,7 @@ if [ "$CMD" = "clean" ]; then # Remove all untracked files, directories, nested repos, and .gitignore files. git clean -ffdx + echo "Cleaning complete" exit 0 elif [ "$CMD" = "full" ]; then if can_use_ci_cache; then From e2feaf8c0613b2b5adfd496a94e1bd58296768f4 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 7 Aug 2024 18:42:39 -0300 Subject: [PATCH 09/61] fix: Create proving job queue when prover node started with no agents (#7828) If the prover node is started with no in-proc agents, then we should create a proving job source so agents can attach to it. Unrelated, also tweaks some logging statements on the agent. --- yarn-project/aztec/src/cli/cmds/start_prover_node.ts | 2 +- yarn-project/prover-client/src/prover-agent/prover-agent.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts index 43058a54fed..9a95b0bb947 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts @@ -70,7 +70,7 @@ export const startProverNode = async ( services.push({ node: createProverNodeRpcServer(proverNode) }); - if (options.prover) { + if (!options.prover) { const provingJobSource = createProvingJobSourceServer(proverNode.getProver().getProvingJobSource()); services.push({ provingJobSource }); } diff --git a/yarn-project/prover-client/src/prover-agent/prover-agent.ts b/yarn-project/prover-client/src/prover-agent/prover-agent.ts index a1d0fd7013c..a7f0fd57a1f 100644 --- a/yarn-project/prover-client/src/prover-agent/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-agent/prover-agent.ts @@ -71,7 +71,7 @@ export class ProverAgent { ); } } catch (err) { - // no-op + this.log.error(`Error fetching job`, err); } } }, this.pollIntervalMs); @@ -96,7 +96,7 @@ export class ProverAgent { this.log.debug(`Picked up proving job id=${job.id} type=${ProvingRequestType[job.request.type]}`); const [time, result] = await elapsed(this.getProof(job.request)); if (this.isRunning()) { - this.log.debug( + this.log.verbose( `Processed proving job id=${job.id} type=${ProvingRequestType[job.request.type]} duration=${time}ms`, ); await jobSource.resolveProvingJob(job.id, result); From 5ce3e0940818e5ed21096a70d5a62442fef27cfd Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 8 Aug 2024 02:16:35 +0000 Subject: [PATCH 10/61] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "4f525cc32f" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "4f525cc32f" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 553a8fd44fa..cb9e2f7211c 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = d4aee60047d9b92f44c2d7d7cc7f3d6b4453f725 - parent = 7b90699fdbebcb00a06f396e8263a9ffe156fbc2 + commit = 4f525cc32f01cbdf4598cfae38107de2907809f7 + parent = e2feaf8c0613b2b5adfd496a94e1bd58296768f4 method = merge cmdver = 0.4.6 From 54774504fbd8cb1de53de5064c5d59495f8fc7c0 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 8 Aug 2024 02:17:11 +0000 Subject: [PATCH 11/61] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..fb16331b042 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.47.1", directory="noir-projects/noir-protocol-circuits/crates/types" } From 154fb95f70dcaa821d44d8bcb94e11ec21b1399a Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 8 Aug 2024 02:17:11 +0000 Subject: [PATCH 12/61] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 8674671f2d4..d2ea11ed192 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ remote = https://github.com/AztecProtocol/aztec-nr commit = 13b2a6ab0a435b7cfb09b8862b7ffc17bda16034 method = merge cmdver = 0.4.6 - parent = af9acb9698eaae764435299e7b5e563b0623c3f2 + parent = 4c57ce9e5dc22f404ad4c3c97aaa5aaa11d26966 From e43684cfd243e5083fc53718d8a62f1fb02ddafd Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 8 Aug 2024 02:17:14 +0000 Subject: [PATCH 13/61] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "51449b1705" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "51449b1705" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index d2ea11ed192..719edd14aaa 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 13b2a6ab0a435b7cfb09b8862b7ffc17bda16034 + commit = 51449b1705a49763ca1b22354e02561ee2d04368 method = merge cmdver = 0.4.6 - parent = 4c57ce9e5dc22f404ad4c3c97aaa5aaa11d26966 + parent = 05b13ddbd426dd13a151bd54525f16f9eea9a154 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index fb16331b042..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.47.1", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From 5ff3554ace81831d0a561b6a4e186b48edb12e5e Mon Sep 17 00:00:00 2001 From: Facundo Date: Thu, 8 Aug 2024 08:34:44 +0100 Subject: [PATCH 14/61] refactor(avm): no fake rows in main trace (#7823) Needed for gas accounting. There are some extra changes but it's the best I could split the PR. PS: We do have repeated SLOADS/SSTORES which are kind of fake, but all else should be good (modulo the stubbed gadgets that dont have a selector). --- barretenberg/cpp/format.sh | 6 + barretenberg/cpp/pil/avm/main.pil | 9 +- .../vm/avm/generated/circuit_builder.cpp | 1 - .../vm/avm/generated/circuit_builder.hpp | 4 +- .../barretenberg/vm/avm/generated/flavor.cpp | 1193 ++++++++--------- .../barretenberg/vm/avm/generated/flavor.hpp | 6 +- .../vm/avm/generated/full_row.cpp | 2 - .../vm/avm/generated/full_row.hpp | 3 +- .../vm/avm/generated/relations/main.hpp | 303 ++--- .../barretenberg/vm/avm/trace/gas_trace.cpp | 10 +- .../barretenberg/vm/avm/trace/gas_trace.hpp | 2 +- .../src/barretenberg/vm/avm/trace/helper.cpp | 1 - .../barretenberg/vm/avm/trace/mem_trace.hpp | 3 + .../src/barretenberg/vm/avm/trace/trace.cpp | 893 +++--------- .../src/barretenberg/vm/avm/trace/trace.hpp | 36 +- .../bb-pil-backend/src/relation_builder.rs | 2 + 16 files changed, 986 insertions(+), 1488 deletions(-) diff --git a/barretenberg/cpp/format.sh b/barretenberg/cpp/format.sh index 3bbfd4b59fc..0b1deb05615 100755 --- a/barretenberg/cpp/format.sh +++ b/barretenberg/cpp/format.sh @@ -13,6 +13,12 @@ if [ "$1" == "staged" ]; then sed -i.bak 's/\r$//' $FILE && rm ${FILE}.bak git add $FILE done +elif [ "$1" == "changed" ]; then + echo Formatting barretenberg changed files... + for FILE in $(git diff-index --diff-filter=d --relative --name-only HEAD | grep -e '\.\(cpp\|hpp\|tcc\)$'); do + clang-format-16 -i $FILE + sed -i.bak 's/\r$//' $FILE && rm ${FILE}.bak + done elif [ "$1" == "check" ]; then for FILE in $(find ./src -iname *.hpp -o -iname *.cpp -o -iname *.tcc | grep -v src/msgpack-c); do clang-format-16 --dry-run --Werror $FILE diff --git a/barretenberg/cpp/pil/avm/main.pil b/barretenberg/cpp/pil/avm/main.pil index 0f6b67549b7..7757cc5e99d 100644 --- a/barretenberg/cpp/pil/avm/main.pil +++ b/barretenberg/cpp/pil/avm/main.pil @@ -100,10 +100,11 @@ namespace main(256); da_out_of_gas * (1 - da_out_of_gas) = 0; // Constrain that the gas decrements correctly per instruction + // TODO: Special-case for external call. #[L2_GAS_REMAINING_DECREMENT] - sel_gas_accounting_active * (l2_gas_remaining' - l2_gas_remaining + l2_gas_op_cost) = 0; + sel_gas_accounting_active * (1 - sel_op_external_call) * (l2_gas_remaining' - l2_gas_remaining + l2_gas_op_cost) = 0; #[DA_GAS_REMAINING_DECREMENT] - sel_gas_accounting_active * (da_gas_remaining' - da_gas_remaining + da_gas_op_cost) = 0; + sel_gas_accounting_active * (1 - sel_op_external_call) * (da_gas_remaining' - da_gas_remaining + da_gas_op_cost) = 0; // Constrain that the remaining gas is unchanged otherwise (multi-line operations) #[L2_GAS_INACTIVE] @@ -497,11 +498,11 @@ namespace main(256); // Alternatively, we introduce a boolean selector for the three opcodes mentioned above. // Note: External call gas cost is not constrained pol commit sel_gas_accounting_active; - pol commit sel_mem_op_activate_gas; // TODO: remove this one // TODO: remove sload and sstore from here // This temporarily disables gas tracking for sload and sstore because our gas // tracking doesn't work properly for instructions that span multiple rows - sel_gas_accounting_active - OPCODE_SELECTORS - SEL_ALL_CTRL_FLOW - sel_op_sload - sel_op_sstore - sel_mem_op_activate_gas = 0; + // TODO: disabling this until PR in stack. + // sel_gas_accounting_active - OPCODE_SELECTORS - SEL_ALL_CTRL_FLOW - sel_op_sload - sel_op_sstore - sel_mem_op_activate_gas = 0; // Program counter must increment if not jumping or returning // TODO: support for muli-rows opcode in execution trace such as diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp index f2cb4c04cc8..58a8a8079d2 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp @@ -206,7 +206,6 @@ AvmCircuitBuilder::ProverPolynomials AvmCircuitBuilder::compute_polynomials() co polys.main_sel_gas_accounting_active[i] = rows[i].main_sel_gas_accounting_active; polys.main_sel_last[i] = rows[i].main_sel_last; polys.main_sel_mem_op_a[i] = rows[i].main_sel_mem_op_a; - polys.main_sel_mem_op_activate_gas[i] = rows[i].main_sel_mem_op_activate_gas; polys.main_sel_mem_op_b[i] = rows[i].main_sel_mem_op_b; polys.main_sel_mem_op_c[i] = rows[i].main_sel_mem_op_c; polys.main_sel_mem_op_d[i] = rows[i].main_sel_mem_op_d; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp index 0f5cf53fb5d..bb20eb74617 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.hpp @@ -20,8 +20,8 @@ class AvmCircuitBuilder { using Polynomial = Flavor::Polynomial; using ProverPolynomials = Flavor::ProverPolynomials; - static constexpr size_t num_fixed_columns = 704; - static constexpr size_t num_polys = 704 + 74; + static constexpr size_t num_fixed_columns = 703; + static constexpr size_t num_polys = 703 + 74; std::vector rows; void set_trace(std::vector&& trace) { rows = std::move(trace); } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp index 481de9fb1b2..418314f4fd1 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp @@ -187,602 +187,601 @@ AvmFlavor::AllConstRefValues::AllConstRefValues( , main_sel_gas_accounting_active(il[179]) , main_sel_last(il[180]) , main_sel_mem_op_a(il[181]) - , main_sel_mem_op_activate_gas(il[182]) - , main_sel_mem_op_b(il[183]) - , main_sel_mem_op_c(il[184]) - , main_sel_mem_op_d(il[185]) - , main_sel_mov_ia_to_ic(il[186]) - , main_sel_mov_ib_to_ic(il[187]) - , main_sel_op_add(il[188]) - , main_sel_op_address(il[189]) - , main_sel_op_and(il[190]) - , main_sel_op_block_number(il[191]) - , main_sel_op_calldata_copy(il[192]) - , main_sel_op_cast(il[193]) - , main_sel_op_chain_id(il[194]) - , main_sel_op_cmov(il[195]) - , main_sel_op_coinbase(il[196]) - , main_sel_op_dagasleft(il[197]) - , main_sel_op_div(il[198]) - , main_sel_op_emit_l2_to_l1_msg(il[199]) - , main_sel_op_emit_note_hash(il[200]) - , main_sel_op_emit_nullifier(il[201]) - , main_sel_op_emit_unencrypted_log(il[202]) - , main_sel_op_eq(il[203]) - , main_sel_op_external_call(il[204]) - , main_sel_op_external_return(il[205]) - , main_sel_op_fdiv(il[206]) - , main_sel_op_fee_per_da_gas(il[207]) - , main_sel_op_fee_per_l2_gas(il[208]) - , main_sel_op_function_selector(il[209]) - , main_sel_op_get_contract_instance(il[210]) - , main_sel_op_halt(il[211]) - , main_sel_op_internal_call(il[212]) - , main_sel_op_internal_return(il[213]) - , main_sel_op_jump(il[214]) - , main_sel_op_jumpi(il[215]) - , main_sel_op_keccak(il[216]) - , main_sel_op_l1_to_l2_msg_exists(il[217]) - , main_sel_op_l2gasleft(il[218]) - , main_sel_op_lt(il[219]) - , main_sel_op_lte(il[220]) - , main_sel_op_mov(il[221]) - , main_sel_op_mul(il[222]) - , main_sel_op_not(il[223]) - , main_sel_op_note_hash_exists(il[224]) - , main_sel_op_nullifier_exists(il[225]) - , main_sel_op_or(il[226]) - , main_sel_op_pedersen(il[227]) - , main_sel_op_poseidon2(il[228]) - , main_sel_op_radix_le(il[229]) - , main_sel_op_sender(il[230]) - , main_sel_op_sha256(il[231]) - , main_sel_op_shl(il[232]) - , main_sel_op_shr(il[233]) - , main_sel_op_sload(il[234]) - , main_sel_op_sstore(il[235]) - , main_sel_op_storage_address(il[236]) - , main_sel_op_sub(il[237]) - , main_sel_op_timestamp(il[238]) - , main_sel_op_transaction_fee(il[239]) - , main_sel_op_version(il[240]) - , main_sel_op_xor(il[241]) - , main_sel_q_kernel_lookup(il[242]) - , main_sel_q_kernel_output_lookup(il[243]) - , main_sel_resolve_ind_addr_a(il[244]) - , main_sel_resolve_ind_addr_b(il[245]) - , main_sel_resolve_ind_addr_c(il[246]) - , main_sel_resolve_ind_addr_d(il[247]) - , main_sel_returndata(il[248]) - , main_sel_rng_16(il[249]) - , main_sel_rng_8(il[250]) - , main_sel_slice_gadget(il[251]) - , main_space_id(il[252]) - , main_tag_err(il[253]) - , main_w_in_tag(il[254]) - , mem_addr(il[255]) - , mem_clk(il[256]) - , mem_diff_hi(il[257]) - , mem_diff_lo(il[258]) - , mem_diff_mid(il[259]) - , mem_glob_addr(il[260]) - , mem_last(il[261]) - , mem_lastAccess(il[262]) - , mem_one_min_inv(il[263]) - , mem_r_in_tag(il[264]) - , mem_rw(il[265]) - , mem_sel_mem(il[266]) - , mem_sel_mov_ia_to_ic(il[267]) - , mem_sel_mov_ib_to_ic(il[268]) - , mem_sel_op_a(il[269]) - , mem_sel_op_b(il[270]) - , mem_sel_op_c(il[271]) - , mem_sel_op_cmov(il[272]) - , mem_sel_op_d(il[273]) - , mem_sel_op_poseidon_read_a(il[274]) - , mem_sel_op_poseidon_read_b(il[275]) - , mem_sel_op_poseidon_read_c(il[276]) - , mem_sel_op_poseidon_read_d(il[277]) - , mem_sel_op_poseidon_write_a(il[278]) - , mem_sel_op_poseidon_write_b(il[279]) - , mem_sel_op_poseidon_write_c(il[280]) - , mem_sel_op_poseidon_write_d(il[281]) - , mem_sel_op_slice(il[282]) - , mem_sel_resolve_ind_addr_a(il[283]) - , mem_sel_resolve_ind_addr_b(il[284]) - , mem_sel_resolve_ind_addr_c(il[285]) - , mem_sel_resolve_ind_addr_d(il[286]) - , mem_sel_rng_chk(il[287]) - , mem_skip_check_tag(il[288]) - , mem_space_id(il[289]) - , mem_tag(il[290]) - , mem_tag_err(il[291]) - , mem_tsp(il[292]) - , mem_val(il[293]) - , mem_w_in_tag(il[294]) - , pedersen_clk(il[295]) - , pedersen_input(il[296]) - , pedersen_output(il[297]) - , pedersen_sel_pedersen(il[298]) - , poseidon2_B_10_0(il[299]) - , poseidon2_B_10_1(il[300]) - , poseidon2_B_10_2(il[301]) - , poseidon2_B_10_3(il[302]) - , poseidon2_B_11_0(il[303]) - , poseidon2_B_11_1(il[304]) - , poseidon2_B_11_2(il[305]) - , poseidon2_B_11_3(il[306]) - , poseidon2_B_12_0(il[307]) - , poseidon2_B_12_1(il[308]) - , poseidon2_B_12_2(il[309]) - , poseidon2_B_12_3(il[310]) - , poseidon2_B_13_0(il[311]) - , poseidon2_B_13_1(il[312]) - , poseidon2_B_13_2(il[313]) - , poseidon2_B_13_3(il[314]) - , poseidon2_B_14_0(il[315]) - , poseidon2_B_14_1(il[316]) - , poseidon2_B_14_2(il[317]) - , poseidon2_B_14_3(il[318]) - , poseidon2_B_15_0(il[319]) - , poseidon2_B_15_1(il[320]) - , poseidon2_B_15_2(il[321]) - , poseidon2_B_15_3(il[322]) - , poseidon2_B_16_0(il[323]) - , poseidon2_B_16_1(il[324]) - , poseidon2_B_16_2(il[325]) - , poseidon2_B_16_3(il[326]) - , poseidon2_B_17_0(il[327]) - , poseidon2_B_17_1(il[328]) - , poseidon2_B_17_2(il[329]) - , poseidon2_B_17_3(il[330]) - , poseidon2_B_18_0(il[331]) - , poseidon2_B_18_1(il[332]) - , poseidon2_B_18_2(il[333]) - , poseidon2_B_18_3(il[334]) - , poseidon2_B_19_0(il[335]) - , poseidon2_B_19_1(il[336]) - , poseidon2_B_19_2(il[337]) - , poseidon2_B_19_3(il[338]) - , poseidon2_B_20_0(il[339]) - , poseidon2_B_20_1(il[340]) - , poseidon2_B_20_2(il[341]) - , poseidon2_B_20_3(il[342]) - , poseidon2_B_21_0(il[343]) - , poseidon2_B_21_1(il[344]) - , poseidon2_B_21_2(il[345]) - , poseidon2_B_21_3(il[346]) - , poseidon2_B_22_0(il[347]) - , poseidon2_B_22_1(il[348]) - , poseidon2_B_22_2(il[349]) - , poseidon2_B_22_3(il[350]) - , poseidon2_B_23_0(il[351]) - , poseidon2_B_23_1(il[352]) - , poseidon2_B_23_2(il[353]) - , poseidon2_B_23_3(il[354]) - , poseidon2_B_24_0(il[355]) - , poseidon2_B_24_1(il[356]) - , poseidon2_B_24_2(il[357]) - , poseidon2_B_24_3(il[358]) - , poseidon2_B_25_0(il[359]) - , poseidon2_B_25_1(il[360]) - , poseidon2_B_25_2(il[361]) - , poseidon2_B_25_3(il[362]) - , poseidon2_B_26_0(il[363]) - , poseidon2_B_26_1(il[364]) - , poseidon2_B_26_2(il[365]) - , poseidon2_B_26_3(il[366]) - , poseidon2_B_27_0(il[367]) - , poseidon2_B_27_1(il[368]) - , poseidon2_B_27_2(il[369]) - , poseidon2_B_27_3(il[370]) - , poseidon2_B_28_0(il[371]) - , poseidon2_B_28_1(il[372]) - , poseidon2_B_28_2(il[373]) - , poseidon2_B_28_3(il[374]) - , poseidon2_B_29_0(il[375]) - , poseidon2_B_29_1(il[376]) - , poseidon2_B_29_2(il[377]) - , poseidon2_B_29_3(il[378]) - , poseidon2_B_30_0(il[379]) - , poseidon2_B_30_1(il[380]) - , poseidon2_B_30_2(il[381]) - , poseidon2_B_30_3(il[382]) - , poseidon2_B_31_0(il[383]) - , poseidon2_B_31_1(il[384]) - , poseidon2_B_31_2(il[385]) - , poseidon2_B_31_3(il[386]) - , poseidon2_B_32_0(il[387]) - , poseidon2_B_32_1(il[388]) - , poseidon2_B_32_2(il[389]) - , poseidon2_B_32_3(il[390]) - , poseidon2_B_33_0(il[391]) - , poseidon2_B_33_1(il[392]) - , poseidon2_B_33_2(il[393]) - , poseidon2_B_33_3(il[394]) - , poseidon2_B_34_0(il[395]) - , poseidon2_B_34_1(il[396]) - , poseidon2_B_34_2(il[397]) - , poseidon2_B_34_3(il[398]) - , poseidon2_B_35_0(il[399]) - , poseidon2_B_35_1(il[400]) - , poseidon2_B_35_2(il[401]) - , poseidon2_B_35_3(il[402]) - , poseidon2_B_36_0(il[403]) - , poseidon2_B_36_1(il[404]) - , poseidon2_B_36_2(il[405]) - , poseidon2_B_36_3(il[406]) - , poseidon2_B_37_0(il[407]) - , poseidon2_B_37_1(il[408]) - , poseidon2_B_37_2(il[409]) - , poseidon2_B_37_3(il[410]) - , poseidon2_B_38_0(il[411]) - , poseidon2_B_38_1(il[412]) - , poseidon2_B_38_2(il[413]) - , poseidon2_B_38_3(il[414]) - , poseidon2_B_39_0(il[415]) - , poseidon2_B_39_1(il[416]) - , poseidon2_B_39_2(il[417]) - , poseidon2_B_39_3(il[418]) - , poseidon2_B_40_0(il[419]) - , poseidon2_B_40_1(il[420]) - , poseidon2_B_40_2(il[421]) - , poseidon2_B_40_3(il[422]) - , poseidon2_B_41_0(il[423]) - , poseidon2_B_41_1(il[424]) - , poseidon2_B_41_2(il[425]) - , poseidon2_B_41_3(il[426]) - , poseidon2_B_42_0(il[427]) - , poseidon2_B_42_1(il[428]) - , poseidon2_B_42_2(il[429]) - , poseidon2_B_42_3(il[430]) - , poseidon2_B_43_0(il[431]) - , poseidon2_B_43_1(il[432]) - , poseidon2_B_43_2(il[433]) - , poseidon2_B_43_3(il[434]) - , poseidon2_B_44_0(il[435]) - , poseidon2_B_44_1(il[436]) - , poseidon2_B_44_2(il[437]) - , poseidon2_B_44_3(il[438]) - , poseidon2_B_45_0(il[439]) - , poseidon2_B_45_1(il[440]) - , poseidon2_B_45_2(il[441]) - , poseidon2_B_45_3(il[442]) - , poseidon2_B_46_0(il[443]) - , poseidon2_B_46_1(il[444]) - , poseidon2_B_46_2(il[445]) - , poseidon2_B_46_3(il[446]) - , poseidon2_B_47_0(il[447]) - , poseidon2_B_47_1(il[448]) - , poseidon2_B_47_2(il[449]) - , poseidon2_B_47_3(il[450]) - , poseidon2_B_48_0(il[451]) - , poseidon2_B_48_1(il[452]) - , poseidon2_B_48_2(il[453]) - , poseidon2_B_48_3(il[454]) - , poseidon2_B_49_0(il[455]) - , poseidon2_B_49_1(il[456]) - , poseidon2_B_49_2(il[457]) - , poseidon2_B_49_3(il[458]) - , poseidon2_B_4_0(il[459]) - , poseidon2_B_4_1(il[460]) - , poseidon2_B_4_2(il[461]) - , poseidon2_B_4_3(il[462]) - , poseidon2_B_50_0(il[463]) - , poseidon2_B_50_1(il[464]) - , poseidon2_B_50_2(il[465]) - , poseidon2_B_50_3(il[466]) - , poseidon2_B_51_0(il[467]) - , poseidon2_B_51_1(il[468]) - , poseidon2_B_51_2(il[469]) - , poseidon2_B_51_3(il[470]) - , poseidon2_B_52_0(il[471]) - , poseidon2_B_52_1(il[472]) - , poseidon2_B_52_2(il[473]) - , poseidon2_B_52_3(il[474]) - , poseidon2_B_53_0(il[475]) - , poseidon2_B_53_1(il[476]) - , poseidon2_B_53_2(il[477]) - , poseidon2_B_53_3(il[478]) - , poseidon2_B_54_0(il[479]) - , poseidon2_B_54_1(il[480]) - , poseidon2_B_54_2(il[481]) - , poseidon2_B_54_3(il[482]) - , poseidon2_B_55_0(il[483]) - , poseidon2_B_55_1(il[484]) - , poseidon2_B_55_2(il[485]) - , poseidon2_B_55_3(il[486]) - , poseidon2_B_56_0(il[487]) - , poseidon2_B_56_1(il[488]) - , poseidon2_B_56_2(il[489]) - , poseidon2_B_56_3(il[490]) - , poseidon2_B_57_0(il[491]) - , poseidon2_B_57_1(il[492]) - , poseidon2_B_57_2(il[493]) - , poseidon2_B_57_3(il[494]) - , poseidon2_B_58_0(il[495]) - , poseidon2_B_58_1(il[496]) - , poseidon2_B_58_2(il[497]) - , poseidon2_B_58_3(il[498]) - , poseidon2_B_59_0(il[499]) - , poseidon2_B_59_1(il[500]) - , poseidon2_B_59_2(il[501]) - , poseidon2_B_59_3(il[502]) - , poseidon2_B_5_0(il[503]) - , poseidon2_B_5_1(il[504]) - , poseidon2_B_5_2(il[505]) - , poseidon2_B_5_3(il[506]) - , poseidon2_B_6_0(il[507]) - , poseidon2_B_6_1(il[508]) - , poseidon2_B_6_2(il[509]) - , poseidon2_B_6_3(il[510]) - , poseidon2_B_7_0(il[511]) - , poseidon2_B_7_1(il[512]) - , poseidon2_B_7_2(il[513]) - , poseidon2_B_7_3(il[514]) - , poseidon2_B_8_0(il[515]) - , poseidon2_B_8_1(il[516]) - , poseidon2_B_8_2(il[517]) - , poseidon2_B_8_3(il[518]) - , poseidon2_B_9_0(il[519]) - , poseidon2_B_9_1(il[520]) - , poseidon2_B_9_2(il[521]) - , poseidon2_B_9_3(il[522]) - , poseidon2_EXT_LAYER_4(il[523]) - , poseidon2_EXT_LAYER_5(il[524]) - , poseidon2_EXT_LAYER_6(il[525]) - , poseidon2_EXT_LAYER_7(il[526]) - , poseidon2_T_0_4(il[527]) - , poseidon2_T_0_5(il[528]) - , poseidon2_T_0_6(il[529]) - , poseidon2_T_0_7(il[530]) - , poseidon2_T_1_4(il[531]) - , poseidon2_T_1_5(il[532]) - , poseidon2_T_1_6(il[533]) - , poseidon2_T_1_7(il[534]) - , poseidon2_T_2_4(il[535]) - , poseidon2_T_2_5(il[536]) - , poseidon2_T_2_6(il[537]) - , poseidon2_T_2_7(il[538]) - , poseidon2_T_3_4(il[539]) - , poseidon2_T_3_5(il[540]) - , poseidon2_T_3_6(il[541]) - , poseidon2_T_3_7(il[542]) - , poseidon2_T_60_4(il[543]) - , poseidon2_T_60_5(il[544]) - , poseidon2_T_60_6(il[545]) - , poseidon2_T_60_7(il[546]) - , poseidon2_T_61_4(il[547]) - , poseidon2_T_61_5(il[548]) - , poseidon2_T_61_6(il[549]) - , poseidon2_T_61_7(il[550]) - , poseidon2_T_62_4(il[551]) - , poseidon2_T_62_5(il[552]) - , poseidon2_T_62_6(il[553]) - , poseidon2_T_62_7(il[554]) - , poseidon2_T_63_4(il[555]) - , poseidon2_T_63_5(il[556]) - , poseidon2_T_63_6(il[557]) - , poseidon2_T_63_7(il[558]) - , poseidon2_a_0(il[559]) - , poseidon2_a_1(il[560]) - , poseidon2_a_2(il[561]) - , poseidon2_a_3(il[562]) - , poseidon2_b_0(il[563]) - , poseidon2_b_1(il[564]) - , poseidon2_b_2(il[565]) - , poseidon2_b_3(il[566]) - , poseidon2_clk(il[567]) - , poseidon2_input_addr(il[568]) - , poseidon2_mem_addr_read_a(il[569]) - , poseidon2_mem_addr_read_b(il[570]) - , poseidon2_mem_addr_read_c(il[571]) - , poseidon2_mem_addr_read_d(il[572]) - , poseidon2_mem_addr_write_a(il[573]) - , poseidon2_mem_addr_write_b(il[574]) - , poseidon2_mem_addr_write_c(il[575]) - , poseidon2_mem_addr_write_d(il[576]) - , poseidon2_output_addr(il[577]) - , poseidon2_sel_poseidon_perm(il[578]) - , sha256_clk(il[579]) - , sha256_input(il[580]) - , sha256_output(il[581]) - , sha256_sel_sha256_compression(il[582]) - , sha256_state(il[583]) - , slice_addr(il[584]) - , slice_clk(il[585]) - , slice_cnt(il[586]) - , slice_col_offset(il[587]) - , slice_one_min_inv(il[588]) - , slice_sel_cd_cpy(il[589]) - , slice_sel_mem_active(il[590]) - , slice_sel_return(il[591]) - , slice_sel_start(il[592]) - , slice_space_id(il[593]) - , slice_val(il[594]) - , lookup_byte_lengths_counts(il[595]) - , lookup_byte_operations_counts(il[596]) - , lookup_cd_value_counts(il[597]) - , lookup_ret_value_counts(il[598]) - , lookup_opcode_gas_counts(il[599]) - , range_check_l2_gas_hi_counts(il[600]) - , range_check_l2_gas_lo_counts(il[601]) - , range_check_da_gas_hi_counts(il[602]) - , range_check_da_gas_lo_counts(il[603]) - , kernel_output_lookup_counts(il[604]) - , lookup_into_kernel_counts(il[605]) - , incl_main_tag_err_counts(il[606]) - , incl_mem_tag_err_counts(il[607]) - , lookup_mem_rng_chk_lo_counts(il[608]) - , lookup_mem_rng_chk_mid_counts(il[609]) - , lookup_mem_rng_chk_hi_counts(il[610]) - , lookup_pow_2_0_counts(il[611]) - , lookup_pow_2_1_counts(il[612]) - , lookup_u8_0_counts(il[613]) - , lookup_u8_1_counts(il[614]) - , lookup_u16_0_counts(il[615]) - , lookup_u16_1_counts(il[616]) - , lookup_u16_2_counts(il[617]) - , lookup_u16_3_counts(il[618]) - , lookup_u16_4_counts(il[619]) - , lookup_u16_5_counts(il[620]) - , lookup_u16_6_counts(il[621]) - , lookup_u16_7_counts(il[622]) - , lookup_u16_8_counts(il[623]) - , lookup_u16_9_counts(il[624]) - , lookup_u16_10_counts(il[625]) - , lookup_u16_11_counts(il[626]) - , lookup_u16_12_counts(il[627]) - , lookup_u16_13_counts(il[628]) - , lookup_u16_14_counts(il[629]) - , lookup_div_u16_0_counts(il[630]) - , lookup_div_u16_1_counts(il[631]) - , lookup_div_u16_2_counts(il[632]) - , lookup_div_u16_3_counts(il[633]) - , lookup_div_u16_4_counts(il[634]) - , lookup_div_u16_5_counts(il[635]) - , lookup_div_u16_6_counts(il[636]) - , lookup_div_u16_7_counts(il[637]) - , perm_pos_mem_read_a(il[638]) - , perm_pos_mem_read_b(il[639]) - , perm_pos_mem_read_c(il[640]) - , perm_pos_mem_read_d(il[641]) - , perm_pos_mem_write_a(il[642]) - , perm_pos_mem_write_b(il[643]) - , perm_pos_mem_write_c(il[644]) - , perm_pos_mem_write_d(il[645]) - , perm_slice_mem(il[646]) - , perm_main_alu(il[647]) - , perm_main_bin(il[648]) - , perm_main_conv(il[649]) - , perm_main_pos2_perm(il[650]) - , perm_main_pedersen(il[651]) - , perm_main_slice(il[652]) - , perm_main_mem_a(il[653]) - , perm_main_mem_b(il[654]) - , perm_main_mem_c(il[655]) - , perm_main_mem_d(il[656]) - , perm_main_mem_ind_addr_a(il[657]) - , perm_main_mem_ind_addr_b(il[658]) - , perm_main_mem_ind_addr_c(il[659]) - , perm_main_mem_ind_addr_d(il[660]) - , lookup_byte_lengths(il[661]) - , lookup_byte_operations(il[662]) - , lookup_cd_value(il[663]) - , lookup_ret_value(il[664]) - , lookup_opcode_gas(il[665]) - , range_check_l2_gas_hi(il[666]) - , range_check_l2_gas_lo(il[667]) - , range_check_da_gas_hi(il[668]) - , range_check_da_gas_lo(il[669]) - , kernel_output_lookup(il[670]) - , lookup_into_kernel(il[671]) - , incl_main_tag_err(il[672]) - , incl_mem_tag_err(il[673]) - , lookup_mem_rng_chk_lo(il[674]) - , lookup_mem_rng_chk_mid(il[675]) - , lookup_mem_rng_chk_hi(il[676]) - , lookup_pow_2_0(il[677]) - , lookup_pow_2_1(il[678]) - , lookup_u8_0(il[679]) - , lookup_u8_1(il[680]) - , lookup_u16_0(il[681]) - , lookup_u16_1(il[682]) - , lookup_u16_2(il[683]) - , lookup_u16_3(il[684]) - , lookup_u16_4(il[685]) - , lookup_u16_5(il[686]) - , lookup_u16_6(il[687]) - , lookup_u16_7(il[688]) - , lookup_u16_8(il[689]) - , lookup_u16_9(il[690]) - , lookup_u16_10(il[691]) - , lookup_u16_11(il[692]) - , lookup_u16_12(il[693]) - , lookup_u16_13(il[694]) - , lookup_u16_14(il[695]) - , lookup_div_u16_0(il[696]) - , lookup_div_u16_1(il[697]) - , lookup_div_u16_2(il[698]) - , lookup_div_u16_3(il[699]) - , lookup_div_u16_4(il[700]) - , lookup_div_u16_5(il[701]) - , lookup_div_u16_6(il[702]) - , lookup_div_u16_7(il[703]) - , alu_a_hi_shift(il[704]) - , alu_a_lo_shift(il[705]) - , alu_b_hi_shift(il[706]) - , alu_b_lo_shift(il[707]) - , alu_cmp_rng_ctr_shift(il[708]) - , alu_div_u16_r0_shift(il[709]) - , alu_div_u16_r1_shift(il[710]) - , alu_div_u16_r2_shift(il[711]) - , alu_div_u16_r3_shift(il[712]) - , alu_div_u16_r4_shift(il[713]) - , alu_div_u16_r5_shift(il[714]) - , alu_div_u16_r6_shift(il[715]) - , alu_div_u16_r7_shift(il[716]) - , alu_op_add_shift(il[717]) - , alu_op_cast_prev_shift(il[718]) - , alu_op_cast_shift(il[719]) - , alu_op_div_shift(il[720]) - , alu_op_mul_shift(il[721]) - , alu_op_shl_shift(il[722]) - , alu_op_shr_shift(il[723]) - , alu_op_sub_shift(il[724]) - , alu_p_sub_a_hi_shift(il[725]) - , alu_p_sub_a_lo_shift(il[726]) - , alu_p_sub_b_hi_shift(il[727]) - , alu_p_sub_b_lo_shift(il[728]) - , alu_sel_alu_shift(il[729]) - , alu_sel_cmp_shift(il[730]) - , alu_sel_div_rng_chk_shift(il[731]) - , alu_sel_rng_chk_lookup_shift(il[732]) - , alu_sel_rng_chk_shift(il[733]) - , alu_u16_r0_shift(il[734]) - , alu_u16_r1_shift(il[735]) - , alu_u16_r2_shift(il[736]) - , alu_u16_r3_shift(il[737]) - , alu_u16_r4_shift(il[738]) - , alu_u16_r5_shift(il[739]) - , alu_u16_r6_shift(il[740]) - , alu_u8_r0_shift(il[741]) - , alu_u8_r1_shift(il[742]) - , binary_acc_ia_shift(il[743]) - , binary_acc_ib_shift(il[744]) - , binary_acc_ic_shift(il[745]) - , binary_mem_tag_ctr_shift(il[746]) - , binary_op_id_shift(il[747]) - , kernel_emit_l2_to_l1_msg_write_offset_shift(il[748]) - , kernel_emit_note_hash_write_offset_shift(il[749]) - , kernel_emit_nullifier_write_offset_shift(il[750]) - , kernel_emit_unencrypted_log_write_offset_shift(il[751]) - , kernel_l1_to_l2_msg_exists_write_offset_shift(il[752]) - , kernel_note_hash_exist_write_offset_shift(il[753]) - , kernel_nullifier_exists_write_offset_shift(il[754]) - , kernel_nullifier_non_exists_write_offset_shift(il[755]) - , kernel_side_effect_counter_shift(il[756]) - , kernel_sload_write_offset_shift(il[757]) - , kernel_sstore_write_offset_shift(il[758]) - , main_da_gas_remaining_shift(il[759]) - , main_internal_return_ptr_shift(il[760]) - , main_l2_gas_remaining_shift(il[761]) - , main_pc_shift(il[762]) - , mem_glob_addr_shift(il[763]) - , mem_rw_shift(il[764]) - , mem_sel_mem_shift(il[765]) - , mem_tag_shift(il[766]) - , mem_tsp_shift(il[767]) - , mem_val_shift(il[768]) - , slice_addr_shift(il[769]) - , slice_clk_shift(il[770]) - , slice_cnt_shift(il[771]) - , slice_col_offset_shift(il[772]) - , slice_sel_cd_cpy_shift(il[773]) - , slice_sel_mem_active_shift(il[774]) - , slice_sel_return_shift(il[775]) - , slice_sel_start_shift(il[776]) - , slice_space_id_shift(il[777]) + , main_sel_mem_op_b(il[182]) + , main_sel_mem_op_c(il[183]) + , main_sel_mem_op_d(il[184]) + , main_sel_mov_ia_to_ic(il[185]) + , main_sel_mov_ib_to_ic(il[186]) + , main_sel_op_add(il[187]) + , main_sel_op_address(il[188]) + , main_sel_op_and(il[189]) + , main_sel_op_block_number(il[190]) + , main_sel_op_calldata_copy(il[191]) + , main_sel_op_cast(il[192]) + , main_sel_op_chain_id(il[193]) + , main_sel_op_cmov(il[194]) + , main_sel_op_coinbase(il[195]) + , main_sel_op_dagasleft(il[196]) + , main_sel_op_div(il[197]) + , main_sel_op_emit_l2_to_l1_msg(il[198]) + , main_sel_op_emit_note_hash(il[199]) + , main_sel_op_emit_nullifier(il[200]) + , main_sel_op_emit_unencrypted_log(il[201]) + , main_sel_op_eq(il[202]) + , main_sel_op_external_call(il[203]) + , main_sel_op_external_return(il[204]) + , main_sel_op_fdiv(il[205]) + , main_sel_op_fee_per_da_gas(il[206]) + , main_sel_op_fee_per_l2_gas(il[207]) + , main_sel_op_function_selector(il[208]) + , main_sel_op_get_contract_instance(il[209]) + , main_sel_op_halt(il[210]) + , main_sel_op_internal_call(il[211]) + , main_sel_op_internal_return(il[212]) + , main_sel_op_jump(il[213]) + , main_sel_op_jumpi(il[214]) + , main_sel_op_keccak(il[215]) + , main_sel_op_l1_to_l2_msg_exists(il[216]) + , main_sel_op_l2gasleft(il[217]) + , main_sel_op_lt(il[218]) + , main_sel_op_lte(il[219]) + , main_sel_op_mov(il[220]) + , main_sel_op_mul(il[221]) + , main_sel_op_not(il[222]) + , main_sel_op_note_hash_exists(il[223]) + , main_sel_op_nullifier_exists(il[224]) + , main_sel_op_or(il[225]) + , main_sel_op_pedersen(il[226]) + , main_sel_op_poseidon2(il[227]) + , main_sel_op_radix_le(il[228]) + , main_sel_op_sender(il[229]) + , main_sel_op_sha256(il[230]) + , main_sel_op_shl(il[231]) + , main_sel_op_shr(il[232]) + , main_sel_op_sload(il[233]) + , main_sel_op_sstore(il[234]) + , main_sel_op_storage_address(il[235]) + , main_sel_op_sub(il[236]) + , main_sel_op_timestamp(il[237]) + , main_sel_op_transaction_fee(il[238]) + , main_sel_op_version(il[239]) + , main_sel_op_xor(il[240]) + , main_sel_q_kernel_lookup(il[241]) + , main_sel_q_kernel_output_lookup(il[242]) + , main_sel_resolve_ind_addr_a(il[243]) + , main_sel_resolve_ind_addr_b(il[244]) + , main_sel_resolve_ind_addr_c(il[245]) + , main_sel_resolve_ind_addr_d(il[246]) + , main_sel_returndata(il[247]) + , main_sel_rng_16(il[248]) + , main_sel_rng_8(il[249]) + , main_sel_slice_gadget(il[250]) + , main_space_id(il[251]) + , main_tag_err(il[252]) + , main_w_in_tag(il[253]) + , mem_addr(il[254]) + , mem_clk(il[255]) + , mem_diff_hi(il[256]) + , mem_diff_lo(il[257]) + , mem_diff_mid(il[258]) + , mem_glob_addr(il[259]) + , mem_last(il[260]) + , mem_lastAccess(il[261]) + , mem_one_min_inv(il[262]) + , mem_r_in_tag(il[263]) + , mem_rw(il[264]) + , mem_sel_mem(il[265]) + , mem_sel_mov_ia_to_ic(il[266]) + , mem_sel_mov_ib_to_ic(il[267]) + , mem_sel_op_a(il[268]) + , mem_sel_op_b(il[269]) + , mem_sel_op_c(il[270]) + , mem_sel_op_cmov(il[271]) + , mem_sel_op_d(il[272]) + , mem_sel_op_poseidon_read_a(il[273]) + , mem_sel_op_poseidon_read_b(il[274]) + , mem_sel_op_poseidon_read_c(il[275]) + , mem_sel_op_poseidon_read_d(il[276]) + , mem_sel_op_poseidon_write_a(il[277]) + , mem_sel_op_poseidon_write_b(il[278]) + , mem_sel_op_poseidon_write_c(il[279]) + , mem_sel_op_poseidon_write_d(il[280]) + , mem_sel_op_slice(il[281]) + , mem_sel_resolve_ind_addr_a(il[282]) + , mem_sel_resolve_ind_addr_b(il[283]) + , mem_sel_resolve_ind_addr_c(il[284]) + , mem_sel_resolve_ind_addr_d(il[285]) + , mem_sel_rng_chk(il[286]) + , mem_skip_check_tag(il[287]) + , mem_space_id(il[288]) + , mem_tag(il[289]) + , mem_tag_err(il[290]) + , mem_tsp(il[291]) + , mem_val(il[292]) + , mem_w_in_tag(il[293]) + , pedersen_clk(il[294]) + , pedersen_input(il[295]) + , pedersen_output(il[296]) + , pedersen_sel_pedersen(il[297]) + , poseidon2_B_10_0(il[298]) + , poseidon2_B_10_1(il[299]) + , poseidon2_B_10_2(il[300]) + , poseidon2_B_10_3(il[301]) + , poseidon2_B_11_0(il[302]) + , poseidon2_B_11_1(il[303]) + , poseidon2_B_11_2(il[304]) + , poseidon2_B_11_3(il[305]) + , poseidon2_B_12_0(il[306]) + , poseidon2_B_12_1(il[307]) + , poseidon2_B_12_2(il[308]) + , poseidon2_B_12_3(il[309]) + , poseidon2_B_13_0(il[310]) + , poseidon2_B_13_1(il[311]) + , poseidon2_B_13_2(il[312]) + , poseidon2_B_13_3(il[313]) + , poseidon2_B_14_0(il[314]) + , poseidon2_B_14_1(il[315]) + , poseidon2_B_14_2(il[316]) + , poseidon2_B_14_3(il[317]) + , poseidon2_B_15_0(il[318]) + , poseidon2_B_15_1(il[319]) + , poseidon2_B_15_2(il[320]) + , poseidon2_B_15_3(il[321]) + , poseidon2_B_16_0(il[322]) + , poseidon2_B_16_1(il[323]) + , poseidon2_B_16_2(il[324]) + , poseidon2_B_16_3(il[325]) + , poseidon2_B_17_0(il[326]) + , poseidon2_B_17_1(il[327]) + , poseidon2_B_17_2(il[328]) + , poseidon2_B_17_3(il[329]) + , poseidon2_B_18_0(il[330]) + , poseidon2_B_18_1(il[331]) + , poseidon2_B_18_2(il[332]) + , poseidon2_B_18_3(il[333]) + , poseidon2_B_19_0(il[334]) + , poseidon2_B_19_1(il[335]) + , poseidon2_B_19_2(il[336]) + , poseidon2_B_19_3(il[337]) + , poseidon2_B_20_0(il[338]) + , poseidon2_B_20_1(il[339]) + , poseidon2_B_20_2(il[340]) + , poseidon2_B_20_3(il[341]) + , poseidon2_B_21_0(il[342]) + , poseidon2_B_21_1(il[343]) + , poseidon2_B_21_2(il[344]) + , poseidon2_B_21_3(il[345]) + , poseidon2_B_22_0(il[346]) + , poseidon2_B_22_1(il[347]) + , poseidon2_B_22_2(il[348]) + , poseidon2_B_22_3(il[349]) + , poseidon2_B_23_0(il[350]) + , poseidon2_B_23_1(il[351]) + , poseidon2_B_23_2(il[352]) + , poseidon2_B_23_3(il[353]) + , poseidon2_B_24_0(il[354]) + , poseidon2_B_24_1(il[355]) + , poseidon2_B_24_2(il[356]) + , poseidon2_B_24_3(il[357]) + , poseidon2_B_25_0(il[358]) + , poseidon2_B_25_1(il[359]) + , poseidon2_B_25_2(il[360]) + , poseidon2_B_25_3(il[361]) + , poseidon2_B_26_0(il[362]) + , poseidon2_B_26_1(il[363]) + , poseidon2_B_26_2(il[364]) + , poseidon2_B_26_3(il[365]) + , poseidon2_B_27_0(il[366]) + , poseidon2_B_27_1(il[367]) + , poseidon2_B_27_2(il[368]) + , poseidon2_B_27_3(il[369]) + , poseidon2_B_28_0(il[370]) + , poseidon2_B_28_1(il[371]) + , poseidon2_B_28_2(il[372]) + , poseidon2_B_28_3(il[373]) + , poseidon2_B_29_0(il[374]) + , poseidon2_B_29_1(il[375]) + , poseidon2_B_29_2(il[376]) + , poseidon2_B_29_3(il[377]) + , poseidon2_B_30_0(il[378]) + , poseidon2_B_30_1(il[379]) + , poseidon2_B_30_2(il[380]) + , poseidon2_B_30_3(il[381]) + , poseidon2_B_31_0(il[382]) + , poseidon2_B_31_1(il[383]) + , poseidon2_B_31_2(il[384]) + , poseidon2_B_31_3(il[385]) + , poseidon2_B_32_0(il[386]) + , poseidon2_B_32_1(il[387]) + , poseidon2_B_32_2(il[388]) + , poseidon2_B_32_3(il[389]) + , poseidon2_B_33_0(il[390]) + , poseidon2_B_33_1(il[391]) + , poseidon2_B_33_2(il[392]) + , poseidon2_B_33_3(il[393]) + , poseidon2_B_34_0(il[394]) + , poseidon2_B_34_1(il[395]) + , poseidon2_B_34_2(il[396]) + , poseidon2_B_34_3(il[397]) + , poseidon2_B_35_0(il[398]) + , poseidon2_B_35_1(il[399]) + , poseidon2_B_35_2(il[400]) + , poseidon2_B_35_3(il[401]) + , poseidon2_B_36_0(il[402]) + , poseidon2_B_36_1(il[403]) + , poseidon2_B_36_2(il[404]) + , poseidon2_B_36_3(il[405]) + , poseidon2_B_37_0(il[406]) + , poseidon2_B_37_1(il[407]) + , poseidon2_B_37_2(il[408]) + , poseidon2_B_37_3(il[409]) + , poseidon2_B_38_0(il[410]) + , poseidon2_B_38_1(il[411]) + , poseidon2_B_38_2(il[412]) + , poseidon2_B_38_3(il[413]) + , poseidon2_B_39_0(il[414]) + , poseidon2_B_39_1(il[415]) + , poseidon2_B_39_2(il[416]) + , poseidon2_B_39_3(il[417]) + , poseidon2_B_40_0(il[418]) + , poseidon2_B_40_1(il[419]) + , poseidon2_B_40_2(il[420]) + , poseidon2_B_40_3(il[421]) + , poseidon2_B_41_0(il[422]) + , poseidon2_B_41_1(il[423]) + , poseidon2_B_41_2(il[424]) + , poseidon2_B_41_3(il[425]) + , poseidon2_B_42_0(il[426]) + , poseidon2_B_42_1(il[427]) + , poseidon2_B_42_2(il[428]) + , poseidon2_B_42_3(il[429]) + , poseidon2_B_43_0(il[430]) + , poseidon2_B_43_1(il[431]) + , poseidon2_B_43_2(il[432]) + , poseidon2_B_43_3(il[433]) + , poseidon2_B_44_0(il[434]) + , poseidon2_B_44_1(il[435]) + , poseidon2_B_44_2(il[436]) + , poseidon2_B_44_3(il[437]) + , poseidon2_B_45_0(il[438]) + , poseidon2_B_45_1(il[439]) + , poseidon2_B_45_2(il[440]) + , poseidon2_B_45_3(il[441]) + , poseidon2_B_46_0(il[442]) + , poseidon2_B_46_1(il[443]) + , poseidon2_B_46_2(il[444]) + , poseidon2_B_46_3(il[445]) + , poseidon2_B_47_0(il[446]) + , poseidon2_B_47_1(il[447]) + , poseidon2_B_47_2(il[448]) + , poseidon2_B_47_3(il[449]) + , poseidon2_B_48_0(il[450]) + , poseidon2_B_48_1(il[451]) + , poseidon2_B_48_2(il[452]) + , poseidon2_B_48_3(il[453]) + , poseidon2_B_49_0(il[454]) + , poseidon2_B_49_1(il[455]) + , poseidon2_B_49_2(il[456]) + , poseidon2_B_49_3(il[457]) + , poseidon2_B_4_0(il[458]) + , poseidon2_B_4_1(il[459]) + , poseidon2_B_4_2(il[460]) + , poseidon2_B_4_3(il[461]) + , poseidon2_B_50_0(il[462]) + , poseidon2_B_50_1(il[463]) + , poseidon2_B_50_2(il[464]) + , poseidon2_B_50_3(il[465]) + , poseidon2_B_51_0(il[466]) + , poseidon2_B_51_1(il[467]) + , poseidon2_B_51_2(il[468]) + , poseidon2_B_51_3(il[469]) + , poseidon2_B_52_0(il[470]) + , poseidon2_B_52_1(il[471]) + , poseidon2_B_52_2(il[472]) + , poseidon2_B_52_3(il[473]) + , poseidon2_B_53_0(il[474]) + , poseidon2_B_53_1(il[475]) + , poseidon2_B_53_2(il[476]) + , poseidon2_B_53_3(il[477]) + , poseidon2_B_54_0(il[478]) + , poseidon2_B_54_1(il[479]) + , poseidon2_B_54_2(il[480]) + , poseidon2_B_54_3(il[481]) + , poseidon2_B_55_0(il[482]) + , poseidon2_B_55_1(il[483]) + , poseidon2_B_55_2(il[484]) + , poseidon2_B_55_3(il[485]) + , poseidon2_B_56_0(il[486]) + , poseidon2_B_56_1(il[487]) + , poseidon2_B_56_2(il[488]) + , poseidon2_B_56_3(il[489]) + , poseidon2_B_57_0(il[490]) + , poseidon2_B_57_1(il[491]) + , poseidon2_B_57_2(il[492]) + , poseidon2_B_57_3(il[493]) + , poseidon2_B_58_0(il[494]) + , poseidon2_B_58_1(il[495]) + , poseidon2_B_58_2(il[496]) + , poseidon2_B_58_3(il[497]) + , poseidon2_B_59_0(il[498]) + , poseidon2_B_59_1(il[499]) + , poseidon2_B_59_2(il[500]) + , poseidon2_B_59_3(il[501]) + , poseidon2_B_5_0(il[502]) + , poseidon2_B_5_1(il[503]) + , poseidon2_B_5_2(il[504]) + , poseidon2_B_5_3(il[505]) + , poseidon2_B_6_0(il[506]) + , poseidon2_B_6_1(il[507]) + , poseidon2_B_6_2(il[508]) + , poseidon2_B_6_3(il[509]) + , poseidon2_B_7_0(il[510]) + , poseidon2_B_7_1(il[511]) + , poseidon2_B_7_2(il[512]) + , poseidon2_B_7_3(il[513]) + , poseidon2_B_8_0(il[514]) + , poseidon2_B_8_1(il[515]) + , poseidon2_B_8_2(il[516]) + , poseidon2_B_8_3(il[517]) + , poseidon2_B_9_0(il[518]) + , poseidon2_B_9_1(il[519]) + , poseidon2_B_9_2(il[520]) + , poseidon2_B_9_3(il[521]) + , poseidon2_EXT_LAYER_4(il[522]) + , poseidon2_EXT_LAYER_5(il[523]) + , poseidon2_EXT_LAYER_6(il[524]) + , poseidon2_EXT_LAYER_7(il[525]) + , poseidon2_T_0_4(il[526]) + , poseidon2_T_0_5(il[527]) + , poseidon2_T_0_6(il[528]) + , poseidon2_T_0_7(il[529]) + , poseidon2_T_1_4(il[530]) + , poseidon2_T_1_5(il[531]) + , poseidon2_T_1_6(il[532]) + , poseidon2_T_1_7(il[533]) + , poseidon2_T_2_4(il[534]) + , poseidon2_T_2_5(il[535]) + , poseidon2_T_2_6(il[536]) + , poseidon2_T_2_7(il[537]) + , poseidon2_T_3_4(il[538]) + , poseidon2_T_3_5(il[539]) + , poseidon2_T_3_6(il[540]) + , poseidon2_T_3_7(il[541]) + , poseidon2_T_60_4(il[542]) + , poseidon2_T_60_5(il[543]) + , poseidon2_T_60_6(il[544]) + , poseidon2_T_60_7(il[545]) + , poseidon2_T_61_4(il[546]) + , poseidon2_T_61_5(il[547]) + , poseidon2_T_61_6(il[548]) + , poseidon2_T_61_7(il[549]) + , poseidon2_T_62_4(il[550]) + , poseidon2_T_62_5(il[551]) + , poseidon2_T_62_6(il[552]) + , poseidon2_T_62_7(il[553]) + , poseidon2_T_63_4(il[554]) + , poseidon2_T_63_5(il[555]) + , poseidon2_T_63_6(il[556]) + , poseidon2_T_63_7(il[557]) + , poseidon2_a_0(il[558]) + , poseidon2_a_1(il[559]) + , poseidon2_a_2(il[560]) + , poseidon2_a_3(il[561]) + , poseidon2_b_0(il[562]) + , poseidon2_b_1(il[563]) + , poseidon2_b_2(il[564]) + , poseidon2_b_3(il[565]) + , poseidon2_clk(il[566]) + , poseidon2_input_addr(il[567]) + , poseidon2_mem_addr_read_a(il[568]) + , poseidon2_mem_addr_read_b(il[569]) + , poseidon2_mem_addr_read_c(il[570]) + , poseidon2_mem_addr_read_d(il[571]) + , poseidon2_mem_addr_write_a(il[572]) + , poseidon2_mem_addr_write_b(il[573]) + , poseidon2_mem_addr_write_c(il[574]) + , poseidon2_mem_addr_write_d(il[575]) + , poseidon2_output_addr(il[576]) + , poseidon2_sel_poseidon_perm(il[577]) + , sha256_clk(il[578]) + , sha256_input(il[579]) + , sha256_output(il[580]) + , sha256_sel_sha256_compression(il[581]) + , sha256_state(il[582]) + , slice_addr(il[583]) + , slice_clk(il[584]) + , slice_cnt(il[585]) + , slice_col_offset(il[586]) + , slice_one_min_inv(il[587]) + , slice_sel_cd_cpy(il[588]) + , slice_sel_mem_active(il[589]) + , slice_sel_return(il[590]) + , slice_sel_start(il[591]) + , slice_space_id(il[592]) + , slice_val(il[593]) + , lookup_byte_lengths_counts(il[594]) + , lookup_byte_operations_counts(il[595]) + , lookup_cd_value_counts(il[596]) + , lookup_ret_value_counts(il[597]) + , lookup_opcode_gas_counts(il[598]) + , range_check_l2_gas_hi_counts(il[599]) + , range_check_l2_gas_lo_counts(il[600]) + , range_check_da_gas_hi_counts(il[601]) + , range_check_da_gas_lo_counts(il[602]) + , kernel_output_lookup_counts(il[603]) + , lookup_into_kernel_counts(il[604]) + , incl_main_tag_err_counts(il[605]) + , incl_mem_tag_err_counts(il[606]) + , lookup_mem_rng_chk_lo_counts(il[607]) + , lookup_mem_rng_chk_mid_counts(il[608]) + , lookup_mem_rng_chk_hi_counts(il[609]) + , lookup_pow_2_0_counts(il[610]) + , lookup_pow_2_1_counts(il[611]) + , lookup_u8_0_counts(il[612]) + , lookup_u8_1_counts(il[613]) + , lookup_u16_0_counts(il[614]) + , lookup_u16_1_counts(il[615]) + , lookup_u16_2_counts(il[616]) + , lookup_u16_3_counts(il[617]) + , lookup_u16_4_counts(il[618]) + , lookup_u16_5_counts(il[619]) + , lookup_u16_6_counts(il[620]) + , lookup_u16_7_counts(il[621]) + , lookup_u16_8_counts(il[622]) + , lookup_u16_9_counts(il[623]) + , lookup_u16_10_counts(il[624]) + , lookup_u16_11_counts(il[625]) + , lookup_u16_12_counts(il[626]) + , lookup_u16_13_counts(il[627]) + , lookup_u16_14_counts(il[628]) + , lookup_div_u16_0_counts(il[629]) + , lookup_div_u16_1_counts(il[630]) + , lookup_div_u16_2_counts(il[631]) + , lookup_div_u16_3_counts(il[632]) + , lookup_div_u16_4_counts(il[633]) + , lookup_div_u16_5_counts(il[634]) + , lookup_div_u16_6_counts(il[635]) + , lookup_div_u16_7_counts(il[636]) + , perm_pos_mem_read_a(il[637]) + , perm_pos_mem_read_b(il[638]) + , perm_pos_mem_read_c(il[639]) + , perm_pos_mem_read_d(il[640]) + , perm_pos_mem_write_a(il[641]) + , perm_pos_mem_write_b(il[642]) + , perm_pos_mem_write_c(il[643]) + , perm_pos_mem_write_d(il[644]) + , perm_slice_mem(il[645]) + , perm_main_alu(il[646]) + , perm_main_bin(il[647]) + , perm_main_conv(il[648]) + , perm_main_pos2_perm(il[649]) + , perm_main_pedersen(il[650]) + , perm_main_slice(il[651]) + , perm_main_mem_a(il[652]) + , perm_main_mem_b(il[653]) + , perm_main_mem_c(il[654]) + , perm_main_mem_d(il[655]) + , perm_main_mem_ind_addr_a(il[656]) + , perm_main_mem_ind_addr_b(il[657]) + , perm_main_mem_ind_addr_c(il[658]) + , perm_main_mem_ind_addr_d(il[659]) + , lookup_byte_lengths(il[660]) + , lookup_byte_operations(il[661]) + , lookup_cd_value(il[662]) + , lookup_ret_value(il[663]) + , lookup_opcode_gas(il[664]) + , range_check_l2_gas_hi(il[665]) + , range_check_l2_gas_lo(il[666]) + , range_check_da_gas_hi(il[667]) + , range_check_da_gas_lo(il[668]) + , kernel_output_lookup(il[669]) + , lookup_into_kernel(il[670]) + , incl_main_tag_err(il[671]) + , incl_mem_tag_err(il[672]) + , lookup_mem_rng_chk_lo(il[673]) + , lookup_mem_rng_chk_mid(il[674]) + , lookup_mem_rng_chk_hi(il[675]) + , lookup_pow_2_0(il[676]) + , lookup_pow_2_1(il[677]) + , lookup_u8_0(il[678]) + , lookup_u8_1(il[679]) + , lookup_u16_0(il[680]) + , lookup_u16_1(il[681]) + , lookup_u16_2(il[682]) + , lookup_u16_3(il[683]) + , lookup_u16_4(il[684]) + , lookup_u16_5(il[685]) + , lookup_u16_6(il[686]) + , lookup_u16_7(il[687]) + , lookup_u16_8(il[688]) + , lookup_u16_9(il[689]) + , lookup_u16_10(il[690]) + , lookup_u16_11(il[691]) + , lookup_u16_12(il[692]) + , lookup_u16_13(il[693]) + , lookup_u16_14(il[694]) + , lookup_div_u16_0(il[695]) + , lookup_div_u16_1(il[696]) + , lookup_div_u16_2(il[697]) + , lookup_div_u16_3(il[698]) + , lookup_div_u16_4(il[699]) + , lookup_div_u16_5(il[700]) + , lookup_div_u16_6(il[701]) + , lookup_div_u16_7(il[702]) + , alu_a_hi_shift(il[703]) + , alu_a_lo_shift(il[704]) + , alu_b_hi_shift(il[705]) + , alu_b_lo_shift(il[706]) + , alu_cmp_rng_ctr_shift(il[707]) + , alu_div_u16_r0_shift(il[708]) + , alu_div_u16_r1_shift(il[709]) + , alu_div_u16_r2_shift(il[710]) + , alu_div_u16_r3_shift(il[711]) + , alu_div_u16_r4_shift(il[712]) + , alu_div_u16_r5_shift(il[713]) + , alu_div_u16_r6_shift(il[714]) + , alu_div_u16_r7_shift(il[715]) + , alu_op_add_shift(il[716]) + , alu_op_cast_prev_shift(il[717]) + , alu_op_cast_shift(il[718]) + , alu_op_div_shift(il[719]) + , alu_op_mul_shift(il[720]) + , alu_op_shl_shift(il[721]) + , alu_op_shr_shift(il[722]) + , alu_op_sub_shift(il[723]) + , alu_p_sub_a_hi_shift(il[724]) + , alu_p_sub_a_lo_shift(il[725]) + , alu_p_sub_b_hi_shift(il[726]) + , alu_p_sub_b_lo_shift(il[727]) + , alu_sel_alu_shift(il[728]) + , alu_sel_cmp_shift(il[729]) + , alu_sel_div_rng_chk_shift(il[730]) + , alu_sel_rng_chk_lookup_shift(il[731]) + , alu_sel_rng_chk_shift(il[732]) + , alu_u16_r0_shift(il[733]) + , alu_u16_r1_shift(il[734]) + , alu_u16_r2_shift(il[735]) + , alu_u16_r3_shift(il[736]) + , alu_u16_r4_shift(il[737]) + , alu_u16_r5_shift(il[738]) + , alu_u16_r6_shift(il[739]) + , alu_u8_r0_shift(il[740]) + , alu_u8_r1_shift(il[741]) + , binary_acc_ia_shift(il[742]) + , binary_acc_ib_shift(il[743]) + , binary_acc_ic_shift(il[744]) + , binary_mem_tag_ctr_shift(il[745]) + , binary_op_id_shift(il[746]) + , kernel_emit_l2_to_l1_msg_write_offset_shift(il[747]) + , kernel_emit_note_hash_write_offset_shift(il[748]) + , kernel_emit_nullifier_write_offset_shift(il[749]) + , kernel_emit_unencrypted_log_write_offset_shift(il[750]) + , kernel_l1_to_l2_msg_exists_write_offset_shift(il[751]) + , kernel_note_hash_exist_write_offset_shift(il[752]) + , kernel_nullifier_exists_write_offset_shift(il[753]) + , kernel_nullifier_non_exists_write_offset_shift(il[754]) + , kernel_side_effect_counter_shift(il[755]) + , kernel_sload_write_offset_shift(il[756]) + , kernel_sstore_write_offset_shift(il[757]) + , main_da_gas_remaining_shift(il[758]) + , main_internal_return_ptr_shift(il[759]) + , main_l2_gas_remaining_shift(il[760]) + , main_pc_shift(il[761]) + , mem_glob_addr_shift(il[762]) + , mem_rw_shift(il[763]) + , mem_sel_mem_shift(il[764]) + , mem_tag_shift(il[765]) + , mem_tsp_shift(il[766]) + , mem_val_shift(il[767]) + , slice_addr_shift(il[768]) + , slice_clk_shift(il[769]) + , slice_cnt_shift(il[770]) + , slice_col_offset_shift(il[771]) + , slice_sel_cd_cpy_shift(il[772]) + , slice_sel_mem_active_shift(il[773]) + , slice_sel_return_shift(il[774]) + , slice_sel_start_shift(il[775]) + , slice_space_id_shift(il[776]) {} AvmFlavor::ProverPolynomials::ProverPolynomials(ProvingKey& proving_key) @@ -981,7 +980,6 @@ AvmFlavor::AllConstRefValues AvmFlavor::ProverPolynomials::get_row(size_t row_id main_sel_gas_accounting_active[row_idx], main_sel_last[row_idx], main_sel_mem_op_a[row_idx], - main_sel_mem_op_activate_gas[row_idx], main_sel_mem_op_b[row_idx], main_sel_mem_op_c[row_idx], main_sel_mem_op_d[row_idx], @@ -1763,7 +1761,6 @@ AvmFlavor::CommitmentLabels::CommitmentLabels() Base::main_sel_gas_accounting_active = "MAIN_SEL_GAS_ACCOUNTING_ACTIVE"; Base::main_sel_last = "MAIN_SEL_LAST"; Base::main_sel_mem_op_a = "MAIN_SEL_MEM_OP_A"; - Base::main_sel_mem_op_activate_gas = "MAIN_SEL_MEM_OP_ACTIVATE_GAS"; Base::main_sel_mem_op_b = "MAIN_SEL_MEM_OP_B"; Base::main_sel_mem_op_c = "MAIN_SEL_MEM_OP_C"; Base::main_sel_mem_op_d = "MAIN_SEL_MEM_OP_D"; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp index da7999d51b3..7661951fbda 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp @@ -102,7 +102,7 @@ template using tuple_cat_t = decltype(std::tuple_cat(std:: // The entities that will be used in the flavor. // clang-format off #define PRECOMPUTED_ENTITIES byte_lookup_sel_bin, byte_lookup_table_byte_lengths, byte_lookup_table_in_tags, byte_lookup_table_input_a, byte_lookup_table_input_b, byte_lookup_table_op_id, byte_lookup_table_output, gas_da_gas_fixed_table, gas_l2_gas_fixed_table, gas_sel_gas_cost, main_clk, main_sel_first, main_zeroes, powers_power_of_2 -#define WIRE_ENTITIES kernel_kernel_inputs, kernel_kernel_value_out, kernel_kernel_side_effect_out, kernel_kernel_metadata_out, main_calldata, main_returndata, alu_a_hi, alu_a_lo, alu_b_hi, alu_b_lo, alu_borrow, alu_cf, alu_clk, alu_cmp_rng_ctr, alu_div_u16_r0, alu_div_u16_r1, alu_div_u16_r2, alu_div_u16_r3, alu_div_u16_r4, alu_div_u16_r5, alu_div_u16_r6, alu_div_u16_r7, alu_divisor_hi, alu_divisor_lo, alu_ff_tag, alu_ia, alu_ib, alu_ic, alu_in_tag, alu_op_add, alu_op_cast, alu_op_cast_prev, alu_op_div, alu_op_div_a_lt_b, alu_op_div_std, alu_op_eq, alu_op_eq_diff_inv, alu_op_lt, alu_op_lte, alu_op_mul, alu_op_not, alu_op_shl, alu_op_shr, alu_op_sub, alu_p_a_borrow, alu_p_b_borrow, alu_p_sub_a_hi, alu_p_sub_a_lo, alu_p_sub_b_hi, alu_p_sub_b_lo, alu_partial_prod_hi, alu_partial_prod_lo, alu_quotient_hi, alu_quotient_lo, alu_remainder, alu_res_hi, alu_res_lo, alu_sel_alu, alu_sel_cmp, alu_sel_div_rng_chk, alu_sel_rng_chk, alu_sel_rng_chk_lookup, alu_sel_shift_which, alu_shift_lt_bit_len, alu_t_sub_s_bits, alu_two_pow_s, alu_two_pow_t_sub_s, alu_u128_tag, alu_u16_r0, alu_u16_r1, alu_u16_r10, alu_u16_r11, alu_u16_r12, alu_u16_r13, alu_u16_r14, alu_u16_r2, alu_u16_r3, alu_u16_r4, alu_u16_r5, alu_u16_r6, alu_u16_r7, alu_u16_r8, alu_u16_r9, alu_u16_tag, alu_u32_tag, alu_u64_tag, alu_u8_r0, alu_u8_r1, alu_u8_tag, binary_acc_ia, binary_acc_ib, binary_acc_ic, binary_clk, binary_ia_bytes, binary_ib_bytes, binary_ic_bytes, binary_in_tag, binary_mem_tag_ctr, binary_mem_tag_ctr_inv, binary_op_id, binary_sel_bin, binary_start, conversion_clk, conversion_input, conversion_num_limbs, conversion_radix, conversion_sel_to_radix_le, keccakf1600_clk, keccakf1600_input, keccakf1600_output, keccakf1600_sel_keccakf1600, kernel_emit_l2_to_l1_msg_write_offset, kernel_emit_note_hash_write_offset, kernel_emit_nullifier_write_offset, kernel_emit_unencrypted_log_write_offset, kernel_kernel_in_offset, kernel_kernel_out_offset, kernel_l1_to_l2_msg_exists_write_offset, kernel_note_hash_exist_write_offset, kernel_nullifier_exists_write_offset, kernel_nullifier_non_exists_write_offset, kernel_q_public_input_kernel_add_to_table, kernel_q_public_input_kernel_out_add_to_table, kernel_side_effect_counter, kernel_sload_write_offset, kernel_sstore_write_offset, main_abs_da_rem_gas_hi, main_abs_da_rem_gas_lo, main_abs_l2_rem_gas_hi, main_abs_l2_rem_gas_lo, main_alu_in_tag, main_bin_op_id, main_call_ptr, main_da_gas_op_cost, main_da_gas_remaining, main_da_out_of_gas, main_ia, main_ib, main_ic, main_id, main_id_zero, main_ind_addr_a, main_ind_addr_b, main_ind_addr_c, main_ind_addr_d, main_internal_return_ptr, main_inv, main_l2_gas_op_cost, main_l2_gas_remaining, main_l2_out_of_gas, main_mem_addr_a, main_mem_addr_b, main_mem_addr_c, main_mem_addr_d, main_op_err, main_opcode_val, main_pc, main_r_in_tag, main_rwa, main_rwb, main_rwc, main_rwd, main_sel_alu, main_sel_bin, main_sel_calldata, main_sel_gas_accounting_active, main_sel_last, main_sel_mem_op_a, main_sel_mem_op_activate_gas, main_sel_mem_op_b, main_sel_mem_op_c, main_sel_mem_op_d, main_sel_mov_ia_to_ic, main_sel_mov_ib_to_ic, main_sel_op_add, main_sel_op_address, main_sel_op_and, main_sel_op_block_number, main_sel_op_calldata_copy, main_sel_op_cast, main_sel_op_chain_id, main_sel_op_cmov, main_sel_op_coinbase, main_sel_op_dagasleft, main_sel_op_div, main_sel_op_emit_l2_to_l1_msg, main_sel_op_emit_note_hash, main_sel_op_emit_nullifier, main_sel_op_emit_unencrypted_log, main_sel_op_eq, main_sel_op_external_call, main_sel_op_external_return, main_sel_op_fdiv, main_sel_op_fee_per_da_gas, main_sel_op_fee_per_l2_gas, main_sel_op_function_selector, main_sel_op_get_contract_instance, main_sel_op_halt, main_sel_op_internal_call, main_sel_op_internal_return, main_sel_op_jump, main_sel_op_jumpi, main_sel_op_keccak, main_sel_op_l1_to_l2_msg_exists, main_sel_op_l2gasleft, main_sel_op_lt, main_sel_op_lte, main_sel_op_mov, main_sel_op_mul, main_sel_op_not, main_sel_op_note_hash_exists, main_sel_op_nullifier_exists, main_sel_op_or, main_sel_op_pedersen, main_sel_op_poseidon2, main_sel_op_radix_le, main_sel_op_sender, main_sel_op_sha256, main_sel_op_shl, main_sel_op_shr, main_sel_op_sload, main_sel_op_sstore, main_sel_op_storage_address, main_sel_op_sub, main_sel_op_timestamp, main_sel_op_transaction_fee, main_sel_op_version, main_sel_op_xor, main_sel_q_kernel_lookup, main_sel_q_kernel_output_lookup, main_sel_resolve_ind_addr_a, main_sel_resolve_ind_addr_b, main_sel_resolve_ind_addr_c, main_sel_resolve_ind_addr_d, main_sel_returndata, main_sel_rng_16, main_sel_rng_8, main_sel_slice_gadget, main_space_id, main_tag_err, main_w_in_tag, mem_addr, mem_clk, mem_diff_hi, mem_diff_lo, mem_diff_mid, mem_glob_addr, mem_last, mem_lastAccess, mem_one_min_inv, mem_r_in_tag, mem_rw, mem_sel_mem, mem_sel_mov_ia_to_ic, mem_sel_mov_ib_to_ic, mem_sel_op_a, mem_sel_op_b, mem_sel_op_c, mem_sel_op_cmov, mem_sel_op_d, mem_sel_op_poseidon_read_a, mem_sel_op_poseidon_read_b, mem_sel_op_poseidon_read_c, mem_sel_op_poseidon_read_d, mem_sel_op_poseidon_write_a, mem_sel_op_poseidon_write_b, mem_sel_op_poseidon_write_c, mem_sel_op_poseidon_write_d, mem_sel_op_slice, mem_sel_resolve_ind_addr_a, mem_sel_resolve_ind_addr_b, mem_sel_resolve_ind_addr_c, mem_sel_resolve_ind_addr_d, mem_sel_rng_chk, mem_skip_check_tag, mem_space_id, mem_tag, mem_tag_err, mem_tsp, mem_val, mem_w_in_tag, pedersen_clk, pedersen_input, pedersen_output, pedersen_sel_pedersen, poseidon2_B_10_0, poseidon2_B_10_1, poseidon2_B_10_2, poseidon2_B_10_3, poseidon2_B_11_0, poseidon2_B_11_1, poseidon2_B_11_2, poseidon2_B_11_3, poseidon2_B_12_0, poseidon2_B_12_1, poseidon2_B_12_2, poseidon2_B_12_3, poseidon2_B_13_0, poseidon2_B_13_1, poseidon2_B_13_2, poseidon2_B_13_3, poseidon2_B_14_0, poseidon2_B_14_1, poseidon2_B_14_2, poseidon2_B_14_3, poseidon2_B_15_0, poseidon2_B_15_1, poseidon2_B_15_2, poseidon2_B_15_3, poseidon2_B_16_0, poseidon2_B_16_1, poseidon2_B_16_2, poseidon2_B_16_3, poseidon2_B_17_0, poseidon2_B_17_1, poseidon2_B_17_2, poseidon2_B_17_3, poseidon2_B_18_0, poseidon2_B_18_1, poseidon2_B_18_2, poseidon2_B_18_3, poseidon2_B_19_0, poseidon2_B_19_1, poseidon2_B_19_2, poseidon2_B_19_3, poseidon2_B_20_0, poseidon2_B_20_1, poseidon2_B_20_2, poseidon2_B_20_3, poseidon2_B_21_0, poseidon2_B_21_1, poseidon2_B_21_2, poseidon2_B_21_3, poseidon2_B_22_0, poseidon2_B_22_1, poseidon2_B_22_2, poseidon2_B_22_3, poseidon2_B_23_0, poseidon2_B_23_1, poseidon2_B_23_2, poseidon2_B_23_3, poseidon2_B_24_0, poseidon2_B_24_1, poseidon2_B_24_2, poseidon2_B_24_3, poseidon2_B_25_0, poseidon2_B_25_1, poseidon2_B_25_2, poseidon2_B_25_3, poseidon2_B_26_0, poseidon2_B_26_1, poseidon2_B_26_2, poseidon2_B_26_3, poseidon2_B_27_0, poseidon2_B_27_1, poseidon2_B_27_2, poseidon2_B_27_3, poseidon2_B_28_0, poseidon2_B_28_1, poseidon2_B_28_2, poseidon2_B_28_3, poseidon2_B_29_0, poseidon2_B_29_1, poseidon2_B_29_2, poseidon2_B_29_3, poseidon2_B_30_0, poseidon2_B_30_1, poseidon2_B_30_2, poseidon2_B_30_3, poseidon2_B_31_0, poseidon2_B_31_1, poseidon2_B_31_2, poseidon2_B_31_3, poseidon2_B_32_0, poseidon2_B_32_1, poseidon2_B_32_2, poseidon2_B_32_3, poseidon2_B_33_0, poseidon2_B_33_1, poseidon2_B_33_2, poseidon2_B_33_3, poseidon2_B_34_0, poseidon2_B_34_1, poseidon2_B_34_2, poseidon2_B_34_3, poseidon2_B_35_0, poseidon2_B_35_1, poseidon2_B_35_2, poseidon2_B_35_3, poseidon2_B_36_0, poseidon2_B_36_1, poseidon2_B_36_2, poseidon2_B_36_3, poseidon2_B_37_0, poseidon2_B_37_1, poseidon2_B_37_2, poseidon2_B_37_3, poseidon2_B_38_0, poseidon2_B_38_1, poseidon2_B_38_2, poseidon2_B_38_3, poseidon2_B_39_0, poseidon2_B_39_1, poseidon2_B_39_2, poseidon2_B_39_3, poseidon2_B_40_0, poseidon2_B_40_1, poseidon2_B_40_2, poseidon2_B_40_3, poseidon2_B_41_0, poseidon2_B_41_1, poseidon2_B_41_2, poseidon2_B_41_3, poseidon2_B_42_0, poseidon2_B_42_1, poseidon2_B_42_2, poseidon2_B_42_3, poseidon2_B_43_0, poseidon2_B_43_1, poseidon2_B_43_2, poseidon2_B_43_3, poseidon2_B_44_0, poseidon2_B_44_1, poseidon2_B_44_2, poseidon2_B_44_3, poseidon2_B_45_0, poseidon2_B_45_1, poseidon2_B_45_2, poseidon2_B_45_3, poseidon2_B_46_0, poseidon2_B_46_1, poseidon2_B_46_2, poseidon2_B_46_3, poseidon2_B_47_0, poseidon2_B_47_1, poseidon2_B_47_2, poseidon2_B_47_3, poseidon2_B_48_0, poseidon2_B_48_1, poseidon2_B_48_2, poseidon2_B_48_3, poseidon2_B_49_0, poseidon2_B_49_1, poseidon2_B_49_2, poseidon2_B_49_3, poseidon2_B_4_0, poseidon2_B_4_1, poseidon2_B_4_2, poseidon2_B_4_3, poseidon2_B_50_0, poseidon2_B_50_1, poseidon2_B_50_2, poseidon2_B_50_3, poseidon2_B_51_0, poseidon2_B_51_1, poseidon2_B_51_2, poseidon2_B_51_3, poseidon2_B_52_0, poseidon2_B_52_1, poseidon2_B_52_2, poseidon2_B_52_3, poseidon2_B_53_0, poseidon2_B_53_1, poseidon2_B_53_2, poseidon2_B_53_3, poseidon2_B_54_0, poseidon2_B_54_1, poseidon2_B_54_2, poseidon2_B_54_3, poseidon2_B_55_0, poseidon2_B_55_1, poseidon2_B_55_2, poseidon2_B_55_3, poseidon2_B_56_0, poseidon2_B_56_1, poseidon2_B_56_2, poseidon2_B_56_3, poseidon2_B_57_0, poseidon2_B_57_1, poseidon2_B_57_2, poseidon2_B_57_3, poseidon2_B_58_0, poseidon2_B_58_1, poseidon2_B_58_2, poseidon2_B_58_3, poseidon2_B_59_0, poseidon2_B_59_1, poseidon2_B_59_2, poseidon2_B_59_3, poseidon2_B_5_0, poseidon2_B_5_1, poseidon2_B_5_2, poseidon2_B_5_3, poseidon2_B_6_0, poseidon2_B_6_1, poseidon2_B_6_2, poseidon2_B_6_3, poseidon2_B_7_0, poseidon2_B_7_1, poseidon2_B_7_2, poseidon2_B_7_3, poseidon2_B_8_0, poseidon2_B_8_1, poseidon2_B_8_2, poseidon2_B_8_3, poseidon2_B_9_0, poseidon2_B_9_1, poseidon2_B_9_2, poseidon2_B_9_3, poseidon2_EXT_LAYER_4, poseidon2_EXT_LAYER_5, poseidon2_EXT_LAYER_6, poseidon2_EXT_LAYER_7, poseidon2_T_0_4, poseidon2_T_0_5, poseidon2_T_0_6, poseidon2_T_0_7, poseidon2_T_1_4, poseidon2_T_1_5, poseidon2_T_1_6, poseidon2_T_1_7, poseidon2_T_2_4, poseidon2_T_2_5, poseidon2_T_2_6, poseidon2_T_2_7, poseidon2_T_3_4, poseidon2_T_3_5, poseidon2_T_3_6, poseidon2_T_3_7, poseidon2_T_60_4, poseidon2_T_60_5, poseidon2_T_60_6, poseidon2_T_60_7, poseidon2_T_61_4, poseidon2_T_61_5, poseidon2_T_61_6, poseidon2_T_61_7, poseidon2_T_62_4, poseidon2_T_62_5, poseidon2_T_62_6, poseidon2_T_62_7, poseidon2_T_63_4, poseidon2_T_63_5, poseidon2_T_63_6, poseidon2_T_63_7, poseidon2_a_0, poseidon2_a_1, poseidon2_a_2, poseidon2_a_3, poseidon2_b_0, poseidon2_b_1, poseidon2_b_2, poseidon2_b_3, poseidon2_clk, poseidon2_input_addr, poseidon2_mem_addr_read_a, poseidon2_mem_addr_read_b, poseidon2_mem_addr_read_c, poseidon2_mem_addr_read_d, poseidon2_mem_addr_write_a, poseidon2_mem_addr_write_b, poseidon2_mem_addr_write_c, poseidon2_mem_addr_write_d, poseidon2_output_addr, poseidon2_sel_poseidon_perm, sha256_clk, sha256_input, sha256_output, sha256_sel_sha256_compression, sha256_state, slice_addr, slice_clk, slice_cnt, slice_col_offset, slice_one_min_inv, slice_sel_cd_cpy, slice_sel_mem_active, slice_sel_return, slice_sel_start, slice_space_id, slice_val, lookup_byte_lengths_counts, lookup_byte_operations_counts, lookup_cd_value_counts, lookup_ret_value_counts, lookup_opcode_gas_counts, range_check_l2_gas_hi_counts, range_check_l2_gas_lo_counts, range_check_da_gas_hi_counts, range_check_da_gas_lo_counts, kernel_output_lookup_counts, lookup_into_kernel_counts, incl_main_tag_err_counts, incl_mem_tag_err_counts, lookup_mem_rng_chk_lo_counts, lookup_mem_rng_chk_mid_counts, lookup_mem_rng_chk_hi_counts, lookup_pow_2_0_counts, lookup_pow_2_1_counts, lookup_u8_0_counts, lookup_u8_1_counts, lookup_u16_0_counts, lookup_u16_1_counts, lookup_u16_2_counts, lookup_u16_3_counts, lookup_u16_4_counts, lookup_u16_5_counts, lookup_u16_6_counts, lookup_u16_7_counts, lookup_u16_8_counts, lookup_u16_9_counts, lookup_u16_10_counts, lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, lookup_u16_14_counts, lookup_div_u16_0_counts, lookup_div_u16_1_counts, lookup_div_u16_2_counts, lookup_div_u16_3_counts, lookup_div_u16_4_counts, lookup_div_u16_5_counts, lookup_div_u16_6_counts, lookup_div_u16_7_counts +#define WIRE_ENTITIES kernel_kernel_inputs, kernel_kernel_value_out, kernel_kernel_side_effect_out, kernel_kernel_metadata_out, main_calldata, main_returndata, alu_a_hi, alu_a_lo, alu_b_hi, alu_b_lo, alu_borrow, alu_cf, alu_clk, alu_cmp_rng_ctr, alu_div_u16_r0, alu_div_u16_r1, alu_div_u16_r2, alu_div_u16_r3, alu_div_u16_r4, alu_div_u16_r5, alu_div_u16_r6, alu_div_u16_r7, alu_divisor_hi, alu_divisor_lo, alu_ff_tag, alu_ia, alu_ib, alu_ic, alu_in_tag, alu_op_add, alu_op_cast, alu_op_cast_prev, alu_op_div, alu_op_div_a_lt_b, alu_op_div_std, alu_op_eq, alu_op_eq_diff_inv, alu_op_lt, alu_op_lte, alu_op_mul, alu_op_not, alu_op_shl, alu_op_shr, alu_op_sub, alu_p_a_borrow, alu_p_b_borrow, alu_p_sub_a_hi, alu_p_sub_a_lo, alu_p_sub_b_hi, alu_p_sub_b_lo, alu_partial_prod_hi, alu_partial_prod_lo, alu_quotient_hi, alu_quotient_lo, alu_remainder, alu_res_hi, alu_res_lo, alu_sel_alu, alu_sel_cmp, alu_sel_div_rng_chk, alu_sel_rng_chk, alu_sel_rng_chk_lookup, alu_sel_shift_which, alu_shift_lt_bit_len, alu_t_sub_s_bits, alu_two_pow_s, alu_two_pow_t_sub_s, alu_u128_tag, alu_u16_r0, alu_u16_r1, alu_u16_r10, alu_u16_r11, alu_u16_r12, alu_u16_r13, alu_u16_r14, alu_u16_r2, alu_u16_r3, alu_u16_r4, alu_u16_r5, alu_u16_r6, alu_u16_r7, alu_u16_r8, alu_u16_r9, alu_u16_tag, alu_u32_tag, alu_u64_tag, alu_u8_r0, alu_u8_r1, alu_u8_tag, binary_acc_ia, binary_acc_ib, binary_acc_ic, binary_clk, binary_ia_bytes, binary_ib_bytes, binary_ic_bytes, binary_in_tag, binary_mem_tag_ctr, binary_mem_tag_ctr_inv, binary_op_id, binary_sel_bin, binary_start, conversion_clk, conversion_input, conversion_num_limbs, conversion_radix, conversion_sel_to_radix_le, keccakf1600_clk, keccakf1600_input, keccakf1600_output, keccakf1600_sel_keccakf1600, kernel_emit_l2_to_l1_msg_write_offset, kernel_emit_note_hash_write_offset, kernel_emit_nullifier_write_offset, kernel_emit_unencrypted_log_write_offset, kernel_kernel_in_offset, kernel_kernel_out_offset, kernel_l1_to_l2_msg_exists_write_offset, kernel_note_hash_exist_write_offset, kernel_nullifier_exists_write_offset, kernel_nullifier_non_exists_write_offset, kernel_q_public_input_kernel_add_to_table, kernel_q_public_input_kernel_out_add_to_table, kernel_side_effect_counter, kernel_sload_write_offset, kernel_sstore_write_offset, main_abs_da_rem_gas_hi, main_abs_da_rem_gas_lo, main_abs_l2_rem_gas_hi, main_abs_l2_rem_gas_lo, main_alu_in_tag, main_bin_op_id, main_call_ptr, main_da_gas_op_cost, main_da_gas_remaining, main_da_out_of_gas, main_ia, main_ib, main_ic, main_id, main_id_zero, main_ind_addr_a, main_ind_addr_b, main_ind_addr_c, main_ind_addr_d, main_internal_return_ptr, main_inv, main_l2_gas_op_cost, main_l2_gas_remaining, main_l2_out_of_gas, main_mem_addr_a, main_mem_addr_b, main_mem_addr_c, main_mem_addr_d, main_op_err, main_opcode_val, main_pc, main_r_in_tag, main_rwa, main_rwb, main_rwc, main_rwd, main_sel_alu, main_sel_bin, main_sel_calldata, main_sel_gas_accounting_active, main_sel_last, main_sel_mem_op_a, main_sel_mem_op_b, main_sel_mem_op_c, main_sel_mem_op_d, main_sel_mov_ia_to_ic, main_sel_mov_ib_to_ic, main_sel_op_add, main_sel_op_address, main_sel_op_and, main_sel_op_block_number, main_sel_op_calldata_copy, main_sel_op_cast, main_sel_op_chain_id, main_sel_op_cmov, main_sel_op_coinbase, main_sel_op_dagasleft, main_sel_op_div, main_sel_op_emit_l2_to_l1_msg, main_sel_op_emit_note_hash, main_sel_op_emit_nullifier, main_sel_op_emit_unencrypted_log, main_sel_op_eq, main_sel_op_external_call, main_sel_op_external_return, main_sel_op_fdiv, main_sel_op_fee_per_da_gas, main_sel_op_fee_per_l2_gas, main_sel_op_function_selector, main_sel_op_get_contract_instance, main_sel_op_halt, main_sel_op_internal_call, main_sel_op_internal_return, main_sel_op_jump, main_sel_op_jumpi, main_sel_op_keccak, main_sel_op_l1_to_l2_msg_exists, main_sel_op_l2gasleft, main_sel_op_lt, main_sel_op_lte, main_sel_op_mov, main_sel_op_mul, main_sel_op_not, main_sel_op_note_hash_exists, main_sel_op_nullifier_exists, main_sel_op_or, main_sel_op_pedersen, main_sel_op_poseidon2, main_sel_op_radix_le, main_sel_op_sender, main_sel_op_sha256, main_sel_op_shl, main_sel_op_shr, main_sel_op_sload, main_sel_op_sstore, main_sel_op_storage_address, main_sel_op_sub, main_sel_op_timestamp, main_sel_op_transaction_fee, main_sel_op_version, main_sel_op_xor, main_sel_q_kernel_lookup, main_sel_q_kernel_output_lookup, main_sel_resolve_ind_addr_a, main_sel_resolve_ind_addr_b, main_sel_resolve_ind_addr_c, main_sel_resolve_ind_addr_d, main_sel_returndata, main_sel_rng_16, main_sel_rng_8, main_sel_slice_gadget, main_space_id, main_tag_err, main_w_in_tag, mem_addr, mem_clk, mem_diff_hi, mem_diff_lo, mem_diff_mid, mem_glob_addr, mem_last, mem_lastAccess, mem_one_min_inv, mem_r_in_tag, mem_rw, mem_sel_mem, mem_sel_mov_ia_to_ic, mem_sel_mov_ib_to_ic, mem_sel_op_a, mem_sel_op_b, mem_sel_op_c, mem_sel_op_cmov, mem_sel_op_d, mem_sel_op_poseidon_read_a, mem_sel_op_poseidon_read_b, mem_sel_op_poseidon_read_c, mem_sel_op_poseidon_read_d, mem_sel_op_poseidon_write_a, mem_sel_op_poseidon_write_b, mem_sel_op_poseidon_write_c, mem_sel_op_poseidon_write_d, mem_sel_op_slice, mem_sel_resolve_ind_addr_a, mem_sel_resolve_ind_addr_b, mem_sel_resolve_ind_addr_c, mem_sel_resolve_ind_addr_d, mem_sel_rng_chk, mem_skip_check_tag, mem_space_id, mem_tag, mem_tag_err, mem_tsp, mem_val, mem_w_in_tag, pedersen_clk, pedersen_input, pedersen_output, pedersen_sel_pedersen, poseidon2_B_10_0, poseidon2_B_10_1, poseidon2_B_10_2, poseidon2_B_10_3, poseidon2_B_11_0, poseidon2_B_11_1, poseidon2_B_11_2, poseidon2_B_11_3, poseidon2_B_12_0, poseidon2_B_12_1, poseidon2_B_12_2, poseidon2_B_12_3, poseidon2_B_13_0, poseidon2_B_13_1, poseidon2_B_13_2, poseidon2_B_13_3, poseidon2_B_14_0, poseidon2_B_14_1, poseidon2_B_14_2, poseidon2_B_14_3, poseidon2_B_15_0, poseidon2_B_15_1, poseidon2_B_15_2, poseidon2_B_15_3, poseidon2_B_16_0, poseidon2_B_16_1, poseidon2_B_16_2, poseidon2_B_16_3, poseidon2_B_17_0, poseidon2_B_17_1, poseidon2_B_17_2, poseidon2_B_17_3, poseidon2_B_18_0, poseidon2_B_18_1, poseidon2_B_18_2, poseidon2_B_18_3, poseidon2_B_19_0, poseidon2_B_19_1, poseidon2_B_19_2, poseidon2_B_19_3, poseidon2_B_20_0, poseidon2_B_20_1, poseidon2_B_20_2, poseidon2_B_20_3, poseidon2_B_21_0, poseidon2_B_21_1, poseidon2_B_21_2, poseidon2_B_21_3, poseidon2_B_22_0, poseidon2_B_22_1, poseidon2_B_22_2, poseidon2_B_22_3, poseidon2_B_23_0, poseidon2_B_23_1, poseidon2_B_23_2, poseidon2_B_23_3, poseidon2_B_24_0, poseidon2_B_24_1, poseidon2_B_24_2, poseidon2_B_24_3, poseidon2_B_25_0, poseidon2_B_25_1, poseidon2_B_25_2, poseidon2_B_25_3, poseidon2_B_26_0, poseidon2_B_26_1, poseidon2_B_26_2, poseidon2_B_26_3, poseidon2_B_27_0, poseidon2_B_27_1, poseidon2_B_27_2, poseidon2_B_27_3, poseidon2_B_28_0, poseidon2_B_28_1, poseidon2_B_28_2, poseidon2_B_28_3, poseidon2_B_29_0, poseidon2_B_29_1, poseidon2_B_29_2, poseidon2_B_29_3, poseidon2_B_30_0, poseidon2_B_30_1, poseidon2_B_30_2, poseidon2_B_30_3, poseidon2_B_31_0, poseidon2_B_31_1, poseidon2_B_31_2, poseidon2_B_31_3, poseidon2_B_32_0, poseidon2_B_32_1, poseidon2_B_32_2, poseidon2_B_32_3, poseidon2_B_33_0, poseidon2_B_33_1, poseidon2_B_33_2, poseidon2_B_33_3, poseidon2_B_34_0, poseidon2_B_34_1, poseidon2_B_34_2, poseidon2_B_34_3, poseidon2_B_35_0, poseidon2_B_35_1, poseidon2_B_35_2, poseidon2_B_35_3, poseidon2_B_36_0, poseidon2_B_36_1, poseidon2_B_36_2, poseidon2_B_36_3, poseidon2_B_37_0, poseidon2_B_37_1, poseidon2_B_37_2, poseidon2_B_37_3, poseidon2_B_38_0, poseidon2_B_38_1, poseidon2_B_38_2, poseidon2_B_38_3, poseidon2_B_39_0, poseidon2_B_39_1, poseidon2_B_39_2, poseidon2_B_39_3, poseidon2_B_40_0, poseidon2_B_40_1, poseidon2_B_40_2, poseidon2_B_40_3, poseidon2_B_41_0, poseidon2_B_41_1, poseidon2_B_41_2, poseidon2_B_41_3, poseidon2_B_42_0, poseidon2_B_42_1, poseidon2_B_42_2, poseidon2_B_42_3, poseidon2_B_43_0, poseidon2_B_43_1, poseidon2_B_43_2, poseidon2_B_43_3, poseidon2_B_44_0, poseidon2_B_44_1, poseidon2_B_44_2, poseidon2_B_44_3, poseidon2_B_45_0, poseidon2_B_45_1, poseidon2_B_45_2, poseidon2_B_45_3, poseidon2_B_46_0, poseidon2_B_46_1, poseidon2_B_46_2, poseidon2_B_46_3, poseidon2_B_47_0, poseidon2_B_47_1, poseidon2_B_47_2, poseidon2_B_47_3, poseidon2_B_48_0, poseidon2_B_48_1, poseidon2_B_48_2, poseidon2_B_48_3, poseidon2_B_49_0, poseidon2_B_49_1, poseidon2_B_49_2, poseidon2_B_49_3, poseidon2_B_4_0, poseidon2_B_4_1, poseidon2_B_4_2, poseidon2_B_4_3, poseidon2_B_50_0, poseidon2_B_50_1, poseidon2_B_50_2, poseidon2_B_50_3, poseidon2_B_51_0, poseidon2_B_51_1, poseidon2_B_51_2, poseidon2_B_51_3, poseidon2_B_52_0, poseidon2_B_52_1, poseidon2_B_52_2, poseidon2_B_52_3, poseidon2_B_53_0, poseidon2_B_53_1, poseidon2_B_53_2, poseidon2_B_53_3, poseidon2_B_54_0, poseidon2_B_54_1, poseidon2_B_54_2, poseidon2_B_54_3, poseidon2_B_55_0, poseidon2_B_55_1, poseidon2_B_55_2, poseidon2_B_55_3, poseidon2_B_56_0, poseidon2_B_56_1, poseidon2_B_56_2, poseidon2_B_56_3, poseidon2_B_57_0, poseidon2_B_57_1, poseidon2_B_57_2, poseidon2_B_57_3, poseidon2_B_58_0, poseidon2_B_58_1, poseidon2_B_58_2, poseidon2_B_58_3, poseidon2_B_59_0, poseidon2_B_59_1, poseidon2_B_59_2, poseidon2_B_59_3, poseidon2_B_5_0, poseidon2_B_5_1, poseidon2_B_5_2, poseidon2_B_5_3, poseidon2_B_6_0, poseidon2_B_6_1, poseidon2_B_6_2, poseidon2_B_6_3, poseidon2_B_7_0, poseidon2_B_7_1, poseidon2_B_7_2, poseidon2_B_7_3, poseidon2_B_8_0, poseidon2_B_8_1, poseidon2_B_8_2, poseidon2_B_8_3, poseidon2_B_9_0, poseidon2_B_9_1, poseidon2_B_9_2, poseidon2_B_9_3, poseidon2_EXT_LAYER_4, poseidon2_EXT_LAYER_5, poseidon2_EXT_LAYER_6, poseidon2_EXT_LAYER_7, poseidon2_T_0_4, poseidon2_T_0_5, poseidon2_T_0_6, poseidon2_T_0_7, poseidon2_T_1_4, poseidon2_T_1_5, poseidon2_T_1_6, poseidon2_T_1_7, poseidon2_T_2_4, poseidon2_T_2_5, poseidon2_T_2_6, poseidon2_T_2_7, poseidon2_T_3_4, poseidon2_T_3_5, poseidon2_T_3_6, poseidon2_T_3_7, poseidon2_T_60_4, poseidon2_T_60_5, poseidon2_T_60_6, poseidon2_T_60_7, poseidon2_T_61_4, poseidon2_T_61_5, poseidon2_T_61_6, poseidon2_T_61_7, poseidon2_T_62_4, poseidon2_T_62_5, poseidon2_T_62_6, poseidon2_T_62_7, poseidon2_T_63_4, poseidon2_T_63_5, poseidon2_T_63_6, poseidon2_T_63_7, poseidon2_a_0, poseidon2_a_1, poseidon2_a_2, poseidon2_a_3, poseidon2_b_0, poseidon2_b_1, poseidon2_b_2, poseidon2_b_3, poseidon2_clk, poseidon2_input_addr, poseidon2_mem_addr_read_a, poseidon2_mem_addr_read_b, poseidon2_mem_addr_read_c, poseidon2_mem_addr_read_d, poseidon2_mem_addr_write_a, poseidon2_mem_addr_write_b, poseidon2_mem_addr_write_c, poseidon2_mem_addr_write_d, poseidon2_output_addr, poseidon2_sel_poseidon_perm, sha256_clk, sha256_input, sha256_output, sha256_sel_sha256_compression, sha256_state, slice_addr, slice_clk, slice_cnt, slice_col_offset, slice_one_min_inv, slice_sel_cd_cpy, slice_sel_mem_active, slice_sel_return, slice_sel_start, slice_space_id, slice_val, lookup_byte_lengths_counts, lookup_byte_operations_counts, lookup_cd_value_counts, lookup_ret_value_counts, lookup_opcode_gas_counts, range_check_l2_gas_hi_counts, range_check_l2_gas_lo_counts, range_check_da_gas_hi_counts, range_check_da_gas_lo_counts, kernel_output_lookup_counts, lookup_into_kernel_counts, incl_main_tag_err_counts, incl_mem_tag_err_counts, lookup_mem_rng_chk_lo_counts, lookup_mem_rng_chk_mid_counts, lookup_mem_rng_chk_hi_counts, lookup_pow_2_0_counts, lookup_pow_2_1_counts, lookup_u8_0_counts, lookup_u8_1_counts, lookup_u16_0_counts, lookup_u16_1_counts, lookup_u16_2_counts, lookup_u16_3_counts, lookup_u16_4_counts, lookup_u16_5_counts, lookup_u16_6_counts, lookup_u16_7_counts, lookup_u16_8_counts, lookup_u16_9_counts, lookup_u16_10_counts, lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, lookup_u16_14_counts, lookup_div_u16_0_counts, lookup_div_u16_1_counts, lookup_div_u16_2_counts, lookup_div_u16_3_counts, lookup_div_u16_4_counts, lookup_div_u16_5_counts, lookup_div_u16_6_counts, lookup_div_u16_7_counts #define DERIVED_WITNESS_ENTITIES perm_pos_mem_read_a, perm_pos_mem_read_b, perm_pos_mem_read_c, perm_pos_mem_read_d, perm_pos_mem_write_a, perm_pos_mem_write_b, perm_pos_mem_write_c, perm_pos_mem_write_d, perm_slice_mem, perm_main_alu, perm_main_bin, perm_main_conv, perm_main_pos2_perm, perm_main_pedersen, perm_main_slice, perm_main_mem_a, perm_main_mem_b, perm_main_mem_c, perm_main_mem_d, perm_main_mem_ind_addr_a, perm_main_mem_ind_addr_b, perm_main_mem_ind_addr_c, perm_main_mem_ind_addr_d, lookup_byte_lengths, lookup_byte_operations, lookup_cd_value, lookup_ret_value, lookup_opcode_gas, range_check_l2_gas_hi, range_check_l2_gas_lo, range_check_da_gas_hi, range_check_da_gas_lo, kernel_output_lookup, lookup_into_kernel, incl_main_tag_err, incl_mem_tag_err, lookup_mem_rng_chk_lo, lookup_mem_rng_chk_mid, lookup_mem_rng_chk_hi, lookup_pow_2_0, lookup_pow_2_1, lookup_u8_0, lookup_u8_1, lookup_u16_0, lookup_u16_1, lookup_u16_2, lookup_u16_3, lookup_u16_4, lookup_u16_5, lookup_u16_6, lookup_u16_7, lookup_u16_8, lookup_u16_9, lookup_u16_10, lookup_u16_11, lookup_u16_12, lookup_u16_13, lookup_u16_14, lookup_div_u16_0, lookup_div_u16_1, lookup_div_u16_2, lookup_div_u16_3, lookup_div_u16_4, lookup_div_u16_5, lookup_div_u16_6, lookup_div_u16_7 #define SHIFTED_ENTITIES alu_a_hi_shift, alu_a_lo_shift, alu_b_hi_shift, alu_b_lo_shift, alu_cmp_rng_ctr_shift, alu_div_u16_r0_shift, alu_div_u16_r1_shift, alu_div_u16_r2_shift, alu_div_u16_r3_shift, alu_div_u16_r4_shift, alu_div_u16_r5_shift, alu_div_u16_r6_shift, alu_div_u16_r7_shift, alu_op_add_shift, alu_op_cast_prev_shift, alu_op_cast_shift, alu_op_div_shift, alu_op_mul_shift, alu_op_shl_shift, alu_op_shr_shift, alu_op_sub_shift, alu_p_sub_a_hi_shift, alu_p_sub_a_lo_shift, alu_p_sub_b_hi_shift, alu_p_sub_b_lo_shift, alu_sel_alu_shift, alu_sel_cmp_shift, alu_sel_div_rng_chk_shift, alu_sel_rng_chk_lookup_shift, alu_sel_rng_chk_shift, alu_u16_r0_shift, alu_u16_r1_shift, alu_u16_r2_shift, alu_u16_r3_shift, alu_u16_r4_shift, alu_u16_r5_shift, alu_u16_r6_shift, alu_u8_r0_shift, alu_u8_r1_shift, binary_acc_ia_shift, binary_acc_ib_shift, binary_acc_ic_shift, binary_mem_tag_ctr_shift, binary_op_id_shift, kernel_emit_l2_to_l1_msg_write_offset_shift, kernel_emit_note_hash_write_offset_shift, kernel_emit_nullifier_write_offset_shift, kernel_emit_unencrypted_log_write_offset_shift, kernel_l1_to_l2_msg_exists_write_offset_shift, kernel_note_hash_exist_write_offset_shift, kernel_nullifier_exists_write_offset_shift, kernel_nullifier_non_exists_write_offset_shift, kernel_side_effect_counter_shift, kernel_sload_write_offset_shift, kernel_sstore_write_offset_shift, main_da_gas_remaining_shift, main_internal_return_ptr_shift, main_l2_gas_remaining_shift, main_pc_shift, mem_glob_addr_shift, mem_rw_shift, mem_sel_mem_shift, mem_tag_shift, mem_tsp_shift, mem_val_shift, slice_addr_shift, slice_clk_shift, slice_cnt_shift, slice_col_offset_shift, slice_sel_cd_cpy_shift, slice_sel_mem_active_shift, slice_sel_return_shift, slice_sel_start_shift, slice_space_id_shift #define TO_BE_SHIFTED(e) e.alu_a_hi, e.alu_a_lo, e.alu_b_hi, e.alu_b_lo, e.alu_cmp_rng_ctr, e.alu_div_u16_r0, e.alu_div_u16_r1, e.alu_div_u16_r2, e.alu_div_u16_r3, e.alu_div_u16_r4, e.alu_div_u16_r5, e.alu_div_u16_r6, e.alu_div_u16_r7, e.alu_op_add, e.alu_op_cast_prev, e.alu_op_cast, e.alu_op_div, e.alu_op_mul, e.alu_op_shl, e.alu_op_shr, e.alu_op_sub, e.alu_p_sub_a_hi, e.alu_p_sub_a_lo, e.alu_p_sub_b_hi, e.alu_p_sub_b_lo, e.alu_sel_alu, e.alu_sel_cmp, e.alu_sel_div_rng_chk, e.alu_sel_rng_chk_lookup, e.alu_sel_rng_chk, e.alu_u16_r0, e.alu_u16_r1, e.alu_u16_r2, e.alu_u16_r3, e.alu_u16_r4, e.alu_u16_r5, e.alu_u16_r6, e.alu_u8_r0, e.alu_u8_r1, e.binary_acc_ia, e.binary_acc_ib, e.binary_acc_ic, e.binary_mem_tag_ctr, e.binary_op_id, e.kernel_emit_l2_to_l1_msg_write_offset, e.kernel_emit_note_hash_write_offset, e.kernel_emit_nullifier_write_offset, e.kernel_emit_unencrypted_log_write_offset, e.kernel_l1_to_l2_msg_exists_write_offset, e.kernel_note_hash_exist_write_offset, e.kernel_nullifier_exists_write_offset, e.kernel_nullifier_non_exists_write_offset, e.kernel_side_effect_counter, e.kernel_sload_write_offset, e.kernel_sstore_write_offset, e.main_da_gas_remaining, e.main_internal_return_ptr, e.main_l2_gas_remaining, e.main_pc, e.mem_glob_addr, e.mem_rw, e.mem_sel_mem, e.mem_tag, e.mem_tsp, e.mem_val, e.slice_addr, e.slice_clk, e.slice_cnt, e.slice_col_offset, e.slice_sel_cd_cpy, e.slice_sel_mem_active, e.slice_sel_return, e.slice_sel_start, e.slice_space_id @@ -128,11 +128,11 @@ class AvmFlavor { using RelationSeparator = AvmFlavorSettings::RelationSeparator; static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 14; - static constexpr size_t NUM_WITNESS_ENTITIES = 690; + static constexpr size_t NUM_WITNESS_ENTITIES = 689; static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 778; + static constexpr size_t NUM_ALL_ENTITIES = 777; using MainRelations = std::tuple< // Relations diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp index eb57c0bd310..f4e669998ab 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp @@ -201,7 +201,6 @@ template std::vector AvmFullRow::names() "main_sel_gas_accounting_active", "main_sel_last", "main_sel_mem_op_a", - "main_sel_mem_op_activate_gas", "main_sel_mem_op_b", "main_sel_mem_op_c", "main_sel_mem_op_d", @@ -910,7 +909,6 @@ template RefVector AvmFullRow::as_vector() const main_sel_gas_accounting_active, main_sel_last, main_sel_mem_op_a, - main_sel_mem_op_activate_gas, main_sel_mem_op_b, main_sel_mem_op_c, main_sel_mem_op_d, diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp index 278ce258c3c..13e075cf7b9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp @@ -192,7 +192,6 @@ template struct AvmFullRow { FF main_sel_gas_accounting_active{}; FF main_sel_last{}; FF main_sel_mem_op_a{}; - FF main_sel_mem_op_activate_gas{}; FF main_sel_mem_op_b{}; FF main_sel_mem_op_c{}; FF main_sel_mem_op_d{}; @@ -718,7 +717,7 @@ template struct AvmFullRow { RefVector as_vector() const; static std::vector names(); - static constexpr size_t SIZE = 704; + static constexpr size_t SIZE = 703; }; template std::ostream& operator<<(std::ostream& os, AvmFullRow const& row); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp index a1e07822a12..25fc0d01cc3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp @@ -59,7 +59,6 @@ template struct MainRow { FF main_sel_first{}; FF main_sel_gas_accounting_active{}; FF main_sel_mem_op_a{}; - FF main_sel_mem_op_activate_gas{}; FF main_sel_mem_op_b{}; FF main_sel_mem_op_c{}; FF main_sel_mem_op_d{}; @@ -135,12 +134,12 @@ template class mainImpl { public: using FF = FF_; - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS = { - 3, 3, 3, 3, 3, 3, 5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 4, 4, 3, 3, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 2, 5, 3, 3, 3, 4, 4, 3, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2 + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS = { + 3, 3, 4, 4, 3, 3, 5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 4, 4, 3, 3, 3, 3, 3, 3, 4, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 3, 3, 4, 4, 3, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2 }; template @@ -163,7 +162,7 @@ template class mainImpl { } { using Accumulator = typename std::tuple_element_t<2, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_gas_accounting_active * + auto tmp = ((new_term.main_sel_gas_accounting_active * (-new_term.main_sel_op_external_call + FF(1))) * ((new_term.main_l2_gas_remaining_shift - new_term.main_l2_gas_remaining) + new_term.main_l2_gas_op_cost)); tmp *= scaling_factor; @@ -171,7 +170,7 @@ template class mainImpl { } { using Accumulator = typename std::tuple_element_t<3, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_gas_accounting_active * + auto tmp = ((new_term.main_sel_gas_accounting_active * (-new_term.main_sel_op_external_call + FF(1))) * ((new_term.main_da_gas_remaining_shift - new_term.main_da_gas_remaining) + new_term.main_da_gas_op_cost)); tmp *= scaling_factor; @@ -784,54 +783,6 @@ template class mainImpl { } { using Accumulator = typename std::tuple_element_t<99, ContainerOverSubrelations>; - auto tmp = - (((((new_term.main_sel_gas_accounting_active - - ((((((((new_term.main_sel_op_fdiv + - ((((((((((new_term.main_sel_op_add + new_term.main_sel_op_sub) + - new_term.main_sel_op_mul) + - new_term.main_sel_op_div) + - new_term.main_sel_op_not) + - new_term.main_sel_op_eq) + - new_term.main_sel_op_lt) + - new_term.main_sel_op_lte) + - new_term.main_sel_op_shr) + - new_term.main_sel_op_shl) + - new_term.main_sel_op_cast)) + - ((new_term.main_sel_op_and + new_term.main_sel_op_or) + new_term.main_sel_op_xor)) + - (new_term.main_sel_op_cmov + new_term.main_sel_op_mov)) + - ((((new_term.main_sel_op_radix_le + new_term.main_sel_op_sha256) + - new_term.main_sel_op_poseidon2) + - new_term.main_sel_op_keccak) + - new_term.main_sel_op_pedersen)) + - (((((((((((new_term.main_sel_op_address + new_term.main_sel_op_storage_address) + - new_term.main_sel_op_sender) + - new_term.main_sel_op_function_selector) + - new_term.main_sel_op_transaction_fee) + - new_term.main_sel_op_chain_id) + - new_term.main_sel_op_version) + - new_term.main_sel_op_block_number) + - new_term.main_sel_op_coinbase) + - new_term.main_sel_op_timestamp) + - new_term.main_sel_op_fee_per_l2_gas) + - new_term.main_sel_op_fee_per_da_gas)) + - ((((((new_term.main_sel_op_note_hash_exists + new_term.main_sel_op_emit_note_hash) + - new_term.main_sel_op_nullifier_exists) + - new_term.main_sel_op_emit_nullifier) + - new_term.main_sel_op_l1_to_l2_msg_exists) + - new_term.main_sel_op_emit_unencrypted_log) + - new_term.main_sel_op_emit_l2_to_l1_msg)) + - (new_term.main_sel_op_dagasleft + new_term.main_sel_op_l2gasleft)) + - (new_term.main_sel_op_calldata_copy + new_term.main_sel_op_external_return))) - - (((new_term.main_sel_op_jump + new_term.main_sel_op_jumpi) + new_term.main_sel_op_internal_call) + - new_term.main_sel_op_internal_return)) - - new_term.main_sel_op_sload) - - new_term.main_sel_op_sstore) - - new_term.main_sel_mem_op_activate_gas); - tmp *= scaling_factor; - std::get<99>(evals) += typename Accumulator::View(tmp); - } - { - using Accumulator = typename std::tuple_element_t<100, ContainerOverSubrelations>; auto tmp = ((((-new_term.main_sel_first + FF(1)) * (-new_term.main_sel_op_halt + FF(1))) * ((((((((new_term.main_sel_op_fdiv + @@ -871,27 +822,27 @@ template class mainImpl { (new_term.main_sel_op_calldata_copy + new_term.main_sel_op_external_return))) * (new_term.main_pc_shift - (new_term.main_pc + FF(1)))); tmp *= scaling_factor; - std::get<100>(evals) += typename Accumulator::View(tmp); + std::get<99>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<101, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<100, ContainerOverSubrelations>; auto tmp = ((-(((new_term.main_sel_first + new_term.main_sel_op_internal_call) + new_term.main_sel_op_internal_return) + new_term.main_sel_op_halt) + FF(1)) * (new_term.main_internal_return_ptr_shift - new_term.main_internal_return_ptr)); tmp *= scaling_factor; - std::get<101>(evals) += typename Accumulator::View(tmp); + std::get<100>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<102, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<101, ContainerOverSubrelations>; auto tmp = ((new_term.main_sel_op_internal_call + new_term.main_sel_op_internal_return) * (new_term.main_space_id - FF(255))); tmp *= scaling_factor; - std::get<102>(evals) += typename Accumulator::View(tmp); + std::get<101>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<103, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<102, ContainerOverSubrelations>; auto tmp = (((((((((new_term.main_sel_op_fdiv + ((((((((((new_term.main_sel_op_add + new_term.main_sel_op_sub) + new_term.main_sel_op_mul) + @@ -930,56 +881,56 @@ template class mainImpl { (new_term.main_sel_op_calldata_copy + new_term.main_sel_op_external_return)) * (new_term.main_call_ptr - new_term.main_space_id)); tmp *= scaling_factor; - std::get<103>(evals) += typename Accumulator::View(tmp); + std::get<102>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<104, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<103, ContainerOverSubrelations>; auto tmp = ((new_term.main_sel_op_cmov + new_term.main_sel_op_jumpi) * (((new_term.main_id * new_term.main_inv) - FF(1)) + new_term.main_id_zero)); tmp *= scaling_factor; - std::get<104>(evals) += typename Accumulator::View(tmp); + std::get<103>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<105, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<104, ContainerOverSubrelations>; auto tmp = (((new_term.main_sel_op_cmov + new_term.main_sel_op_jumpi) * new_term.main_id_zero) * (-new_term.main_inv + FF(1))); tmp *= scaling_factor; - std::get<105>(evals) += typename Accumulator::View(tmp); + std::get<104>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<106, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<105, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_mov_ia_to_ic - (new_term.main_sel_op_mov + (new_term.main_sel_op_cmov * (-new_term.main_id_zero + FF(1))))); tmp *= scaling_factor; - std::get<106>(evals) += typename Accumulator::View(tmp); + std::get<105>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<107, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<106, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_mov_ib_to_ic - (new_term.main_sel_op_cmov * new_term.main_id_zero)); tmp *= scaling_factor; - std::get<107>(evals) += typename Accumulator::View(tmp); + std::get<106>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<108, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<107, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_mov_ia_to_ic * (new_term.main_ia - new_term.main_ic)); tmp *= scaling_factor; - std::get<108>(evals) += typename Accumulator::View(tmp); + std::get<107>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<109, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<108, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_mov_ib_to_ic * (new_term.main_ib - new_term.main_ic)); tmp *= scaling_factor; - std::get<109>(evals) += typename Accumulator::View(tmp); + std::get<108>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<110, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<109, ContainerOverSubrelations>; auto tmp = ((new_term.main_sel_op_mov + new_term.main_sel_op_cmov) * (new_term.main_r_in_tag - new_term.main_w_in_tag)); tmp *= scaling_factor; - std::get<110>(evals) += typename Accumulator::View(tmp); + std::get<109>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<111, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<110, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_alu - ((((((((((((new_term.main_sel_op_add + new_term.main_sel_op_sub) + new_term.main_sel_op_mul) + new_term.main_sel_op_div) + @@ -993,10 +944,10 @@ template class mainImpl { (-new_term.main_tag_err + FF(1))) * (-new_term.main_op_err + FF(1)))); tmp *= scaling_factor; - std::get<111>(evals) += typename Accumulator::View(tmp); + std::get<110>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<112, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<111, ContainerOverSubrelations>; auto tmp = ((((((((((new_term.main_sel_op_add + new_term.main_sel_op_sub) + new_term.main_sel_op_mul) + new_term.main_sel_op_div) + new_term.main_sel_op_not) + @@ -1007,229 +958,229 @@ template class mainImpl { new_term.main_sel_op_shl) * (new_term.main_alu_in_tag - new_term.main_r_in_tag)); tmp *= scaling_factor; + std::get<111>(evals) += typename Accumulator::View(tmp); + } + { + using Accumulator = typename std::tuple_element_t<112, ContainerOverSubrelations>; + auto tmp = (new_term.main_sel_op_cast * (new_term.main_alu_in_tag - new_term.main_w_in_tag)); + tmp *= scaling_factor; std::get<112>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<113, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_cast * (new_term.main_alu_in_tag - new_term.main_w_in_tag)); + auto tmp = (new_term.main_sel_op_l2gasleft * (new_term.main_ia - new_term.main_l2_gas_remaining_shift)); tmp *= scaling_factor; std::get<113>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<114, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_l2gasleft * (new_term.main_ia - new_term.main_l2_gas_remaining_shift)); + auto tmp = (new_term.main_sel_op_dagasleft * (new_term.main_ia - new_term.main_da_gas_remaining_shift)); tmp *= scaling_factor; std::get<114>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<115, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_dagasleft * (new_term.main_ia - new_term.main_da_gas_remaining_shift)); + auto tmp = (new_term.main_sel_op_address * (new_term.kernel_kernel_in_offset - FF(1))); tmp *= scaling_factor; std::get<115>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<116, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_address * (new_term.kernel_kernel_in_offset - FF(1))); + auto tmp = (new_term.main_sel_op_storage_address * (new_term.kernel_kernel_in_offset - FF(1))); tmp *= scaling_factor; std::get<116>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<117, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_storage_address * (new_term.kernel_kernel_in_offset - FF(1))); + auto tmp = (new_term.main_sel_op_sender * new_term.kernel_kernel_in_offset); tmp *= scaling_factor; std::get<117>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<118, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_sender * new_term.kernel_kernel_in_offset); + auto tmp = (new_term.main_sel_op_function_selector * (new_term.kernel_kernel_in_offset - FF(2))); tmp *= scaling_factor; std::get<118>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<119, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_function_selector * (new_term.kernel_kernel_in_offset - FF(2))); + auto tmp = (new_term.main_sel_op_transaction_fee * (new_term.kernel_kernel_in_offset - FF(41))); tmp *= scaling_factor; std::get<119>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<120, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_transaction_fee * (new_term.kernel_kernel_in_offset - FF(41))); + auto tmp = (new_term.main_sel_op_chain_id * (new_term.kernel_kernel_in_offset - FF(29))); tmp *= scaling_factor; std::get<120>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<121, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_chain_id * (new_term.kernel_kernel_in_offset - FF(29))); + auto tmp = (new_term.main_sel_op_version * (new_term.kernel_kernel_in_offset - FF(30))); tmp *= scaling_factor; std::get<121>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<122, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_version * (new_term.kernel_kernel_in_offset - FF(30))); + auto tmp = (new_term.main_sel_op_block_number * (new_term.kernel_kernel_in_offset - FF(31))); tmp *= scaling_factor; std::get<122>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<123, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_block_number * (new_term.kernel_kernel_in_offset - FF(31))); + auto tmp = (new_term.main_sel_op_timestamp * (new_term.kernel_kernel_in_offset - FF(33))); tmp *= scaling_factor; std::get<123>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<124, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_timestamp * (new_term.kernel_kernel_in_offset - FF(33))); + auto tmp = (new_term.main_sel_op_coinbase * (new_term.kernel_kernel_in_offset - FF(34))); tmp *= scaling_factor; std::get<124>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<125, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_coinbase * (new_term.kernel_kernel_in_offset - FF(34))); + auto tmp = (new_term.main_sel_op_fee_per_da_gas * (new_term.kernel_kernel_in_offset - FF(36))); tmp *= scaling_factor; std::get<125>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<126, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_fee_per_da_gas * (new_term.kernel_kernel_in_offset - FF(36))); + auto tmp = (new_term.main_sel_op_fee_per_l2_gas * (new_term.kernel_kernel_in_offset - FF(37))); tmp *= scaling_factor; std::get<126>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<127, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_fee_per_l2_gas * (new_term.kernel_kernel_in_offset - FF(37))); + auto tmp = (new_term.main_sel_op_note_hash_exists * + (new_term.kernel_kernel_out_offset - (new_term.kernel_note_hash_exist_write_offset + FF(0)))); tmp *= scaling_factor; std::get<127>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<128, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_note_hash_exists * - (new_term.kernel_kernel_out_offset - (new_term.kernel_note_hash_exist_write_offset + FF(0)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_note_hash_exist_write_offset); tmp *= scaling_factor; std::get<128>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<129, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_note_hash_exist_write_offset); + auto tmp = (new_term.main_sel_op_emit_note_hash * + (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_note_hash_write_offset + FF(128)))); tmp *= scaling_factor; std::get<129>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<130, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_emit_note_hash * - (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_note_hash_write_offset + FF(128)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_emit_note_hash_write_offset); tmp *= scaling_factor; std::get<130>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<131, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_emit_note_hash_write_offset); - tmp *= scaling_factor; - std::get<131>(evals) += typename Accumulator::View(tmp); - } - { - using Accumulator = typename std::tuple_element_t<132, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_op_nullifier_exists * (new_term.kernel_kernel_out_offset - ((new_term.main_ib * (new_term.kernel_nullifier_exists_write_offset + FF(16))) + ((-new_term.main_ib + FF(1)) * (new_term.kernel_nullifier_non_exists_write_offset + FF(32)))))); tmp *= scaling_factor; + std::get<131>(evals) += typename Accumulator::View(tmp); + } + { + using Accumulator = typename std::tuple_element_t<132, ContainerOverSubrelations>; + auto tmp = (new_term.main_sel_first * new_term.kernel_nullifier_exists_write_offset); + tmp *= scaling_factor; std::get<132>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<133, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_nullifier_exists_write_offset); + auto tmp = (new_term.main_sel_first * new_term.kernel_nullifier_non_exists_write_offset); tmp *= scaling_factor; std::get<133>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<134, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_nullifier_non_exists_write_offset); + auto tmp = (new_term.main_sel_op_emit_nullifier * + (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_nullifier_write_offset + FF(144)))); tmp *= scaling_factor; std::get<134>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<135, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_emit_nullifier * - (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_nullifier_write_offset + FF(144)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_emit_nullifier_write_offset); tmp *= scaling_factor; std::get<135>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<136, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_emit_nullifier_write_offset); + auto tmp = + (new_term.main_sel_op_l1_to_l2_msg_exists * + (new_term.kernel_kernel_out_offset - (new_term.kernel_l1_to_l2_msg_exists_write_offset + FF(48)))); tmp *= scaling_factor; std::get<136>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<137, ContainerOverSubrelations>; - auto tmp = - (new_term.main_sel_op_l1_to_l2_msg_exists * - (new_term.kernel_kernel_out_offset - (new_term.kernel_l1_to_l2_msg_exists_write_offset + FF(48)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_l1_to_l2_msg_exists_write_offset); tmp *= scaling_factor; std::get<137>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<138, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_l1_to_l2_msg_exists_write_offset); + auto tmp = + (new_term.main_sel_op_emit_unencrypted_log * + (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_unencrypted_log_write_offset + FF(162)))); tmp *= scaling_factor; std::get<138>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<139, ContainerOverSubrelations>; - auto tmp = - (new_term.main_sel_op_emit_unencrypted_log * - (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_unencrypted_log_write_offset + FF(162)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_emit_unencrypted_log_write_offset); tmp *= scaling_factor; std::get<139>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<140, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_emit_unencrypted_log_write_offset); + auto tmp = + (new_term.main_sel_op_emit_l2_to_l1_msg * + (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_l2_to_l1_msg_write_offset + FF(160)))); tmp *= scaling_factor; std::get<140>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<141, ContainerOverSubrelations>; - auto tmp = - (new_term.main_sel_op_emit_l2_to_l1_msg * - (new_term.kernel_kernel_out_offset - (new_term.kernel_emit_l2_to_l1_msg_write_offset + FF(160)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_emit_l2_to_l1_msg_write_offset); tmp *= scaling_factor; std::get<141>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<142, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_emit_l2_to_l1_msg_write_offset); + auto tmp = (new_term.main_sel_op_sload * + (new_term.kernel_kernel_out_offset - (new_term.kernel_sload_write_offset + FF(96)))); tmp *= scaling_factor; std::get<142>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<143, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_sload * - (new_term.kernel_kernel_out_offset - (new_term.kernel_sload_write_offset + FF(96)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_sload_write_offset); tmp *= scaling_factor; std::get<143>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<144, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_sload_write_offset); + auto tmp = (new_term.main_sel_op_sstore * + (new_term.kernel_kernel_out_offset - (new_term.kernel_sstore_write_offset + FF(64)))); tmp *= scaling_factor; std::get<144>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<145, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_sstore * - (new_term.kernel_kernel_out_offset - (new_term.kernel_sstore_write_offset + FF(64)))); + auto tmp = (new_term.main_sel_first * new_term.kernel_sstore_write_offset); tmp *= scaling_factor; std::get<145>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<146, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_first * new_term.kernel_sstore_write_offset); - tmp *= scaling_factor; - std::get<146>(evals) += typename Accumulator::View(tmp); - } - { - using Accumulator = typename std::tuple_element_t<147, ContainerOverSubrelations>; auto tmp = (((((((new_term.main_sel_op_note_hash_exists + new_term.main_sel_op_emit_note_hash) + new_term.main_sel_op_nullifier_exists) + new_term.main_sel_op_emit_nullifier) + @@ -1238,28 +1189,28 @@ template class mainImpl { new_term.main_sel_op_emit_l2_to_l1_msg) * (new_term.kernel_side_effect_counter_shift - (new_term.kernel_side_effect_counter + FF(1)))); tmp *= scaling_factor; - std::get<147>(evals) += typename Accumulator::View(tmp); + std::get<146>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<148, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<147, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_slice_gadget - ((new_term.main_sel_op_calldata_copy + new_term.main_sel_op_external_return) * (-new_term.main_tag_err + FF(1)))); tmp *= scaling_factor; - std::get<148>(evals) += typename Accumulator::View(tmp); + std::get<147>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<149, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<148, ContainerOverSubrelations>; auto tmp = (new_term.main_bin_op_id - (new_term.main_sel_op_or + (new_term.main_sel_op_xor * FF(2)))); tmp *= scaling_factor; - std::get<149>(evals) += typename Accumulator::View(tmp); + std::get<148>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<150, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<149, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_bin - ((new_term.main_sel_op_and + new_term.main_sel_op_or) + new_term.main_sel_op_xor)); tmp *= scaling_factor; - std::get<150>(evals) += typename Accumulator::View(tmp); + std::get<149>(evals) += typename Accumulator::View(tmp); } } }; @@ -1305,73 +1256,73 @@ template class main : public Relation> { return "RETURN_POINTER_INCREMENT"; case 94: return "RETURN_POINTER_DECREMENT"; - case 100: + case 99: return "PC_INCREMENT"; - case 101: + case 100: return "INTERNAL_RETURN_POINTER_CONSISTENCY"; - case 102: + case 101: return "SPACE_ID_INTERNAL"; - case 103: + case 102: return "SPACE_ID_STANDARD_OPCODES"; - case 104: + case 103: return "CMOV_CONDITION_RES_1"; - case 105: + case 104: return "CMOV_CONDITION_RES_2"; - case 108: + case 107: return "MOV_SAME_VALUE_A"; - case 109: + case 108: return "MOV_SAME_VALUE_B"; - case 110: + case 109: return "MOV_MAIN_SAME_TAG"; - case 114: + case 113: return "L2GASLEFT"; - case 115: + case 114: return "DAGASLEFT"; - case 116: + case 115: return "ADDRESS_KERNEL"; - case 117: + case 116: return "STORAGE_ADDRESS_KERNEL"; - case 118: + case 117: return "SENDER_KERNEL"; - case 119: + case 118: return "FUNCTION_SELECTOR_KERNEL"; - case 120: + case 119: return "FEE_TRANSACTION_FEE_KERNEL"; - case 121: + case 120: return "CHAIN_ID_KERNEL"; - case 122: + case 121: return "VERSION_KERNEL"; - case 123: + case 122: return "BLOCK_NUMBER_KERNEL"; - case 124: + case 123: return "TIMESTAMP_KERNEL"; - case 125: + case 124: return "COINBASE_KERNEL"; - case 126: + case 125: return "FEE_DA_GAS_KERNEL"; - case 127: + case 126: return "FEE_L2_GAS_KERNEL"; - case 128: + case 127: return "NOTE_HASH_KERNEL_OUTPUT"; - case 130: + case 129: return "EMIT_NOTE_HASH_KERNEL_OUTPUT"; - case 132: + case 131: return "NULLIFIER_EXISTS_KERNEL_OUTPUT"; - case 135: + case 134: return "EMIT_NULLIFIER_KERNEL_OUTPUT"; - case 137: + case 136: return "L1_TO_L2_MSG_EXISTS_KERNEL_OUTPUT"; - case 139: + case 138: return "EMIT_UNENCRYPTED_LOG_KERNEL_OUTPUT"; - case 141: + case 140: return "EMIT_L2_TO_L1_MSGS_KERNEL_OUTPUT"; - case 143: + case 142: return "SLOAD_KERNEL_OUTPUT"; - case 145: + case 144: return "SSTORE_KERNEL_OUTPUT"; - case 149: + case 148: return "BIN_SEL_1"; - case 150: + case 149: return "BIN_SEL_2"; } return std::to_string(index); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.cpp index 55c04d355c9..fe0b1040c83 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.cpp @@ -38,7 +38,7 @@ uint32_t AvmGasTraceBuilder::get_da_gas_left() return gas_trace.back().remaining_da_gas; } -void AvmGasTraceBuilder::constrain_gas_lookup(uint32_t clk, OpCode opcode) +void AvmGasTraceBuilder::constrain_gas(uint32_t clk, OpCode opcode, [[maybe_unused]] uint32_t dyn_gas_multiplier) { // TODO: increase lookup counter for the opcode we are looking up into gas_opcode_lookup_counter[opcode]++; @@ -72,7 +72,7 @@ void AvmGasTraceBuilder::constrain_gas_for_external_call(uint32_t clk, const OpCode opcode = OpCode::CALL; // TODO: increase lookup counter for the opcode we are looking up into - // gas_opcode_lookup_counter[opcode]++; + gas_opcode_lookup_counter[opcode]++; // Get the gas prices for this opcode const auto& GAS_COST_TABLE = FixedGasTable::get(); @@ -87,10 +87,8 @@ void AvmGasTraceBuilder::constrain_gas_for_external_call(uint32_t clk, GasTraceEntry entry = { .clk = clk, .opcode = OpCode::CALL, - .l2_gas_cost = 0, // We need 0 in this case because we do not activate the gas_cost_active selector to satisfy - // #[L2_GAS_INACTIVE]. - .da_gas_cost = 0, // We need 0 in this case because we do not activate the gas_cost_active selector to satisfy - // #[DA_GAS_INACTIVE]. + .l2_gas_cost = opcode_l2_gas_cost, + .da_gas_cost = opcode_da_gas_cost, .remaining_l2_gas = remaining_l2_gas, .remaining_da_gas = remaining_da_gas, }; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.hpp index 8653b93b18d..88033a75416 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gas_trace.hpp @@ -27,7 +27,7 @@ class AvmGasTraceBuilder { void reset(); std::vector finalize(); - void constrain_gas_lookup(uint32_t clk, OpCode opcode); + void constrain_gas(uint32_t clk, OpCode opcode, uint32_t dyn_gas_multiplier = 0); void constrain_gas_for_external_call(uint32_t clk, uint32_t nested_l2_gas_cost, uint32_t nested_da_gas_cost); void set_initial_gas(uint32_t l2_gas, uint32_t da_gas); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp index 13ab95d3b9c..5b2a496f33d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp @@ -85,7 +85,6 @@ void log_avm_trace([[maybe_unused]] std::vector const& trace, info("diff_lo: ", trace.at(i).mem_diff_lo); info("=======GAS ACCOUNTING================================================================"); - info("opcode active: ", trace.at(i).main_sel_mem_op_activate_gas); info("l2_gas_remaining: ", trace.at(i).main_l2_gas_remaining); info("da_gas_remaining: ", trace.at(i).main_da_gas_remaining); info("l2_gas_op_cost: ", trace.at(i).main_l2_gas_op_cost); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/mem_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/mem_trace.hpp index 10124892b7c..0bdd8cc9ed4 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/mem_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/mem_trace.hpp @@ -108,6 +108,9 @@ class AvmMemTraceBuilder { uint32_t direct_dst_offset); std::vector read_return_opcode(uint32_t clk, uint8_t space_id, uint32_t direct_ret_offset, uint32_t ret_size); + // DO NOT USE FOR REAL OPERATIONS + FF unconstrained_read(uint8_t space_id, uint32_t addr) { return memory[space_id][addr].val; } + private: std::vector mem_trace; // Entries will be sorted by m_clk, m_sub_clk after finalize(). diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp index 4d83635b8a8..ab3b4d2d550 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp @@ -13,6 +13,7 @@ #include #include +#include "barretenberg/common/assert.hpp" #include "barretenberg/common/throw_or_abort.hpp" #include "barretenberg/crypto/pedersen_commitment/pedersen.hpp" #include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" @@ -289,167 +290,46 @@ AvmTraceBuilder::MemOp AvmTraceBuilder::constrained_write_to_memory(uint8_t spac .val = value }; } -// TODO(ilyas: #6383): Temporary way to bulk read slices -template -uint32_t AvmTraceBuilder::read_slice_to_memory(uint8_t space_id, - uint32_t clk, - AddressWithMode addr, - AvmMemoryTag r_tag, - AvmMemoryTag w_tag, - FF internal_return_ptr, - size_t slice_len, - std::vector& slice) +FF AvmTraceBuilder::unconstrained_read_from_memory(AddressWithMode addr) { - // If the mem_op is indirect, it goes into register A - bool is_indirect = addr.mode == AddressingMode::INDIRECT; - auto src_offset = addr.offset; - // We have 4 registers that we are able to use to read from memory within a single main trace row - auto register_order = std::array{ IntermRegister::IA, IntermRegister::IB, IntermRegister::IC, IntermRegister::ID }; - // If the slice size isnt a multiple of 4, we still need an extra row to write the remainder - uint32_t const num_main_rows = static_cast(slice_len) / 4 + static_cast(slice_len % 4 != 0); - for (uint32_t i = 0; i < num_main_rows; i++) { - Row main_row{ - .main_clk = clk + i, - .main_internal_return_ptr = FF(internal_return_ptr), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(r_tag)), - .main_w_in_tag = FF(static_cast(w_tag)), - }; - // Write 4 values to memory in each_row - for (uint32_t j = 0; j < 4; j++) { - auto offset = i * 4 + j; - // If we exceed the slice size, we break - if (offset >= slice_len) { - break; - } - MemOp mem_read; - if (is_indirect) { - // If the first address is indirect we read it into register A, this can only happen once per slice read - mem_read = constrained_read_from_memory(space_id, clk + i, addr, r_tag, w_tag, IntermRegister::IA); - // Set this to false for the rest of the reads - is_indirect = false; - src_offset = mem_read.direct_address; - } else { - auto mem_load = mem_trace_builder.read_and_load_from_memory( - space_id, clk + i, register_order[j], src_offset + offset, r_tag, w_tag); - mem_read = MemOp{ - .is_indirect = false, - .indirect_address = 0, - .direct_address = src_offset + offset, - .tag = r_tag, - .tag_match = mem_load.tag_match, - .val = MEM(mem_load.val), - }; - } - slice.emplace_back(MEM(mem_read.val)); - // This looks a bit gross, but it is fine for now. - if (j == 0) { - main_row.main_ia = slice.at(offset); - main_row.main_ind_addr_a = FF(mem_read.indirect_address); - main_row.main_sel_resolve_ind_addr_a = FF(static_cast(mem_read.is_indirect)); - main_row.main_mem_addr_a = FF(mem_read.direct_address); - main_row.main_sel_mem_op_a = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } else if (j == 1) { - main_row.main_ib = slice.at(offset); - main_row.main_mem_addr_b = FF(mem_read.direct_address); - main_row.main_sel_mem_op_b = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } else if (j == 2) { - main_row.main_ic = slice.at(offset); - main_row.main_mem_addr_c = FF(mem_read.direct_address); - main_row.main_sel_mem_op_c = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } else { - main_row.main_id = slice.at(offset); - main_row.main_mem_addr_d = FF(mem_read.direct_address); - main_row.main_sel_mem_op_d = FF(1); - main_row.main_tag_err = FF(static_cast(!mem_read.tag_match)); - } - } - main_trace.emplace_back(main_row); + auto offset = addr.offset; + if (addr.mode == AddressingMode::INDIRECT) { + offset = static_cast(mem_trace_builder.unconstrained_read(call_ptr, offset)); } - return num_main_rows; + return mem_trace_builder.unconstrained_read(call_ptr, offset); } -// TODO(ilyas: #6383): Temporary way to bulk write slices -uint32_t AvmTraceBuilder::write_slice_to_memory(uint8_t space_id, - uint32_t clk, - AddressWithMode addr, - AvmMemoryTag r_tag, - AvmMemoryTag w_tag, - FF internal_return_ptr, - std::vector const& slice) +void AvmTraceBuilder::write_to_memory(AddressWithMode addr, FF val, AvmMemoryTag w_tag) { - bool is_indirect = addr.mode == AddressingMode::INDIRECT; - auto dst_offset = addr.offset; - // We have 4 registers that we are able to use to write to memory within a single main trace row - auto register_order = std::array{ IntermRegister::IA, IntermRegister::IB, IntermRegister::IC, IntermRegister::ID }; - // If the slice size isnt a multiple of 4, we still need an extra row to write the remainder - uint32_t const num_main_rows = - static_cast(slice.size()) / 4 + static_cast(slice.size() % 4 != 0); - for (uint32_t i = 0; i < num_main_rows; i++) { - Row main_row{ - .main_clk = clk + i, - .main_internal_return_ptr = FF(internal_return_ptr), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(r_tag)), - .main_w_in_tag = FF(static_cast(w_tag)), - }; - // Write 4 values to memory in each_row - for (uint32_t j = 0; j < 4; j++) { - auto offset = i * 4 + j; - // If we exceed the slice size, we break - if (offset >= slice.size()) { - break; - } - MemOp mem_write; - if (is_indirect) { - mem_write = constrained_write_to_memory( - space_id, clk + i, addr, slice.at(offset), r_tag, w_tag, IntermRegister::IA); - // Ensure futures calls are direct - is_indirect = false; - dst_offset = mem_write.direct_address; - } else { - mem_trace_builder.write_into_memory( - space_id, clk + i, register_order[j], dst_offset + offset, slice.at(offset), r_tag, w_tag); - mem_write = MemOp{ - .is_indirect = false, - .indirect_address = 0, - .direct_address = dst_offset + offset, - .tag = w_tag, - .tag_match = true, - .val = slice.at(offset), - }; - } - // This looks a bit gross, but it is fine for now. - if (j == 0) { - main_row.main_ia = slice.at(offset); - main_row.main_ind_addr_a = FF(mem_write.indirect_address); - main_row.main_sel_resolve_ind_addr_a = FF(static_cast(mem_write.is_indirect)); - main_row.main_mem_addr_a = FF(mem_write.direct_address); - main_row.main_sel_mem_op_a = FF(1); - main_row.main_rwa = FF(1); - } else if (j == 1) { - main_row.main_ib = slice.at(offset); - main_row.main_mem_addr_b = FF(mem_write.direct_address); - main_row.main_sel_mem_op_b = FF(1); - main_row.main_rwb = FF(1); - } else if (j == 2) { - main_row.main_ic = slice.at(offset); - main_row.main_mem_addr_c = FF(mem_write.direct_address); - main_row.main_sel_mem_op_c = FF(1); - main_row.main_rwc = FF(1); - } else { - main_row.main_id = slice.at(offset); - main_row.main_mem_addr_d = FF(mem_write.direct_address); - main_row.main_sel_mem_op_d = FF(1); - main_row.main_rwd = FF(1); - } - } - main_trace.emplace_back(main_row); + // op_set_internal increments the pc, so we need to store the current pc and then jump back to it + // to legaly reset the pc. + auto current_pc = pc; + op_set_internal(static_cast(addr.mode), val, addr.offset, w_tag); + op_jump(current_pc); +} + +template +void AvmTraceBuilder::read_slice_from_memory(AddressWithMode addr, size_t slice_len, std::vector& slice) +{ + uint32_t base_addr = addr.offset; + if (addr.mode == AddressingMode::INDIRECT) { + base_addr = static_cast(mem_trace_builder.unconstrained_read(call_ptr, base_addr)); + } + + for (uint32_t i = 0; i < slice_len; i++) { + slice.push_back(static_cast(mem_trace_builder.unconstrained_read(call_ptr, base_addr + i))); + } +} + +template +void AvmTraceBuilder::write_slice_to_memory(AddressWithMode addr, AvmMemoryTag w_tag, const T& slice) +{ + auto base_addr = addr.mode == AddressingMode::INDIRECT + ? static_cast(mem_trace_builder.unconstrained_read(call_ptr, addr.offset)) + : addr.offset; + for (uint32_t i = 0; i < slice.size(); i++) { + write_to_memory(base_addr + i, slice[i], w_tag); } - return num_main_rows; } // Finalise Lookup Counts @@ -530,7 +410,7 @@ void AvmTraceBuilder::op_add( auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::ADD); + gas_trace_builder.constrain_gas(clk, OpCode::ADD); main_trace.push_back(Row{ .main_clk = clk, @@ -597,7 +477,7 @@ void AvmTraceBuilder::op_sub( auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SUB); + gas_trace_builder.constrain_gas(clk, OpCode::SUB); main_trace.push_back(Row{ .main_clk = clk, @@ -664,7 +544,7 @@ void AvmTraceBuilder::op_mul( auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::MUL); + gas_trace_builder.constrain_gas(clk, OpCode::MUL); main_trace.push_back(Row{ .main_clk = clk, @@ -742,7 +622,7 @@ void AvmTraceBuilder::op_div( auto write_dst = constrained_write_to_memory(call_ptr, clk, resolved_dst, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::DIV); + gas_trace_builder.constrain_gas(clk, OpCode::DIV); main_trace.push_back(Row{ .main_clk = clk, @@ -822,7 +702,7 @@ void AvmTraceBuilder::op_fdiv(uint8_t indirect, uint32_t a_offset, uint32_t b_of call_ptr, clk, resolved_c, c, AvmMemoryTag::FF, AvmMemoryTag::FF, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::FDIV); + gas_trace_builder.constrain_gas(clk, OpCode::FDIV); main_trace.push_back(Row{ .main_clk = clk, @@ -892,7 +772,7 @@ void AvmTraceBuilder::op_eq( constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, AvmMemoryTag::U8, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::EQ); + gas_trace_builder.constrain_gas(clk, OpCode::EQ); main_trace.push_back(Row{ .main_clk = clk, @@ -944,7 +824,7 @@ void AvmTraceBuilder::op_lt( constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, AvmMemoryTag::U8, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::LT); + gas_trace_builder.constrain_gas(clk, OpCode::LT); main_trace.push_back(Row{ .main_clk = clk, @@ -997,7 +877,7 @@ void AvmTraceBuilder::op_lte( constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, AvmMemoryTag::U8, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::LTE); + gas_trace_builder.constrain_gas(clk, OpCode::LTE); main_trace.push_back(Row{ .main_clk = clk, @@ -1053,7 +933,7 @@ void AvmTraceBuilder::op_and( auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::AND); + gas_trace_builder.constrain_gas(clk, OpCode::AND); main_trace.push_back(Row{ .main_clk = clk, @@ -1105,7 +985,7 @@ void AvmTraceBuilder::op_or( auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::OR); + gas_trace_builder.constrain_gas(clk, OpCode::OR); main_trace.push_back(Row{ .main_clk = clk, @@ -1158,7 +1038,7 @@ void AvmTraceBuilder::op_xor( auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::XOR); + gas_trace_builder.constrain_gas(clk, OpCode::XOR); main_trace.push_back(Row{ .main_clk = clk, @@ -1221,7 +1101,7 @@ void AvmTraceBuilder::op_not(uint8_t indirect, uint32_t a_offset, uint32_t dst_o auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::NOT); + gas_trace_builder.constrain_gas(clk, OpCode::NOT); main_trace.push_back(Row{ .main_clk = clk, @@ -1267,7 +1147,7 @@ void AvmTraceBuilder::op_shl( // Write into memory value c from intermediate register ic. auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHL); + gas_trace_builder.constrain_gas(clk, OpCode::SHL); main_trace.push_back(Row{ .main_clk = clk, @@ -1319,7 +1199,7 @@ void AvmTraceBuilder::op_shr( // Write into memory value c from intermediate register ic. auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, c, in_tag, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHR); + gas_trace_builder.constrain_gas(clk, OpCode::SHR); main_trace.push_back(Row{ .main_clk = clk, @@ -1400,7 +1280,7 @@ void AvmTraceBuilder::op_cast(uint8_t indirect, uint32_t a_offset, uint32_t dst_ mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, direct_dst_offset, c, memEntry.tag, dst_tag); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::CAST); + gas_trace_builder.constrain_gas(clk, OpCode::CAST); main_trace.push_back(Row{ .main_clk = clk, @@ -1478,7 +1358,7 @@ void AvmTraceBuilder::op_address(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_address = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::ADDRESS); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::ADDRESS); main_trace.push_back(row); } @@ -1490,7 +1370,7 @@ void AvmTraceBuilder::op_storage_address(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_storage_address = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::STORAGEADDRESS); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::STORAGEADDRESS); main_trace.push_back(row); } @@ -1502,7 +1382,7 @@ void AvmTraceBuilder::op_sender(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_sender = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::SENDER); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::SENDER); main_trace.push_back(row); } @@ -1515,7 +1395,7 @@ void AvmTraceBuilder::op_function_selector(uint8_t indirect, uint32_t dst_offset row.main_sel_op_function_selector = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::FUNCTIONSELECTOR); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::FUNCTIONSELECTOR); main_trace.push_back(row); } @@ -1527,7 +1407,7 @@ void AvmTraceBuilder::op_transaction_fee(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_transaction_fee = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::TRANSACTIONFEE); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::TRANSACTIONFEE); main_trace.push_back(row); } @@ -1543,7 +1423,7 @@ void AvmTraceBuilder::op_chain_id(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_chain_id = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::CHAINID); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::CHAINID); main_trace.push_back(row); } @@ -1555,7 +1435,7 @@ void AvmTraceBuilder::op_version(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_version = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::VERSION); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::VERSION); main_trace.push_back(row); } @@ -1567,7 +1447,7 @@ void AvmTraceBuilder::op_block_number(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_block_number = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::BLOCKNUMBER); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::BLOCKNUMBER); main_trace.push_back(row); } @@ -1579,7 +1459,7 @@ void AvmTraceBuilder::op_timestamp(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_timestamp = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::TIMESTAMP); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::TIMESTAMP); main_trace.push_back(row); } @@ -1591,7 +1471,7 @@ void AvmTraceBuilder::op_coinbase(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_coinbase = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::COINBASE); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::COINBASE); main_trace.push_back(row); } @@ -1603,7 +1483,7 @@ void AvmTraceBuilder::op_fee_per_l2_gas(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_fee_per_l2_gas = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::FEEPERL2GAS); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::FEEPERL2GAS); main_trace.push_back(row); } @@ -1615,7 +1495,7 @@ void AvmTraceBuilder::op_fee_per_da_gas(uint8_t indirect, uint32_t dst_offset) row.main_sel_op_fee_per_da_gas = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(static_cast(row.main_clk), OpCode::FEEPERDAGAS); + gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::FEEPERDAGAS); main_trace.push_back(row); } @@ -1670,7 +1550,7 @@ void AvmTraceBuilder::op_calldata_copy(uint8_t indirect, uint32_t cd_offset, uin } // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::CALLDATACOPY); + gas_trace_builder.constrain_gas(clk, OpCode::CALLDATACOPY, copy_size); main_trace.push_back(Row{ .main_clk = clk, @@ -1704,7 +1584,7 @@ void AvmTraceBuilder::execute_gasleft(OpCode opcode, uint8_t indirect, uint32_t auto [resolved_dst] = unpack_indirects<1>(indirect, { dst_offset }); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, opcode); + gas_trace_builder.constrain_gas(clk, opcode); uint32_t gas_remaining = 0; @@ -1766,7 +1646,7 @@ void AvmTraceBuilder::op_jump(uint32_t jmp_dest) auto clk = static_cast(main_trace.size()) + 1; // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::JUMP); + gas_trace_builder.constrain_gas(clk, OpCode::JUMP); main_trace.push_back(Row{ .main_clk = clk, @@ -1815,7 +1695,7 @@ void AvmTraceBuilder::op_jumpi(uint8_t indirect, uint32_t jmp_dest, uint32_t con uint32_t next_pc = !id_zero ? jmp_dest : pc + 1; // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::JUMPI); + gas_trace_builder.constrain_gas(clk, OpCode::JUMPI); main_trace.push_back(Row{ .main_clk = clk, @@ -1867,7 +1747,7 @@ void AvmTraceBuilder::op_internal_call(uint32_t jmp_dest) AvmMemoryTag::U32); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::INTERNALCALL); + gas_trace_builder.constrain_gas(clk, OpCode::INTERNALCALL); main_trace.push_back(Row{ .main_clk = clk, @@ -1909,7 +1789,7 @@ void AvmTraceBuilder::op_internal_return() INTERNAL_CALL_SPACE_ID, clk, IntermRegister::IA, internal_return_ptr - 1, AvmMemoryTag::U32, AvmMemoryTag::U0); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::INTERNALRETURN); + gas_trace_builder.constrain_gas(clk, OpCode::INTERNALRETURN); main_trace.push_back(Row{ .main_clk = clk, @@ -1949,15 +1829,20 @@ void AvmTraceBuilder::op_internal_return() */ void AvmTraceBuilder::op_set(uint8_t indirect, uint128_t val, uint32_t dst_offset, AvmMemoryTag in_tag) { - auto const clk = static_cast(main_trace.size()) + 1; auto const val_ff = FF{ uint256_t::from_uint128(val) }; + op_set_internal(indirect, val_ff, dst_offset, in_tag); +} + +void AvmTraceBuilder::op_set_internal(uint8_t indirect, FF val_ff, uint32_t dst_offset, AvmMemoryTag in_tag) +{ + auto const clk = static_cast(main_trace.size()) + 1; auto [resolved_c] = unpack_indirects<1>(indirect, { dst_offset }); auto write_c = constrained_write_to_memory(call_ptr, clk, resolved_c, val_ff, AvmMemoryTag::U0, in_tag, IntermRegister::IC); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SET); + gas_trace_builder.constrain_gas(clk, OpCode::SET); main_trace.push_back(Row{ .main_clk = clk, @@ -1968,7 +1853,6 @@ void AvmTraceBuilder::op_set(uint8_t indirect, uint128_t val, uint32_t dst_offse .main_mem_addr_c = FF(write_c.direct_address), .main_pc = pc++, .main_rwc = 1, - .main_sel_mem_op_activate_gas = 1, // TODO: remove in the long term .main_sel_mem_op_c = 1, .main_sel_resolve_ind_addr_c = FF(static_cast(write_c.is_indirect)), .main_tag_err = static_cast(!write_c.tag_match), @@ -2015,7 +1899,7 @@ void AvmTraceBuilder::op_mov(uint8_t indirect, uint32_t src_offset, uint32_t dst mem_trace_builder.write_into_memory(call_ptr, clk, IntermRegister::IC, direct_dst_offset, val, tag, tag); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::MOV); + gas_trace_builder.constrain_gas(clk, OpCode::MOV); main_trace.push_back(Row{ .main_clk = clk, @@ -2115,7 +1999,7 @@ void AvmTraceBuilder::op_cmov( FF const inv = !id_zero ? cond_mem_entry.val.invert() : 1; // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::CMOV); + gas_trace_builder.constrain_gas(clk, OpCode::CMOV); main_trace.push_back(Row{ .main_clk = clk, @@ -2367,6 +2251,7 @@ void AvmTraceBuilder::op_sload(uint8_t indirect, uint32_t slot_offset, uint32_t .main_sel_resolve_ind_addr_a = FF(static_cast(read_slot.is_indirect)), .main_tag_err = FF(static_cast(!read_slot.tag_match)), }); + gas_trace_builder.constrain_gas(clk, OpCode::SLOAD); clk++; AddressWithMode write_dst = resolved_dest; @@ -2399,7 +2284,9 @@ void AvmTraceBuilder::op_sload(uint8_t indirect, uint32_t slot_offset, uint32_t kernel_trace_builder.op_sload(clk, side_effect_counter, row.main_ib, row.main_ia); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SLOAD); + // TODO: when/if we move this to its own gadget, and we have 1 row only, we should pass the size as + // n_multiplier here. + gas_trace_builder.constrain_gas(clk, OpCode::SLOAD); main_trace.push_back(row); @@ -2435,6 +2322,7 @@ void AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint32_t .main_tag_err = FF(static_cast(!read_slot.tag_match)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), }); + gas_trace_builder.constrain_gas(clk, OpCode::SSTORE); clk++; AddressWithMode read_src = resolved_src; @@ -2463,7 +2351,9 @@ void AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint32_t kernel_trace_builder.op_sstore(clk, side_effect_counter, row.main_ib, row.main_ia); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SSTORE); + // TODO: when/if we move this to its own gadget, and we have 1 row only, we should pass the size as + // n_multiplier here. + gas_trace_builder.constrain_gas(clk, OpCode::SSTORE); main_trace.push_back(row); @@ -2487,7 +2377,7 @@ void AvmTraceBuilder::op_note_hash_exists(uint8_t indirect, uint32_t note_hash_o row.main_sel_op_note_hash_exists = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::NOTEHASHEXISTS); + gas_trace_builder.constrain_gas(clk, OpCode::NOTEHASHEXISTS); main_trace.push_back(row); @@ -2504,7 +2394,7 @@ void AvmTraceBuilder::op_emit_note_hash(uint8_t indirect, uint32_t note_hash_off row.main_sel_op_emit_note_hash = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::EMITNOTEHASH); + gas_trace_builder.constrain_gas(clk, OpCode::EMITNOTEHASH); main_trace.push_back(row); @@ -2523,7 +2413,7 @@ void AvmTraceBuilder::op_nullifier_exists(uint8_t indirect, uint32_t nullifier_o row.main_sel_op_nullifier_exists = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::NULLIFIEREXISTS); + gas_trace_builder.constrain_gas(clk, OpCode::NULLIFIEREXISTS); main_trace.push_back(row); @@ -2540,7 +2430,7 @@ void AvmTraceBuilder::op_emit_nullifier(uint8_t indirect, uint32_t nullifier_off row.main_sel_op_emit_nullifier = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::EMITNULLIFIER); + gas_trace_builder.constrain_gas(clk, OpCode::EMITNULLIFIER); main_trace.push_back(row); @@ -2558,7 +2448,7 @@ void AvmTraceBuilder::op_l1_to_l2_msg_exists(uint8_t indirect, uint32_t log_offs row.main_sel_op_l1_to_l2_msg_exists = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::L1TOL2MSGEXISTS); + gas_trace_builder.constrain_gas(clk, OpCode::L1TOL2MSGEXISTS); main_trace.push_back(row); @@ -2576,7 +2466,7 @@ void AvmTraceBuilder::op_get_contract_instance(uint8_t indirect, uint32_t addres bool tag_match = read_address.tag_match; // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::GETCONTRACTINSTANCE); + gas_trace_builder.constrain_gas(clk, OpCode::GETCONTRACTINSTANCE); main_trace.push_back(Row{ .main_clk = clk, @@ -2587,12 +2477,11 @@ void AvmTraceBuilder::op_get_contract_instance(uint8_t indirect, uint32_t addres .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_activate_gas = FF(1), // TODO: remove in the long term .main_sel_op_get_contract_instance = FF(1), .main_sel_resolve_ind_addr_a = FF(static_cast(read_address.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), }); - clk++; + // Read the contract instance ContractInstanceHint contract_instance = execution_hints.contract_instance_hints.at(read_address.val); @@ -2603,13 +2492,7 @@ void AvmTraceBuilder::op_get_contract_instance(uint8_t indirect, uint32_t addres contract_instance.contract_class_id, contract_instance.initialisation_hash, contract_instance.public_key_hash }; - write_slice_to_memory(call_ptr, - clk, - resolved_dst_offset, - AvmMemoryTag::U0, - AvmMemoryTag::FF, - internal_return_ptr, - contract_instance_vec); + write_slice_to_memory(resolved_dst_offset, AvmMemoryTag::FF, contract_instance_vec); debug("contract_instance cnt: ", side_effect_counter); side_effect_counter++; @@ -2626,13 +2509,19 @@ void AvmTraceBuilder::op_emit_unencrypted_log(uint8_t indirect, auto const clk = static_cast(main_trace.size()) + 1; // FIXME: read (and constrain) log_size_offset + auto [resolved_log_offset, resolved_log_size_offset] = + unpack_indirects<2>(indirect, { log_offset, log_size_offset }); + auto log_size = unconstrained_read_from_memory(resolved_log_size_offset); + // FIXME: we need to constrain the log_size_offset mem read (and tag check), not just one field! - Row row = create_kernel_output_opcode(indirect, clk, log_offset); + // FIXME: we shouldn't pass resolved_log_offset.offset; we should modify create_kernel_output_opcode to take an + // addresswithmode. + Row row = create_kernel_output_opcode(indirect, clk, resolved_log_offset.offset); kernel_trace_builder.op_emit_unencrypted_log(clk, side_effect_counter, row.main_ia); row.main_sel_op_emit_unencrypted_log = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::EMITUNENCRYPTEDLOG); + gas_trace_builder.constrain_gas(clk, OpCode::EMITUNENCRYPTEDLOG, static_cast(log_size)); main_trace.push_back(row); @@ -2644,14 +2533,14 @@ void AvmTraceBuilder::op_emit_l2_to_l1_msg(uint8_t indirect, uint32_t recipient_ { auto const clk = static_cast(main_trace.size()) + 1; - // Note: unorthadox order - as seen in L2ToL1Message struct in TS + // Note: unorthodox order - as seen in L2ToL1Message struct in TS Row row = create_kernel_output_opcode_with_metadata( indirect, clk, content_offset, AvmMemoryTag::FF, recipient_offset, AvmMemoryTag::FF); kernel_trace_builder.op_emit_l2_to_l1_msg(clk, side_effect_counter, row.main_ia, row.main_ib); row.main_sel_op_emit_l2_to_l1_msg = FF(1); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SENDL2TOL1MSG); + gas_trace_builder.constrain_gas(clk, OpCode::SENDL2TOL1MSG); main_trace.push_back(row); @@ -2731,7 +2620,7 @@ void AvmTraceBuilder::op_call(uint8_t indirect, .main_mem_addr_b = FF(read_gas_l2.direct_address + 1), .main_mem_addr_c = FF(read_addr.direct_address), .main_mem_addr_d = FF(read_args.direct_address), - .main_pc = FF(pc), + .main_pc = FF(pc++), .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), .main_sel_mem_op_a = FF(1), .main_sel_mem_op_b = FF(1), @@ -2743,23 +2632,15 @@ void AvmTraceBuilder::op_call(uint8_t indirect, .main_sel_resolve_ind_addr_d = FF(static_cast(read_args.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), }); - clk++; + // The return data hint is used for now, we check it has the same length as the ret_size ASSERT(hint.return_data.size() == ret_size); // Write the return data to memory - uint32_t num_rows = write_slice_to_memory( - call_ptr, clk, resolved_ret_offset, AvmMemoryTag::U0, AvmMemoryTag::FF, internal_return_ptr, hint.return_data); - clk += num_rows; + write_slice_to_memory(resolved_ret_offset, AvmMemoryTag::FF, hint.return_data); // Write the success flag to memory - write_slice_to_memory(call_ptr, - clk, - resolved_success_offset, - AvmMemoryTag::U0, - AvmMemoryTag::U8, - internal_return_ptr, - { hint.success }); + write_slice_to_memory(resolved_success_offset, AvmMemoryTag::U8, std::vector{ hint.success }); external_call_counter++; - pc++; + // Adjust the side_effect_counter to the value at the end of the external call. side_effect_counter = static_cast(hint.end_side_effect_counter); } @@ -2781,6 +2662,7 @@ void AvmTraceBuilder::op_call(uint8_t indirect, */ std::vector AvmTraceBuilder::op_return(uint8_t indirect, uint32_t ret_offset, uint32_t ret_size) { + // FIXME: this is wrong. E.g., we wouldn't be charging gas. if (ret_size == 0) { halt(); return {}; @@ -2796,7 +2678,6 @@ std::vector AvmTraceBuilder::op_return(uint8_t indirect, uint32_t ret_offset // The only memory operation performed from the main trace is a possible indirect load for resolving the // direct destination offset stored in main_mem_addr_c. // All the other memory operations are triggered by the slice gadget. - if (is_operand_indirect(indirect, 0)) { indirect_flag = true; auto ind_read = @@ -2811,7 +2692,7 @@ std::vector AvmTraceBuilder::op_return(uint8_t indirect, uint32_t ret_offset } // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::RETURN); + gas_trace_builder.constrain_gas(clk, OpCode::RETURN, ret_size); main_trace.push_back(Row{ .main_clk = clk, @@ -2862,7 +2743,7 @@ void AvmTraceBuilder::op_keccak(uint8_t indirect, unpack_indirects<3>(indirect, { output_offset, input_offset, input_size_offset }); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::KECCAK); + gas_trace_builder.constrain_gas(clk, OpCode::KECCAK); // Read the input length first auto input_length_read = constrained_read_from_memory( @@ -2888,16 +2769,7 @@ void AvmTraceBuilder::op_keccak(uint8_t indirect, std::vector input; input.reserve(uint32_t(input_length_read.val)); // Read the slice length from memory - uint32_t num_main_rows = read_slice_to_memory(call_ptr, - clk, - resolved_input_offset, - AvmMemoryTag::U8, - AvmMemoryTag::U8, - FF(internal_return_ptr), - uint32_t(input_length_read.val), - input); - - clk += num_main_rows; + read_slice_from_memory(resolved_input_offset, uint32_t(input_length_read.val), input); std::array result = keccak_trace_builder.keccak(keccak_op_clk, input, uint32_t(input_length_read.val)); // We convert the results to field elements here @@ -2906,8 +2778,7 @@ void AvmTraceBuilder::op_keccak(uint8_t indirect, ff_result.emplace_back(result[i]); } // Write the result to memory after - write_slice_to_memory( - call_ptr, clk, resolved_output_offset, AvmMemoryTag::U8, AvmMemoryTag::U8, FF(internal_return_ptr), ff_result); + write_slice_to_memory(resolved_output_offset, AvmMemoryTag::U8, ff_result); } /** @@ -2948,7 +2819,7 @@ void AvmTraceBuilder::op_poseidon2_permutation(uint8_t indirect, uint32_t input_ } // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::POSEIDON2); + gas_trace_builder.constrain_gas(clk, OpCode::POSEIDON2); // Main trace contains on operand values from the bytecode and resolved indirects main_trace.push_back(Row{ @@ -3063,7 +2934,7 @@ void AvmTraceBuilder::op_sha256(uint8_t indirect, auto [resolved_output_offset, resolved_input_offset, resolved_input_size_offset] = unpack_indirects<3>(indirect, { output_offset, input_offset, input_size_offset }); - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHA256); + gas_trace_builder.constrain_gas(clk, OpCode::SHA256); auto input_length_read = constrained_read_from_memory( call_ptr, clk, resolved_input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); @@ -3087,16 +2958,8 @@ void AvmTraceBuilder::op_sha256(uint8_t indirect, std::vector input; input.reserve(uint32_t(input_length_read.val)); - uint32_t num_main_rows = read_slice_to_memory(call_ptr, - clk, - resolved_input_offset, - AvmMemoryTag::U8, - AvmMemoryTag::U0, - FF(internal_return_ptr), - uint32_t(input_length_read.val), - input); - clk += num_main_rows; - // + read_slice_from_memory(resolved_input_offset, uint32_t(input_length_read.val), input); + std::array result = sha256_trace_builder.sha256(input, sha256_op_clk); std::vector ff_result; @@ -3104,12 +2967,11 @@ void AvmTraceBuilder::op_sha256(uint8_t indirect, ff_result.emplace_back(result[i]); } // Write the result to memory after - write_slice_to_memory( - call_ptr, clk, resolved_output_offset, AvmMemoryTag::U0, AvmMemoryTag::U8, FF(internal_return_ptr), ff_result); + write_slice_to_memory(resolved_output_offset, AvmMemoryTag::U8, ff_result); } /** - * @brief Pedersen Hash with direct or indirect memory access. + * @brief Pedersen Hash with direct or indirect memory access. * @param indirect byte encoding information about indirect/direct memory access. * @param gen_ctx_offset An index in memory pointing to where the u32 offset for the pedersen hash generators. * @param input_offset An index in memory pointing to the first FF value of the input array to be used in the next @@ -3124,13 +2986,19 @@ void AvmTraceBuilder::op_pedersen_hash(uint8_t indirect, auto clk = static_cast(main_trace.size()) + 1; auto [resolved_gen_ctx_offset, resolved_output_offset, resolved_input_offset, resolved_input_size_offset] = unpack_indirects<4>(indirect, { gen_ctx_offset, output_offset, input_offset, input_size_offset }); + auto input_read = constrained_read_from_memory( call_ptr, clk, resolved_input_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); + // auto input_size_read = constrained_read_from_memory( + // call_ptr, clk, resolved_input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); + // auto gen_ctx_read = constrained_read_from_memory( + // call_ptr, clk, resolved_gen_ctx_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IC); + auto input_size_read = unconstrained_read_from_memory(resolved_input_size_offset); + auto gen_ctx_read = unconstrained_read_from_memory(resolved_gen_ctx_offset); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::PEDERSEN); + gas_trace_builder.constrain_gas(clk, OpCode::PEDERSEN); - uint32_t pedersen_clk = clk; // We read the input and output addresses in one row as they should contain FF elements main_trace.push_back(Row{ .main_clk = clk, @@ -3145,43 +3013,11 @@ void AvmTraceBuilder::op_pedersen_hash(uint8_t indirect, .main_sel_resolve_ind_addr_a = FF(static_cast(input_read.is_indirect)), .main_tag_err = FF(static_cast(!input_read.tag_match)), }); - clk++; - // We read the input size and gen_ctx addresses in one row as they should contain U32 elements - auto input_size_read = constrained_read_from_memory( - call_ptr, clk, resolved_input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IA); - auto gen_ctx_read = constrained_read_from_memory( - call_ptr, clk, resolved_gen_ctx_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = input_size_read.val, - .main_ib = gen_ctx_read.val, - .main_ind_addr_a = FF(input_size_read.indirect_address), - .main_ind_addr_b = FF(gen_ctx_read.indirect_address), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(input_size_read.direct_address), - .main_mem_addr_b = FF(gen_ctx_read.direct_address), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(input_size_read.is_indirect)), - .main_sel_resolve_ind_addr_b = FF(static_cast(gen_ctx_read.is_indirect)), - }); - clk++; std::vector inputs; - uint32_t num_main_rows = read_slice_to_memory(call_ptr, - clk, - resolved_input_offset, - AvmMemoryTag::FF, - AvmMemoryTag::FF, - FF(internal_return_ptr), - uint32_t(input_size_read.val), - inputs); - clk += num_main_rows; - FF output = pedersen_trace_builder.pedersen_hash(inputs, uint32_t(gen_ctx_read.val), pedersen_clk); - write_slice_to_memory( - call_ptr, clk, resolved_output_offset, AvmMemoryTag::FF, AvmMemoryTag::FF, FF(internal_return_ptr), { output }); + read_slice_from_memory(resolved_input_offset, uint32_t(input_size_read), inputs); + FF output = pedersen_trace_builder.pedersen_hash(inputs, uint32_t(gen_ctx_read), clk); + write_slice_to_memory(resolved_output_offset, AvmMemoryTag::FF, std::vector{ output }); } void AvmTraceBuilder::op_ec_add(uint8_t indirect, @@ -3209,117 +3045,40 @@ void AvmTraceBuilder::op_ec_add(uint8_t indirect, rhs_is_inf_offset, output_offset }); // Load lhs point - auto lhs_x_read = constrained_read_from_memory( - call_ptr, clk, resolved_lhs_x_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); - auto lhs_y_read = constrained_read_from_memory( - call_ptr, clk, resolved_lhs_y_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IB); + auto lhs_x_read = unconstrained_read_from_memory(resolved_lhs_x_offset); + auto lhs_y_read = unconstrained_read_from_memory(resolved_lhs_y_offset); // Load rhs point - auto rhs_x_read = constrained_read_from_memory( - call_ptr, clk, resolved_rhs_x_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IC); - auto rhs_y_read = constrained_read_from_memory( - call_ptr, clk, resolved_rhs_y_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::ID); - bool tag_match = lhs_x_read.tag_match && lhs_y_read.tag_match && rhs_x_read.tag_match && rhs_y_read.tag_match; - - // Save this clk time to line up with the gadget op. - auto ecc_clk = clk; - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = lhs_x_read.val, - .main_ib = lhs_y_read.val, - .main_ic = rhs_x_read.val, - .main_id = rhs_y_read.val, - .main_ind_addr_a = FF(lhs_x_read.indirect_address), - .main_ind_addr_b = FF(lhs_y_read.indirect_address), - .main_ind_addr_c = FF(rhs_x_read.indirect_address), - .main_ind_addr_d = FF(rhs_y_read.indirect_address), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(lhs_x_read.direct_address), - .main_mem_addr_b = FF(lhs_y_read.direct_address), - .main_mem_addr_c = FF(rhs_x_read.direct_address), - .main_mem_addr_d = FF(rhs_y_read.direct_address), - .main_pc = FF(pc++), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_mem_op_c = FF(1), - .main_sel_mem_op_d = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(lhs_x_read.is_indirect)), - .main_sel_resolve_ind_addr_b = FF(static_cast(lhs_y_read.is_indirect)), - .main_sel_resolve_ind_addr_c = FF(static_cast(rhs_x_read.is_indirect)), - .main_sel_resolve_ind_addr_d = FF(static_cast(rhs_y_read.is_indirect)), - .main_tag_err = FF(static_cast(!tag_match)), - }); - clk++; + auto rhs_x_read = unconstrained_read_from_memory(resolved_rhs_x_offset); + auto rhs_y_read = unconstrained_read_from_memory(resolved_rhs_y_offset); // Load the infinite bools separately since they have a different memory tag - auto lhs_is_inf_read = constrained_read_from_memory( - call_ptr, clk, resolved_lhs_is_inf_offset, AvmMemoryTag::U8, AvmMemoryTag::U0, IntermRegister::IA); - auto rhs_is_inf_read = constrained_read_from_memory( - call_ptr, clk, resolved_rhs_is_inf_offset, AvmMemoryTag::U8, AvmMemoryTag::U0, IntermRegister::IB); - bool tag_match_inf = lhs_is_inf_read.tag_match && rhs_is_inf_read.tag_match; + auto lhs_is_inf_read = unconstrained_read_from_memory(resolved_lhs_is_inf_offset); + auto rhs_is_inf_read = unconstrained_read_from_memory(resolved_rhs_is_inf_offset); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = lhs_is_inf_read.val, - .main_ib = rhs_is_inf_read.val, - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(lhs_is_inf_offset), - .main_mem_addr_b = FF(rhs_is_inf_offset), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U8)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_tag_err = FF(static_cast(!tag_match_inf)), - }); - clk++; - grumpkin::g1::affine_element lhs = uint8_t(lhs_is_inf_read.val) == 1 + grumpkin::g1::affine_element lhs = uint8_t(lhs_is_inf_read) == 1 ? grumpkin::g1::affine_element::infinity() - : grumpkin::g1::affine_element{ lhs_x_read.val, lhs_y_read.val }; - grumpkin::g1::affine_element rhs = uint8_t(rhs_is_inf_read.val) == 1 + : grumpkin::g1::affine_element{ lhs_x_read, lhs_y_read }; + grumpkin::g1::affine_element rhs = uint8_t(rhs_is_inf_read) == 1 ? grumpkin::g1::affine_element::infinity() - : grumpkin::g1::affine_element{ rhs_x_read.val, rhs_y_read.val }; - auto result = ecc_trace_builder.embedded_curve_add(lhs, rhs, ecc_clk); + : grumpkin::g1::affine_element{ rhs_x_read, rhs_y_read }; + auto result = ecc_trace_builder.embedded_curve_add(lhs, rhs, clk); - // Write point coordinates - auto write_x = constrained_write_to_memory( - call_ptr, clk, resolved_output_offset, result.x, AvmMemoryTag::U0, AvmMemoryTag::FF, IntermRegister::IA); - // Write y (directly) using the write_x.direct_address + 1 - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IB, write_x.direct_address + 1, result.y, AvmMemoryTag::U0, AvmMemoryTag::FF); main_trace.push_back(Row{ .main_clk = clk, - .main_ia = result.x, - .main_ib = result.y, - .main_ind_addr_a = FF(write_x.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(write_x.direct_address), - .main_mem_addr_b = FF(write_x.direct_address + 1), - .main_pc = FF(pc), - .main_rwa = FF(1), - .main_rwb = FF(1), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(write_x.is_indirect)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), + .main_pc = FF(pc++), + .main_tag_err = FF(0), }); - clk++; - mem_trace_builder.write_into_memory(call_ptr, - clk, - IntermRegister::IA, - write_x.direct_address + 2, - result.is_point_at_infinity(), - AvmMemoryTag::U0, - AvmMemoryTag::U8); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = result.is_point_at_infinity(), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(write_x.direct_address + 2), - .main_pc = FF(pc), - .main_rwa = FF(1), - .main_sel_mem_op_a = FF(1), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), - }); + gas_trace_builder.constrain_gas(clk, OpCode::ECADD); + + // Write point coordinates + auto out_addr_direct = + resolved_output_offset.mode == AddressingMode::DIRECT + ? resolved_output_offset.offset + : static_cast(mem_trace_builder.unconstrained_read(call_ptr, resolved_output_offset.offset)); + write_to_memory(out_addr_direct, result.x, AvmMemoryTag::FF); + write_to_memory(out_addr_direct + 1, result.y, AvmMemoryTag::FF); + write_to_memory(out_addr_direct + 2, result.is_point_at_infinity(), AvmMemoryTag::U8); } void AvmTraceBuilder::op_variable_msm(uint8_t indirect, @@ -3332,28 +3091,20 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, auto [resolved_points_offset, resolved_scalars_offset, resolved_output_offset] = unpack_indirects<3>(indirect, { points_offset, scalars_offset, output_offset }); - auto points_length_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, point_length_offset, AvmMemoryTag::U32, AvmMemoryTag::U0); - - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = points_length_read.val, - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(point_length_offset), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), - .main_sel_mem_op_a = FF(1), - .main_tag_err = FF(static_cast(!points_length_read.tag_match)), - }); - clk++; + auto points_length = unconstrained_read_from_memory(point_length_offset); // Points are stored as [x1, y1, inf1, x2, y2, inf2, ...] with the types [FF, FF, U8, FF, FF, U8, ...] - uint32_t num_points = uint32_t(points_length_read.val) / 3; // 3 elements per point + uint32_t num_points = uint32_t(points_length) / 3; // 3 elements per point // We need to split up the reads due to the memory tags, std::vector points_coords_vec; std::vector points_inf_vec; std::vector scalars_vec; - AddressWithMode coords_offset = resolved_points_offset; + + AddressWithMode coords_offset_direct = + resolved_points_offset.mode == AddressingMode::DIRECT + ? resolved_points_offset + : static_cast(mem_trace_builder.unconstrained_read(call_ptr, resolved_points_offset.offset)); + // Loading the points is a bit more complex since we need to read the coordinates and the infinity flags // separately The current circuit constraints does not allow for multiple memory tags to be loaded from within // the same row. If we could we would be able to replace the following loops with a single read_slice_to_memory @@ -3362,67 +3113,18 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, // Read the coordinates first, +2 since we read 2 points per row, the first load could be indirect for (uint32_t i = 0; i < num_points; i++) { - auto point_x1_read = constrained_read_from_memory( - call_ptr, clk, coords_offset, AvmMemoryTag::FF, AvmMemoryTag::U0, IntermRegister::IA); - auto point_y1_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, point_x1_read.direct_address + 1, AvmMemoryTag::FF, AvmMemoryTag::U0); - - bool tag_match = point_x1_read.tag_match && point_y1_read.tag_match; - points_coords_vec.insert(points_coords_vec.end(), { point_x1_read.val, point_y1_read.val }); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = point_x1_read.val, - .main_ib = point_y1_read.val, - .main_ind_addr_a = FF(point_x1_read.indirect_address), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(point_x1_read.direct_address), - .main_mem_addr_b = FF(point_x1_read.direct_address + 1), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::FF)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(point_x1_read.is_indirect)), - .main_tag_err = FF(static_cast(!tag_match)), - }); - clk++; - // Update the coords offset to read the next point (subsequent points are always direct and separated by 3 - // addresses) - coords_offset = { AddressingMode::DIRECT, point_x1_read.direct_address + 3 }; - } - uint32_t inf_direct_address = resolved_points_offset.offset + 2; - // Read the Infinities flags - for (uint32_t i = 0; i < num_points; i++) { - auto point_inf_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IA, inf_direct_address, AvmMemoryTag::U8, AvmMemoryTag::U0); - points_inf_vec.emplace_back(point_inf_read.val); - - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = point_inf_read.val, - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(inf_direct_address), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U8)), - .main_sel_mem_op_a = FF(1), - .main_tag_err = FF(static_cast(!point_inf_read.tag_match)), - }); - clk++; - // Update the inf offset to read the next point (subsequent points are always direct and separated by 3 - inf_direct_address += 3; + auto point_x1 = unconstrained_read_from_memory(coords_offset_direct + 3 * i); + auto point_y1 = unconstrained_read_from_memory(coords_offset_direct + 3 * i + 1); + auto infty = unconstrained_read_from_memory(coords_offset_direct + 3 * i + 2); + points_coords_vec.insert(points_coords_vec.end(), { point_x1, point_y1 }); + points_inf_vec.emplace_back(infty); } // Scalar read length is num_points* 2 since scalars are stored as lo and hi limbs uint32_t scalar_read_length = num_points * 2; // Scalars are easy to read since they are stored as [lo1, hi1, lo2, hi2, ...] with the types [FF, FF, FF,FF, // ...] - auto num_scalar_rows = read_slice_to_memory(call_ptr, - clk, - resolved_scalars_offset, - AvmMemoryTag::FF, - AvmMemoryTag::U0, - FF(internal_return_ptr), - scalar_read_length, - scalars_vec); - clk += num_scalar_rows; + read_slice_from_memory(resolved_scalars_offset, scalar_read_length, scalars_vec); + // Reconstruct Grumpkin points std::vector points; for (size_t i = 0; i < num_points; i++) { @@ -3447,49 +3149,24 @@ void AvmTraceBuilder::op_variable_msm(uint8_t indirect, } // Perform the variable MSM - could just put the logic in here since there are no constraints. auto result = ecc_trace_builder.variable_msm(points, scalars, clk); - // Write the result back to memory [x, y, inf] with tags [FF, FF, U8] - auto write_x = constrained_write_to_memory( - call_ptr, clk, resolved_output_offset, result.x, AvmMemoryTag::U0, AvmMemoryTag::FF, IntermRegister::IA); - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IB, write_x.direct_address + 1, result.y, AvmMemoryTag::U0, AvmMemoryTag::FF); main_trace.push_back(Row{ .main_clk = clk, - .main_ia = result.x, - .main_ib = result.y, - .main_ind_addr_a = FF(write_x.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(write_x.direct_address), - .main_mem_addr_b = FF(write_x.direct_address + 1), - .main_pc = FF(pc), - .main_rwa = FF(1), - .main_rwb = FF(1), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(write_x.is_indirect)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), - }); - clk++; - // Write the infinity - mem_trace_builder.write_into_memory(call_ptr, - clk, - IntermRegister::IA, - write_x.direct_address + 2, - result.is_point_at_infinity(), - AvmMemoryTag::U0, - AvmMemoryTag::U8); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = static_cast(result.is_point_at_infinity()), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(write_x.direct_address + 2), - .main_pc = FF(pc), - .main_rwa = FF(1), - .main_sel_mem_op_a = FF(1), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), + .main_pc = FF(pc++), + .main_tag_err = FF(0), }); - pc++; + gas_trace_builder.constrain_gas(clk, OpCode::MSM); + + // Write the result back to memory [x, y, inf] with tags [FF, FF, U8] + AddressWithMode output_offset_direct = + resolved_output_offset.mode == AddressingMode::DIRECT + ? resolved_output_offset + : static_cast(mem_trace_builder.unconstrained_read(call_ptr, resolved_output_offset.offset)); + write_to_memory(output_offset_direct, result.x, AvmMemoryTag::FF); + write_to_memory(output_offset_direct + 1, result.y, AvmMemoryTag::FF); + write_to_memory(output_offset_direct + 2, result.is_point_at_infinity(), AvmMemoryTag::U8); } void AvmTraceBuilder::op_pedersen_commit(uint8_t indirect, @@ -3502,85 +3179,31 @@ void AvmTraceBuilder::op_pedersen_commit(uint8_t indirect, auto [resolved_input_offset, resolved_output_offset, resolved_input_size_offset, resolved_gen_ctx_offset] = unpack_indirects<4>(indirect, { input_offset, output_offset, input_size_offset, gen_ctx_offset }); - auto input_length_read = constrained_read_from_memory( - call_ptr, clk, resolved_input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IA); - auto gen_ctx_read = constrained_read_from_memory( - call_ptr, clk, resolved_gen_ctx_offset, AvmMemoryTag::U32, AvmMemoryTag::U0, IntermRegister::IB); - - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = input_length_read.val, - .main_ib = gen_ctx_read.val, - .main_ind_addr_a = FF(input_length_read.indirect_address), - .main_ind_addr_b = FF(gen_ctx_read.indirect_address), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(input_length_read.direct_address), - .main_mem_addr_b = FF(gen_ctx_read.direct_address), - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(input_length_read.is_indirect)), - .main_sel_resolve_ind_addr_b = FF(static_cast(gen_ctx_read.is_indirect)), - }); - clk++; + auto input_length_read = unconstrained_read_from_memory(resolved_input_size_offset); + auto gen_ctx_read = unconstrained_read_from_memory(resolved_gen_ctx_offset); std::vector inputs; - auto num_rows = read_slice_to_memory(call_ptr, - clk, - resolved_input_offset, - AvmMemoryTag::FF, - AvmMemoryTag::U0, - FF(internal_return_ptr), - uint32_t(input_length_read.val), - inputs); - clk += num_rows; - - grumpkin::g1::affine_element result = - crypto::pedersen_commitment::commit_native(inputs, uint32_t(gen_ctx_read.val)); + read_slice_from_memory(resolved_input_offset, uint32_t(input_length_read), inputs); - auto write_x = constrained_write_to_memory( - call_ptr, clk, resolved_output_offset, result.x, AvmMemoryTag::U0, AvmMemoryTag::FF, IntermRegister::IA); - - mem_trace_builder.write_into_memory( - call_ptr, clk, IntermRegister::IB, write_x.direct_address + 1, result.y, AvmMemoryTag::U0, AvmMemoryTag::FF); + grumpkin::g1::affine_element result = crypto::pedersen_commitment::commit_native(inputs, uint32_t(gen_ctx_read)); main_trace.push_back(Row{ .main_clk = clk, - .main_ia = result.x, - .main_ib = result.y, - .main_ind_addr_a = FF(write_x.indirect_address), .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(write_x.direct_address), - .main_mem_addr_b = FF(write_x.direct_address + 1), - .main_pc = FF(pc), - .main_rwa = FF(1), - .main_rwb = FF(1), - .main_sel_mem_op_a = FF(1), - .main_sel_mem_op_b = FF(1), - .main_sel_resolve_ind_addr_a = FF(static_cast(write_x.is_indirect)), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::FF)), + .main_pc = FF(pc++), + .main_tag_err = FF(0), }); - clk++; - mem_trace_builder.write_into_memory(call_ptr, - clk, - IntermRegister::IA, - write_x.direct_address + 2, - result.is_point_at_infinity(), - AvmMemoryTag::U0, - AvmMemoryTag::U8); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ia = static_cast(result.is_point_at_infinity()), - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_a = FF(write_x.direct_address + 2), - .main_pc = FF(pc), - .main_rwa = FF(1), - .main_sel_mem_op_a = FF(1), - .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), - }); - pc++; + gas_trace_builder.constrain_gas(clk, OpCode::PEDERSENCOMMITMENT); + + // Write the result back to memory [x, y, inf] with tags [FF, FF, U8] + AddressWithMode output_offset_direct = + resolved_output_offset.mode == AddressingMode::DIRECT + ? resolved_output_offset + : static_cast(mem_trace_builder.unconstrained_read(call_ptr, resolved_output_offset.offset)); + write_to_memory(output_offset_direct, result.x, AvmMemoryTag::FF); + write_to_memory(output_offset_direct + 1, result.y, AvmMemoryTag::FF); + write_to_memory(output_offset_direct + 2, result.is_point_at_infinity(), AvmMemoryTag::U8); } /************************************************************************************************** @@ -3617,7 +3240,7 @@ void AvmTraceBuilder::op_to_radix_le( : std::vector(num_limbs, 0); // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::TORADIXLE); + gas_trace_builder.constrain_gas(clk, OpCode::TORADIXLE, num_limbs); // This is the row that contains the selector to trigger the sel_op_radix_le // In this row, we read the input value and the destination address into register A and B respectively @@ -3642,18 +3265,8 @@ void AvmTraceBuilder::op_to_radix_le( .main_sel_resolve_ind_addr_b = FF(static_cast(read_dst.is_indirect)), .main_w_in_tag = FF(static_cast(AvmMemoryTag::U8)), }); - // Increment the clock so we dont write at the same clock cycle - // Instead we temporarily encode the writes into the subsequent rows of the main trace - clk++; - // MemTrace, write into memory value b from intermediate register ib. - std::vector ff_res = {}; - ff_res.reserve(res.size()); - for (auto const& limb : res) { - ff_res.emplace_back(limb); - } - write_slice_to_memory( - call_ptr, clk, resolved_dst_offset, AvmMemoryTag::FF, AvmMemoryTag::U8, FF(internal_return_ptr), ff_res); + write_slice_to_memory(resolved_dst_offset, AvmMemoryTag::U8, res); } /************************************************************************************************** @@ -3691,7 +3304,7 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, bool tag_match = read_a.tag_match && read_b.tag_match; // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::SHA256COMPRESSION); + gas_trace_builder.constrain_gas(clk, OpCode::SHA256COMPRESSION); // Since the above adds mem_reads in the mem_trace_builder at clk, we need to follow up resolving the reads in // the main trace at the same clk cycle to preserve the cross-table permutation @@ -3729,28 +3342,9 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, // Input for hash is expanded to 512 bits std::vector input_vec; // Read results are written to h_init array. - read_slice_to_memory(call_ptr, - clk, - resolved_h_init_offset, - AvmMemoryTag::U32, - AvmMemoryTag::U32, - FF(internal_return_ptr), - 8, - h_init_vec); - - // Increment the clock by 2 since (8 reads / 4 reads per row = 2) - clk += 2; + read_slice_from_memory(resolved_h_init_offset, 8, h_init_vec); // Read results are written to input array - read_slice_to_memory(call_ptr, - clk, - resolved_input_offset, - AvmMemoryTag::U32, - AvmMemoryTag::U32, - FF(internal_return_ptr), - 16, - input_vec); - // Increment the clock by 4 since (16 / 4 = 4) - clk += 4; + read_slice_from_memory(resolved_input_offset, 16, input_vec); // Now that we have read all the values, we can perform the operation to get the resulting witness. // Note: We use the sha_op_clk to ensure that the sha256 operation is performed at the same clock cycle as the @@ -3766,13 +3360,7 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, } // Write the result to memory after - write_slice_to_memory(call_ptr, - clk, - resolved_output_offset, - AvmMemoryTag::U32, - AvmMemoryTag::U32, - FF(internal_return_ptr), - ff_result); + write_slice_to_memory(resolved_output_offset, AvmMemoryTag::U32, ff_result); } /** @@ -3791,7 +3379,7 @@ void AvmTraceBuilder::op_sha256_compression(uint8_t indirect, void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, uint32_t output_offset, uint32_t input_offset, - uint32_t input_size_offset) + [[maybe_unused]] uint32_t input_size_offset) { // What happens if the input_size_offset is > 25 when the state is more that that? auto clk = static_cast(main_trace.size()) + 1; @@ -3804,7 +3392,7 @@ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, bool tag_match = input_read.tag_match && output_read.tag_match; // Constrain gas cost - gas_trace_builder.constrain_gas_lookup(clk, OpCode::KECCAKF1600); + gas_trace_builder.constrain_gas(clk, OpCode::KECCAKF1600); main_trace.push_back(Row{ .main_clk = clk, @@ -3824,53 +3412,19 @@ void AvmTraceBuilder::op_keccakf1600(uint8_t indirect, .main_sel_resolve_ind_addr_c = FF(static_cast(output_read.is_indirect)), .main_tag_err = FF(static_cast(!tag_match)), }); - // We store the current clk this main trace row occurred so that we can line up the keccak gadget operation - // at the same clk later. - auto keccak_op_clk = clk; - // We need to increment the clk - clk++; - auto input_length_read = mem_trace_builder.read_and_load_from_memory( - call_ptr, clk, IntermRegister::IB, input_size_offset, AvmMemoryTag::U32, AvmMemoryTag::U0); - main_trace.push_back(Row{ - .main_clk = clk, - .main_ib = input_length_read.val, // Message Length - .main_internal_return_ptr = FF(internal_return_ptr), - .main_mem_addr_b = FF(input_size_offset), // length - .main_pc = FF(pc), - .main_r_in_tag = FF(static_cast(AvmMemoryTag::U32)), - .main_sel_mem_op_b = FF(1), - .main_tag_err = FF(static_cast(!input_length_read.tag_match)), - }); - clk++; + // Array input is fixed to 1600 bits std::vector input_vec; // Read results are written to input array - uint32_t num_main_rows = read_slice_to_memory(call_ptr, - clk, - resolved_input_offset, - AvmMemoryTag::U64, - AvmMemoryTag::U0, - FF(internal_return_ptr), - 25, - input_vec); - + read_slice_from_memory(resolved_input_offset, 25, input_vec); std::array input = vec_to_arr(input_vec); - // Increment the clock by 7 since (25 reads / 4 reads per row = 7) - clk += num_main_rows; // Now that we have read all the values, we can perform the operation to get the resulting witness. // Note: We use the keccak_op_clk to ensure that the keccakf1600 operation is performed at the same clock cycle // as the main trace that has the selector - std::array result = keccak_trace_builder.keccakf1600(keccak_op_clk, input); - // We convert the results to field elements here - std::vector ff_result; - for (uint32_t i = 0; i < 25; i++) { - ff_result.emplace_back(result[i]); - } - + std::array result = keccak_trace_builder.keccakf1600(clk, input); // Write the result to memory after - write_slice_to_memory( - call_ptr, clk, resolved_output_offset, AvmMemoryTag::U0, AvmMemoryTag::U64, FF(internal_return_ptr), ff_result); + write_slice_to_memory(resolved_output_offset, AvmMemoryTag::U64, result); } /************************************************************************************************** @@ -3919,8 +3473,8 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c // If the bin_trace_size has entries, we need the main_trace to be as big as our byte lookup table (3 * // 2**16 long) size_t const lookup_table_size = (bin_trace_size > 0 && range_check_required) ? 3 * (1 << 16) : 0; - // Range check size is 1 less than it needs to be since we insert a "first row" at the top of the trace at the end, - // with clk 0 (this doubles as our range check) + // Range check size is 1 less than it needs to be since we insert a "first row" at the top of the trace at the + // end, with clk 0 (this doubles as our range check) size_t const range_check_size = range_check_required ? UINT16_MAX : 0; std::vector trace_sizes = { mem_trace_size, main_trace_size, alu_trace_size, range_check_size, conv_trace_size, lookup_table_size, @@ -4398,14 +3952,9 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c // Assume that gas_trace entries are ordered by a strictly increasing clk sequence. for (auto const& gas_entry : gas_trace) { - // Filling potential gap between two gas_trace entries - // Remaining gas values remain unchanged. - while (gas_entry.clk > current_clk) { - auto& next = main_trace.at(current_clk); - next.main_l2_gas_remaining = current_l2_gas_remaining; - next.main_da_gas_remaining = current_da_gas_remaining; - current_clk++; - } + // There should be no gaps in the gas_trace. + ASSERT(gas_entry.clk == current_clk); + // << "No gas entry for opcode" << next.main_opcode_val << "at clk" << current_clk; auto& dest = main_trace.at(gas_entry.clk - 1); auto& next = main_trace.at(gas_entry.clk); @@ -4426,17 +3975,13 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c dest.main_abs_da_rem_gas_hi = abs_da_gas_remaining >> 16; dest.main_abs_l2_rem_gas_lo = static_cast(abs_l2_gas_remaining); dest.main_abs_da_rem_gas_lo = static_cast(abs_da_gas_remaining); + dest.main_sel_gas_accounting_active = FF(1); - // TODO: gas is not constrained for external call at this time - if (gas_entry.opcode != OpCode::CALL) { - dest.main_sel_gas_accounting_active = FF(1); - - // lookups counting - rem_gas_rng_check_counts[L2_HI_GAS_COUNTS_IDX][static_cast(dest.main_abs_l2_rem_gas_hi)]++; - rem_gas_rng_check_counts[L2_LO_GAS_COUNTS_IDX][static_cast(dest.main_abs_l2_rem_gas_lo)]++; - rem_gas_rng_check_counts[DA_HI_GAS_COUNTS_IDX][static_cast(dest.main_abs_da_rem_gas_hi)]++; - rem_gas_rng_check_counts[DA_LO_GAS_COUNTS_IDX][static_cast(dest.main_abs_da_rem_gas_lo)]++; - } + // lookups counting + rem_gas_rng_check_counts[L2_HI_GAS_COUNTS_IDX][static_cast(dest.main_abs_l2_rem_gas_hi)]++; + rem_gas_rng_check_counts[L2_LO_GAS_COUNTS_IDX][static_cast(dest.main_abs_l2_rem_gas_lo)]++; + rem_gas_rng_check_counts[DA_HI_GAS_COUNTS_IDX][static_cast(dest.main_abs_da_rem_gas_hi)]++; + rem_gas_rng_check_counts[DA_LO_GAS_COUNTS_IDX][static_cast(dest.main_abs_da_rem_gas_lo)]++; dest.main_l2_out_of_gas = static_cast(l2_out_of_gas); dest.main_da_out_of_gas = static_cast(da_out_of_gas); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp index ff06295cc39..c5404ec1a03 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp @@ -29,8 +29,18 @@ struct AddressWithMode { AddressingMode mode; uint32_t offset; + AddressWithMode() = default; + AddressWithMode(uint32_t offset) + : mode(AddressingMode::DIRECT) + , offset(offset) + {} + AddressWithMode(AddressingMode mode, uint32_t offset) + : mode(mode) + , offset(offset) + {} + // Dont mutate - AddressWithMode operator+(uint val) { return { mode, offset + val }; } + AddressWithMode operator+(uint val) const noexcept { return { mode, offset + val }; } }; // This is the internal context that we keep along the lifecycle of bytecode execution @@ -102,6 +112,8 @@ class AvmTraceBuilder { // Machine State - Memory void op_set(uint8_t indirect, uint128_t val, uint32_t dst_offset, AvmMemoryTag in_tag); + // TODO: only used for write_slice_to_memory. Remove. + void op_set_internal(uint8_t indirect, FF val_ff, uint32_t dst_offset, AvmMemoryTag in_tag); void op_mov(uint8_t indirect, uint32_t src_offset, uint32_t dst_offset); void op_cmov(uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t cond_offset, uint32_t dst_offset); @@ -256,23 +268,11 @@ class AvmTraceBuilder { IntermRegister reg, AvmMemTraceBuilder::MemOpOwner mem_op_owner = AvmMemTraceBuilder::MAIN); - // TODO(ilyas: #6383): Temporary way to bulk read slices - template - uint32_t read_slice_to_memory(uint8_t space_id, - uint32_t clk, - AddressWithMode addr, - AvmMemoryTag r_tag, - AvmMemoryTag w_tag, - FF internal_return_ptr, - size_t slice_len, - std::vector& slice); - uint32_t write_slice_to_memory(uint8_t space_id, - uint32_t clk, - AddressWithMode addr, - AvmMemoryTag r_tag, - AvmMemoryTag w_tag, - FF internal_return_ptr, - std::vector const& slice); + // TODO: remove these once everything is constrained. + FF unconstrained_read_from_memory(AddressWithMode addr); + template void read_slice_from_memory(AddressWithMode addr, size_t slice_len, std::vector& slice); + void write_to_memory(AddressWithMode addr, FF val, AvmMemoryTag w_tag); + template void write_slice_to_memory(AddressWithMode addr, AvmMemoryTag w_tag, const T& slice); }; } // namespace bb::avm_trace diff --git a/bb-pilcom/bb-pil-backend/src/relation_builder.rs b/bb-pilcom/bb-pil-backend/src/relation_builder.rs index f1be69f0ba8..1d5f069dd67 100644 --- a/bb-pilcom/bb-pil-backend/src/relation_builder.rs +++ b/bb-pilcom/bb-pil-backend/src/relation_builder.rs @@ -131,6 +131,8 @@ impl RelationBuilder for BBFiles { .enumerate() .filter(|(_, id)| id.label.is_some()) .map(|(idx, id)| (idx, id.label.clone().unwrap())) + // Useful for debugging + // .map(|(idx, id)| (idx, id.label.as_ref().unwrap_or(&id.identity).clone())) .collect_vec(); let data = &json!({ From 8021eda6b5c6e6c518ff38bacdc828fcfab09465 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Thu, 8 Aug 2024 10:43:23 +0100 Subject: [PATCH 15/61] feat: merge devnet chagnes to master (#7822) Brings the latest set of devnet fixes into the master branch --------- Co-authored-by: PhilWindle Co-authored-by: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Co-authored-by: sirasistant Co-authored-by: Maxim Vezenov Co-authored-by: Facundo Co-authored-by: AztecBot Co-authored-by: PhilWindle <60546371+PhilWindle@users.noreply.github.com> Co-authored-by: Santiago Palladino Co-authored-by: spypsy Co-authored-by: spypsy --- .github/workflows/devnet-deploys.yml | 62 ++-- aztec-up/bin/.aztec-run | 37 ++- aztec-up/bin/aztec | 38 ++- aztec-up/bin/aztec-install | 31 +- cspell.json | 1 + docker-compose.yml | 9 +- iac/mainnet-fork/redeploy | 2 +- .../archiver/src/archiver/archiver.ts | 9 +- yarn-project/archiver/src/archiver/config.ts | 70 ++--- .../archiver/src/archiver/instrumentation.ts | 27 +- yarn-project/archiver/src/index.ts | 2 +- .../aztec-node/src/aztec-node/config.ts | 47 +-- .../aztec-node/src/aztec-node/server.ts | 2 +- yarn-project/aztec/package.json | 2 + .../aztec/src/cli/aztec_start_options.ts | 294 ++++++++++++++++++ yarn-project/aztec/src/cli/cli.ts | 190 +++++------ .../aztec/src/cli/cmds/start_archiver.ts | 13 +- yarn-project/aztec/src/cli/cmds/start_bot.ts | 9 +- yarn-project/aztec/src/cli/cmds/start_node.ts | 94 +++--- .../aztec/src/cli/cmds/start_p2p_bootstrap.ts | 8 +- .../aztec/src/cli/cmds/start_prover_agent.ts | 52 ++-- .../aztec/src/cli/cmds/start_prover_node.ts | 26 +- yarn-project/aztec/src/cli/cmds/start_pxe.ts | 46 +-- yarn-project/aztec/src/cli/cmds/start_txe.ts | 4 +- yarn-project/aztec/src/cli/texts.ts | 107 ------- yarn-project/aztec/src/cli/util.ts | 232 +++++++++----- yarn-project/aztec/src/sandbox.ts | 14 +- yarn-project/aztec/terraform/node/main.tf | 12 +- yarn-project/aztec/terraform/prover/main.tf | 192 ++++++------ .../bb-prover/src/prover/bb_prover.ts | 2 +- .../bb-prover/src/test/test_circuit_prover.ts | 1 - yarn-project/bot/src/config.ts | 119 ++++--- yarn-project/bot/src/index.ts | 2 +- .../circuit-types/src/interfaces/configs.ts | 4 +- .../src/interfaces/prover-client.ts | 51 ++- .../cli/src/cmds/devnet/bootstrap_network.ts | 1 - yarn-project/cli/src/cmds/pxe/call.ts | 20 +- yarn-project/cli/src/cmds/pxe/index.ts | 5 +- .../composed/integration_l1_publisher.test.ts | 6 +- .../uniswap_trade_on_l1_from_l2.test.ts | 2 +- yarn-project/end-to-end/src/e2e_bot.test.ts | 8 +- .../end-to-end/src/e2e_fees/fees_test.ts | 6 +- .../end-to-end/src/e2e_p2p_network.test.ts | 9 +- ...lic_cross_chain_messaging_contract_test.ts | 4 +- .../src/fixtures/snapshot_manager.ts | 12 +- yarn-project/end-to-end/src/fixtures/utils.ts | 16 +- yarn-project/ethereum/src/index.ts | 1 + .../ethereum/src/l1_contract_addresses.ts | 65 ++-- yarn-project/ethereum/src/l1_reader.ts | 40 +++ yarn-project/foundation/package.json | 1 + yarn-project/foundation/src/config/env_var.ts | 97 ++++++ yarn-project/foundation/src/config/index.ts | 115 +++++++ yarn-project/foundation/src/index.ts | 1 + yarn-project/kv-store/src/utils.ts | 11 +- yarn-project/p2p-bootstrap/src/index.ts | 4 +- yarn-project/p2p/src/bootstrap/bootstrap.ts | 11 +- yarn-project/p2p/src/client/p2p_client.ts | 2 +- yarn-project/p2p/src/config.ts | 162 +++++++--- .../p2p/src/service/discv5_service.test.ts | 6 +- .../p2p/src/service/libp2p_service.ts | 16 +- .../p2p/src/tx_pool/aztec_kv_tx_pool.ts | 8 +- yarn-project/prover-client/src/config.ts | 84 ++--- .../src/prover-agent/prover-agent.ts | 37 ++- yarn-project/prover-node/src/config.ts | 30 +- .../prover-node/src/tx-provider/config.ts | 14 +- yarn-project/pxe/src/config/index.ts | 101 ++++-- yarn-project/sequencer-client/src/config.ts | 156 ++++++---- .../src/global_variable_builder/config.ts | 20 -- .../src/global_variable_builder/index.ts | 6 +- .../global_variable_builder/viem-reader.ts | 9 +- .../sequencer-client/src/publisher/config.ts | 74 +++-- .../sequencer-client/src/publisher/index.ts | 2 +- .../src/publisher/viem-tx-sender.ts | 2 +- .../sequencer-client/src/sequencer/metrics.ts | 57 +++- .../src/sequencer/sequencer.ts | 8 +- yarn-project/telemetry-client/package.json | 1 + .../telemetry-client/src/attributes.ts | 2 +- .../src/aztec_resource_detector.ts | 19 ++ yarn-project/telemetry-client/src/config.ts | 29 ++ yarn-project/telemetry-client/src/metrics.ts | 10 +- yarn-project/telemetry-client/src/otel.ts | 45 ++- yarn-project/telemetry-client/src/start.ts | 32 +- .../telemetry-client/src/telemetry.ts | 18 ++ yarn-project/types/src/network/index.ts | 7 + .../world-state/src/synchronizer/config.ts | 31 +- yarn-project/yarn.lock | 62 ++++ 86 files changed, 2164 insertions(+), 1132 deletions(-) create mode 100644 yarn-project/aztec/src/cli/aztec_start_options.ts delete mode 100644 yarn-project/aztec/src/cli/texts.ts create mode 100644 yarn-project/ethereum/src/l1_reader.ts create mode 100644 yarn-project/foundation/src/config/env_var.ts create mode 100644 yarn-project/foundation/src/config/index.ts delete mode 100644 yarn-project/sequencer-client/src/global_variable_builder/config.ts create mode 100644 yarn-project/telemetry-client/src/aztec_resource_detector.ts create mode 100644 yarn-project/telemetry-client/src/config.ts diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index cb297afb6b2..b6d4509538f 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -77,7 +77,7 @@ jobs: setup: uses: ./.github/workflows/setup-runner.yml with: - username: master + username: ${{ github.actor }} runner_type: builder-x86 secrets: inherit @@ -99,7 +99,6 @@ jobs: pxe_lb_priority_range_start: ${{ steps.set_network_vars.outputs.pxe_lb_priority_range_start }} faucet_lb_priority: ${{ steps.set_network_vars.outputs.faucet_lb_priority }} bot_no_wait: ${{ steps.set_network_vars.outputs.bot_no_wait }} - max_txs_per_block: ${{ steps.set_network_vars.outputs.max_txs_per_block }} steps: - name: Set network vars shell: bash @@ -119,13 +118,12 @@ jobs: echo "pxe_lb_priority_range_start=5100" >> $GITHUB_OUTPUT echo "faucet_lb_priority=601" >> $GITHUB_OUTPUT echo "bot_no_wait=false" >> $GITHUB_OUTPUT - echo "max_txs_per_block=64" >> $GITHUB_OUTPUT elif [ "$BRANCH_NAME" = "provernet" ] then echo "deploy_tag=provernet" >> $GITHUB_OUTPUT echo "branch_name=provernet" >> $GITHUB_OUTPUT echo "network_api_key=PROVERNET_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=2" >> $GITHUB_OUTPUT + echo "agents_per_prover=4" >> $GITHUB_OUTPUT echo "bot_interval=300" >> $GITHUB_OUTPUT echo "node_tcp_range_start=40200" >> $GITHUB_OUTPUT echo "node_udp_range_start=45200" >> $GITHUB_OUTPUT @@ -133,7 +131,6 @@ jobs: echo "pxe_lb_priority_range_start=5200" >> $GITHUB_OUTPUT echo "faucet_lb_priority=602" >> $GITHUB_OUTPUT echo "bot_no_wait=true" >> $GITHUB_OUTPUT - echo "max_txs_per_block=4" >> $GITHUB_OUTPUT elif [ "$BRANCH_NAME" = "alphanet" ] then echo "deploy_tag=alphanet" >> $GITHUB_OUTPUT @@ -147,7 +144,6 @@ jobs: echo "pxe_lb_priority_range_start=5000" >> $GITHUB_OUTPUT echo "faucet_lb_priority=600" >> $GITHUB_OUTPUT echo "bot_no_wait=false" >> $GITHUB_OUTPUT - echo "max_txs_per_block=64" >> $GITHUB_OUTPUT else echo "Unrecognized Branch!!" exit 1 @@ -190,31 +186,6 @@ jobs: earthly-ci \ --no-output --push ./iac/mainnet-fork+export-mainnet-fork --DIST_TAG=${{ env.DEPLOY_TAG }} - build-aztec-nargo: - needs: set-network - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-aztec-nargo-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: Build & push aztec nargo image - run: | - earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ env.DEPLOY_TAG }} - earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ github.sha }} - build-aztec: needs: set-network env: @@ -268,8 +239,33 @@ jobs: docker push aztecprotocol/aztec:${{ github.sha }} + build-aztec-nargo: + needs: [set-network, build-aztec] + env: + BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} + DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} + TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} + API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} + TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} + API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} + runs-on: ${{ github.actor }}-x86 + steps: + - uses: actions/checkout@v4 + with: + ref: "${{ env.GIT_COMMIT }}" + fetch-depth: 0 + - uses: ./.github/ci-setup-action + with: + concurrency_key: build-aztec-nargo-${{ github.actor }} + dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" + + - name: Build & push aztec nargo image + run: | + earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ env.DEPLOY_TAG }} + earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ github.sha }} + build-faucet: - needs: set-network + needs: [set-network, build-aztec] env: BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} @@ -415,7 +411,6 @@ jobs: TF_VAR_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.node_lb_priority_range_start }} TF_VAR_PXE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.pxe_lb_priority_range_start }} TF_VAR_BOT_NO_WAIT_FOR_TRANSFERS: ${{ needs.set-network.outputs.bot_no_wait }} - TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }} steps: - uses: actions/checkout@v4 with: @@ -616,7 +611,6 @@ jobs: TF_VAR_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.node_lb_priority_range_start }} TF_VAR_PXE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.pxe_lb_priority_range_start }} TF_VAR_BOT_NO_WAIT_FOR_TRANSFERS: ${{ needs.set-network.outputs.bot_no_wait }} - TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }} TF_VAR_PROVING_ENABLED: true TF_VAR_BOT_NO_START: false steps: diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run index 8e318fc4c63..1ba0673cf08 100755 --- a/aztec-up/bin/.aztec-run +++ b/aztec-up/bin/.aztec-run @@ -10,6 +10,13 @@ shift DEFAULT_PORT=8080 VERSION=${VERSION:-"latest"} AZTEC_PORT=${AZTEC_PORT:-$DEFAULT_PORT} +INHERIT_USER=${INHERIT_USER:-1} + +if [[ -n "${NETWORK:-}" ]]; then + VERSION=$NETWORK + # enable proving if connecting to a network by default + export PXE_PROVER_ENABLED=${PXE_PROVER_ENABLED:-1} +fi # preserve arguments to pass to docker run declare -a preserved_args @@ -39,7 +46,7 @@ fi # If running in rootless docker, don't set uid/gid in container, as it breaks the write perms to the home mount. # Otherwise we set the uid/gid to be that of user running the container to ensure write perms to the home mount. -if [ -z "${ROOTLESS:-}" ]; then +if [[ -z "${ROOTLESS:-}" && "${INHERIT_USER:-}" == "1" ]]; then DOCKER_USER="--user $(id -u):$(id -g)" fi @@ -73,16 +80,6 @@ for i in "${!args[@]}"; do fi done -DOCKER_ENV="-e HOME=$HOME" -for env in ${ENV_VARS_TO_INJECT:-}; do - if [ -n "${!env:-}" ]; then - # First substitute any reference to localhost with our host gateway. - env=${env//localhost/host.docker.internal} - # Inject into container. - DOCKER_ENV+=" -e $env=${!env:-}" - fi -done - # Parse command-line arguments while [[ "$#" -gt 0 ]]; do case $1 in @@ -91,6 +88,14 @@ while [[ "$#" -gt 0 ]]; do preserved_args+=("$1" "$2") # Store both argument and value shift 2 # Move past argument and value ;; + --pxe.network) + VERSION="$2" # Set version to user-specified network (e.g. 'devnet') + echo "Using aztecprotocol/aztec: $VERSION" + preserved_args+=("$1" "$2") # Store both argument and value + # turn on proving if connecting to a network by default + export PXE_PROVER_ENABLED=${PXE_PROVER_ENABLED:-1} + shift 2 + ;; # Move past argument and value *) preserved_args+=("$1") # Store unrecognized/other arguments shift # Move to next argument @@ -98,6 +103,16 @@ while [[ "$#" -gt 0 ]]; do esac done +DOCKER_ENV="-e HOME=$HOME" +for env in ${ENV_VARS_TO_INJECT:-}; do + if [ -n "${!env:-}" ]; then + # First substitute any reference to localhost with our host gateway. + env=${env//localhost/host.docker.internal} + # Inject into container. + DOCKER_ENV+=" -e $env=${!env:-}" + fi +done + # Dynamic port assignment port_assignment="" if [[ -z "${SKIP_PORT_ASSIGNMENT:-}" ]]; then diff --git a/aztec-up/bin/aztec b/aztec-up/bin/aztec index 7268142f8ae..eff925be279 100755 --- a/aztec-up/bin/aztec +++ b/aztec-up/bin/aztec @@ -1,6 +1,13 @@ #!/usr/bin/env bash set -euo pipefail +# Directory of env_var file +TS_ENV_VAR_FILE=/usr/src/yarn-project/foundation/src/config/env_var.ts +LOCAL_TS_FILE=./env_var.ts +NETWORK=${NETWORK:-} +VERSION=${VERSION:-${NETWORK:-"latest"}} +LOCAL_ENV_VAR_FILE="./.${VERSION}_aztec_cli_vars" + function get_compose { # Favour 'docker compose', falling back on docker-compose. CMD="docker compose" @@ -8,6 +15,11 @@ function get_compose { $CMD $@ } +# Function to parse the TypeScript file +function parse_ts_file { + grep -oE "\| '[^']+'" "$LOCAL_TS_FILE" | sed "s/| '//; s/'//g" >"$LOCAL_ENV_VAR_FILE" +} + CALLED_FROM=$PWD if [ "${1:-}" == "test" ]; then @@ -24,7 +36,7 @@ elif [ $# == 2 ] && [ "$1" == "start" ] && [ "$2" == "--sandbox" ]; then # Compose file to use FILE_ARG="-f $HOME/.aztec/docker-compose.sandbox.yml" - # Function to be executed when SIGINT is received. + # Function to be executed when SIGINT is received. cleanup() { get_compose $FILE_ARG down } @@ -32,9 +44,29 @@ elif [ $# == 2 ] && [ "$1" == "start" ] && [ "$2" == "--sandbox" ]; then # Set trap to catch SIGINT and call the cleanup function. trap cleanup SIGINT - get_compose -p sandbox $FILE_ARG up --force-recreate --remove-orphans + get_compose -p sandbox $FILE_ARG up --force-recreate --remove-orphans elif [ "${1:-}" == "start" ]; then - $(dirname $0)/.aztec-run aztecprotocol/aztec "$@" + # export ENV_VARS_TO_INJECT=$(<.aztec_cli_vars) + + # Grab env vars from aztec project + if [[ ! -f "$LOCAL_ENV_VAR_FILE" ]]; then + echo "No env vars file found, creating one..." + # Create temp container + temp_container_id=$(docker create aztecprotocol/aztec:$VERSION) + # Copy env vars file from container + docker cp ${temp_container_id}:$TS_ENV_VAR_FILE $LOCAL_TS_FILE &> /dev/null + # Remove temp container + docker rm $temp_container_id &> /dev/null + + # Parse the TypeScript file + parse_ts_file + rm $LOCAL_TS_FILE + fi + + # Read env vars from file + readarray -t ENV_VARS_TO_INJECT <"$LOCAL_ENV_VAR_FILE" + export ENV_VARS_TO_INJECT="${ENV_VARS_TO_INJECT[*]}" + ENV_VARS_TO_INJECT="${ENV_VARS_TO_INJECT[*]}" INHERIT_USER=0 $(dirname $0)/.aztec-run aztecprotocol/aztec "$@" else SKIP_PORT_ASSIGNMENT=1 $(dirname $0)/.aztec-run aztecprotocol/aztec "$@" fi diff --git a/aztec-up/bin/aztec-install b/aztec-up/bin/aztec-install index 4d158894149..028d798beab 100755 --- a/aztec-up/bin/aztec-install +++ b/aztec-up/bin/aztec-install @@ -2,19 +2,19 @@ set -euo pipefail # Colors -g="\033[32m" # Green -y="\033[33m" # Yellow -b="\033[34m" # Blue -p="\033[35m" # Purple -r="\033[0m" # Reset +g="\033[32m" # Green +y="\033[33m" # Yellow +b="\033[34m" # Blue +p="\033[35m" # Purple +r="\033[0m" # Reset bold="\033[1m" # Function to replace characters and add color function print_colored() { - local b=$'\033[34m' # Blue - local y=$'\033[33m' # Yellow - local r=$'\033[0m' # Reset - echo "$1" | sed -E "s/(█+)/${b}\1${y}/g" + local b=$'\033[34m' # Blue + local y=$'\033[33m' # Yellow + local r=$'\033[0m' # Reset + echo "$1" | sed -E "s/(█+)/${b}\1${y}/g" } function title() { @@ -32,11 +32,11 @@ function title() { echo if [ "$(uname -s)" == "Darwin" ]; then echo -e "${y}WARNING: For best performance we recommend adjusting your default docker settings:" - echo -e " - Under general, enable VirtioFS." - echo -e " - Under resources, set CPUs to ~80% your maximum." - echo -e " - Under resources, set Memory to ~80% your maximum." - echo -e "You may receive a warning about your home directory being mounted into a container." - echo -e "This is requested so we can read and write project files, that is all." + echo -e " - Under general, enable VirtioFS." + echo -e " - Under resources, set CPUs to ~80% your maximum." + echo -e " - Under resources, set Memory to ~80% your maximum." + echo -e "You may receive a warning about your home directory being mounted into a container." + echo -e "This is requested so we can read and write project files, that is all." echo -e "${r}" fi echo -e "This will install the following scripts and update your PATH if necessary:" @@ -126,6 +126,7 @@ install_bin aztec install_bin aztec-up install_bin aztec-nargo install_bin aztec-wallet +install_bin .aztec_cli_vars function update_path_env_var { TARGET_DIR="${1}" @@ -159,7 +160,7 @@ function update_path_env_var { fi # Add the target directory to the user's PATH in their profile. - echo "export PATH=\"\$PATH:$TARGET_DIR\"" >> "$SHELL_PROFILE" + echo "export PATH=\"\$PATH:$TARGET_DIR\"" >>"$SHELL_PROFILE" if [ -z "${NON_INTERACTIVE:-}" ]; then info "Done! Starting fresh shell..." diff --git a/cspell.json b/cspell.json index dda4954485e..c4ae01a32e5 100644 --- a/cspell.json +++ b/cspell.json @@ -5,6 +5,7 @@ "acir", "acvm", "addrs", + "alphanet", "archiver", "assignement", "asyncify", diff --git a/docker-compose.yml b/docker-compose.yml index 8cc9bda4c66..82a33648b5a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,6 +14,7 @@ services: PXE_DATA_DIRECTORY: /var/lib/aztec/pxe NODE_NO_WARNINGS: 1 AZTEC_PORT: 8080 + MNEMONIC: "test test test test test test test test test test test junk" entrypoint: > sh -c ' export AZTEC_NODE_URL=$$(cat /var/run/secrets/aztec-node-url); @@ -61,7 +62,7 @@ services: P2P_ENABLED: true PEER_ID_PRIVATE_KEY: AZTEC_PORT: 8999 - TEL_COLLECTOR_BASE_URL: ${TEL_COLLECTOR_BASE_URL:-http://otel-collector:4318} + OTEL_EXPORTER_OTLP_ENDPOINT: ${OTEL_EXPORTER_OTLP_ENDPOINT:-http://otel-collector:4318} secrets: - ethereum-host - p2p-boot-node @@ -76,13 +77,13 @@ services: # if the stack is started with --profile metrics --profile node, give the collector a chance to start before the node i=0 max=3 - while ! curl --head --silent $$TEL_COLLECTOR_BASE_URL > /dev/null; do + while ! curl --head --silent $$OTEL_EXPORTER_OTLP_ENDPOINT > /dev/null; do echo "OpenTelemetry collector not up. Retrying after 1s"; sleep 1; i=$$((i+1)); if [ $$i -eq $$max ]; then - echo "OpenTelemetry collector at $$TEL_COLLECTOR_BASE_URL not up after $${max}s. Running without metrics"; - unset TEL_COLLECTOR_BASE_URL; + echo "OpenTelemetry collector at $$OTEL_EXPORTER_OTLP_ENDPOINT not up after $${max}s. Running without metrics"; + unset OTEL_EXPORTER_OTLP_ENDPOINT; break fi; done; diff --git a/iac/mainnet-fork/redeploy b/iac/mainnet-fork/redeploy index e440e5c8425..00750edc07d 100644 --- a/iac/mainnet-fork/redeploy +++ b/iac/mainnet-fork/redeploy @@ -1 +1 @@ -3 \ No newline at end of file +3 diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 006bccdfd4d..a276f7cc3a1 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -28,6 +28,7 @@ import { type EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; +import { Timer } from '@aztec/foundation/timer'; import { ClassRegistererAddress } from '@aztec/protocol-contracts/class-registerer'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { @@ -109,7 +110,7 @@ export class Archiver implements ArchiveSource { telemetry: TelemetryClient, blockUntilSynced = true, ): Promise { - const chain = createEthereumChain(config.rpcUrl, config.l1ChainId); + const chain = createEthereumChain(config.l1RpcUrl, config.l1ChainId); const publicClient = createPublicClient({ chain: chain.chainInfo, transport: http(chain.rpcUrl), @@ -291,8 +292,12 @@ export class Archiver implements ArchiveSource { ); if (retrievedBlocks.retrievedData.length > 0) { + const timer = new Timer(); await this.store.addBlocks(retrievedBlocks); - this.instrumentation.processNewBlocks(retrievedBlocks.retrievedData); + this.instrumentation.processNewBlocks( + timer.ms() / retrievedBlocks.retrievedData.length, + retrievedBlocks.retrievedData, + ); const lastL2BlockNumber = retrievedBlocks.retrievedData[retrievedBlocks.retrievedData.length - 1].number; this.log.verbose(`Processed ${retrievedBlocks.retrievedData.length} new L2 blocks up to ${lastL2BlockNumber}`); } diff --git a/yarn-project/archiver/src/archiver/config.ts b/yarn-project/archiver/src/archiver/config.ts index c59e6d1789c..f4ec61d3106 100644 --- a/yarn-project/archiver/src/archiver/config.ts +++ b/yarn-project/archiver/src/archiver/config.ts @@ -1,4 +1,5 @@ -import { type L1ContractAddresses, getL1ContractAddressesFromEnv } from '@aztec/ethereum'; +import { type L1ContractAddresses, type L1ReaderConfig, l1ReaderConfigMappings } from '@aztec/ethereum'; +import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; /** * There are 2 polling intervals used in this configuration. The first is the archiver polling interval, archiverPollingIntervalMS. @@ -10,27 +11,12 @@ import { type L1ContractAddresses, getL1ContractAddressesFromEnv } from '@aztec/ /** * The archiver configuration. */ -export interface ArchiverConfig { +export type ArchiverConfig = { /** * URL for an archiver service. If set, will return an archiver client as opposed to starting a new one. */ archiverUrl?: string; - /** - * The url of the Ethereum RPC node. - */ - rpcUrl: string; - - /** - * The key for the ethereum node. - */ - apiKey?: string; - - /** - * The L1 chain's ID - */ - l1ChainId: number; - /** * The polling interval in ms for retrieving new L2 blocks and encrypted logs. */ @@ -53,7 +39,35 @@ export interface ArchiverConfig { /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */ maxLogs?: number; -} +} & L1ReaderConfig; + +export const archiverConfigMappings: ConfigMappingsType = { + archiverUrl: { + env: 'ARCHIVER_URL', + description: + 'URL for an archiver service. If set, will return an archiver client as opposed to starting a new one.', + }, + archiverPollingIntervalMS: { + env: 'ARCHIVER_POLLING_INTERVAL_MS', + description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.', + ...numberConfigHelper(1000), + }, + viemPollingIntervalMS: { + env: 'ARCHIVER_VIEM_POLLING_INTERVAL_MS', + description: 'The polling interval viem uses in ms', + ...numberConfigHelper(1000), + }, + dataDirectory: { + env: 'DATA_DIRECTORY', + description: 'Optional dir to store data. If omitted will store in memory.', + }, + maxLogs: { + env: 'ARCHIVER_MAX_LOGS', + description: 'The max number of logs that can be obtained in 1 "getUnencryptedLogs" call.', + ...numberConfigHelper(1_000), + }, + ...l1ReaderConfigMappings, +}; /** * Returns the archiver configuration from the environment variables. @@ -61,23 +75,5 @@ export interface ArchiverConfig { * @returns The archiver configuration. */ export function getArchiverConfigFromEnv(): ArchiverConfig { - const { - ETHEREUM_HOST, - L1_CHAIN_ID, - ARCHIVER_POLLING_INTERVAL_MS, - ARCHIVER_VIEM_POLLING_INTERVAL_MS, - API_KEY, - DATA_DIRECTORY, - ARCHIVER_URL, - } = process.env; - return { - rpcUrl: ETHEREUM_HOST || '', - l1ChainId: L1_CHAIN_ID ? +L1_CHAIN_ID : 31337, // 31337 is the default chain id for anvil - archiverPollingIntervalMS: ARCHIVER_POLLING_INTERVAL_MS ? +ARCHIVER_POLLING_INTERVAL_MS : 1_000, - viemPollingIntervalMS: ARCHIVER_VIEM_POLLING_INTERVAL_MS ? +ARCHIVER_VIEM_POLLING_INTERVAL_MS : 1_000, - apiKey: API_KEY, - l1Contracts: getL1ContractAddressesFromEnv(), - dataDirectory: DATA_DIRECTORY, - archiverUrl: ARCHIVER_URL, - }; + return getConfigFromMappings(archiverConfigMappings); } diff --git a/yarn-project/archiver/src/archiver/instrumentation.ts b/yarn-project/archiver/src/archiver/instrumentation.ts index 837b00af7f2..ccfaf063de8 100644 --- a/yarn-project/archiver/src/archiver/instrumentation.ts +++ b/yarn-project/archiver/src/archiver/instrumentation.ts @@ -1,9 +1,17 @@ import { type L2Block } from '@aztec/circuit-types'; -import { type Gauge, type Histogram, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client'; +import { + type Gauge, + type Histogram, + Metrics, + type TelemetryClient, + ValueType, + exponentialBuckets, +} from '@aztec/telemetry-client'; export class ArchiverInstrumentation { private blockHeight: Gauge; - private blockSize: Histogram; + private blockSize: Gauge; + private syncDuration: Histogram; constructor(telemetry: TelemetryClient) { const meter = telemetry.getMeter('Archiver'); @@ -12,16 +20,23 @@ export class ArchiverInstrumentation { valueType: ValueType.INT, }); - this.blockSize = meter.createHistogram(Metrics.ARCHIVER_BLOCK_SIZE, { - description: 'The number of transactions processed per block', + this.blockSize = meter.createGauge(Metrics.ARCHIVER_BLOCK_SIZE, { + description: 'The number of transactions in a block', + valueType: ValueType.INT, + }); + + this.syncDuration = meter.createHistogram(Metrics.ARCHIVER_SYNC_DURATION, { + unit: 'ms', + description: 'Duration to sync a block', valueType: ValueType.INT, advice: { - explicitBucketBoundaries: [2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192], + explicitBucketBoundaries: exponentialBuckets(1, 16), }, }); } - public processNewBlocks(blocks: L2Block[]) { + public processNewBlocks(syncTimePerBlock: number, blocks: L2Block[]) { + this.syncDuration.record(syncTimePerBlock); this.blockHeight.record(Math.max(...blocks.map(b => b.number))); for (const block of blocks) { this.blockSize.record(block.body.txEffects.length); diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index e47c3ebb5c5..7b8fa78a625 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -23,7 +23,7 @@ const log = createDebugLogger('aztec:archiver'); // eslint-disable-next-line require-await async function main() { const config = getArchiverConfigFromEnv(); - const { rpcUrl, l1Contracts } = config; + const { l1RpcUrl: rpcUrl, l1Contracts } = config; const publicClient = createPublicClient({ chain: localhost, diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index 8d2f883f1bf..79f705ab417 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -1,13 +1,16 @@ -import { type ArchiverConfig, getArchiverConfigFromEnv as getArchiverVars } from '@aztec/archiver'; -import { type P2PConfig, getP2PConfigEnvVars } from '@aztec/p2p'; -import { type ProverClientConfig, getProverEnvVars } from '@aztec/prover-client'; -import { type SequencerClientConfig, getConfigEnvVars as getSequencerVars } from '@aztec/sequencer-client'; -import { type WorldStateConfig, getWorldStateConfigFromEnv as getWorldStateVars } from '@aztec/world-state'; +import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver'; +import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; +import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p'; +import { type ProverClientConfig, proverClientConfigMappings } from '@aztec/prover-client'; +import { type SequencerClientConfig, sequencerClientConfigMappings } from '@aztec/sequencer-client'; +import { type WorldStateConfig, worldStateConfigMappings } from '@aztec/world-state'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; import { fileURLToPath } from 'url'; +export { sequencerClientConfigMappings, SequencerClientConfig } from '@aztec/sequencer-client'; + /** * The configuration the aztec node. */ @@ -23,24 +26,32 @@ export type AztecNodeConfig = ArchiverConfig & disableProver: boolean; }; +export const aztecNodeConfigMappings: ConfigMappingsType = { + ...archiverConfigMappings, + ...sequencerClientConfigMappings, + ...proverClientConfigMappings, + ...worldStateConfigMappings, + ...p2pConfigMappings, + disableSequencer: { + env: 'SEQ_DISABLED', + parseEnv: (val: string) => ['1', 'true'].includes(val), + default: false, + description: 'Whether the sequencer is disabled for this node.', + }, + disableProver: { + env: 'PROVER_DISABLED', + parseEnv: (val: string) => ['1', 'true'].includes(val), + default: false, + description: 'Whether the prover is disabled for this node.', + }, +}; + /** * Returns the config of the aztec node from environment variables with reasonable defaults. * @returns A valid aztec node config. */ export function getConfigEnvVars(): AztecNodeConfig { - const { SEQ_DISABLED, PROVER_DISABLED = '' } = process.env; - - const allEnvVars: AztecNodeConfig = { - ...getSequencerVars(), - ...getArchiverVars(), - ...getP2PConfigEnvVars(), - ...getWorldStateVars(), - ...getProverEnvVars(), - disableSequencer: !!SEQ_DISABLED, - disableProver: ['1', 'true'].includes(PROVER_DISABLED), - }; - - return allEnvVars; + return getConfigFromMappings(aztecNodeConfigMappings); } /** diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 931e33dc032..cd5b8b8b097 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -135,7 +135,7 @@ export class AztecNodeService implements AztecNode { storeLog = createDebugLogger('aztec:node:lmdb'), ): Promise { telemetry ??= new NoopTelemetryClient(); - const ethereumChain = createEthereumChain(config.rpcUrl, config.l1ChainId); + const ethereumChain = createEthereumChain(config.l1RpcUrl, config.l1ChainId); //validate that the actual chain id matches that specified in configuration if (config.l1ChainId !== ethereumChain.chainInfo.id) { throw new Error( diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 879963d1ca2..ccd6261e0a5 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -55,7 +55,9 @@ "@aztec/telemetry-client": "workspace:^", "@aztec/txe": "workspace:^", "@aztec/types": "workspace:^", + "@types/chalk": "^2.2.0", "abitype": "^0.8.11", + "chalk": "^5.3.0", "commander": "^12.1.0", "koa": "^2.14.2", "koa-router": "^12.0.0", diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts new file mode 100644 index 00000000000..663734e3bbd --- /dev/null +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -0,0 +1,294 @@ +import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver'; +import { sequencerClientConfigMappings } from '@aztec/aztec-node'; +import { botConfigMappings } from '@aztec/bot'; +import { type ConfigMapping, filterConfigMappings, isBooleanConfigValue } from '@aztec/foundation/config'; +import { bootnodeConfigMappings, p2pConfigMappings } from '@aztec/p2p'; +import { proverClientConfigMappings } from '@aztec/prover-client'; +import { proverNodeConfigMappings } from '@aztec/prover-node'; +import { allPxeConfigMappings } from '@aztec/pxe'; +import { telemetryClientConfigMappings } from '@aztec/telemetry-client/start'; + +// Define an interface for options +export interface AztecStartOption { + flag: string; + description: string; + defaultValue: any | undefined; + printDefault?: (val: any) => string; + envVar: string | undefined; + parseVal?: (val: string) => any; +} + +export const getOptions = (namespace: string, configMappings: Record) => { + const options: AztecStartOption[] = []; + for (const [key, { env, default: def, parseEnv, description, printDefault }] of Object.entries(configMappings)) { + if (universalOptions.includes(key)) { + continue; + } + const isBoolean = isBooleanConfigValue(configMappings, key as keyof typeof configMappings); + options.push({ + flag: `--${namespace}.${key}${isBoolean ? '' : ' '}`, + description, + defaultValue: def, + printDefault, + envVar: env, + parseVal: parseEnv, + }); + } + return options; +}; + +// These are options used by multiple modules so should be inputted once +export const universalOptions = ['l1RpcUrl', 'l1ChainId', 'l1Contracts', 'p2pEnabled']; + +// Define categories and options +export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { + SANDBOX: [ + { + flag: '--sandbox', + description: 'Starts Aztec Sandbox', + defaultValue: undefined, + envVar: undefined, + }, + { + flag: '--sandbox.testAccounts', + description: 'Deploy test accounts on sandbox start', + defaultValue: true, + envVar: 'TEST_ACCOUNTS', + parseVal: val => ['1', true].includes(val), + }, + { + flag: '--sandbox.enableGas', + description: 'Enable gas on sandbox start', + defaultValue: false, + envVar: 'ENABLE_GAS', + parseVal: val => ['1', true].includes(val), + }, + ], + API: [ + { + flag: '--port', + description: 'Port to run the Aztec Services on on', + defaultValue: 8080, + envVar: 'AZTEC_PORT', + parseVal: val => parseInt(val, 10), + }, + { + flag: '--api-prefix', + description: 'Prefix for API routes on any service that is started', + defaultValue: '', + envVar: 'API_PREFIX', + }, + ], + ETHEREUM: [ + { + flag: '--l1-rpc-url ', + description: 'URL of the Ethereum RPC node that services will connect to', + defaultValue: 'http://localhost:8545', + envVar: 'ETHEREUM_HOST', + }, + { + flag: '--l1-chain-id ', + description: 'The L1 chain ID', + defaultValue: 1337, + envVar: 'L1_CHAIN_ID', + parseVal: val => parseInt(val, 10), + }, + { + flag: '--l1-mnemonic ', + description: 'Mnemonic for L1 accounts. Will be used if no publisher private keys are provided', + defaultValue: undefined, + envVar: 'MNEMONIC', + }, + ], + 'L1 CONTRACT ADDRESSES': [ + { + flag: '--rollup-address ', + description: 'The deployed L1 rollup contract address', + defaultValue: undefined, + envVar: 'ROLLUP_CONTRACT_ADDRESS', + }, + { + flag: '--registry-address ', + description: 'The deployed L1 registry contract address', + defaultValue: undefined, + envVar: 'REGISTRY_CONTRACT_ADDRESS', + }, + { + flag: '--inbox-address ', + description: 'The deployed L1 -> L2 inbox contract address', + defaultValue: undefined, + envVar: 'INBOX_CONTRACT_ADDRESS', + }, + { + flag: '--outbox-address ', + description: 'The deployed L2 -> L1 outbox contract address', + defaultValue: undefined, + envVar: 'OUTBOX_CONTRACT_ADDRESS', + }, + { + flag: '--availability-oracle-address ', + description: 'The deployed L1 availability oracle contract address', + defaultValue: undefined, + envVar: 'AVAILABILITY_ORACLE_CONTRACT_ADDRESS', + }, + { + flag: '--gas-token-address ', + description: 'The deployed L1 gas token contract address', + defaultValue: undefined, + envVar: 'GAS_TOKEN_CONTRACT_ADDRESS', + }, + { + flag: '--gas-portal-address ', + description: 'The deployed L1 gas portal contract address', + defaultValue: undefined, + envVar: 'GAS_PORTAL_CONTRACT_ADDRESS', + }, + ], + // We can't easily auto-generate node options as they're parts of modules defined below + 'AZTEC NODE': [ + { + flag: '--node', + description: 'Starts Aztec Node with options', + defaultValue: undefined, + envVar: undefined, + }, + { + flag: '--node.archiverUrl ', + description: 'URL for an archiver service', + defaultValue: undefined, + envVar: 'ARCHIVER_URL', + }, + { + flag: '--node.dataDirectory ', + description: 'Where to store node data. If not set, will store temporarily', + defaultValue: undefined, + envVar: 'NODE_DATA_DIRECTORY', + }, + { + flag: '--node.deployAztecContracts', + description: 'Deploys L1 Aztec contracts before starting the node. Needs mnemonic or private key to be set', + defaultValue: false, + envVar: 'DEPLOY_AZTEC_CONTRACTS', + }, + { + flag: '--node.publisherPrivateKey ', + description: 'Private key of account for publishing L1 contracts', + defaultValue: undefined, + envVar: 'L1_PRIVATE_KEY', + }, + { + flag: '--node.l2QueueSize ', + description: 'Size of queue of L2 blocks to store in world state', + defaultValue: 1000, + envVar: 'L2_QUEUE_SIZE', + parseVal: val => parseInt(val, 10), + }, + { + flag: '--node.worldStateBlockCheckIntervalMS ', + description: 'Frequency in which to check for blocks in ms', + defaultValue: 100, + envVar: 'WS_BLOCK_CHECK_INTERVAL_MS', + parseVal: val => parseInt(val, 10), + }, + ], + 'P2P SUBSYSTEM': [ + { + flag: '--p2p-enabled', + description: 'Enable P2P subsystem', + defaultValue: false, + envVar: 'P2P_ENABLED', + parseVal: val => ['1', true].includes(val), + }, + ...getOptions('p2p', p2pConfigMappings), + ], + TELEMETRY: [...getOptions('tel', telemetryClientConfigMappings)], + PXE: [ + { + flag: '--pxe', + description: 'Starts Aztec PXE with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('pxe', allPxeConfigMappings), + ], + ARCHIVER: [ + { + flag: '--archiver', + description: 'Starts Aztec Archiver with options', + defaultValue: undefined, + envVar: undefined, + }, + // filter out archiverUrl as it's passed separately in --node & --prover-node + ...getOptions('archiver', archiverConfigMappings).filter(opt => !opt.flag.includes('archiverUrl')), + ], + SEQUENCER: [ + { + flag: '--sequencer', + description: 'Starts Aztec Sequencer with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('sequencer', sequencerClientConfigMappings), + ], + 'PROVER AGENT': [ + { + flag: '--prover', + description: 'Starts Aztec Prover Agent with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('prover', proverClientConfigMappings), + ], + 'PROVER NODE': [ + { + flag: '--prover-node', + description: 'Starts Aztec Prover Node with options', + defaultValue: undefined, + envVar: undefined, + }, + { + flag: '--proverNode.archiverUrl ', + description: 'URL for an archiver service', + defaultValue: undefined, + envVar: 'ARCHIVER_URL', + }, + ...getOptions( + 'proverNode', + // filter out archiver options from prover node options as they're passed separately in --archiver + filterConfigMappings(proverNodeConfigMappings, Object.keys(archiverConfigMappings) as (keyof ArchiverConfig)[]), + ), + ], + 'P2P BOOTSTRAP': [ + { + flag: '--p2p-bootstrap', + description: 'Starts Aztec P2P Bootstrap with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('p2pBootstrap', bootnodeConfigMappings), + ], + BOT: [ + { + flag: '--bot', + description: 'Starts Aztec Bot with options', + defaultValue: undefined, + envVar: undefined, + }, + ...getOptions('bot', botConfigMappings), + ], + TXE: [ + { + flag: '--txe', + description: 'Starts Aztec TXE with options', + defaultValue: undefined, + envVar: undefined, + }, + { + flag: '--txe.port ', + description: 'Port to run TXE on', + defaultValue: 8081, + envVar: 'TXE_PORT', + parseVal: val => parseInt(val, 10), + }, + ], +}; diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index d5b92f2b566..4242e07c644 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -4,116 +4,118 @@ import { type ServerList, createNamespacedJsonRpcServer, createStatusRouter } fr import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { createPXERpcServer } from '@aztec/pxe'; -import { type Command } from 'commander'; +import { Command } from 'commander'; import http from 'http'; import { createSandbox } from '../sandbox.js'; import { github, splash } from '../splash.js'; -import { cliTexts } from './texts.js'; -import { createAccountLogs, installSignalHandlers } from './util.js'; - -const { AZTEC_PORT = '8080', API_PREFIX = '', TEST_ACCOUNTS = 'true', ENABLE_GAS = '' } = process.env; +import { aztecStartOptions } from './aztec_start_options.js'; +import { + addOptions, + createAccountLogs, + extractNamespacedOptions, + installSignalHandlers, + printAztecStartHelpText, +} from './util.js'; /** * Returns commander program that defines the 'aztec' command line interface. * @param userLog - log function for logging user output. * @param debugLogger - logger for logging debug messages. */ -export function injectAztecCommands(program: Command, userLog: LogFn, debugLogger: DebugLogger) { - // Start Aztec modules with options - program - .command('start') - .description( - 'Starts Aztec modules. Options for each module can be set as key-value pairs (e.g. "option1=value1,option2=value2") or as environment variables.', - ) - .option('-sb, --sandbox', 'Starts Aztec Sandbox.') - .option('-p, --port ', 'Port to run Aztec on.', AZTEC_PORT) - .option('-n, --node [options]', cliTexts.node) - .option('-px, --pxe [options]', cliTexts.pxe) - .option('-a, --archiver [options]', cliTexts.archiver) - .option('-s, --sequencer [options]', cliTexts.sequencer) - .option('-r, --prover [options]', cliTexts.proverAgent) - .option('-o, --prover-node [options]', cliTexts.proverNode) - .option('-p2p, --p2p-bootstrap [options]', cliTexts.p2pBootstrap) - .option('-t, --txe [options]', cliTexts.txe) - .option('--bot [options]', cliTexts.bot) - .action(async options => { - // list of 'stop' functions to call when process ends - const signalHandlers: Array<() => Promise> = []; - let services: ServerList = []; - - if (options.sandbox) { - userLog(`${splash}\n${github}\n\n`); - userLog(`Setting up Aztec Sandbox, please stand by...`); - const { aztecNodeConfig, node, pxe, stop } = await createSandbox({ - enableGas: ['true', '1'].includes(ENABLE_GAS), - }); - - // Deploy test accounts by default - if (TEST_ACCOUNTS === 'true') { - if (aztecNodeConfig.p2pEnabled) { - userLog(`Not setting up test accounts as we are connecting to a network`); - } else { - userLog('Setting up test accounts...'); - const accounts = await deployInitialTestAccounts(pxe); - const accLogs = await createAccountLogs(accounts, pxe); - userLog(accLogs.join('')); - } - } +export function injectAztecCommands(program: Command, userLog: LogFn, debugLogger: DebugLogger): Command { + const startCmd = new Command('start').description( + 'Starts Aztec modules. Options for each module can be set as key-value pairs (e.g. "option1=value1,option2=value2") or as environment variables.', + ); - // Start Node and PXE JSON-RPC server - const nodeServer = createAztecNodeRpcServer(node); - const pxeServer = createPXERpcServer(pxe); - signalHandlers.push(stop); - services = [{ node: nodeServer }, { pxe: pxeServer }]; - } else { - if (options.node) { - const { startNode } = await import('./cmds/start_node.js'); - services = await startNode(options, signalHandlers, userLog); - } else if (options.bot) { - const { startBot } = await import('./cmds/start_bot.js'); - services = await startBot(options, signalHandlers, userLog); - } else if (options.proverNode) { - const { startProverNode } = await import('./cmds/start_prover_node.js'); - services = await startProverNode(options, signalHandlers, userLog); - } else if (options.pxe) { - const { startPXE } = await import('./cmds/start_pxe.js'); - services = await startPXE(options, signalHandlers, userLog); - } else if (options.archiver) { - const { startArchiver } = await import('./cmds/start_archiver.js'); - services = await startArchiver(options, signalHandlers); - } else if (options.p2pBootstrap) { - const { startP2PBootstrap } = await import('./cmds/start_p2p_bootstrap.js'); - await startP2PBootstrap(options, userLog, debugLogger); - } else if (options.prover) { - const { startProverAgent } = await import('./cmds/start_prover_agent.js'); - services = await startProverAgent(options, signalHandlers, userLog); - } else if (options.txe) { - const { startTXE } = await import('./cmds/start_txe.js'); - startTXE(options, debugLogger); - } else if (options.sequencer) { - userLog(`Cannot run a standalone sequencer without a node`); - process.exit(1); + // Assuming commands are added elsewhere, here we just add options to the main program + Object.keys(aztecStartOptions).forEach(category => { + addOptions(startCmd, aztecStartOptions[category]); + }); + + startCmd.helpInformation = printAztecStartHelpText; + + startCmd.action(async options => { + // list of 'stop' functions to call when process ends + const signalHandlers: Array<() => Promise> = []; + let services: ServerList = []; + + if (options.sandbox) { + const sandboxOptions = extractNamespacedOptions(options, 'sandbox'); + userLog(`${splash}\n${github}\n\n`); + userLog(`Setting up Aztec Sandbox, please stand by...`); + const { aztecNodeConfig, node, pxe, stop } = await createSandbox({ + enableGas: sandboxOptions.enableGas, + l1Mnemonic: options.l1Mnemonic, + }); + + // Deploy test accounts by default + if (sandboxOptions.testAccounts) { + if (aztecNodeConfig.p2pEnabled) { + userLog(`Not setting up test accounts as we are connecting to a network`); } else { - userLog(`No module specified to start ${JSON.stringify(options, null, 2)}`); - process.exit(1); + userLog('Setting up test accounts...'); + const accounts = await deployInitialTestAccounts(pxe); + const accLogs = await createAccountLogs(accounts, pxe); + userLog(accLogs.join('')); } } - installSignalHandlers(debugLogger.info, signalHandlers); - if (services.length) { - const rpcServer = createNamespacedJsonRpcServer(services, debugLogger); + // Start Node and PXE JSON-RPC server + const nodeServer = createAztecNodeRpcServer(node); + const pxeServer = createPXERpcServer(pxe); + signalHandlers.push(stop); + services = [{ node: nodeServer }, { pxe: pxeServer }]; + } else { + if (options.node) { + const { startNode } = await import('./cmds/start_node.js'); + services = await startNode(options, signalHandlers, userLog); + } else if (options.bot) { + const { startBot } = await import('./cmds/start_bot.js'); + services = await startBot(options, signalHandlers, userLog); + } else if (options.proverNode) { + const { startProverNode } = await import('./cmds/start_prover_node.js'); + services = await startProverNode(options, signalHandlers, userLog); + } else if (options.pxe) { + const { startPXE } = await import('./cmds/start_pxe.js'); + services = await startPXE(options, signalHandlers, userLog); + } else if (options.archiver) { + const { startArchiver } = await import('./cmds/start_archiver.js'); + services = await startArchiver(options, signalHandlers); + } else if (options.p2pBootstrap) { + const { startP2PBootstrap } = await import('./cmds/start_p2p_bootstrap.js'); + await startP2PBootstrap(options, userLog, debugLogger); + } else if (options.prover) { + const { startProverAgent } = await import('./cmds/start_prover_agent.js'); + services = await startProverAgent(options, signalHandlers, userLog); + } else if (options.txe) { + const { startTXE } = await import('./cmds/start_txe.js'); + startTXE(options, debugLogger); + } else if (options.sequencer) { + userLog(`Cannot run a standalone sequencer without a node`); + process.exit(1); + } else { + userLog(`No module specified to start ${JSON.stringify(options, null, 2)}`); + process.exit(1); + } + } + installSignalHandlers(debugLogger.info, signalHandlers); + + if (services.length) { + const rpcServer = createNamespacedJsonRpcServer(services, debugLogger); - const app = rpcServer.getApp(API_PREFIX); - // add status route - const statusRouter = createStatusRouter(API_PREFIX); - app.use(statusRouter.routes()).use(statusRouter.allowedMethods()); + const app = rpcServer.getApp(options.apiPrefix); + // add status route + const statusRouter = createStatusRouter(options.apiPrefix); + app.use(statusRouter.routes()).use(statusRouter.allowedMethods()); - const httpServer = http.createServer(app.callback()); - httpServer.listen(options.port); - userLog(`Aztec Server listening on port ${options.port}`); - } - }); + const httpServer = http.createServer(app.callback()); + httpServer.listen(options.port); + userLog(`Aztec Server listening on port ${options.port}`); + } + }); + + program.addCommand(startCmd); program.configureHelp({ sortSubcommands: true }); @@ -125,7 +127,7 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge test [options]: starts a dockerized TXE node via $ aztec start --txe - then runs + then runs $ aztec-nargo test --silence-warnings --oracle-resolver= [options] `, ); diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index 4d6e487e532..c3464fdbad9 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -2,8 +2,8 @@ import { Archiver, type ArchiverConfig, KVArchiverDataStore, + archiverConfigMappings, createArchiverRpcServer, - getArchiverConfigFromEnv as getArchiverConfigEnvVars, } from '@aztec/archiver'; import { createDebugLogger } from '@aztec/aztec.js'; import { type ServerList } from '@aztec/foundation/json-rpc/server'; @@ -14,21 +14,16 @@ import { getConfigEnvVars as getTelemetryClientConfig, } from '@aztec/telemetry-client/start'; -import { mergeEnvVarsAndCliOptions, parseModuleOptions } from '../util.js'; +import { extractRelevantOptions } from '../util.js'; export const startArchiver = async (options: any, signalHandlers: (() => Promise)[]) => { const services: ServerList = []; // Start a standalone archiver. - // get env vars first - const archiverConfigEnvVars = getArchiverConfigEnvVars(); - // get config from options - const archiverCliOptions = parseModuleOptions(options.archiver); - // merge env vars and cli options - const archiverConfig = mergeEnvVarsAndCliOptions(archiverConfigEnvVars, archiverCliOptions, true); + const archiverConfig = extractRelevantOptions(options, archiverConfigMappings); const storeLog = createDebugLogger('aztec:archiver:lmdb'); const store = await initStoreForRollup( - AztecLmdbStore.open(archiverConfig.dataDirectory, false, storeLog), + AztecLmdbStore.open(archiverConfig.dataDirectory, false), archiverConfig.l1Contracts.rollupAddress, storeLog, ); diff --git a/yarn-project/aztec/src/cli/cmds/start_bot.ts b/yarn-project/aztec/src/cli/cmds/start_bot.ts index 0d33f0bf10c..0888b9dbf63 100644 --- a/yarn-project/aztec/src/cli/cmds/start_bot.ts +++ b/yarn-project/aztec/src/cli/cmds/start_bot.ts @@ -1,9 +1,9 @@ -import { type BotConfig, BotRunner, createBotRunnerRpcServer, getBotConfigFromEnv } from '@aztec/bot'; +import { type BotConfig, BotRunner, botConfigMappings, createBotRunnerRpcServer } from '@aztec/bot'; import { type PXE } from '@aztec/circuit-types'; import { type ServerList } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; -import { mergeEnvVarsAndCliOptions, parseModuleOptions } from '../util.js'; +import { extractRelevantOptions } from '../util.js'; export async function startBot( options: any, @@ -20,7 +20,6 @@ export async function startBot( ); process.exit(1); } - // Start a PXE client that is used by the bot if required let pxe: PXE | undefined; if (options.pxe) { @@ -38,9 +37,7 @@ export function addBot( signalHandlers: (() => Promise)[], deps: { pxe?: PXE } = {}, ) { - const envVars = getBotConfigFromEnv(); - const cliOptions = parseModuleOptions(options.bot); - const config = mergeEnvVarsAndCliOptions(envVars, cliOptions); + const config = extractRelevantOptions(options, botConfigMappings); const botRunner = new BotRunner(config, { pxe: deps.pxe }); const botServer = createBotRunnerRpcServer(botRunner); diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 33dd2a2489b..a3666539abb 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -1,38 +1,35 @@ -import { - type AztecNodeConfig, - createAztecNodeRpcServer, - getConfigEnvVars as getNodeConfigEnvVars, -} from '@aztec/aztec-node'; +import { aztecNodeConfigMappings, createAztecNodeRpcServer } from '@aztec/aztec-node'; import { type PXE } from '@aztec/circuit-types'; -import { NULL_KEY } from '@aztec/ethereum'; import { type ServerList } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; import { createProvingJobSourceServer } from '@aztec/prover-client/prover-agent'; import { + type TelemetryClientConfig, createAndStartTelemetryClient, - getConfigEnvVars as getTelemetryClientConfig, + telemetryClientConfigMappings, } from '@aztec/telemetry-client/start'; import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; -import { MNEMONIC, createAztecNode, deployContractsToL1 } from '../../sandbox.js'; -import { mergeEnvVarsAndCliOptions, parseModuleOptions } from '../util.js'; - -const { DEPLOY_AZTEC_CONTRACTS } = process.env; +import { createAztecNode, deployContractsToL1 } from '../../sandbox.js'; +import { extractL1ContractAddresses, extractNamespacedOptions, extractRelevantOptions } from '../util.js'; export const startNode = async ( options: any, signalHandlers: (() => Promise)[], userLog: LogFn, -): Promise => { + // ): Promise => { +) => { // Services that will be started in a single multi-rpc server const services: ServerList = []; - // get env vars first - const aztecNodeConfigEnvVars = getNodeConfigEnvVars(); - // get config from options - const nodeCliOptions = parseModuleOptions(options.node); - // merge env vars and cli options - let nodeConfig = mergeEnvVarsAndCliOptions(aztecNodeConfigEnvVars, nodeCliOptions); + + // options specifically namespaced with --node.