From fe10c7049b3597a96f76a27a22e9233bc3b8ce82 Mon Sep 17 00:00:00 2001 From: Maddiaa <47148561+Maddiaa0@users.noreply.github.com> Date: Wed, 14 Feb 2024 00:47:54 +0000 Subject: [PATCH] feat(avm): hashing opcodes (#4526) Adds [keccak, pedersen, sha256, poseidon] --- avm-transpiler/src/opcodes.rs | 4 + .../barretenberg/vm/avm_trace/avm_opcode.cpp | 2 + yarn-project/foundation/src/crypto/index.ts | 1 + .../src/crypto/pedersen/pedersen.wasm.ts | 2 +- .../foundation/src/crypto/poseidon/index.ts | 13 ++ .../simulator/src/avm/avm_memory_types.ts | 14 +- .../simulator/src/avm/opcodes/hashing.test.ts | 138 ++++++++++++++++++ .../simulator/src/avm/opcodes/hashing.ts | 138 ++++++++++++++++++ .../serialization/bytecode_serialization.ts | 9 +- .../instruction_serialization.ts | 2 + 10 files changed, 318 insertions(+), 5 deletions(-) create mode 100644 yarn-project/foundation/src/crypto/poseidon/index.ts create mode 100644 yarn-project/simulator/src/avm/opcodes/hashing.test.ts create mode 100644 yarn-project/simulator/src/avm/opcodes/hashing.ts diff --git a/avm-transpiler/src/opcodes.rs b/avm-transpiler/src/opcodes.rs index 6948aca6239..031bee76d2e 100644 --- a/avm-transpiler/src/opcodes.rs +++ b/avm-transpiler/src/opcodes.rs @@ -82,6 +82,8 @@ pub enum AvmOpcode { // Gadgets KECCAK, POSEIDON, + SHA256, + PEDERSEN, } impl AvmOpcode { @@ -167,6 +169,8 @@ impl AvmOpcode { // Gadgets AvmOpcode::KECCAK => "KECCAK", AvmOpcode::POSEIDON => "POSEIDON", + AvmOpcode::SHA256 => "SHA256 ", + AvmOpcode::PEDERSEN => "PEDERSEN", } } } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.cpp index dbf9a76a741..d1202fd932c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.cpp @@ -88,6 +88,8 @@ const std::unordered_map Bytecode::OPERANDS_NUM = { //// Gadgets //{ OpCode::KECCAK, }, //{ OpCode::POSEIDON, }, + //{ OpCode::SHA256, }, + //{ OpCode::PEDERSEN, }, }; /** diff --git a/yarn-project/foundation/src/crypto/index.ts b/yarn-project/foundation/src/crypto/index.ts index f574fb4d2f5..96f4df584ef 100644 --- a/yarn-project/foundation/src/crypto/index.ts +++ b/yarn-project/foundation/src/crypto/index.ts @@ -4,6 +4,7 @@ export * from './keccak/index.js'; export * from './random/index.js'; export * from './sha256/index.js'; export * from './pedersen/index.js'; +export * from './poseidon/index.js'; /** * Init the bb singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. diff --git a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts index 7cc23bd8503..6d38d88a110 100644 --- a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts +++ b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts @@ -19,7 +19,7 @@ export function pedersenCommit(input: Buffer[]) { * Create a pedersen hash (field) from an array of input fields. * Left pads any inputs less than 32 bytes. */ -export function pedersenHash(input: Buffer[], index = 0) { +export function pedersenHash(input: Buffer[], index = 0): Buffer { if (!input.every(i => i.length <= 32)) { throw new Error('All Pedersen Hash input buffers must be <= 32 bytes.'); } diff --git a/yarn-project/foundation/src/crypto/poseidon/index.ts b/yarn-project/foundation/src/crypto/poseidon/index.ts new file mode 100644 index 00000000000..7cc44a95b5e --- /dev/null +++ b/yarn-project/foundation/src/crypto/poseidon/index.ts @@ -0,0 +1,13 @@ +import { BarretenbergSync, Fr } from '@aztec/bb.js'; + +/** + * Create a poseidon hash (field) from an array of input fields. + * Left pads any inputs less than 32 bytes. + */ +export function poseidonHash(input: Buffer[]): Buffer { + return Buffer.from( + BarretenbergSync.getSingleton() + .poseidonHash(input.map(i => new Fr(i))) + .toBuffer(), + ); +} diff --git a/yarn-project/simulator/src/avm/avm_memory_types.ts b/yarn-project/simulator/src/avm/avm_memory_types.ts index 49546d41ec9..b8b709b2d32 100644 --- a/yarn-project/simulator/src/avm/avm_memory_types.ts +++ b/yarn-project/simulator/src/avm/avm_memory_types.ts @@ -1,3 +1,4 @@ +import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { Fr } from '@aztec/foundation/fields'; import { strict as assert } from 'assert'; @@ -19,6 +20,9 @@ export abstract class MemoryValue { // Use sparingly. public abstract toBigInt(): bigint; + // To Buffer + public abstract toBuffer(): Buffer; + // To field public toFr(): Fr { return new Fr(this.toBigInt()); @@ -111,6 +115,10 @@ function UnsignedIntegerClassFactory(bits: number) { public toBigInt(): bigint { return this.n; } + + public toBuffer(): Buffer { + return toBufferBE(this.n, bits / 8); + } }; } @@ -127,7 +135,7 @@ export class Field extends MemoryValue { public static readonly MODULUS: bigint = Fr.MODULUS; private readonly rep: Fr; - constructor(v: number | bigint | Fr) { + constructor(v: number | bigint | Fr | Buffer) { super(); this.rep = new Fr(v); } @@ -159,6 +167,10 @@ export class Field extends MemoryValue { public toBigInt(): bigint { return this.rep.toBigInt(); } + + public toBuffer(): Buffer { + return this.rep.toBuffer(); + } } export enum TypeTag { diff --git a/yarn-project/simulator/src/avm/opcodes/hashing.test.ts b/yarn-project/simulator/src/avm/opcodes/hashing.test.ts new file mode 100644 index 00000000000..48e7206385a --- /dev/null +++ b/yarn-project/simulator/src/avm/opcodes/hashing.test.ts @@ -0,0 +1,138 @@ +import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; +import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto'; + +import { AvmContext } from '../avm_context.js'; +import { Field } from '../avm_memory_types.js'; +import { initContext } from '../fixtures/index.js'; +import { Keccak, Pedersen, Poseidon2, Sha256 } from './hashing.js'; + +describe('Hashing Opcodes', () => { + let context: AvmContext; + + beforeEach(async () => { + context = initContext(); + }); + + describe('Poseidon2', () => { + it('Should (de)serialize correctly', () => { + const buf = Buffer.from([ + Poseidon2.opcode, // opcode + ...Buffer.from('12345678', 'hex'), // dstOffset + ...Buffer.from('23456789', 'hex'), // hashOffset + ...Buffer.from('3456789a', 'hex'), // hashSize + ]); + const inst = new Poseidon2(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); + + expect(Poseidon2.deserialize(buf)).toEqual(inst); + expect(inst.serialize()).toEqual(buf); + }); + + it('Should hash correctly', async () => { + const args = [new Field(1n), new Field(2n), new Field(3n)]; + const hashOffset = 0; + context.machineState.memory.setSlice(hashOffset, args); + + const dstOffset = 3; + + const expectedHash = poseidonHash(args.map(field => field.toBuffer())); + await new Poseidon2(dstOffset, hashOffset, args.length).execute(context); + + const result = context.machineState.memory.get(dstOffset); + expect(result).toEqual(new Field(toBigIntBE(expectedHash))); + }); + }); + + describe('Keccak', () => { + it('Should (de)serialize correctly', () => { + const buf = Buffer.from([ + Keccak.opcode, // opcode + ...Buffer.from('12345678', 'hex'), // dstOffset + ...Buffer.from('23456789', 'hex'), // hashOffset + ...Buffer.from('3456789a', 'hex'), // hashSize + ]); + const inst = new Keccak(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); + + expect(Keccak.deserialize(buf)).toEqual(inst); + expect(inst.serialize()).toEqual(buf); + }); + + it('Should hash correctly', async () => { + const args = [new Field(1n), new Field(2n), new Field(3n)]; + const hashOffset = 0; + context.machineState.memory.setSlice(hashOffset, args); + + const dstOffset = 3; + + const inputBuffer = Buffer.concat(args.map(field => field.toBuffer())); + const expectedHash = keccak(inputBuffer); + await new Keccak(dstOffset, hashOffset, args.length).execute(context); + + const result = context.machineState.memory.getSliceAs(dstOffset, 2); + const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]); + + expect(combined).toEqual(expectedHash); + }); + }); + + describe('Sha256', () => { + it('Should (de)serialize correctly', () => { + const buf = Buffer.from([ + Sha256.opcode, // opcode + ...Buffer.from('12345678', 'hex'), // dstOffset + ...Buffer.from('23456789', 'hex'), // hashOffset + ...Buffer.from('3456789a', 'hex'), // hashSize + ]); + const inst = new Sha256(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); + + expect(Sha256.deserialize(buf)).toEqual(inst); + expect(inst.serialize()).toEqual(buf); + }); + + it('Should hash correctly', async () => { + const args = [new Field(1n), new Field(2n), new Field(3n)]; + const hashOffset = 0; + context.machineState.memory.setSlice(hashOffset, args); + + const dstOffset = 3; + + const inputBuffer = Buffer.concat(args.map(field => field.toBuffer())); + const expectedHash = sha256(inputBuffer); + await new Sha256(dstOffset, hashOffset, args.length).execute(context); + + const result = context.machineState.memory.getSliceAs(dstOffset, 2); + const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]); + + expect(combined).toEqual(expectedHash); + }); + }); + + describe('Pedersen', () => { + it('Should (de)serialize correctly', () => { + const buf = Buffer.from([ + Pedersen.opcode, // opcode + ...Buffer.from('12345678', 'hex'), // dstOffset + ...Buffer.from('23456789', 'hex'), // hashOffset + ...Buffer.from('3456789a', 'hex'), // hashSize + ]); + const inst = new Pedersen(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); + + expect(Sha256.deserialize(buf)).toEqual(inst); + expect(inst.serialize()).toEqual(buf); + }); + + it('Should hash correctly', async () => { + const args = [new Field(1n), new Field(2n), new Field(3n)]; + const hashOffset = 0; + context.machineState.memory.setSlice(hashOffset, args); + + const dstOffset = 3; + + const inputBuffer = args.map(field => field.toBuffer()); + const expectedHash = pedersenHash(inputBuffer); + await new Pedersen(dstOffset, hashOffset, args.length).execute(context); + + const result = context.machineState.memory.get(dstOffset); + expect(result).toEqual(new Field(toBigIntBE(expectedHash))); + }); + }); +}); diff --git a/yarn-project/simulator/src/avm/opcodes/hashing.ts b/yarn-project/simulator/src/avm/opcodes/hashing.ts new file mode 100644 index 00000000000..5add968ac6e --- /dev/null +++ b/yarn-project/simulator/src/avm/opcodes/hashing.ts @@ -0,0 +1,138 @@ +import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; +import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto'; + +import { AvmContext } from '../avm_context.js'; +import { Field } from '../avm_memory_types.js'; +import { Opcode, OperandType } from '../serialization/instruction_serialization.js'; +import { Instruction } from './instruction.js'; + +export class Poseidon2 extends Instruction { + static type: string = 'POSEIDON2'; + static readonly opcode: Opcode = Opcode.POSEIDON; + + // Informs (de)serialization. See Instruction.deserialize. + static readonly wireFormat: OperandType[] = [ + OperandType.UINT8, + OperandType.UINT32, + OperandType.UINT32, + OperandType.UINT32, + ]; + + constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { + super(); + } + + async execute(context: AvmContext): Promise { + // We hash a set of field elements + const hashData = context.machineState.memory + .getSlice(this.hashOffset, this.hashOffset + this.hashSize) + .map(word => word.toBuffer()); + + const hash = poseidonHash(hashData); + context.machineState.memory.set(this.dstOffset, new Field(hash)); + + context.machineState.incrementPc(); + } +} + +export class Keccak extends Instruction { + static type: string = 'KECCAK'; + static readonly opcode: Opcode = Opcode.KECCAK; + + // Informs (de)serialization. See Instruction.deserialize. + static readonly wireFormat: OperandType[] = [ + OperandType.UINT8, + OperandType.UINT32, + OperandType.UINT32, + OperandType.UINT32, + ]; + + constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { + super(); + } + + // Note hash output is 32 bytes, so takes up two fields + async execute(context: AvmContext): Promise { + // We hash a set of field elements + const hashData = context.machineState.memory + .getSlice(this.hashOffset, this.hashOffset + this.hashSize) + .map(word => word.toBuffer()); + + const hash = keccak(Buffer.concat(hashData)); + + // Split output into two fields + const high = new Field(toBigIntBE(hash.subarray(0, 16))); + const low = new Field(toBigIntBE(hash.subarray(16, 32))); + + context.machineState.memory.set(this.dstOffset, high); + context.machineState.memory.set(this.dstOffset + 1, low); + + context.machineState.incrementPc(); + } +} + +export class Sha256 extends Instruction { + static type: string = 'SHA256'; + static readonly opcode: Opcode = Opcode.SHA256; + + // Informs (de)serialization. See Instruction.deserialize. + static readonly wireFormat: OperandType[] = [ + OperandType.UINT8, + OperandType.UINT32, + OperandType.UINT32, + OperandType.UINT32, + ]; + + constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { + super(); + } + + // Note hash output is 32 bytes, so takes up two fields + async execute(context: AvmContext): Promise { + // We hash a set of field elements + const hashData = context.machineState.memory + .getSlice(this.hashOffset, this.hashOffset + this.hashSize) + .map(word => word.toBuffer()); + + const hash = sha256(Buffer.concat(hashData)); + + // Split output into two fields + const high = new Field(toBigIntBE(hash.subarray(0, 16))); + const low = new Field(toBigIntBE(hash.subarray(16, 32))); + + context.machineState.memory.set(this.dstOffset, high); + context.machineState.memory.set(this.dstOffset + 1, low); + + context.machineState.incrementPc(); + } +} + +export class Pedersen extends Instruction { + static type: string = 'PEDERSEN'; + static readonly opcode: Opcode = Opcode.PEDERSEN; + + // Informs (de)serialization. See Instruction.deserialize. + static readonly wireFormat: OperandType[] = [ + OperandType.UINT8, + OperandType.UINT32, + OperandType.UINT32, + OperandType.UINT32, + ]; + + constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { + super(); + } + + async execute(context: AvmContext): Promise { + // We hash a set of field elements + const hashData = context.machineState.memory + .getSlice(this.hashOffset, this.hashOffset + this.hashSize) + .map(word => word.toBuffer()); + + // No domain sep for now + const hash = pedersenHash(hashData); + context.machineState.memory.set(this.dstOffset, new Field(hash)); + + context.machineState.incrementPc(); + } +} diff --git a/yarn-project/simulator/src/avm/serialization/bytecode_serialization.ts b/yarn-project/simulator/src/avm/serialization/bytecode_serialization.ts index 756d44e906b..dcacc5dbf01 100644 --- a/yarn-project/simulator/src/avm/serialization/bytecode_serialization.ts +++ b/yarn-project/simulator/src/avm/serialization/bytecode_serialization.ts @@ -1,3 +1,4 @@ +import { Keccak, Pedersen, Poseidon2, Sha256 } from '../opcodes/hashing.js'; import { Add, Address, @@ -127,9 +128,11 @@ const INSTRUCTION_SET = () => [Return.opcode, Return], [Revert.opcode, Revert], - // Gadgets - //[Keccak.opcode, Keccak], - //[Poseidon.opcode, Poseidon], + // //// Gadgets + [Keccak.opcode, Keccak], + [Poseidon2.opcode, Poseidon2], + [Sha256.opcode, Sha256], + [Pedersen.opcode, Pedersen], ]); interface Serializable { diff --git a/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts b/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts index d4d2a3a32c6..d9e07f50f04 100644 --- a/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts +++ b/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts @@ -66,6 +66,8 @@ export enum Opcode { KECCAK, POSEIDON, // Add new opcodes before this + SHA256, // temp - may be removed, but alot of contracts rely on it + PEDERSEN, // temp - may be removed, but alot of contracts rely on it TOTAL_OPCODES_NUMBER, }