-
Notifications
You must be signed in to change notification settings - Fork 268
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Adds [keccak, pedersen, sha256, poseidon]
- Loading branch information
Showing
10 changed files
with
318 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
import { BarretenbergSync, Fr } from '@aztec/bb.js'; | ||
|
||
/** | ||
* Create a poseidon hash (field) from an array of input fields. | ||
* Left pads any inputs less than 32 bytes. | ||
*/ | ||
export function poseidonHash(input: Buffer[]): Buffer { | ||
return Buffer.from( | ||
BarretenbergSync.getSingleton() | ||
.poseidonHash(input.map(i => new Fr(i))) | ||
.toBuffer(), | ||
); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,138 @@ | ||
import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; | ||
import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto'; | ||
|
||
import { AvmContext } from '../avm_context.js'; | ||
import { Field } from '../avm_memory_types.js'; | ||
import { initContext } from '../fixtures/index.js'; | ||
import { Keccak, Pedersen, Poseidon2, Sha256 } from './hashing.js'; | ||
|
||
describe('Hashing Opcodes', () => { | ||
let context: AvmContext; | ||
|
||
beforeEach(async () => { | ||
context = initContext(); | ||
}); | ||
|
||
describe('Poseidon2', () => { | ||
it('Should (de)serialize correctly', () => { | ||
const buf = Buffer.from([ | ||
Poseidon2.opcode, // opcode | ||
...Buffer.from('12345678', 'hex'), // dstOffset | ||
...Buffer.from('23456789', 'hex'), // hashOffset | ||
...Buffer.from('3456789a', 'hex'), // hashSize | ||
]); | ||
const inst = new Poseidon2(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); | ||
|
||
expect(Poseidon2.deserialize(buf)).toEqual(inst); | ||
expect(inst.serialize()).toEqual(buf); | ||
}); | ||
|
||
it('Should hash correctly', async () => { | ||
const args = [new Field(1n), new Field(2n), new Field(3n)]; | ||
const hashOffset = 0; | ||
context.machineState.memory.setSlice(hashOffset, args); | ||
|
||
const dstOffset = 3; | ||
|
||
const expectedHash = poseidonHash(args.map(field => field.toBuffer())); | ||
await new Poseidon2(dstOffset, hashOffset, args.length).execute(context); | ||
|
||
const result = context.machineState.memory.get(dstOffset); | ||
expect(result).toEqual(new Field(toBigIntBE(expectedHash))); | ||
}); | ||
}); | ||
|
||
describe('Keccak', () => { | ||
it('Should (de)serialize correctly', () => { | ||
const buf = Buffer.from([ | ||
Keccak.opcode, // opcode | ||
...Buffer.from('12345678', 'hex'), // dstOffset | ||
...Buffer.from('23456789', 'hex'), // hashOffset | ||
...Buffer.from('3456789a', 'hex'), // hashSize | ||
]); | ||
const inst = new Keccak(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); | ||
|
||
expect(Keccak.deserialize(buf)).toEqual(inst); | ||
expect(inst.serialize()).toEqual(buf); | ||
}); | ||
|
||
it('Should hash correctly', async () => { | ||
const args = [new Field(1n), new Field(2n), new Field(3n)]; | ||
const hashOffset = 0; | ||
context.machineState.memory.setSlice(hashOffset, args); | ||
|
||
const dstOffset = 3; | ||
|
||
const inputBuffer = Buffer.concat(args.map(field => field.toBuffer())); | ||
const expectedHash = keccak(inputBuffer); | ||
await new Keccak(dstOffset, hashOffset, args.length).execute(context); | ||
|
||
const result = context.machineState.memory.getSliceAs<Field>(dstOffset, 2); | ||
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]); | ||
|
||
expect(combined).toEqual(expectedHash); | ||
}); | ||
}); | ||
|
||
describe('Sha256', () => { | ||
it('Should (de)serialize correctly', () => { | ||
const buf = Buffer.from([ | ||
Sha256.opcode, // opcode | ||
...Buffer.from('12345678', 'hex'), // dstOffset | ||
...Buffer.from('23456789', 'hex'), // hashOffset | ||
...Buffer.from('3456789a', 'hex'), // hashSize | ||
]); | ||
const inst = new Sha256(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); | ||
|
||
expect(Sha256.deserialize(buf)).toEqual(inst); | ||
expect(inst.serialize()).toEqual(buf); | ||
}); | ||
|
||
it('Should hash correctly', async () => { | ||
const args = [new Field(1n), new Field(2n), new Field(3n)]; | ||
const hashOffset = 0; | ||
context.machineState.memory.setSlice(hashOffset, args); | ||
|
||
const dstOffset = 3; | ||
|
||
const inputBuffer = Buffer.concat(args.map(field => field.toBuffer())); | ||
const expectedHash = sha256(inputBuffer); | ||
await new Sha256(dstOffset, hashOffset, args.length).execute(context); | ||
|
||
const result = context.machineState.memory.getSliceAs<Field>(dstOffset, 2); | ||
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]); | ||
|
||
expect(combined).toEqual(expectedHash); | ||
}); | ||
}); | ||
|
||
describe('Pedersen', () => { | ||
it('Should (de)serialize correctly', () => { | ||
const buf = Buffer.from([ | ||
Pedersen.opcode, // opcode | ||
...Buffer.from('12345678', 'hex'), // dstOffset | ||
...Buffer.from('23456789', 'hex'), // hashOffset | ||
...Buffer.from('3456789a', 'hex'), // hashSize | ||
]); | ||
const inst = new Pedersen(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a); | ||
|
||
expect(Sha256.deserialize(buf)).toEqual(inst); | ||
expect(inst.serialize()).toEqual(buf); | ||
}); | ||
|
||
it('Should hash correctly', async () => { | ||
const args = [new Field(1n), new Field(2n), new Field(3n)]; | ||
const hashOffset = 0; | ||
context.machineState.memory.setSlice(hashOffset, args); | ||
|
||
const dstOffset = 3; | ||
|
||
const inputBuffer = args.map(field => field.toBuffer()); | ||
const expectedHash = pedersenHash(inputBuffer); | ||
await new Pedersen(dstOffset, hashOffset, args.length).execute(context); | ||
|
||
const result = context.machineState.memory.get(dstOffset); | ||
expect(result).toEqual(new Field(toBigIntBE(expectedHash))); | ||
}); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,138 @@ | ||
import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; | ||
import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto'; | ||
|
||
import { AvmContext } from '../avm_context.js'; | ||
import { Field } from '../avm_memory_types.js'; | ||
import { Opcode, OperandType } from '../serialization/instruction_serialization.js'; | ||
import { Instruction } from './instruction.js'; | ||
|
||
export class Poseidon2 extends Instruction { | ||
static type: string = 'POSEIDON2'; | ||
static readonly opcode: Opcode = Opcode.POSEIDON; | ||
|
||
// Informs (de)serialization. See Instruction.deserialize. | ||
static readonly wireFormat: OperandType[] = [ | ||
OperandType.UINT8, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
]; | ||
|
||
constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { | ||
super(); | ||
} | ||
|
||
async execute(context: AvmContext): Promise<void> { | ||
// We hash a set of field elements | ||
const hashData = context.machineState.memory | ||
.getSlice(this.hashOffset, this.hashOffset + this.hashSize) | ||
.map(word => word.toBuffer()); | ||
|
||
const hash = poseidonHash(hashData); | ||
context.machineState.memory.set(this.dstOffset, new Field(hash)); | ||
|
||
context.machineState.incrementPc(); | ||
} | ||
} | ||
|
||
export class Keccak extends Instruction { | ||
static type: string = 'KECCAK'; | ||
static readonly opcode: Opcode = Opcode.KECCAK; | ||
|
||
// Informs (de)serialization. See Instruction.deserialize. | ||
static readonly wireFormat: OperandType[] = [ | ||
OperandType.UINT8, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
]; | ||
|
||
constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { | ||
super(); | ||
} | ||
|
||
// Note hash output is 32 bytes, so takes up two fields | ||
async execute(context: AvmContext): Promise<void> { | ||
// We hash a set of field elements | ||
const hashData = context.machineState.memory | ||
.getSlice(this.hashOffset, this.hashOffset + this.hashSize) | ||
.map(word => word.toBuffer()); | ||
|
||
const hash = keccak(Buffer.concat(hashData)); | ||
|
||
// Split output into two fields | ||
const high = new Field(toBigIntBE(hash.subarray(0, 16))); | ||
const low = new Field(toBigIntBE(hash.subarray(16, 32))); | ||
|
||
context.machineState.memory.set(this.dstOffset, high); | ||
context.machineState.memory.set(this.dstOffset + 1, low); | ||
|
||
context.machineState.incrementPc(); | ||
} | ||
} | ||
|
||
export class Sha256 extends Instruction { | ||
static type: string = 'SHA256'; | ||
static readonly opcode: Opcode = Opcode.SHA256; | ||
|
||
// Informs (de)serialization. See Instruction.deserialize. | ||
static readonly wireFormat: OperandType[] = [ | ||
OperandType.UINT8, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
]; | ||
|
||
constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { | ||
super(); | ||
} | ||
|
||
// Note hash output is 32 bytes, so takes up two fields | ||
async execute(context: AvmContext): Promise<void> { | ||
// We hash a set of field elements | ||
const hashData = context.machineState.memory | ||
.getSlice(this.hashOffset, this.hashOffset + this.hashSize) | ||
.map(word => word.toBuffer()); | ||
|
||
const hash = sha256(Buffer.concat(hashData)); | ||
|
||
// Split output into two fields | ||
const high = new Field(toBigIntBE(hash.subarray(0, 16))); | ||
const low = new Field(toBigIntBE(hash.subarray(16, 32))); | ||
|
||
context.machineState.memory.set(this.dstOffset, high); | ||
context.machineState.memory.set(this.dstOffset + 1, low); | ||
|
||
context.machineState.incrementPc(); | ||
} | ||
} | ||
|
||
export class Pedersen extends Instruction { | ||
static type: string = 'PEDERSEN'; | ||
static readonly opcode: Opcode = Opcode.PEDERSEN; | ||
|
||
// Informs (de)serialization. See Instruction.deserialize. | ||
static readonly wireFormat: OperandType[] = [ | ||
OperandType.UINT8, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
OperandType.UINT32, | ||
]; | ||
|
||
constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) { | ||
super(); | ||
} | ||
|
||
async execute(context: AvmContext): Promise<void> { | ||
// We hash a set of field elements | ||
const hashData = context.machineState.memory | ||
.getSlice(this.hashOffset, this.hashOffset + this.hashSize) | ||
.map(word => word.toBuffer()); | ||
|
||
// No domain sep for now | ||
const hash = pedersenHash(hashData); | ||
context.machineState.memory.set(this.dstOffset, new Field(hash)); | ||
|
||
context.machineState.incrementPc(); | ||
} | ||
} |
Oops, something went wrong.