Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: rename pedersen typescript methods to be called hash instead of compress #3047

Merged
merged 14 commits into from
Oct 26, 2023
28 changes: 28 additions & 0 deletions barretenberg/ts/src/barretenberg_api/pedersen.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,34 @@ describe('pedersen', () => {
expect(result).toEqual(new Fr(2152386650411553803409271316104075950536496387580531018130718456431861859990n));
});

it('pedersenCompressAndHashSame', async () => {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These are a bunch of "crutch" tests that show that a lot of the pedersen methods are effectively the same or some are just supersets.

Once I remove the c_binds for them, these will also disappear since they won't be available anymore

const resultCompress = await api.pedersenCompressWithHashIndex([new Fr(4n), new Fr(8n)], 7);
const resultHash = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 7);
expect(resultCompress).toEqual(resultHash);
});

it('pedersenHashWith0IndexSameAsNoIndex', async () => {
const resultHashImplicit0 = await api.pedersenHash([new Fr(4n), new Fr(8n)]);
const resultCompressImplicit0 = await api.pedersenCompress([new Fr(4n), new Fr(8n)]);
const resultCompressFieldsImplicit0 = await api.pedersenCompressFields(new Fr(4n), new Fr(8n));
const resultHashExplicit0 = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 0);
expect(resultHashImplicit0).toEqual(resultCompressImplicit0);
expect(resultHashImplicit0).toEqual(resultHashExplicit0);
expect(resultHashImplicit0).toEqual(resultCompressFieldsImplicit0);
});

it('pedersenHashPairSameAsWith0Index', async () => {
const resultHashPair = await api.pedersenHashPair(new Fr(4n), new Fr(8n));
const resultHashExplicit0 = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 0);
expect(resultHashExplicit0).toEqual(resultHashPair);
});

it('pedersenHashMultipleSameAsWith0Index', async () => {
const resultHashPair = await api.pedersenHashMultiple([new Fr(4n), new Fr(8n)]);
const resultHashExplicit0 = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 0);
expect(resultHashExplicit0).toEqual(resultHashPair);
});

it('pedersenCommit', async () => {
const result = await api.pedersenCommit([new Fr(4n), new Fr(8n), new Fr(12n)]);
expect(result).toEqual(new Fr(18374309251862457296563484909553154519357910650678202211610516068880120638872n));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import {
computeVarArgsHash,
siloCommitment,
} from '@aztec/circuits.js/abis';
import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg';
import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the main change in the PR,compress to Hash

import { makeContractDeploymentData } from '@aztec/circuits.js/factories';
import { FunctionArtifact, FunctionSelector, encodeArguments } from '@aztec/foundation/abi';
import { asyncMap } from '@aztec/foundation/async-map';
Expand Down Expand Up @@ -145,10 +145,10 @@ describe('Private Execution test suite', () => {
return trees[name];
};

const hash = (data: Buffer[]) => pedersenCompressInputs(circuitsWasm, data);
const hash = (data: Buffer[]) => pedersenHashInputs(circuitsWasm, data);
const hashFields = (data: Fr[]) =>
Fr.fromBuffer(
pedersenCompressInputs(
pedersenHashInputs(
circuitsWasm,
data.map(f => f.toBuffer()),
),
Expand Down
10 changes: 5 additions & 5 deletions yarn-project/acir-simulator/src/public/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {
HistoricBlockData,
L1_TO_L2_MSG_TREE_HEIGHT,
} from '@aztec/circuits.js';
import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg';
import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
import { FunctionArtifact, FunctionSelector, encodeArguments } from '@aztec/foundation/abi';
import { AztecAddress } from '@aztec/foundation/aztec-address';
import { EthAddress } from '@aztec/foundation/eth-address';
Expand Down Expand Up @@ -335,9 +335,9 @@ describe('ACIR public execution simulator', () => {
// Assert the commitment was created
expect(result.newCommitments.length).toEqual(1);

const expectedNoteHash = pedersenCompressInputs(wasm, [amount.toBuffer(), secretHash.toBuffer()]);
const expectedNoteHash = pedersenHashInputs(wasm, [amount.toBuffer(), secretHash.toBuffer()]);
const storageSlot = new Fr(5); // for pending_shields
const expectedInnerNoteHash = pedersenCompressInputs(wasm, [storageSlot.toBuffer(), expectedNoteHash]);
const expectedInnerNoteHash = pedersenHashInputs(wasm, [storageSlot.toBuffer(), expectedNoteHash]);
expect(result.newCommitments[0].toBuffer()).toEqual(expectedInnerNoteHash);
});

Expand Down Expand Up @@ -365,7 +365,7 @@ describe('ACIR public execution simulator', () => {
// Assert the l2 to l1 message was created
expect(result.newL2ToL1Messages.length).toEqual(1);

const expectedNewMessageValue = pedersenCompressInputs(
const expectedNewMessageValue = pedersenHashInputs(
wasm,
params.map(a => a.toBuffer()),
);
Expand Down Expand Up @@ -452,7 +452,7 @@ describe('ACIR public execution simulator', () => {
// Assert the l2 to l1 message was created
expect(result.newNullifiers.length).toEqual(1);

const expectedNewMessageValue = pedersenCompressInputs(
const expectedNewMessageValue = pedersenHashInputs(
wasm,
params.map(a => a.toBuffer()),
);
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/acir-simulator/src/utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { CircuitsWasm, GrumpkinPrivateKey } from '@aztec/circuits.js';
import { Grumpkin, pedersenCompressInputs } from '@aztec/circuits.js/barretenberg';
import { Grumpkin, pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
import { Fr } from '@aztec/foundation/fields';

/**
Expand All @@ -24,7 +24,7 @@ export function computeSlotForMapping(mappingSlot: Fr, owner: NoirPoint | Fr, bb
const ownerField = isFr(owner) ? owner : new Fr(owner.x);

return Fr.fromBuffer(
pedersenCompressInputs(
pedersenHashInputs(
bbWasm,
[mappingSlot, ownerField].map(f => f.toBuffer()),
),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { CircuitsWasm, Fr, GeneratorIndex } from '@aztec/circuits.js';
import { pedersenCompressWithHashIndex } from '@aztec/circuits.js/barretenberg';
import { pedersenHashWithHashIndex } from '@aztec/circuits.js/barretenberg';
import { padArrayEnd } from '@aztec/foundation/collection';
import { FunctionCall, PackedArguments, emptyFunctionCall } from '@aztec/types';

Expand Down Expand Up @@ -69,7 +69,7 @@ export async function buildPayload(calls: FunctionCall[]): Promise<{

/** Compresses an entrypoint payload to a 32-byte buffer (useful for signing) */
export async function hashPayload(payload: EntrypointPayload) {
return pedersenCompressWithHashIndex(
return pedersenHashWithHashIndex(
await CircuitsWasm.get(),
flattenPayload(payload).map(fr => fr.toBuffer()),
GeneratorIndex.SIGNATURE_PAYLOAD,
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/aztec.js/src/utils/authwit.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { AztecAddress, CircuitsWasm, GeneratorIndex } from '@aztec/circuits.js';
import { pedersenCompressWithHashIndex } from '@aztec/circuits.js/barretenberg';
import { pedersenHashWithHashIndex } from '@aztec/circuits.js/barretenberg';
import { FunctionCall, PackedArguments } from '@aztec/types';

// docs:start:authwit_computeAuthWitMessageHash
Expand All @@ -12,7 +12,7 @@ import { FunctionCall, PackedArguments } from '@aztec/types';
*/
export const computeAuthWitMessageHash = async (caller: AztecAddress, request: FunctionCall) => {
const wasm = await CircuitsWasm.get();
return pedersenCompressWithHashIndex(
return pedersenHashWithHashIndex(
wasm,
[
caller.toField(),
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/aztec.js/src/utils/cheat_codes.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { AztecAddress, CircuitsWasm, EthAddress, Fr } from '@aztec/circuits.js';
import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg';
import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer';
import { keccak } from '@aztec/foundation/crypto';
import { createDebugLogger } from '@aztec/foundation/log';
Expand Down Expand Up @@ -236,7 +236,7 @@ export class AztecCheatCodes {
// Based on `at` function in
// aztec3-packages/yarn-project/aztec-nr/aztec/src/state_vars/map.nr
return Fr.fromBuffer(
pedersenCompressInputs(
pedersenHashInputs(
this.wasm,
[new Fr(baseSlot), new Fr(key)].map(f => f.toBuffer()),
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,53 +5,40 @@ import { Buffer } from 'buffer';
import { deserializeArrayFromVector, deserializeField, serializeBufferArrayToVector } from '../../serialize.js';

/**
* Compresses two 32-byte hashes.
* Hashes two arrays.
* @param wasm - The barretenberg module.
* @param lhs - The first hash.
* @param rhs - The second hash.
* @param lhs - The first array.
* @param rhs - The second array.
* @returns The new 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8Array): Buffer {
return pedersenCompressWithHashIndex(wasm, [Buffer.from(lhs), Buffer.from(rhs)], 0);
export function pedersenHash(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8Array): Buffer {
return pedersenHashWithHashIndex(wasm, [Buffer.from(lhs), Buffer.from(rhs)], 0);
}

/**
* Combine an array of hashes using pedersen hash.
* Computes the hash of an array of buffers.
* @param wasm - The barretenberg module.
* @param lhs - The first hash.
* @param rhs - The second hash.
* @param inputs - The array of buffers to hash.
* @returns The new 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer {
return pedersenCompressWithHashIndex(wasm, inputs, 0);
return pedersenHashWithHashIndex(wasm, inputs, 0);
}

/**
* Compresses an array of buffers.
* Hashes an array of buffers.
* @param wasm - The barretenberg module.
* @param inputs - The array of buffers to compress.
* @returns The resulting 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenCompressInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Removed this as this is the same as pedersenHashInputs

return pedersenCompressWithHashIndex(wasm, inputs, 0);
}

/**
* Compresses an array of buffers.
* @param wasm - The barretenberg module.
* @param inputs - The array of buffers to compress.
* @param inputs - The array of buffers to hash.
* @param hashIndex - Hash index of the generator to use (See GeneratorIndex enum).
* @returns The resulting 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer {
export function pedersenHashWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer {
// If not done already, precompute constants.
wasm.call('pedersen__init');
const inputVectors = serializeBufferArrayToVector(inputs);
Expand All @@ -66,7 +53,7 @@ export function pedersenCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[
*
* E.g.
* Input: [1][2][3][4]
* Output: [1][2][3][4][compress(1,2)][compress(3,4)][compress(5,6)].
* Output: [1][2][3][4][hash(1,2)][hash(3,4)][hash(5,6)].
*
* @param wasm - The barretenberg module.
* @param values - The 32 byte pedersen leaves.
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/end-to-end/src/e2e_block_building.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import {
isContractDeployed,
} from '@aztec/aztec.js';
import { CircuitsWasm } from '@aztec/circuits.js';
import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg';
import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
import { DebugLogger } from '@aztec/foundation/log';
import { TestContractArtifact } from '@aztec/noir-contracts/artifacts';
import { TestContract, TokenContract } from '@aztec/noir-contracts/types';
Expand Down Expand Up @@ -135,7 +135,7 @@ describe('e2e_block_building', () => {
it('drops tx with private nullifier already emitted from public on the same block', async () => {
const secret = Fr.random();
// See yarn-project/acir-simulator/src/public/index.test.ts 'Should be able to create a nullifier from the public context'
const emittedPublicNullifier = pedersenCompressInputs(
const emittedPublicNullifier = pedersenHashInputs(
await CircuitsWasm.get(),
[new Fr(140), secret].map(a => a.toBuffer()),
);
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/end-to-end/src/simulators/lending_simulator.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Convenience struct to hold an account's address and secret that can easily be passed around.
import { CheatCodes } from '@aztec/aztec.js';
import { AztecAddress, CircuitsWasm, Fr } from '@aztec/circuits.js';
import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg';
import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
import { LendingContract } from '@aztec/noir-contracts/types';

import { TokenSimulator } from './token_simulator.js';
Expand All @@ -26,7 +26,7 @@ export class LendingAccount {
*/
public async key(): Promise<Fr> {
return Fr.fromBuffer(
pedersenCompressInputs(
pedersenHashInputs(
await CircuitsWasm.get(),
[this.address, this.secret].map(f => f.toBuffer()),
),
Expand Down
8 changes: 4 additions & 4 deletions yarn-project/merkle-tree/src/pedersen.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { pedersenCompress, pedersenGetHashTree, pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
import { pedersenGetHashTree, pedersenHash, pedersenHashInputs } from '@aztec/circuits.js/barretenberg';
import { IWasmModule } from '@aztec/foundation/wasm';
import { Hasher } from '@aztec/types';

Expand All @@ -14,15 +14,15 @@ export class Pedersen implements Hasher {
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public compress(lhs: Uint8Array, rhs: Uint8Array): Buffer {
return pedersenCompress(this.wasm, lhs, rhs);
public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer {
return pedersenHash(this.wasm, lhs, rhs);
}

/*
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public compressInputs(inputs: Buffer[]): Buffer {
public hashInputs(inputs: Buffer[]): Buffer {
return pedersenHashInputs(this.wasm, inputs);
}

Expand Down
30 changes: 15 additions & 15 deletions yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,21 +95,21 @@ describe('SparseTreeSpecific', () => {
const db = levelup(createMemDown());
const tree = await createDb(db, pedersen, 'test', 3);

const level2ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF);
const level1ZeroHash = pedersen.compress(level2ZeroHash, level2ZeroHash);
const level2ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF);
const level1ZeroHash = pedersen.hash(level2ZeroHash, level2ZeroHash);

expect(tree.getNumLeaves(false)).toEqual(0n);
expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash));
expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash));

// Insert leaf at index 3
let level1LeftHash: Buffer;
const leafAtIndex3 = randomBytes(32);
{
await tree.updateLeaf(leafAtIndex3, 3n);
expect(tree.getNumLeaves(true)).toEqual(1n);
const level2Hash = pedersen.compress(INITIAL_LEAF, leafAtIndex3);
level1LeftHash = pedersen.compress(level2ZeroHash, level2Hash);
const root = pedersen.compress(level1LeftHash, level1ZeroHash);
const level2Hash = pedersen.hash(INITIAL_LEAF, leafAtIndex3);
level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash);
const root = pedersen.hash(level1LeftHash, level1ZeroHash);
expect(tree.getRoot(true)).toEqual(root);
expect(await tree.getSiblingPath(3n, true)).toEqual(
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level2ZeroHash, level1ZeroHash]),
Expand All @@ -122,9 +122,9 @@ describe('SparseTreeSpecific', () => {
const leafAtIndex6 = randomBytes(32);
await tree.updateLeaf(leafAtIndex6, 6n);
expect(tree.getNumLeaves(true)).toEqual(2n);
const level2Hash = pedersen.compress(leafAtIndex6, INITIAL_LEAF);
level1RightHash = pedersen.compress(level2ZeroHash, level2Hash);
const root = pedersen.compress(level1LeftHash, level1RightHash);
const level2Hash = pedersen.hash(leafAtIndex6, INITIAL_LEAF);
level1RightHash = pedersen.hash(level2ZeroHash, level2Hash);
const root = pedersen.hash(level1LeftHash, level1RightHash);
expect(tree.getRoot(true)).toEqual(root);
expect(await tree.getSiblingPath(6n, true)).toEqual(
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level2ZeroHash, level1LeftHash]),
Expand All @@ -136,9 +136,9 @@ describe('SparseTreeSpecific', () => {
{
await tree.updateLeaf(leafAtIndex2, 2n);
expect(tree.getNumLeaves(true)).toEqual(3n);
const level2Hash = pedersen.compress(leafAtIndex2, leafAtIndex3);
level1LeftHash = pedersen.compress(level2ZeroHash, level2Hash);
const root = pedersen.compress(level1LeftHash, level1RightHash);
const level2Hash = pedersen.hash(leafAtIndex2, leafAtIndex3);
level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash);
const root = pedersen.hash(level1LeftHash, level1RightHash);
expect(tree.getRoot(true)).toEqual(root);
expect(await tree.getSiblingPath(2n, true)).toEqual(
new SiblingPath(TEST_TREE_DEPTH, [leafAtIndex3, level2ZeroHash, level1RightHash]),
Expand All @@ -150,9 +150,9 @@ describe('SparseTreeSpecific', () => {
const updatedLeafAtIndex3 = randomBytes(32);
await tree.updateLeaf(updatedLeafAtIndex3, 3n);
expect(tree.getNumLeaves(true)).toEqual(3n);
const level2Hash = pedersen.compress(leafAtIndex2, updatedLeafAtIndex3);
level1LeftHash = pedersen.compress(level2ZeroHash, level2Hash);
const root = pedersen.compress(level1LeftHash, level1RightHash);
const level2Hash = pedersen.hash(leafAtIndex2, updatedLeafAtIndex3);
level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash);
const root = pedersen.hash(level1LeftHash, level1RightHash);
expect(tree.getRoot(true)).toEqual(root);
expect(await tree.getSiblingPath(3n, true)).toEqual(
new SiblingPath(TEST_TREE_DEPTH, [leafAtIndex2, level2ZeroHash, level1RightHash]),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -573,7 +573,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree {
if (!hash0Leaf && leaf.value == 0n) {
encodedLeaf = toBufferBE(0n, 32);
} else {
encodedLeaf = this.hasher.compressInputs(
encodedLeaf = this.hasher.hashInputs(
[leaf.value, leaf.nextIndex, leaf.nextValue].map(val => toBufferBE(val, 32)),
);
}
Expand Down
Loading