-
Notifications
You must be signed in to change notification settings - Fork 598
refactor: TS hash wrappers cleanup #5691
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
8c8d078
38df5b3
f05a671
cf2466e
5a71417
d49d409
3712c5a
a70c29d
29fb9a7
47cc247
7bd9fcd
12587ac
20a0ede
409143f
fd561a5
b302b4e
353ce82
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -79,7 +79,7 @@ | |
| "erc", | ||
| "falsey", | ||
| "fargate", | ||
| "Fieldeable", | ||
| "Fieldable", | ||
| "filestat", | ||
| "finalise", | ||
| "finalised", | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -71,8 +71,8 @@ pub fn compute_note_hash_for_consumption<Note, N>(note: Note) -> Field where Not | |
| } else { | ||
| // When nonce is nonzero, that means we are reading a settled note (from tree) created in a | ||
| // previous TX. So we need the unique_siloed_note_hash which has already been hashed with | ||
| // contract address and then nonce. This hash will match the existing leaf in the private | ||
| // data tree, so the kernel can just perform a membership check directly on this hash/leaf. | ||
| // contract address and then nonce. This hash will match the existing leaf in the note hash | ||
| // tree, so the kernel can just perform a membership check directly on this hash/leaf. | ||
| compute_unique_siloed_note_hash(note) | ||
| // IMPORTANT NOTE ON REDUNDANT SILOING BY CONTRACT ADDRESS: The note hash computed above is | ||
| // "siloed" by contract address. When a note hash is computed solely for the purpose of | ||
|
|
@@ -81,7 +81,7 @@ pub fn compute_note_hash_for_consumption<Note, N>(note: Note) -> Field where Not | |
| // be computed from a siloed note hash. After all, persistable note hashes and nullifiers are | ||
| // siloed by the kernel circuit. That being said, the siloed note hash computed above CAN be | ||
| // used for nullifier computation, and this achieves the (arguably unnecessary) property that | ||
| // nullifiers are computed from a note hash's fully-computed private data tree leaf. | ||
| // nullifiers are computed from a note hash's fully-computed note hash tree leaf. | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The naming here was stale so I fixed it. |
||
| } | ||
| } | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,4 +1,4 @@ | ||
| import { randomBytes } from '@aztec/foundation/crypto'; | ||
| import { Fr } from '@aztec/foundation/fields'; | ||
| import { type FromBuffer } from '@aztec/foundation/serialize'; | ||
| import { type AztecKVStore } from '@aztec/kv-store'; | ||
| import { openTmpStore } from '@aztec/kv-store/utils'; | ||
|
|
@@ -24,7 +24,7 @@ describe('AppendOnlySnapshot', () => { | |
| () => tree, | ||
| () => snapshotBuilder, | ||
| async tree => { | ||
| const newLeaves = Array.from({ length: 2 }).map(() => randomBytes(32)); | ||
| const newLeaves = Array.from({ length: 2 }).map(() => Fr.random().toBuffer()); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. After having the modulus check in Barretenberg FR I had to update this ^. It's a bit suspicious that it didn't cause some issues before. |
||
| await tree.appendLeaves(newLeaves); | ||
| }, | ||
| ); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -5,6 +5,7 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; | |
| import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto'; | ||
| import { EthAddress } from '@aztec/foundation/eth-address'; | ||
| import { Fr } from '@aztec/foundation/fields'; | ||
| import { type Fieldable } from '@aztec/foundation/serialize'; | ||
| import { AvmNestedCallsTestContractArtifact, AvmTestContractArtifact } from '@aztec/noir-contracts.js'; | ||
|
|
||
| import { jest } from '@jest/globals'; | ||
|
|
@@ -140,11 +141,11 @@ describe('AVM simulator: transpiled Noir contracts', () => { | |
| describe.each([ | ||
| ['poseidon_hash', poseidonHash], | ||
| ['pedersen_hash', pedersenHash], | ||
| ['pedersen_hash_with_index', (m: Buffer[]) => pedersenHash(m, 20)], | ||
| ])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Buffer[]) => Fr) => { | ||
| ['pedersen_hash_with_index', (m: Fieldable[]) => pedersenHash(m, 20)], | ||
| ])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Fieldable[]) => Fr) => { | ||
| it(`Should execute contract function that performs ${name} hash`, async () => { | ||
| const calldata = [new Fr(1), new Fr(2), new Fr(3)]; | ||
| const hash = hashFunction(calldata.map(f => f.toBuffer())); | ||
| const hash = hashFunction(calldata); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. These manual conversions are no longer necessary because the hash functions serialize it as they need it. |
||
|
|
||
| const context = initContext({ env: initExecutionEnvironment({ calldata }) }); | ||
| const bytecode = getAvmTestContractBytecode(name); | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The code here was not performing modulus check when you passed a buffer on input and we were passing in random 32 bytes in plenty of the places. Not sure why it has not caused issues but I assume it's because we passed that only in a few unit tests (in e2e we always get "real" fields).