Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: random fixes and readability improvements #11129

Draft
wants to merge 5 commits into
base: 01-07-chore_fixing_aztec-nr_warnings
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
"asyncify",
"auditability",
"authwit",
"authwits",
"authwitness",
"authwits",
"Automine",
"autonat",
"autorun",
Expand Down Expand Up @@ -230,6 +230,7 @@
"Reserialize",
"retag",
"rethrown",
"revertibles",
"rollup",
"rollups",
"rushstack",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,7 @@ impl PrivateContext {
// for the correct public key has been received.
get_key_validation_request(pk_m_hash, key_index)
};
assert_eq(request.pk_m.hash(), pk_m_hash, "Obtained invalid key validation request");
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I find assert_eq nicer to read than assert(left == right) so I sneaked this change in here and in other places.

I think we didn't use it before as it was not yet implemented.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This assert is not present in the origianl code though?


self.key_validation_requests_and_generators.push(
KeyValidationRequestAndGenerator {
Expand Down
19 changes: 3 additions & 16 deletions noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
use dep::protocol_types::{
address::AztecAddress,
constants::{GENERATOR_INDEX__SYMMETRIC_KEY, PRIVATE_LOG_SIZE_IN_FIELDS},
hash::poseidon2_hash,
point::Point,
public_keys::AddressPoint,
scalar::Scalar,
utils::arrays::array_concat,
address::AztecAddress, constants::PRIVATE_LOG_SIZE_IN_FIELDS, hash::poseidon2_hash,
point::Point, public_keys::AddressPoint, scalar::Scalar, utils::arrays::array_concat,
};
use std::{
aes128::aes128_encrypt, embedded_curve_ops::fixed_base_scalar_mul as derive_public_key,
field::bn254::decompose, hash::from_field_unsafe as fr_to_fq_unsafe,
hash::from_field_unsafe as fr_to_fq_unsafe,
};

use crate::{
Expand Down Expand Up @@ -187,14 +182,6 @@ unconstrained fn get_random_bytes<let N: u32>() -> [u8; N] {
bytes
}

/// Converts a base field element to scalar field element.
/// This is fine because modulus of the base field is smaller than the modulus of the scalar field.
fn fr_to_fq(r: Field) -> Scalar {
let (lo, hi) = decompose(r);

Scalar { lo, hi }
}
nventuro marked this conversation as resolved.
Show resolved Hide resolved

fn generate_ephemeral_key_pair() -> (Scalar, Point) {
// @todo Need to draw randomness from the full domain of Fq not only Fr
let randomness = unsafe {
Expand Down
30 changes: 15 additions & 15 deletions noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,21 @@ impl ProveNullifierInclusion for BlockHeader {

// 2) First we prove that the tree leaf in the witness is present in the nullifier tree. This is expected to be
// the leaf that contains the nullifier we're proving inclusion for.
assert(
self.state.partial.nullifier_tree.root
== root_from_sibling_path(witness.leaf_preimage.hash(), witness.index, witness.path)
,
"Proving nullifier inclusion failed",
);

// 3) Then we simply check that the value in the leaf is the expected one. Note that we don't need to perform
// any checks on the rest of the values in the leaf preimage (the next index or next nullifier), since all we
// care about is showing that the tree contains an entry with the expected nullifier.
assert(
witness.leaf_preimage.nullifier == nullifier,
"Nullifier does not match value in witness",
);
}
assert_eq(
self.state.partial.nullifier_tree.root,
root_from_sibling_path(witness.leaf_preimage.hash(), witness.index, witness.path),
"Proving nullifier inclusion failed",
);

// 3) Then we simply check that the value in the leaf is the expected one. Note that we don't need to perform
// any checks on the rest of the values in the leaf preimage (the next index or next nullifier), since all we
// care about is showing that the tree contains an entry with the expected nullifier.
assert_eq(
witness.leaf_preimage.nullifier,
nullifier,
"Nullifier does not match value in witness",
);
}
nventuro marked this conversation as resolved.
Show resolved Hide resolved
}

trait ProveNoteIsNullified {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ impl ProveNullifierNonInclusion for BlockHeader {
// the 'low leaf', i.e. the leaf that would come immediately before the nullifier's leaf, if the nullifier were
// to be in the tree.
let low_nullifier_leaf = witness.leaf_preimage;
assert(
self.state.partial.nullifier_tree.root
== root_from_sibling_path(low_nullifier_leaf.hash(), witness.index, witness.path),
assert_eq(
self.state.partial.nullifier_tree.root,
root_from_sibling_path(low_nullifier_leaf.hash(), witness.index, witness.path),
"Proving nullifier non-inclusion failed: Could not prove low nullifier inclusion",
);

Expand Down
56 changes: 27 additions & 29 deletions noir-projects/aztec-nr/aztec/src/history/public_storage.nr
Original file line number Diff line number Diff line change
Expand Up @@ -39,37 +39,35 @@ impl PublicStorageHistoricalRead for BlockHeader {
// We first prove that the witness is indeed valid for the public data tree, i.e. that the preimage is of a
// value present in the tree. Note that `hash` returns not just the hash of the value but also the metadata
// (slot, next index and next slot).
assert(
nventuro marked this conversation as resolved.
Show resolved Hide resolved
self.state.partial.public_data_tree.root
== root_from_sibling_path(witness.leaf_preimage.hash(), witness.index, witness.path)
,
"Proving public value inclusion failed",
);
assert_eq(
self.state.partial.public_data_tree.root,
root_from_sibling_path(witness.leaf_preimage.hash(), witness.index, witness.path),
"Proving public value inclusion failed",
);

// 4) Now that we know the preimage is valid, we determine the value that's represented by this tree entry. Here
// we have two scenarios:
// 1. The tree entry is initialized, and the value is the same as the one in the witness
// 2. The entry was never initialized, and the value is default zero (the default)
// The code below is based on the same checks in `validate_public_data_reads` in `base_rollup_inputs`.
let preimage = witness.leaf_preimage;
// 4) Now that we know the preimage is valid, we determine the value that's represented by this tree entry. Here
// we have two scenarios:
// 1. The tree entry is initialized, and the value is the same as the one in the witness
// 2. The entry was never initialized, and the value is default zero (the default)
// The code below is based on the same checks in `validate_public_data_reads` in `base_rollup_inputs`.
let preimage = witness.leaf_preimage;

let is_less_than_slot = full_field_less_than(preimage.slot, public_data_tree_index);
let is_next_greater_than =
full_field_less_than(public_data_tree_index, preimage.next_slot);
let is_max = ((preimage.next_index == 0) & (preimage.next_slot == 0));
let is_in_range = is_less_than_slot & (is_next_greater_than | is_max);
let is_less_than_slot = full_field_less_than(preimage.slot, public_data_tree_index);
let is_next_greater_than = full_field_less_than(public_data_tree_index, preimage.next_slot);
let is_max = ((preimage.next_index == 0) & (preimage.next_slot == 0));
let is_in_range = is_less_than_slot & (is_next_greater_than | is_max);

let value = if is_in_range {
0
} else {
assert_eq(
preimage.slot,
public_data_tree_index,
"Public data tree index doesn't match witness",
);
preimage.value
};
let value = if is_in_range {
0
} else {
assert_eq(
preimage.slot,
public_data_tree_index,
"Public data tree index doesn't match witness",
);
preimage.value
};

value
}
value
}
}
1 change: 0 additions & 1 deletion noir-projects/aztec-nr/aztec/src/lib.nr
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ mod deploy;
mod generators;
mod hash;
mod history;
mod initializer;
nventuro marked this conversation as resolved.
Show resolved Hide resolved
mod keys;
mod messaging;
mod note;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,7 @@ pub fn assert_initialization_matches_address_preimage_private(context: PrivateCo
);
}

pub fn compute_initialization_hash(
init_selector: FunctionSelector,
init_args_hash: Field,
) -> Field {
fn compute_initialization_hash(init_selector: FunctionSelector, init_args_hash: Field) -> Field {
nventuro marked this conversation as resolved.
Show resolved Hide resolved
poseidon2_hash_with_separator(
[init_selector.to_field(), init_args_hash],
GENERATOR_INDEX__CONSTRUCTOR,
Expand Down
8 changes: 5 additions & 3 deletions noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod interfaces;
pub mod initialization_utils;

use super::utils::{
add_to_hasher, fn_has_noinitcheck, get_fn_visibility, is_fn_initializer, is_fn_internal,
Expand Down Expand Up @@ -64,18 +65,19 @@ pub comptime fn noinitcheck(_f: FunctionDefinition) {

comptime fn create_assert_correct_initializer_args(f: FunctionDefinition) -> Quoted {
let fn_visibility = get_fn_visibility(f);
f"dep::aztec::initializer::assert_initialization_matches_address_preimage_{fn_visibility}(context);"
f"dep::aztec::macros::functions::initialization_utils::assert_initialization_matches_address_preimage_{fn_visibility}(context);"
.quoted_contents()
}

comptime fn create_mark_as_initialized(f: FunctionDefinition) -> Quoted {
let fn_visibility = get_fn_visibility(f);
f"dep::aztec::initializer::mark_as_initialized_{fn_visibility}(&mut context);".quoted_contents()
f"dep::aztec::macros::functions::initialization_utils::mark_as_initialized_{fn_visibility}(&mut context);"
.quoted_contents()
}

comptime fn create_init_check(f: FunctionDefinition) -> Quoted {
let fn_visibility = get_fn_visibility(f);
f"dep::aztec::initializer::assert_is_initialized_{fn_visibility}(&mut context);"
f"dep::aztec::macros::functions::initialization_utils::assert_is_initialized_{fn_visibility}(&mut context);"
.quoted_contents()
}

Expand Down
2 changes: 1 addition & 1 deletion noir-projects/aztec-nr/aztec/src/messaging.nr
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ pub fn process_l1_to_l2_message(
};

let root = root_from_sibling_path(message_hash, leaf_index, sibling_path);
assert(root == l1_to_l2_root, "Message not in state");
assert_eq(root, l1_to_l2_root, "Message not in state");

compute_l1_to_l2_message_nullifier(message_hash, secret)
}
6 changes: 3 additions & 3 deletions noir-projects/aztec-nr/aztec/src/note/note_getter_options.nr
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ use dep::protocol_types::{constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, trait
use std::option::Option;

pub struct PropertySelector {
pub(crate) index: u8, // index of the field in the serialized note array
pub(crate) offset: u8, // offset in the byte representation of the field (selected with index above) from which to reading
pub(crate) length: u8, // number of bytes to read after the offset
pub index: u8, // index of the field in the serialized note array
pub offset: u8, // offset in the byte representation of the field (selected with index above) from which to reading
pub length: u8, // number of bytes to read after the offset
nventuro marked this conversation as resolved.
Show resolved Hide resolved
}

pub struct Select {
Expand Down
10 changes: 6 additions & 4 deletions noir-projects/aztec-nr/aztec/src/oracle/block_header.nr
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,16 @@ fn constrain_get_block_header_at_internal(
};

// 3) Check that the block is in the archive (i.e. the witness is valid)
assert(
last_archive_root == root_from_sibling_path(block_hash, witness.index, witness.path),
assert_eq(
last_archive_root,
root_from_sibling_path(block_hash, witness.index, witness.path),
"Proving membership of a block in archive failed",
);

// 4) Check that the header hint has the same block number as the block number we are looking for, ensuring we are actually grabbing the header we specify
assert(
header_hint.global_variables.block_number as u32 == block_number,
assert_eq(
header_hint.global_variables.block_number as u32,
block_number,
"Block number provided is not the same as the block number from the header hint",
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ mod test;

// This data structure is used by SharedMutable to store the minimum delay with which a ScheduledValueChange object can
// schedule a change.
// This delay is initally equal to INITIAL_DELAY, and can be safely mutated to any other value over time. This mutation
// This delay is initially equal to INITIAL_DELAY, and can be safely mutated to any other value over time. This mutation
// is performed via `schedule_change` in order to satisfy ScheduleValueChange constraints: if e.g. we allowed for the
// delay to be decreased immediately then it'd be possible for the state variable to schedule a value change with a
// reduced delay, invalidating prior private reads.
Expand Down Expand Up @@ -58,7 +58,7 @@ impl<let INITIAL_DELAY: u32> ScheduledDelayChange<INITIAL_DELAY> {
// When changing the delay value we must ensure that it is not possible to produce a value change with a delay
// shorter than the current one.
let blocks_until_change = if new > current {
// Increasing the delay value can therefore be done immediately: this does not invalidate prior contraints
// Increasing the delay value can therefore be done immediately: this does not invalidate prior constraints
// about how quickly a value might be changed (indeed it strengthens them).
0
} else {
Expand Down Expand Up @@ -127,34 +127,28 @@ impl<let INITIAL_DELAY: u32> ScheduledDelayChange<INITIAL_DELAY> {

impl<let INITIAL_DELAY: u32> Serialize<1> for ScheduledDelayChange<INITIAL_DELAY> {
fn serialize(self) -> [Field; 1] {
// We pack all three u32 values into a single U128, which is made up of two u64 limbs.
// Low limb: [ pre_inner: u32 | post_inner: u32 ]
// High limb: [ empty | pre_is_some: u8 | post_is_some: u8 | block_of_change: u32 ]
let lo = ((self.pre.unwrap_unchecked() as u64) * (1 << 32))
+ (self.post.unwrap_unchecked() as u64);

let hi = (self.pre.is_some() as u64) * (1 << 33)
+ (self.post.is_some() as u64 * (1 << 32))
+ self.block_of_change as u64;

let packed = U128::from_u64s_le(lo, hi);
// Pack all values directly into a single Field:
// [pre_inner: 32 bits | post_inner: 32 bits | block_of_change: 32 bits | post_is_some: 1 bit | pre_is_some: 1 bit]
let packed = U128::from_integer(self.pre.unwrap_unchecked())
+ (U128::from_integer(self.post.unwrap_unchecked()) << 32)
+ (U128::from_integer(self.block_of_change) << 64)
+ (U128::from_integer(self.post.is_some()) << 96)
+ (U128::from_integer(self.pre.is_some()) << 97);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Warnings fix. As discussed in a PR down the stack now I don't work with u64 limbs but I just pack directly. I needed to use U128 here to avoid overflows.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can't we use Field directly? If Field doesn't support shifts then lets just multiply by the corresponding power of two. U128 will make this unnecessarily expensive.


[packed.to_integer()]
}
}

impl<let INITIAL_DELAY: u32> Deserialize<1> for ScheduledDelayChange<INITIAL_DELAY> {
fn deserialize(input: [Field; 1]) -> Self {
let packed = U128::from_integer(input[0]);

// We use division and modulo to clear the bits that correspond to other values when unpacking.
let pre_is_some = ((packed.hi as u64) / (1 << 33)) as bool;
let pre_inner = ((packed.lo as u64) / (1 << 32)) as u32;

let post_is_some = (((packed.hi as u64) / (1 << 32)) % (1 << 1)) as bool;
let post_inner = ((packed.lo as u64) % (1 << 32)) as u32;

let block_of_change = ((packed.hi as u64) % (1 << 32)) as u32;
let packed = U128::from_field(input[0]);

// We unpack the values from the Field
let pre_inner = packed.to_integer() as u32;
let post_inner = (packed >> 32).to_integer() as u32;
let block_of_change = (packed >> 64).to_integer() as u32;
let post_is_some = (packed >> 96).to_integer() & 1 == 1;
let pre_is_some = (packed >> 97).to_integer() & 1 == 1;

Self {
pre: if pre_is_some {
Expand Down
2 changes: 1 addition & 1 deletion noir-projects/aztec-nr/aztec/src/utils/array/collapse.nr
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub fn collapse<T, let N: u32>(input: [Option<T>; N]) -> BoundedVec<T, N>
where
T: Eq,
{
// Computing the collpased BoundedVec would result in a very large number of constraints, since we'd need to loop
// Computing the collapsed BoundedVec would result in a very large number of constraints, since we'd need to loop
// over the input array and conditionally write to a dynamic vec index, which is a very unfriendly pattern to the
// proving backend.
// Instead, we use an unconstrained function to produce the final collapsed array, along with some hints, and then
Expand Down
2 changes: 1 addition & 1 deletion noir-projects/aztec-nr/value-note/src/value_note.nr
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use dep::aztec::{
},
};

global VALUE_NOTE_LEN: u32 = 3; // 3 plus a header.
pub global VALUE_NOTE_LEN: u32 = 3; // 3 plus a header.
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Warnings fix --> value was used in a test contract.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Which test contract? Accessing this is an antipattern.


// docs:start:value-note-def
// ValueNote is used as fn parameter in the Claim contract, so it has to implement the Serialize trait.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,12 @@ use dep::aztec::macros::aztec;

#[aztec]
contract StatefulTest {
use dep::aztec::{
initializer::assert_is_initialized_private,
macros::{functions::{initializer, noinitcheck, private, public, view}, storage::storage},
use dep::aztec::macros::{
functions::{
initialization_utils::assert_is_initialized_private, initializer, noinitcheck, private,
public, view,
},
storage::storage,
};
use dep::aztec::prelude::{AztecAddress, FunctionSelector, Map, PrivateSet, PublicMutable};
use dep::value_note::{balance_utils, utils::{decrement, increment}, value_note::ValueNote};
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
constants::{BLOBS_PER_BLOCK, FIELDS_PER_BLOB, SPONGE_BLOB_LENGTH},
constants::{BLOBS_PER_BLOCK, FIELDS_PER_BLOB, POW64, SPONGE_BLOB_LENGTH},
nventuro marked this conversation as resolved.
Show resolved Hide resolved
hash::poseidon2_absorb_chunks_existing_sponge,
traits::{Deserialize, Empty, Serialize},
};
Expand All @@ -19,7 +19,7 @@ use std::hash::poseidon2::Poseidon2;
// The hash is used as part of the blob challenge, as we've proven it encompasses all elts of the blob(s).

// Init is given by input len * 2^64 (see noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr -> hash_internal)
global IV: Field = (FIELDS_PER_BLOB * BLOBS_PER_BLOCK) as Field * 18446744073709551616;
global IV: Field = (FIELDS_PER_BLOB * BLOBS_PER_BLOCK) as Field * POW64;

pub struct SpongeBlob {
pub sponge: Poseidon2,
Expand All @@ -34,7 +34,7 @@ impl SpongeBlob {

pub fn new(expected_fields_hint: u32) -> Self {
Self {
sponge: Poseidon2::new((expected_fields_hint as Field) * 18446744073709551616),
sponge: Poseidon2::new((expected_fields_hint as Field) * POW64),
fields: 0,
expected_fields: expected_fields_hint,
}
Expand Down
Loading
Loading