Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: tightly pack public logs inside blobs #11752

Open
wants to merge 12 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,7 @@ mod tests {
MAX_L2_TO_L1_MSGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX,
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NOTES_PREFIX, NULLIFIERS_PREFIX,
PRIVATE_LOG_SIZE_IN_FIELDS, PRIVATE_LOGS_PREFIX, PUBLIC_DATA_UPDATE_REQUESTS_PREFIX,
PUBLIC_LOG_SIZE_IN_FIELDS, PUBLIC_LOGS_PREFIX, REVERT_CODE_PREFIX, TUBE_VK_INDEX,
TX_FEE_PREFIX, TX_START_PREFIX,
PUBLIC_LOGS_PREFIX, REVERT_CODE_PREFIX, TUBE_VK_INDEX, TX_FEE_PREFIX, TX_START_PREFIX,
},
data::{PublicDataTreeLeaf, PublicDataTreeLeafPreimage},
hash::silo_l2_to_l1_message,
Expand All @@ -188,8 +187,9 @@ mod tests {
merkle_tree_utils::NonEmptyMerkleTree,
},
traits::{Empty, Hash, Serialize},
utils::{arrays::array_concat, field::field_from_bytes},
utils::{arrays::{array_concat, find_index_hint_from_end}, field::field_from_bytes},
};
use types::constants::PUBLIC_LOG_DATA_SIZE_IN_FIELDS;

struct NullifierInsertion {
existing_index: u32,
Expand Down Expand Up @@ -411,21 +411,6 @@ mod tests {
let NUM_CC_LOGS = 1;
let PUB_DATA_SLOT = 25;
let PUB_DATA_VALUE = 60;
let TOTAL_BLOB_FIELDS = 3 // revert code, tx hash and tx fee
+ NUM_NOTES
+ 1 // notes and prefix
+ NUM_NULLIFIERS
+ 1 // nullifiers and prefix
+ NUM_MSGS
+ 1 // L2 to L1 msgs and prefix
+ NUM_PUB_LOGS * PUBLIC_LOG_SIZE_IN_FIELDS
+ 1 // public logs and prefix
+ NUM_CC_LOGS
+ 1 // contract class logs and prefix
+ 2
+ 1 // single public data update (2 fields) and prefix
+ NUM_PRIV_EVENT_LOGS * PRIVATE_LOG_SIZE_IN_FIELDS
+ 1; // private logs and prefix
let tx_fee = 100_000;
let mut builder = PublicBaseRollupInputsBuilder::new();
builder.transaction_fee = tx_fee;
Expand All @@ -448,31 +433,20 @@ mod tests {
0, PublicDataTreeLeaf { slot: PUB_DATA_SLOT, value: PUB_DATA_VALUE },
));
builder.tube_data.append_private_logs(NUM_PRIV_EVENT_LOGS);
builder.avm_data.append_public_logs(NUM_PUB_LOGS);
for i in 0..NUM_PUB_LOGS {
// manually adding to test appending logs with trailing zeros
let fields = [(2 * i) as Field, (2 * i + 1) as Field, (2 * i + 2) as Field];
builder.avm_data.add_public_log(array_concat(
fields,
[0; PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 3 /* = fields.len() */],
));
}
// Below will only work with NUM_CC_LOGS=1
builder.tube_data.add_contract_class_log_hash(1, 2);
let inputs = builder.build_inputs();
let outputs = inputs.execute();

let mut reconstructed_tx_effects = [0; TX_EFFECTS_BLOB_HASH_INPUT_FIELDS];
// Initial field = TX_START_PREFIX | 0 | txlen[0] txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revert_code
// revert code = 0
let total_blob_fields_bytes = (TOTAL_BLOB_FIELDS as Field).to_be_bytes::<2>();
reconstructed_tx_effects[0] = field_from_bytes(
array_concat(
TX_START_PREFIX.to_be_bytes::<8>(),
[
0,
total_blob_fields_bytes[0],
total_blob_fields_bytes[1],
0,
REVERT_CODE_PREFIX,
0,
0,
],
),
true,
);
// tx hash
reconstructed_tx_effects[1] = inputs.tube_data.public_inputs.hash();
// tx fee
Expand Down Expand Up @@ -531,17 +505,27 @@ mod tests {
}
offset += total_private_logs_len;
// public logs
let public_logs_prefix =
encode_blob_prefix(PUBLIC_LOGS_PREFIX, NUM_PUB_LOGS * PUBLIC_LOG_SIZE_IN_FIELDS);
let total_public_logs_len = builder
.avm_data
.public_logs
.storage()
.map(|l| find_index_hint_from_end(l.serialize(), |f| f != 0))
.fold(0, |acc, len| acc + len + if len == 0 { 0 } else { 1 });
let public_logs_prefix = encode_blob_prefix(PUBLIC_LOGS_PREFIX, total_public_logs_len);
reconstructed_tx_effects[offset] = public_logs_prefix;
offset += 1;
for i in 0..NUM_PUB_LOGS {
let log = builder.avm_data.public_logs.storage()[i].serialize();
for j in 0..PUBLIC_LOG_SIZE_IN_FIELDS {
reconstructed_tx_effects[offset + i * PUBLIC_LOG_SIZE_IN_FIELDS + j] = log[j];
let log_len = find_index_hint_from_end(log, |f| f != 0);
if log_len != 0 {
reconstructed_tx_effects[offset] = log_len as Field;
offset += 1;
}
for j in 0..log_len {
reconstructed_tx_effects[offset] = log[j];
offset += 1;
}
}
offset += NUM_PUB_LOGS * PUBLIC_LOG_SIZE_IN_FIELDS;
// cc logs
let contract_class_logs_prefix =
encode_blob_prefix(CONTRACT_CLASS_LOGS_PREFIX, NUM_CC_LOGS);
Expand All @@ -554,8 +538,16 @@ mod tests {
}
offset += NUM_CC_LOGS;

// Sanity check
assert(offset == TOTAL_BLOB_FIELDS);
// Initial field = TX_START_PREFIX | 0 | txlen[0] txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revert_code
// revert code = 0
let length_bytes = (offset as Field).to_be_bytes::<2>();
reconstructed_tx_effects[0] = field_from_bytes(
array_concat(
TX_START_PREFIX.to_be_bytes::<8>(),
[0, length_bytes[0], length_bytes[1], 0, REVERT_CODE_PREFIX, 0, 0],
),
true,
);

let mut expected_sponge = outputs.start_sponge_blob;
expected_sponge.absorb(reconstructed_tx_effects, offset);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use dep::types::{
hash::{accumulate_sha256, silo_contract_class_log_hash},
merkle_tree::VariableMerkleTree,
traits::{Empty, Serialize},
utils::arrays::{array_length, array_merge},
utils::arrays::{array_length, array_merge, find_index_hint_from_end, validate_trailing_zeroes},
};
use blob::blob_public_inputs::BlockBlobPublicInputs;

Expand Down Expand Up @@ -149,9 +149,9 @@ pub fn encode_blob_prefix(input_type: u8, array_len: u32) -> Field {
// MAX_NULLIFIERS_PER_TX fields for nullifiers
// MAX_L2_TO_L1_MSGS_PER_TX for L2 to L1 messages
// MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX public data update requests -> MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * 2 fields
// TODO(#8954): When logs are refactored into fields, we will append the values here, for now appending the log hashes:
// MAX_PRIVATE_LOGS_PER_TX * PRIVATE_LOG_SIZE_IN_FIELDS fields for private logs
// MAX_PUBLIC_LOGS_PER_TX * PUBLIC_LOG_SIZE_IN_FIELDS fields for public logs
// MAX_PUBLIC_LOGS_PER_TX * (PUBLIC_LOG_SIZE_IN_FIELDS + 1) fields for public logs (+1 for length of each log)
// TODO(#8954): When logs are refactored into fields, we will append the values here, for now appending the log hashes:
// MAX_CONTRACT_CLASS_LOGS_PER_TX fields for contract class logs
// 7 fields for prefixes for each of the above categories
pub(crate) global TX_EFFECTS_BLOB_HASH_INPUT_FIELDS: u32 = 1
Expand All @@ -162,7 +162,7 @@ pub(crate) global TX_EFFECTS_BLOB_HASH_INPUT_FIELDS: u32 = 1
+ MAX_L2_TO_L1_MSGS_PER_TX
+ MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * 2
+ MAX_PRIVATE_LOGS_PER_TX * PRIVATE_LOG_SIZE_IN_FIELDS
+ MAX_PUBLIC_LOGS_PER_TX * PUBLIC_LOG_SIZE_IN_FIELDS
+ MAX_PUBLIC_LOGS_PER_TX * (PUBLIC_LOG_SIZE_IN_FIELDS + 1)
+ MAX_CONTRACT_CLASS_LOGS_PER_TX
+ 7;

Expand Down Expand Up @@ -315,7 +315,6 @@ fn get_tx_effects_hash_input(
offset += array_len * 2;
}

// TODO(Miranda): squash 0s in a nested loop and add len prefix?
// PRIVATE_LOGS
array_len = array_length(private_logs) * PRIVATE_LOG_SIZE_IN_FIELDS;
if array_len != 0 {
Expand All @@ -337,23 +336,32 @@ fn get_tx_effects_hash_input(
}

// PUBLIC LOGS
array_len = array_length(public_logs) * PUBLIC_LOG_SIZE_IN_FIELDS;
array_len = array_length(public_logs);
if array_len != 0 {
let mut check_elt = true;
let public_logs_prefix = encode_blob_prefix(PUBLIC_LOGS_PREFIX, array_len);
assert_eq(tx_effects_hash_input[offset], public_logs_prefix);
let prefix_index = offset;
offset += 1;
let mut total_public_log_len = 0;

for j in 0..MAX_PUBLIC_LOGS_PER_TX {
let log = public_logs[j].serialize();
let log_len = validate_trailing_zeroes(log);
if log_len != 0 {
assert_eq(tx_effects_hash_input[offset], log_len as Field);
offset += 1;
total_public_log_len += log_len + 1;
check_elt = true;
}
for k in 0..PUBLIC_LOG_SIZE_IN_FIELDS {
let index = offset + j * PUBLIC_LOG_SIZE_IN_FIELDS + k;
check_elt &= j * PUBLIC_LOG_SIZE_IN_FIELDS + k != array_len;
check_elt &= k != log_len;
if check_elt {
assert_eq(tx_effects_hash_input[index], log[k]);
assert_eq(tx_effects_hash_input[offset + k], log[k]);
}
}
offset += log_len;
}
offset += array_len;
let public_logs_prefix = encode_blob_prefix(PUBLIC_LOGS_PREFIX, total_public_log_len);
assert_eq(tx_effects_hash_input[prefix_index], public_logs_prefix);
}

// TODO(#8954): When logs are refactored into fields, we will append the values here
Expand Down Expand Up @@ -475,7 +483,6 @@ unconstrained fn get_tx_effects_hash_input_helper(
offset += array_len * 2;
}

// TODO(Miranda): squash 0s in a nested loop and add len prefix?
// PRIVATE_LOGS
let num_private_logs = array_length(private_logs);
if num_private_logs != 0 {
Expand All @@ -496,18 +503,23 @@ unconstrained fn get_tx_effects_hash_input_helper(
// PUBLIC LOGS
let num_public_logs = array_length(public_logs);
if num_public_logs != 0 {
let array_len = num_public_logs * PUBLIC_LOG_SIZE_IN_FIELDS;
let public_logs_prefix = encode_blob_prefix(PUBLIC_LOGS_PREFIX, array_len);
tx_effects_hash_input[offset] = public_logs_prefix;
let prefix_index = offset;
offset += 1;
let mut array_len = 0;

for j in 0..num_public_logs {
let log = public_logs[j].serialize();
for k in 0..PUBLIC_LOG_SIZE_IN_FIELDS {
let index = offset + j * PUBLIC_LOG_SIZE_IN_FIELDS + k;
tx_effects_hash_input[index] = log[k];
let log_len = find_index_hint_from_end(log, |f| f != 0);
tx_effects_hash_input[offset] = log_len as Field;
offset += 1;
array_len += log_len + 1;
for k in 0..log_len {
tx_effects_hash_input[offset + k] = log[k];
}
offset += log_len;
}
offset += array_len;
let public_logs_prefix = encode_blob_prefix(PUBLIC_LOGS_PREFIX, array_len);
tx_effects_hash_input[prefix_index] = public_logs_prefix;
}

// TODO(#8954): When logs are refactored into fields, we will append the values here
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,25 @@ pub unconstrained fn find_index_hint<T, let N: u32, Env>(
index
}

// As above, but counts from the end of the array.
// Useful for finding trailing zeroes in arrays which may have valid empty values.
// e.g. removing trailing 0s from [1, 0, 2, 0, 0, 0] -> [1, 0, 2]
pub unconstrained fn find_index_hint_from_end<T, let N: u32, Env>(
array: [T; N],
find: fn[Env](T) -> bool,
) -> u32 {
let mut index = 0;
for i in 0..N {
let j = N - i - 1;
// We check `index == 0` to ensure that we only update the index if we haven't found a match yet.
if (index == 0) & find(array[j]) {
// Since we are looking from the end of the array, the index at which find() is false is the next one:
index = j + 1;
}
}
index
}

// Routine which validates that all zero values of an array form a contiguous region at the end, i.e.,
// of the form: [*,*,*...,0,0,0,0] where any * is non-zero. Note that a full array of non-zero values is
// valid.
Expand All @@ -101,6 +120,29 @@ where
length
}

// Routine which validates that zero values of an array form a contiguous region at the end, i.e.,
// of the form: [*,*,*...,0,0,0,0] where * is any value (zeroes allowed).
pub fn validate_trailing_zeroes<T, let N: u32>(array: [T; N]) -> u32
where
T: Empty + Eq,
{
/// Safety: this value is constrained in the below loop.
let length = unsafe { find_index_hint_from_end(array, |elem: T| !is_empty(elem)) };
// Check the elt just before length is non-zero:
if length != 0 {
assert(!is_empty(array[length - 1]), "invalid array");
}
// Check all beyond length are zero:
let mut check_zero = false;
for i in 0..N {
check_zero |= i == length;
if check_zero {
assert(is_empty(array[i]), "invalid array");
}
}
length
}

// Helper function to count the number of non-empty elements in a validated array.
// Important: Only use it for validated arrays where validate_array(array) returns true,
// which ensures that:
Expand Down Expand Up @@ -230,6 +272,30 @@ fn smoke_validate_array() {
assert(validate_array(valid_array) == 3);
}

#[test]
fn smoke_validate_array_trailing() {
let valid_array: [Field; 0] = [];
assert(validate_trailing_zeroes(valid_array) == 0);

let valid_array = [0];
assert(validate_trailing_zeroes(valid_array) == 0);

let valid_array = [3];
assert(validate_trailing_zeroes(valid_array) == 1);

let valid_array = [1, 0, 3];
assert(validate_trailing_zeroes(valid_array) == 3);

let valid_array = [1, 0, 3, 0];
assert(validate_trailing_zeroes(valid_array) == 3);

let valid_array = [1, 2, 3, 0, 0];
assert(validate_trailing_zeroes(valid_array) == 3);

let valid_array = [0, 0, 3, 0, 0];
assert(validate_trailing_zeroes(valid_array) == 3);
}

#[test(should_fail_with = "invalid array")]
fn smoke_validate_array_invalid_case0() {
let invalid_array = [0, 1];
Expand Down
18 changes: 13 additions & 5 deletions yarn-project/circuit-types/src/tx_effect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -376,8 +376,12 @@ export class TxEffect {
flattened.push(...this.privateLogs.map(l => l.fields).flat());
}
if (this.publicLogs.length) {
flattened.push(this.toPrefix(PUBLIC_LOGS_PREFIX, this.publicLogs.length * PUBLIC_LOG_SIZE_IN_FIELDS));
flattened.push(...this.publicLogs.map(l => l.toFields()).flat());
const totalLogLen = this.publicLogs.reduce(
(total, log) => total + (log.getEmittedLength() == 0 ? 0 : log.getEmittedLength() + 1),
0,
);
flattened.push(this.toPrefix(PUBLIC_LOGS_PREFIX, totalLogLen));
flattened.push(...this.publicLogs.flatMap(l => [new Fr(l.getEmittedLength()), ...l.getEmittedFields()]));
}
// TODO(#8954): When logs are refactored into fields, we will append the values here
// Currently appending the single log hash as an interim solution
Expand Down Expand Up @@ -446,7 +450,6 @@ export class TxEffect {
break;
}
case PRIVATE_LOGS_PREFIX: {
// TODO(Miranda): squash log 0s in a nested loop and add len prefix?
ensureEmpty(effect.privateLogs);
const flatPrivateLogs = reader.readFieldArray(length);
for (let i = 0; i < length; i += PRIVATE_LOG_SIZE_IN_FIELDS) {
Expand All @@ -457,8 +460,13 @@ export class TxEffect {
case PUBLIC_LOGS_PREFIX: {
ensureEmpty(effect.publicLogs);
const flatPublicLogs = reader.readFieldArray(length);
for (let i = 0; i < length; i += PUBLIC_LOG_SIZE_IN_FIELDS) {
effect.publicLogs.push(PublicLog.fromFields(flatPublicLogs.slice(i, i + PUBLIC_LOG_SIZE_IN_FIELDS)));
let i = 0;
while (i < length) {
const logLen = flatPublicLogs[i++].toNumber();
const logFields = flatPublicLogs.slice(i, (i += logLen));
effect.publicLogs.push(
PublicLog.fromFields(logFields.concat(new Array(PUBLIC_LOG_SIZE_IN_FIELDS - logLen).fill(Fr.ZERO))),
);
}
break;
}
Expand Down
11 changes: 11 additions & 0 deletions yarn-project/circuits.js/src/structs/public_log.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,17 @@ export class PublicLog {
return new PublicLog(await AztecAddress.random(), makeTuple(PUBLIC_LOG_DATA_SIZE_IN_FIELDS, Fr.random));
}

getEmittedLength() {
// This assumes that we cut trailing zeroes from the end of the log. In ts, these will always be added back.
return this.getEmittedFields().length;
}

getEmittedFields() {
const fields = this.toFields();
const lastNonZeroIndex = fields.findLastIndex(f => !f.isZero());
return fields.slice(0, lastNonZeroIndex + 1);
}

equals(other: this) {
return (
this.contractAddress.equals(other.contractAddress) &&
Expand Down