diff --git a/.noir-sync-commit b/.noir-sync-commit index 3222858403e..88f10e68a4d 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -bb6913ac53620fabd73e24ca1a2b1369225903ec +e59ff8c6a12978407be4f9f474d5208bdabb8c29 diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 965e79489c1..890073034cc 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -694,6 +694,7 @@ name = "fm" version = "0.31.0" dependencies = [ "codespan-reporting", + "iter-extended", "serde", ] diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr index 7d660e11853..943a9636acb 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr @@ -32,15 +32,15 @@ impl EncryptedLogHeader { fn test_encrypted_log_header() { let address = AztecAddress::from_field(0xdeadbeef); let header = EncryptedLogHeader::new(address); - let secret = Scalar::new( - 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, - 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 - ); - let point = Point::new( - 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, - 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, - false - ); + let secret = Scalar { + lo: 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, + hi: 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 + }; + let point = Point { + x: 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, + y: 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, + is_infinite: false + }; let ciphertext = header.compute_ciphertext(secret, point); diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr index c7052bbbbe4..3caaaeb845f 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr @@ -118,15 +118,15 @@ mod test { let storage_slot = 2; - let eph_sk = Scalar::new( - 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, - 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 - ); - let ivpk_app = Point::new( - 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, - 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, - false - ); + let eph_sk = Scalar { + lo: 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, + hi: 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 + }; + let ivpk_app = Point { + x: 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, + y: 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, + is_infinite: false + }; let body = EncryptedLogIncomingBody::from_note(note, storage_slot); @@ -217,16 +217,16 @@ mod test { fn test_encrypted_log_event_incoming_body() { let test_event = TestEvent { value0: 1, value1: 2, value2: 3 }; - let eph_sk = Scalar::new( - 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, - 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 - ); + let eph_sk = Scalar { + lo: 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, + hi: 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 + }; - let ivpk_app = Point::new( - 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, - 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, - false - ); + let ivpk_app = Point { + x: 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, + y: 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, + is_infinite: false + }; let randomness = 2; diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr index c63a54baa6c..d3cc68d1d4a 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr @@ -2,7 +2,6 @@ use dep::protocol_types::{ address::AztecAddress, scalar::Scalar, point::Point, constants::GENERATOR_INDEX__SYMMETRIC_KEY, hash::poseidon2_hash }; - use std::aes128::aes128_encrypt; use crate::keys::point_to_symmetric_key::point_to_symmetric_key; @@ -65,26 +64,27 @@ mod test { address::AztecAddress, traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER, scalar::Scalar, point::Point, hash::poseidon2_hash }; + use std::embedded_curve_ops::fixed_base_scalar_mul as derive_public_key; use crate::context::PrivateContext; #[test] fn test_encrypted_log_outgoing_body() { - let eph_sk = Scalar::new( - 0x00000000000000000000000000000000d0d302ee245dfaf2807e604eec4715fe, - 0x000000000000000000000000000000000f096b423017226a18461115fa8d34bb - ); - let recipient_ivsk_app = Scalar::new( - 0x000000000000000000000000000000004828f8f95676ebb481df163f87fd4022, - 0x000000000000000000000000000000000f4d97c25d578f9348251a71ca17ae31 - ); - let sender_ovsk_app = Scalar::new( - 0x0000000000000000000000000000000074d2e28c6bc5176ac02cf7c7d36a444e, - 0x00000000000000000000000000000000089c6887cb1446d86c64e81afc78048b - ); - - let eph_pk = eph_sk.derive_public_key(); - let recipient_ivpk_app = recipient_ivsk_app.derive_public_key(); + let eph_sk = Scalar { + lo: 0x00000000000000000000000000000000d0d302ee245dfaf2807e604eec4715fe, + hi: 0x000000000000000000000000000000000f096b423017226a18461115fa8d34bb + }; + let recipient_ivsk_app = Scalar { + lo: 0x000000000000000000000000000000004828f8f95676ebb481df163f87fd4022, + hi: 0x000000000000000000000000000000000f4d97c25d578f9348251a71ca17ae31 + }; + let sender_ovsk_app = Scalar { + lo: 0x0000000000000000000000000000000074d2e28c6bc5176ac02cf7c7d36a444e, + hi: 0x00000000000000000000000000000000089c6887cb1446d86c64e81afc78048b + }; + + let eph_pk = derive_public_key(eph_sk); + let recipient_ivpk_app = derive_public_key(recipient_ivsk_app); let recipient = AztecAddress::from_field(0xdeadbeef); diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr index d0dbde5450c..9141fcbe4ae 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr @@ -2,7 +2,7 @@ use dep::protocol_types::{ address::AztecAddress, scalar::Scalar, point::{Point, pub_key_to_bytes}, constants::{GENERATOR_INDEX__IVSK_M, GENERATOR_INDEX__OVSK_M}, hash::poseidon2_hash }; - +use std::embedded_curve_ops::fixed_base_scalar_mul as derive_public_key; use std::field::bytes32_to_field; use crate::oracle::unsafe_rand::unsafe_rand; @@ -25,7 +25,7 @@ pub fn compute_encrypted_event_log( ) -> [u8; OB] where Event: EventInterface { // @todo Need to draw randomness from the full domain of Fq not only Fr let eph_sk: Scalar = fr_to_fq(unsafe_rand()); - let eph_pk = eph_sk.derive_public_key(); + let eph_pk = derive_public_key(eph_sk); // TODO: (#7177) This value needs to be populated! let recipient = AztecAddress::from_field(0); @@ -82,7 +82,7 @@ pub fn compute_encrypted_note_log( ) -> [u8; M] where Note: NoteInterface { // @todo Need to draw randomness from the full domain of Fq not only Fr let eph_sk: Scalar = fr_to_fq(unsafe_rand()); - let eph_pk = eph_sk.derive_public_key(); + let eph_pk = derive_public_key(eph_sk); // TODO: (#7177) This value needs to be populated! let recipient = AztecAddress::from_field(0); @@ -142,10 +142,10 @@ fn fr_to_fq(r: Field) -> Scalar { low_bytes[16 + i] = r_bytes[i + 16]; } - let low = bytes32_to_field(low_bytes); - let high = bytes32_to_field(high_bytes); + let lo = bytes32_to_field(low_bytes); + let hi = bytes32_to_field(high_bytes); - Scalar::new(low, high) + Scalar { lo, hi } } fn compute_ivpk_app(ivpk: Point, contract_address: AztecAddress) -> Point { @@ -158,12 +158,12 @@ fn compute_ivpk_app(ivpk: Point, contract_address: AztecAddress) -> Point { assert((ivpk.x != 0) & (ivpk.y != 0), "ivpk is infinite"); let i = fr_to_fq(poseidon2_hash([contract_address.to_field(), ivpk.x, ivpk.y, GENERATOR_INDEX__IVSK_M])); - let I = i.derive_public_key(); + let I = derive_public_key(i); let embed_I = Point { x: I.x, y: I.y, is_infinite: false }; let embed_ivpk = Point { x: ivpk.x, y: ivpk.y, is_infinite: false }; let embed_result = embedded_curve_add(embed_I, embed_ivpk); - Point::new(embed_result.x, embed_result.y)*/ + Point { x: embed_result.x, embed_result.y)*/ } diff --git a/noir-projects/aztec-nr/aztec/src/keys/getters.nr b/noir-projects/aztec-nr/aztec/src/keys/getters.nr index f966b104d0a..372e3a433a5 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/getters.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/getters.nr @@ -86,7 +86,7 @@ fn fetch_key_from_registry( let x_coordinate = x_coordinate_registry.get_value_in_private(header); let y_coordinate = y_coordinate_registry.get_value_in_private(header); - Point::new(x_coordinate, y_coordinate, false) + Point { x: x_coordinate, y: y_coordinate, is_infinite: false } } // Passes only when keys were not rotated - is expected to be called only when keys were not registered yet diff --git a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr index caa3173443f..b3e1c544dda 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr @@ -19,15 +19,15 @@ pub fn point_to_symmetric_key(secret: Scalar, point: Point) -> [u8; 32] { #[test] fn check_point_to_symmetric_key() { // Value taken from "derive shared secret" test in encrypt_buffer.test.ts - let secret = Scalar::new( - 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, - 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 - ); - let point = Point::new( - 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, - 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, - false - ); + let secret = Scalar { + lo: 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd, + hi: 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06 + }; + let point = Point { + x: 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, + y: 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e, + is_infinite: false + }; let key = point_to_symmetric_key(secret, point); // The following value gets updated when running encrypt_buffer.test.ts with AZTEC_GENERATE_TEST_DATA=1 diff --git a/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr b/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr index 9ad4c01e200..ac53b7601fc 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/public_keys.nr @@ -96,10 +96,10 @@ impl Serialize for PublicKeys { impl Deserialize for PublicKeys { fn deserialize(serialized: [Field; PUBLIC_KEYS_LENGTH]) -> PublicKeys { PublicKeys { - npk_m: Point::new(serialized[0], serialized[1], serialized[2] as bool), - ivpk_m: Point::new(serialized[3], serialized[4], serialized[5] as bool), - ovpk_m: Point::new(serialized[6], serialized[7], serialized[8] as bool), - tpk_m: Point::new(serialized[9], serialized[10], serialized[11] as bool) + npk_m: Point { x:serialized[0], y:serialized[1], is_infinite: serialized[2] as bool }, + ivpk_m: Point { x:serialized[3], y: serialized[4], is_infinite: serialized[5] as bool }, + ovpk_m: Point { x:serialized[6], y: serialized[7], is_infinite: serialized[8] as bool }, + tpk_m: Point { x:serialized[9], y: serialized[10], is_infinite: serialized[11] as bool } } } } diff --git a/noir-projects/aztec-nr/aztec/src/oracle/keys.nr b/noir-projects/aztec-nr/aztec/src/oracle/keys.nr index d7bd87c0c03..92403bf9eae 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/keys.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/keys.nr @@ -12,10 +12,10 @@ fn get_public_keys_and_partial_address(address: AztecAddress) -> (PublicKeys, Pa let result = get_public_keys_and_partial_address_oracle_wrapper(address); let keys = PublicKeys { - npk_m: Point::new(result[0], result[1], result[2] as bool), - ivpk_m: Point::new(result[3], result[4], result[5] as bool), - ovpk_m: Point::new(result[6], result[7], result[8] as bool), - tpk_m: Point::new(result[9], result[10], result[11] as bool) + npk_m: Point { x: result[0], y: result[1], is_infinite: result[2] as bool }, + ivpk_m: Point { x: result[3], y: result[4], is_infinite: result[5] as bool }, + ovpk_m: Point { x: result[6], y: result[7], is_infinite: result[8] as bool }, + tpk_m: Point { x: result[9], y: result[10], is_infinite: result[11] as bool } }; let partial_address = PartialAddress::from_field(result[12]); diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 97aaec35ace..f0d020e3c61 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -37,6 +37,7 @@ contract Test { }; use dep::token_portal_content_hash_lib::{get_mint_private_content_hash, get_mint_public_content_hash}; use dep::value_note::value_note::ValueNote; + use std::embedded_curve_ops::fixed_base_scalar_mul as derive_public_key; use crate::test_note::TestNote; @@ -427,7 +428,7 @@ contract Test { recipient_ivpk_app: Point, ovsk_app: Scalar ) -> [u8; 176] { - let eph_pk = eph_sk.derive_public_key(); + let eph_pk = derive_public_key(eph_sk); EncryptedLogOutgoingBody::new(eph_sk, recipient, recipient_ivpk_app).compute_ciphertext(ovsk_app, eph_pk) } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_reset.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_reset.nr index 8d8ef785f20..a57531c8d7b 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_reset.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_reset.nr @@ -290,7 +290,11 @@ mod tests { let remaining_nullifier_rr_index = builder.previous_kernel.add_read_request_for_pending_nullifier(1); let nullifier_rr = builder.previous_kernel.nullifier_read_requests.storage[remaining_nullifier_rr_index]; - let key_validation_index = builder.previous_kernel.add_request_for_key_validation(Point::new(1, 2, false), 27, GENERATOR_INDEX__OVSK_M); + let key_validation_index = builder.previous_kernel.add_request_for_key_validation( + Point { x: 1, y: 2, is_infinite: false }, + 27, + GENERATOR_INDEX__OVSK_M + ); let key_validation = builder.previous_kernel.scoped_key_validation_requests_and_generators.storage[key_validation_index]; // Check that they have been propagated to the next kernel diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index 40196869689..20433ada5cf 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -334,7 +334,11 @@ mod tests { #[test(should_fail_with="Non empty key validation requests")] fn non_empty_key_validations() { let mut builder = PrivateKernelTailInputsBuilder::new(); - let _void = builder.previous_kernel.add_request_for_key_validation(Point::new(1, 2, false), 27, GENERATOR_INDEX__IVSK_M); + let _void = builder.previous_kernel.add_request_for_key_validation( + Point { x: 1, y: 2, is_infinite: false }, + 27, + GENERATOR_INDEX__IVSK_M + ); builder.failed(); } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index 6180ea6c315..f093f0bd07f 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -325,7 +325,11 @@ mod tests { #[test(should_fail_with="Non empty key validation requests")] fn non_empty_key_validations() { let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); - let _void = builder.previous_kernel.add_request_for_key_validation(Point::new(1, 2, false), 27, GENERATOR_INDEX__TSK_M); + let _void = builder.previous_kernel.add_request_for_key_validation( + Point { x: 1, y: 2, is_infinite: false }, + 27, + GENERATOR_INDEX__TSK_M + ); builder.failed(); } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/key_validation_hint.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/key_validation_hint.nr index a682550ccb4..dd5cdd193a2 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/key_validation_hint.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/key_validation_hint.nr @@ -3,6 +3,7 @@ use dep::types::{ constants::MAX_KEY_VALIDATION_REQUESTS_PER_TX, scalar::Scalar, hash::poseidon2_hash, utils::arrays::filter_array_to_bounded_vec }; +use std::embedded_curve_ops::fixed_base_scalar_mul as derive_public_key; struct KeyValidationHint { sk_m: Scalar, @@ -44,7 +45,7 @@ pub fn reset_key_validation_requests( let sk_app_generator = request_and_generator.sk_app_generator; // First we check that derived public key matches master public key from request - let pk_m = sk_m.derive_public_key(); + let pk_m = derive_public_key(sk_m); assert( pk_m.eq(request.pk_m), "Failed to derive matching master public key from the secret key." ); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/key_validation_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/key_validation_request.nr index a6088696f7a..eec4479a872 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/key_validation_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/key_validation_request.nr @@ -35,7 +35,7 @@ impl Serialize for KeyValidationRequest { impl Deserialize for KeyValidationRequest { fn deserialize(fields: [Field; KEY_VALIDATION_REQUEST_LENGTH]) -> Self { Self { - pk_m: Point::new(fields[0], fields[1], fields[2] as bool), + pk_m: Point { x:fields[0], y: fields[1], is_infinite: fields[2] as bool}, sk_app: fields[3], } } diff --git a/noir/bb-version b/noir/bb-version index 8298bb08b2d..620104d8208 100644 --- a/noir/bb-version +++ b/noir/bb-version @@ -1 +1 @@ -0.43.0 +0.46.1 diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index 115ecbf7992..5d749e709b3 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -360,7 +360,7 @@ mod tests { use std::collections::BTreeSet; use super::{ - opcodes::{BlackBoxFuncCall, ConstantOrWitnessEnum, FunctionInput}, + opcodes::{BlackBoxFuncCall, FunctionInput}, Circuit, Compression, Opcode, PublicInputs, }; use crate::{ diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 7c560a0a346..6478f0c7a19 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -520,7 +520,7 @@ mod tests { use crate::{circuit::Opcode, native_types::Witness}; use acir_field::{AcirField, FieldElement}; - use super::{BlackBoxFuncCall, ConstantOrWitnessEnum, FunctionInput}; + use super::{BlackBoxFuncCall, FunctionInput}; fn keccakf1600_opcode() -> Opcode { let inputs: Box<[FunctionInput; 25]> = diff --git a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs index 3a42cc41d47..3047ac002e8 100644 --- a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs @@ -14,7 +14,7 @@ use std::collections::BTreeSet; use acir::{ circuit::{ brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, - opcodes::{BlackBoxFuncCall, BlockId, ConstantOrWitnessEnum, FunctionInput, MemOp}, + opcodes::{BlackBoxFuncCall, BlockId, FunctionInput, MemOp}, Circuit, Opcode, Program, PublicInputs, }, native_types::{Expression, Witness}, diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 87a026148d7..b03b6715abe 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -150,7 +150,7 @@ mod tests { use crate::compiler::optimizers::redundant_range::RangeOptimizer; use acir::{ circuit::{ - opcodes::{BlackBoxFuncCall, ConstantOrWitnessEnum, FunctionInput}, + opcodes::{BlackBoxFuncCall, FunctionInput}, Circuit, ExpressionWidth, Opcode, PublicInputs, }, native_types::{Expression, Witness}, diff --git a/noir/noir-repo/compiler/fm/Cargo.toml b/noir/noir-repo/compiler/fm/Cargo.toml index 1a356d93d89..b48f445be36 100644 --- a/noir/noir-repo/compiler/fm/Cargo.toml +++ b/noir/noir-repo/compiler/fm/Cargo.toml @@ -10,6 +10,7 @@ license.workspace = true [dependencies] codespan-reporting.workspace = true +iter-extended.workspace = true serde.workspace = true [dev-dependencies] diff --git a/noir/noir-repo/compiler/fm/src/lib.rs b/noir/noir-repo/compiler/fm/src/lib.rs index 2e52d802479..37da29fc982 100644 --- a/noir/noir-repo/compiler/fm/src/lib.rs +++ b/noir/noir-repo/compiler/fm/src/lib.rs @@ -7,6 +7,7 @@ mod file_map; pub use file_map::{File, FileId, FileMap, PathString}; +use iter_extended::vecmap; // Re-export for the lsp pub use codespan_reporting::files as codespan_files; @@ -103,6 +104,26 @@ impl FileManager { pub fn name_to_id(&self, file_name: PathBuf) -> Option { self.file_map.get_file_id(&PathString::from_path(file_name)) } + + /// Find a file by its path suffix, e.g. "src/main.nr" is a suffix of + /// "some_dir/package_name/src/main.nr"` + pub fn find_by_path_suffix(&self, suffix: &str) -> Result, Vec> { + let suffix_path: Vec<_> = Path::new(suffix).components().rev().collect(); + let results: Vec<_> = self + .path_to_id + .iter() + .filter(|(path, _id)| { + path.components().rev().zip(suffix_path.iter()).all(|(x, y)| &x == y) + }) + .collect(); + if results.is_empty() { + Ok(None) + } else if results.len() == 1 { + Ok(Some(*results[0].1)) + } else { + Err(vecmap(results, |(path, _id)| path.clone())) + } + } } pub trait NormalizePath { diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index 4ba9b85f967..2b0769e30d4 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -103,6 +103,11 @@ pub struct CompileOptions { #[arg(long, hide = true)] pub use_legacy: bool, + /// Enable printing results of comptime evaluation: provide a path suffix + /// for the module to debug, e.g. "package_name/src/main.nr" + #[arg(long)] + pub debug_comptime_in_file: Option, + /// Outputs the paths to any modified artifacts #[arg(long, hide = true)] pub show_artifact_paths: bool, @@ -258,12 +263,14 @@ pub fn check_crate( deny_warnings: bool, disable_macros: bool, use_legacy: bool, + debug_comptime_in_file: Option<&str>, ) -> CompilationResult<()> { let macros: &[&dyn MacroProcessor] = if disable_macros { &[] } else { &[&aztec_macros::AztecMacro as &dyn MacroProcessor] }; let mut errors = vec![]; - let diagnostics = CrateDefMap::collect_defs(crate_id, context, use_legacy, macros); + let diagnostics = + CrateDefMap::collect_defs(crate_id, context, use_legacy, debug_comptime_in_file, macros); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { let diagnostic = CustomDiagnostic::from(&error); diagnostic.in_file(file_id) @@ -301,6 +308,7 @@ pub fn compile_main( options.deny_warnings, options.disable_macros, options.use_legacy, + options.debug_comptime_in_file.as_deref(), )?; let main = context.get_main_function(&crate_id).ok_or_else(|| { @@ -342,6 +350,7 @@ pub fn compile_contract( options.deny_warnings, options.disable_macros, options.use_legacy, + options.debug_comptime_in_file.as_deref(), )?; // TODO: We probably want to error if contracts is empty diff --git a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs index 47ce893d202..0e098d0d087 100644 --- a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -25,7 +25,7 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> let root_crate_id = prepare_crate(&mut context, file_name); let ((), warnings) = - noirc_driver::check_crate(&mut context, root_crate_id, false, false, false)?; + noirc_driver::check_crate(&mut context, root_crate_id, false, false, false, None)?; assert_eq!(warnings, Vec::new(), "stdlib is producing {} warnings", warnings.len()); diff --git a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs index ea7c97b1f25..3ce0f268715 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs @@ -19,6 +19,7 @@ pub enum DiagnosticKind { Error, Bug, Warning, + Info, } /// A count of errors that have been already reported to stderr @@ -37,30 +38,57 @@ impl CustomDiagnostic { } } - pub fn simple_error( + fn simple_with_kind( primary_message: String, secondary_message: String, secondary_span: Span, + kind: DiagnosticKind, ) -> CustomDiagnostic { CustomDiagnostic { message: primary_message, secondaries: vec![CustomLabel::new(secondary_message, secondary_span)], notes: Vec::new(), - kind: DiagnosticKind::Error, + kind, } } + pub fn simple_error( + primary_message: String, + secondary_message: String, + secondary_span: Span, + ) -> CustomDiagnostic { + Self::simple_with_kind( + primary_message, + secondary_message, + secondary_span, + DiagnosticKind::Error, + ) + } + pub fn simple_warning( primary_message: String, secondary_message: String, secondary_span: Span, ) -> CustomDiagnostic { - CustomDiagnostic { - message: primary_message, - secondaries: vec![CustomLabel::new(secondary_message, secondary_span)], - notes: Vec::new(), - kind: DiagnosticKind::Warning, - } + Self::simple_with_kind( + primary_message, + secondary_message, + secondary_span, + DiagnosticKind::Warning, + ) + } + + pub fn simple_info( + primary_message: String, + secondary_message: String, + secondary_span: Span, + ) -> CustomDiagnostic { + Self::simple_with_kind( + primary_message, + secondary_message, + secondary_span, + DiagnosticKind::Info, + ) } pub fn simple_bug( @@ -96,6 +124,10 @@ impl CustomDiagnostic { matches!(self.kind, DiagnosticKind::Warning) } + pub fn is_info(&self) -> bool { + matches!(self.kind, DiagnosticKind::Info) + } + pub fn is_bug(&self) -> bool { matches!(self.kind, DiagnosticKind::Bug) } @@ -191,6 +223,7 @@ fn convert_diagnostic( ) -> Diagnostic { let diagnostic = match (cd.kind, deny_warnings) { (DiagnosticKind::Warning, false) => Diagnostic::warning(), + (DiagnosticKind::Info, _) => Diagnostic::note(), (DiagnosticKind::Bug, ..) => Diagnostic::bug(), _ => Diagnostic::error(), }; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs index 2b328898257..2c7ec0f8e1a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs @@ -19,8 +19,6 @@ use serde::{Deserialize, Serialize}; pub enum RuntimeError { #[error(transparent)] InternalError(#[from] InternalError), - #[error("Index out of bounds, array has size {array_size}, but index was {index}")] - IndexOutOfBounds { index: usize, array_size: usize, call_stack: CallStack }, #[error("Range constraint of {num_bits} bits is too large for the Field size")] InvalidRangeConstraint { num_bits: u32, call_stack: CallStack }, #[error("The value `{value:?}` cannot fit into `{typ}` which has range `{range}`")] @@ -145,7 +143,6 @@ impl RuntimeError { | InternalError::UndeclaredAcirVar { call_stack } | InternalError::Unexpected { call_stack, .. }, ) - | RuntimeError::IndexOutOfBounds { call_stack, .. } | RuntimeError::InvalidRangeConstraint { call_stack, .. } | RuntimeError::TypeConversion { call_stack, .. } | RuntimeError::UnInitialized { call_stack, .. } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index cfcc7a9a997..1bdc9aaf4eb 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -975,6 +975,8 @@ impl<'a> Context<'a> { .into()) } }; + // Ensure that array id is fully resolved. + let array = dfg.resolve(array); let array_id = dfg.resolve(array); let array_typ = dfg.type_of_value(array_id); @@ -992,7 +994,6 @@ impl<'a> Context<'a> { // If we find one, we will use it when computing the index under the enable_side_effect predicate // If not, array_get(..) will use a fallback costing one multiplication in the worst case. // cf. https://github.com/noir-lang/noir/pull/4971 - // For simplicity we compute the offset only for simple arrays let is_simple_array = dfg.instruction_results(instruction).len() == 1 && can_omit_element_sizes_array(&array_typ); @@ -1085,45 +1086,30 @@ impl<'a> Context<'a> { } }; - let side_effects_always_enabled = - self.acir_context.is_constant_one(&self.current_side_effects_enabled_var); - let index_out_of_bounds = index >= array_size; - - // Note that the value of `side_effects_always_enabled` doesn't affect the value which we return here for valid - // indices, just whether we return an error for invalid indices at compile time or defer until execution. - match (side_effects_always_enabled, index_out_of_bounds) { - (true, false) => { - let value = match store_value { - Some(store_value) => AcirValue::Array(array.update(index, store_value)), - None => array[index].clone(), - }; + if index >= array_size { + return Ok(false); + } + + if let Some(store_value) = store_value { + let side_effects_always_enabled = + self.acir_context.is_constant_one(&self.current_side_effects_enabled_var); + if side_effects_always_enabled { + // If we know that this write will always occur then we can perform it at compile time. + let value = AcirValue::Array(array.update(index, store_value)); self.define_result(dfg, instruction, value); Ok(true) + } else { + // If a predicate is applied however we must wait until runtime. + Ok(false) } - (false, false) => { - if store_value.is_none() { - // If there is a predicate and the index is not out of range, we can optimistically perform the - // read at compile time as if the predicate is true. - // - // This is as if the predicate is false, any side-effects will be disabled so the value returned - // will not affect the rest of execution. - self.define_result(dfg, instruction, array[index].clone()); - Ok(true) - } else { - // We do not do this for a array writes however. - Ok(false) - } - } - - // Report the error if side effects are enabled. - (true, true) => { - let call_stack = self.acir_context.get_call_stack(); - Err(RuntimeError::IndexOutOfBounds { index, array_size, call_stack }) - } - // Index is out of bounds but predicate may result in this array operation being skipped - // so we don't return an error now. - (false, true) => Ok(false), + } else { + // If the index is not out of range, we can optimistically perform the read at compile time + // as if the predicate were true. This is as if the predicate were to resolve to false then + // the result should not affect the rest of circuit execution. + let value = array[index].clone(); + self.define_result(dfg, instruction, value); + Ok(true) } } @@ -1138,13 +1124,14 @@ impl<'a> Context<'a> { /// It is a dummy value because in the case of a false predicate, the value stored at the requested index will be itself. fn convert_array_operation_inputs( &mut self, - array: ValueId, + array_id: ValueId, dfg: &DataFlowGraph, index: ValueId, store_value: Option, offset: usize, ) -> Result<(AcirVar, Option), RuntimeError> { - let (array_id, array_typ, block_id) = self.check_array_is_initialized(array, dfg)?; + let array_typ = dfg.type_of_value(array_id); + let block_id = self.ensure_array_is_initialized(array_id, dfg)?; let index_var = self.convert_numeric_value(index, dfg)?; let index_var = self.get_flattened_index(&array_typ, array_id, index_var, dfg)?; @@ -1263,22 +1250,22 @@ impl<'a> Context<'a> { dfg: &DataFlowGraph, mut index_side_effect: bool, ) -> Result { - let (array_id, _, block_id) = self.check_array_is_initialized(array, dfg)?; + let block_id = self.ensure_array_is_initialized(array, dfg)?; let results = dfg.instruction_results(instruction); let res_typ = dfg.type_of_value(results[0]); // Get operations to call-data parameters are replaced by a get to the call-data-bus array if let Some(call_data) = self.data_bus.call_data { - if self.data_bus.call_data_map.contains_key(&array_id) { + if self.data_bus.call_data_map.contains_key(&array) { // TODO: the block_id of call-data must be notified to the backend // TODO: should we do the same for return-data? let type_size = res_typ.flattened_size(); let type_size = self.acir_context.add_constant(FieldElement::from(type_size as i128)); let offset = self.acir_context.mul_var(var_index, type_size)?; - let bus_index = self.acir_context.add_constant(FieldElement::from( - self.data_bus.call_data_map[&array_id] as i128, - )); + let bus_index = self + .acir_context + .add_constant(FieldElement::from(self.data_bus.call_data_map[&array] as i128)); let new_index = self.acir_context.add_var(offset, bus_index)?; return self.array_get(instruction, call_data, new_index, dfg, index_side_effect); } @@ -1292,8 +1279,7 @@ impl<'a> Context<'a> { let mut value = self.array_get_value(&res_typ, block_id, &mut var_index)?; if let AcirValue::Var(value_var, typ) = &value { - let array_id = dfg.resolve(array_id); - let array_typ = dfg.type_of_value(array_id); + let array_typ = dfg.type_of_value(array); if let (Type::Numeric(numeric_type), AcirType::NumericType(num)) = (array_typ.first(), typ) { @@ -1377,7 +1363,7 @@ impl<'a> Context<'a> { } }; - let (array_id, array_typ, block_id) = self.check_array_is_initialized(array, dfg)?; + let block_id = self.ensure_array_is_initialized(array, dfg)?; // Every array has a length in its type, so we fetch that from // the SSA IR. @@ -1386,10 +1372,11 @@ impl<'a> Context<'a> { // However, this size is simply the capacity of a slice. The capacity is dependent upon the witness // and may contain data for which we want to restrict access. The true slice length is tracked in a // a separate SSA value and restrictions on slice indices should be generated elsewhere in the SSA. + let array_typ = dfg.type_of_value(array); let array_len = if !array_typ.contains_slice_element() { array_typ.flattened_size() } else { - self.flattened_slice_size(array_id, dfg) + self.flattened_slice_size(array, dfg) }; // Since array_set creates a new array, we create a new block ID for this @@ -1412,18 +1399,13 @@ impl<'a> Context<'a> { self.array_set_value(&store_value, result_block_id, &mut var_index)?; let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { - let acir_value = self.convert_value(array_id, dfg); - Some(self.init_element_type_sizes_array( - &array_typ, - array_id, - Some(&acir_value), - dfg, - )?) + let acir_value = self.convert_value(array, dfg); + Some(self.init_element_type_sizes_array(&array_typ, array, Some(&acir_value), dfg)?) } else { None }; - let value_types = self.convert_value(array_id, dfg).flat_numeric_types(); + let value_types = self.convert_value(array, dfg).flat_numeric_types(); // Compiler sanity check assert_eq!(value_types.len(), array_len, "ICE: The length of the flattened type array should match the length of the dynamic array"); @@ -1469,37 +1451,33 @@ impl<'a> Context<'a> { Ok(()) } - fn check_array_is_initialized( + fn ensure_array_is_initialized( &mut self, array: ValueId, dfg: &DataFlowGraph, - ) -> Result<(ValueId, Type, BlockId), RuntimeError> { - // Fetch the internal SSA ID for the array - let array_id = dfg.resolve(array); - - let array_typ = dfg.type_of_value(array_id); - + ) -> Result { // Use the SSA ID to get or create its block ID - let block_id = self.block_id(&array_id); + let block_id = self.block_id(&array); // Check if the array has already been initialized in ACIR gen // if not, we initialize it using the values from SSA let already_initialized = self.initialized_arrays.contains(&block_id); if !already_initialized { - let value = &dfg[array_id]; + let value = &dfg[array]; match value { Value::Array { .. } | Value::Instruction { .. } => { - let value = self.convert_value(array_id, dfg); + let value = self.convert_value(array, dfg); + let array_typ = dfg.type_of_value(array); let len = if !array_typ.contains_slice_element() { array_typ.flattened_size() } else { - self.flattened_slice_size(array_id, dfg) + self.flattened_slice_size(array, dfg) }; self.initialize_array(block_id, len, Some(value))?; } _ => { return Err(InternalError::General { - message: format!("Array {array_id} should be initialized"), + message: format!("Array {array} should be initialized"), call_stack: self.acir_context.get_call_stack(), } .into()); @@ -1507,7 +1485,7 @@ impl<'a> Context<'a> { } } - Ok((array_id, array_typ, block_id)) + Ok(block_id) } fn init_element_type_sizes_array( @@ -1761,7 +1739,7 @@ impl<'a> Context<'a> { /// Converts an SSA terminator's return values into their ACIR representations fn get_num_return_witnesses( - &mut self, + &self, terminator: &TerminatorInstruction, dfg: &DataFlowGraph, ) -> usize { @@ -1815,7 +1793,7 @@ impl<'a> Context<'a> { has_constant_return |= self.acir_context.is_constant(&acir_var); if is_databus { // We do not return value for the data bus. - self.check_array_is_initialized( + self.ensure_array_is_initialized( self.data_bus.return_data.expect( "`is_databus == true` implies `data_bus.return_data` is `Some`", ), @@ -2182,8 +2160,9 @@ impl<'a> Context<'a> { Ok(vec![AcirValue::Var(self.acir_context.add_constant(len), AcirType::field())]) } Intrinsic::AsSlice => { - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[0], dfg)?; + let slice_contents = arguments[0]; + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let result_block_id = self.block_id(&result_ids[1]); @@ -2227,8 +2206,9 @@ impl<'a> Context<'a> { Intrinsic::SlicePushBack => { // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let (slice_contents, slice_typ, _) = - self.check_array_is_initialized(arguments[1], dfg)?; + let slice_contents = arguments[1]; + let slice_typ = dfg.type_of_value(slice_contents); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); @@ -2294,9 +2274,8 @@ impl<'a> Context<'a> { Intrinsic::SlicePushFront => { // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - - let (slice_contents, slice_typ, _) = - self.check_array_is_initialized(arguments[1], dfg)?; + let slice_contents = arguments[1]; + let slice_typ = dfg.type_of_value(slice_contents); assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice: AcirValue = self.convert_value(slice_contents, dfg); @@ -2359,6 +2338,7 @@ impl<'a> Context<'a> { Intrinsic::SlicePopBack => { // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; @@ -2367,8 +2347,8 @@ impl<'a> Context<'a> { // the elements stored at that index will no longer be able to be accessed. let mut var_index = new_slice_length; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let mut popped_elements = Vec::new(); @@ -2393,9 +2373,11 @@ impl<'a> Context<'a> { Intrinsic::SlicePopFront => { // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; + + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let one = self.acir_context.add_constant(FieldElement::one()); @@ -2434,9 +2416,11 @@ impl<'a> Context<'a> { Intrinsic::SliceInsert => { // arguments = [slice_length, slice_contents, insert_index, ...elements_to_insert] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; + + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); @@ -2573,9 +2557,11 @@ impl<'a> Context<'a> { Intrinsic::SliceRemove => { // arguments = [slice_length, slice_contents, remove_index] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; + let slice_contents = arguments[1]; + + let slice_typ = dfg.type_of_value(slice_contents); + let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?; - let (slice_contents, slice_typ, block_id) = - self.check_array_is_initialized(arguments[1], dfg)?; assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index b08283a9ceb..8cbae732ef9 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -288,25 +288,33 @@ impl Instruction { } /// Indicates if the instruction can be safely replaced with the results of another instruction with the same inputs. - pub(crate) fn can_be_deduplicated(&self, dfg: &DataFlowGraph) -> bool { + /// If `deduplicate_with_predicate` is set, we assume we're deduplicating with the instruction + /// and its predicate, rather than just the instruction. Setting this means instructions that + /// rely on predicates can be deduplicated as well. + pub(crate) fn can_be_deduplicated( + &self, + dfg: &DataFlowGraph, + deduplicate_with_predicate: bool, + ) -> bool { use Instruction::*; match self { // These either have side-effects or interact with memory - Constrain(..) - | EnableSideEffects { .. } + EnableSideEffects { .. } | Allocate | Load { .. } | Store { .. } | IncrementRc { .. } - | DecrementRc { .. } - | RangeCheck { .. } => false, + | DecrementRc { .. } => false, Call { func, .. } => match dfg[*func] { Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), _ => false, }, + // We can deduplicate these instructions if we know the predicate is also the same. + Constrain(..) | RangeCheck { .. } => deduplicate_with_predicate, + // These can have different behavior depending on the EnableSideEffectsIf context. // Replacing them with a similar instruction potentially enables replacing an instruction // with one that was disabled. See @@ -317,7 +325,9 @@ impl Instruction { | Truncate { .. } | IfElse { .. } | ArrayGet { .. } - | ArraySet { .. } => !self.requires_acir_gen_predicate(dfg), + | ArraySet { .. } => { + deduplicate_with_predicate || !self.requires_acir_gen_predicate(dfg) + } } } @@ -372,7 +382,7 @@ impl Instruction { } /// If true the instruction will depends on enable_side_effects context during acir-gen - fn requires_acir_gen_predicate(&self, dfg: &DataFlowGraph) -> bool { + pub(crate) fn requires_acir_gen_predicate(&self, dfg: &DataFlowGraph) -> bool { match self { Instruction::Binary(binary) if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) => diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index d7cd366e9af..160105d27e6 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -87,12 +87,16 @@ struct Context { block_queue: Vec, } +/// HashMap from (Instruction, side_effects_enabled_var) to the results of the instruction. +/// Stored as a two-level map to avoid cloning Instructions during the `.get` call. +type InstructionResultCache = HashMap, Vec>>; + impl Context { fn fold_constants_in_block(&mut self, function: &mut Function, block: BasicBlockId) { let instructions = function.dfg[block].take_instructions(); // Cache of instructions without any side-effects along with their outputs. - let mut cached_instruction_results: HashMap> = HashMap::default(); + let mut cached_instruction_results = HashMap::default(); // Contains sets of values which are constrained to be equivalent to each other. // @@ -124,7 +128,7 @@ impl Context { dfg: &mut DataFlowGraph, block: BasicBlockId, id: InstructionId, - instruction_result_cache: &mut HashMap>, + instruction_result_cache: &mut InstructionResultCache, constraint_simplification_mappings: &mut HashMap>, side_effects_enabled_var: &mut ValueId, ) { @@ -134,7 +138,9 @@ impl Context { let old_results = dfg.instruction_results(id).to_vec(); // If a copy of this instruction exists earlier in the block, then reuse the previous results. - if let Some(cached_results) = instruction_result_cache.get(&instruction) { + if let Some(cached_results) = + Self::get_cached(dfg, instruction_result_cache, &instruction, *side_effects_enabled_var) + { Self::replace_result_ids(dfg, &old_results, cached_results); return; } @@ -150,6 +156,7 @@ impl Context { dfg, instruction_result_cache, constraint_simplification_mapping, + *side_effects_enabled_var, ); // If we just inserted an `Instruction::EnableSideEffects`, we need to update `side_effects_enabled_var` @@ -224,8 +231,9 @@ impl Context { instruction: Instruction, instruction_results: Vec, dfg: &DataFlowGraph, - instruction_result_cache: &mut HashMap>, + instruction_result_cache: &mut InstructionResultCache, constraint_simplification_mapping: &mut HashMap, + side_effects_enabled_var: ValueId, ) { if self.use_constraint_info { // If the instruction was a constraint, then create a link between the two `ValueId`s @@ -258,8 +266,15 @@ impl Context { // If the instruction doesn't have side-effects and if it won't interact with enable_side_effects during acir_gen, // we cache the results so we can reuse them if the same instruction appears again later in the block. - if instruction.can_be_deduplicated(dfg) { - instruction_result_cache.insert(instruction, instruction_results); + if instruction.can_be_deduplicated(dfg, self.use_constraint_info) { + let use_predicate = + self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); + let predicate = use_predicate.then_some(side_effects_enabled_var); + + instruction_result_cache + .entry(instruction) + .or_default() + .insert(predicate, instruction_results); } } @@ -273,6 +288,25 @@ impl Context { dfg.set_value_from_id(*old_result, *new_result); } } + + fn get_cached<'a>( + dfg: &DataFlowGraph, + instruction_result_cache: &'a mut InstructionResultCache, + instruction: &Instruction, + side_effects_enabled_var: ValueId, + ) -> Option<&'a Vec> { + let results_for_instruction = instruction_result_cache.get(instruction); + + // See if there's a cached version with no predicate first + if let Some(results) = results_for_instruction.and_then(|map| map.get(&None)) { + return Some(results); + } + + let predicate = + instruction.requires_acir_gen_predicate(dfg).then_some(side_effects_enabled_var); + + results_for_instruction.and_then(|map| map.get(&predicate)) + } } #[cfg(test)] @@ -725,4 +759,86 @@ mod test { let ending_instruction_count = instructions.len(); assert_eq!(starting_instruction_count, ending_instruction_count); } + + #[test] + fn deduplicate_instructions_with_predicates() { + // fn main f0 { + // b0(v0: bool, v1: bool, v2: [u32; 2]): + // enable_side_effects v0 + // v3 = array_get v2, index u32 0 + // v4 = array_set v2, index u32 1, value: u32 2 + // v5 = array_get v4, index u32 0 + // constrain_eq v3, v5 + // enable_side_effects v1 + // v6 = array_get v2, index u32 0 + // v7 = array_set v2, index u32 1, value: u32 2 + // v8 = array_get v7, index u32 0 + // constrain_eq v6, v8 + // enable_side_effects v0 + // v9 = array_get v2, index u32 0 + // v10 = array_set v2, index u32 1, value: u32 2 + // v11 = array_get v10, index u32 0 + // constrain_eq v9, v11 + // } + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id); + + let v0 = builder.add_parameter(Type::bool()); + let v1 = builder.add_parameter(Type::bool()); + let v2 = builder.add_parameter(Type::Array(Rc::new(vec![Type::field()]), 2)); + + let zero = builder.numeric_constant(0u128, Type::length_type()); + let one = builder.numeric_constant(1u128, Type::length_type()); + let two = builder.numeric_constant(2u128, Type::length_type()); + + builder.insert_enable_side_effects_if(v0); + let v3 = builder.insert_array_get(v2, zero, Type::length_type()); + let v4 = builder.insert_array_set(v2, one, two); + let v5 = builder.insert_array_get(v4, zero, Type::length_type()); + builder.insert_constrain(v3, v5, None); + + builder.insert_enable_side_effects_if(v1); + let v6 = builder.insert_array_get(v2, zero, Type::length_type()); + let v7 = builder.insert_array_set(v2, one, two); + let v8 = builder.insert_array_get(v7, zero, Type::length_type()); + builder.insert_constrain(v6, v8, None); + + // We expect all these instructions after the 'enable side effects' instruction to be removed. + builder.insert_enable_side_effects_if(v0); + let v9 = builder.insert_array_get(v2, zero, Type::length_type()); + let v10 = builder.insert_array_set(v2, one, two); + let v11 = builder.insert_array_get(v10, zero, Type::length_type()); + builder.insert_constrain(v9, v11, None); + + let ssa = builder.finish(); + println!("{ssa}"); + + let main = ssa.main(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 15); + + // Expected output: + // + // fn main f0 { + // b0(v0: bool, v1: bool, v2: [Field; 2]): + // enable_side_effects v0 + // v3 = array_get v2, index Field 0 + // v4 = array_set v2, index Field 1, value: Field 2 + // v5 = array_get v4, index Field 0 + // constrain_eq v3, v5 + // enable_side_effects v1 + // v7 = array_set v2, index Field 1, value: Field 2 + // v8 = array_get v7, index Field 0 + // constrain_eq v3, v8 + // enable_side_effects v0 + // } + let ssa = ssa.fold_constants_using_constraints(); + println!("{ssa}"); + + let main = ssa.main(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 10); + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index ad79af4e63d..5cda8787241 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -9,25 +9,25 @@ use crate::{ UnresolvedTypeExpression, }, hir::{ - comptime::{self, Interpreter, InterpreterError}, + comptime::{self, InterpreterError}, resolution::{errors::ResolverError, resolver::LambdaContext}, type_check::TypeCheckError, }, hir_def::{ expr::{ HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, - HirConstructorExpression, HirIfExpression, HirIndexExpression, HirInfixExpression, - HirLambda, HirMemberAccess, HirMethodCallExpression, HirMethodReference, - HirPrefixExpression, + HirConstructorExpression, HirExpression, HirIfExpression, HirIndexExpression, + HirInfixExpression, HirLambda, HirMemberAccess, HirMethodCallExpression, + HirMethodReference, HirPrefixExpression, }, traits::TraitConstraint, }, macros_api::{ - BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, HirExpression, - HirLiteral, HirStatement, Ident, IndexExpression, Literal, MemberAccessExpression, + BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, HirLiteral, + HirStatement, Ident, IndexExpression, Literal, MemberAccessExpression, MethodCallExpression, PrefixExpression, }, - node_interner::{DefinitionKind, ExprId, FuncId, ReferenceId}, + node_interner::{DefinitionKind, ExprId, FuncId, ReferenceId, TraitMethodId}, token::Tokens, QuotedType, Shared, StructType, Type, }; @@ -39,7 +39,7 @@ impl<'context> Elaborator<'context> { let (hir_expr, typ) = match expr.kind { ExpressionKind::Literal(literal) => self.elaborate_literal(literal, expr.span), ExpressionKind::Block(block) => self.elaborate_block(block), - ExpressionKind::Prefix(prefix) => self.elaborate_prefix(*prefix), + ExpressionKind::Prefix(prefix) => return self.elaborate_prefix(*prefix), ExpressionKind::Index(index) => self.elaborate_index(*index), ExpressionKind::Call(call) => self.elaborate_call(*call, expr.span), ExpressionKind::MethodCall(call) => self.elaborate_method_call(*call, expr.span), @@ -227,11 +227,22 @@ impl<'context> Elaborator<'context> { (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) } - fn elaborate_prefix(&mut self, prefix: PrefixExpression) -> (HirExpression, Type) { + fn elaborate_prefix(&mut self, prefix: PrefixExpression) -> (ExprId, Type) { let span = prefix.rhs.span; let (rhs, rhs_type) = self.elaborate_expression(prefix.rhs); - let ret_type = self.type_check_prefix_operand(&prefix.operator, &rhs_type, span); - (HirExpression::Prefix(HirPrefixExpression { operator: prefix.operator, rhs }), ret_type) + let trait_id = self.interner.get_prefix_operator_trait_method(&prefix.operator); + + let operator = prefix.operator; + let expr = + HirExpression::Prefix(HirPrefixExpression { operator, rhs, trait_method_id: trait_id }); + let expr_id = self.interner.push_expr(expr); + self.interner.push_expr_location(expr_id, span, self.file); + + let result = self.prefix_operand_type_rules(&operator, &rhs_type, span); + let typ = self.handle_operand_type_rules_result(result, &rhs_type, trait_id, expr_id, span); + + self.interner.push_expr_type(expr_id, typ.clone()); + (expr_id, typ) } fn elaborate_index(&mut self, index_expr: IndexExpression) -> (HirExpression, Type) { @@ -440,8 +451,13 @@ impl<'context> Elaborator<'context> { let mut unseen_fields = struct_type.borrow().field_names(); for (field_name, field) in fields { - let expected_type = field_types.iter().find(|(name, _)| name == &field_name.0.contents); - let expected_type = expected_type.map(|(_, typ)| typ).unwrap_or(&Type::Error); + let expected_field_with_index = field_types + .iter() + .enumerate() + .find(|(_, (name, _))| name == &field_name.0.contents); + let expected_index = expected_field_with_index.map(|(index, _)| index); + let expected_type = + expected_field_with_index.map(|(_, (_, typ))| typ).unwrap_or(&Type::Error); let field_span = field.span; let (resolved, field_type) = self.elaborate_expression(field); @@ -468,6 +484,14 @@ impl<'context> Elaborator<'context> { }); } + if let Some(expected_index) = expected_index { + let struct_id = struct_type.borrow().id; + let referenced = ReferenceId::StructMember(struct_id, expected_index); + let reference = + ReferenceId::Reference(Location::new(field_name.span(), self.file), false); + self.interner.add_reference(referenced, reference); + } + ret.push((field_name, resolved)); } @@ -489,10 +513,11 @@ impl<'context> Elaborator<'context> { ) -> (ExprId, Type) { let (lhs, lhs_type) = self.elaborate_expression(access.lhs); let rhs = access.rhs; + let rhs_span = rhs.span(); // `is_offset` is only used when lhs is a reference and we want to return a reference to rhs let access = HirMemberAccess { lhs, rhs, is_offset: false }; let expr_id = self.intern_expr(HirExpression::MemberAccess(access.clone()), span); - let typ = self.type_check_member_access(access, expr_id, lhs_type, span); + let typ = self.type_check_member_access(access, expr_id, lhs_type, rhs_span); self.interner.push_expr_type(expr_id, typ.clone()); (expr_id, typ) } @@ -527,19 +552,38 @@ impl<'context> Elaborator<'context> { let expr_id = self.interner.push_expr(expr); self.interner.push_expr_location(expr_id, span, self.file); - let typ = match self.infix_operand_type_rules(&lhs_type, &operator, &rhs_type, span) { + let result = self.infix_operand_type_rules(&lhs_type, &operator, &rhs_type, span); + let typ = + self.handle_operand_type_rules_result(result, &lhs_type, Some(trait_id), expr_id, span); + + self.interner.push_expr_type(expr_id, typ.clone()); + (expr_id, typ) + } + + fn handle_operand_type_rules_result( + &mut self, + result: Result<(Type, bool), TypeCheckError>, + operand_type: &Type, + trait_id: Option, + expr_id: ExprId, + span: Span, + ) -> Type { + match result { Ok((typ, use_impl)) => { if use_impl { + let trait_id = + trait_id.expect("ice: expected some trait_id when use_impl is true"); + // Delay checking the trait constraint until the end of the function. // Checking it now could bind an unbound type variable to any type // that implements the trait. let constraint = TraitConstraint { - typ: lhs_type.clone(), + typ: operand_type.clone(), trait_id: trait_id.trait_id, trait_generics: Vec::new(), }; self.push_trait_constraint(constraint, expr_id); - self.type_check_operator_method(expr_id, trait_id, &lhs_type, span); + self.type_check_operator_method(expr_id, trait_id, operand_type, span); } typ } @@ -547,10 +591,7 @@ impl<'context> Elaborator<'context> { self.push_err(error); Type::Error } - }; - - self.interner.push_expr_type(expr_id, typ.clone()); - (expr_id, typ) + } } fn elaborate_if(&mut self, if_expr: IfExpression) -> (HirExpression, Type) { @@ -660,12 +701,20 @@ impl<'context> Elaborator<'context> { // call is not yet solved for. self.function_context.push(Default::default()); let (block, _typ) = self.elaborate_block_expression(block); - self.check_and_pop_function_context(); - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); + self.check_and_pop_function_context(); + let mut interpreter_errors = vec![]; + let mut interpreter = self.setup_interpreter(&mut interpreter_errors); let value = interpreter.evaluate_block(block); - self.inline_comptime_value(value, span) + self.include_interpreter_errors(interpreter_errors); + let (id, typ) = self.inline_comptime_value(value, span); + + let location = self.interner.id_location(id); + self.debug_comptime(location, |interner| { + interner.expression(&id).to_display_ast(interner, location.span).kind + }); + + (id, typ) } pub(super) fn inline_comptime_value( @@ -736,9 +785,9 @@ impl<'context> Elaborator<'context> { } }; - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); - + let file = self.file; + let mut interpreter_errors = vec![]; + let mut interpreter = self.setup_interpreter(&mut interpreter_errors); let mut comptime_args = Vec::new(); let mut errors = Vec::new(); @@ -748,17 +797,19 @@ impl<'context> Elaborator<'context> { let location = interpreter.interner.expr_location(&argument); comptime_args.push((arg, location)); } - Err(error) => errors.push((error.into(), self.file)), + Err(error) => errors.push((error.into(), file)), } } + let bindings = interpreter.interner.get_instantiation_bindings(func).clone(); + let result = interpreter.call_function(function, comptime_args, bindings, location); + self.include_interpreter_errors(interpreter_errors); + if !errors.is_empty() { self.errors.append(&mut errors); return None; } - let bindings = interpreter.interner.get_instantiation_bindings(func).clone(); - let result = interpreter.call_function(function, comptime_args, bindings, location); let (expr_id, typ) = self.inline_comptime_value(result, location.span); Some((self.interner.expression(&expr_id), typ)) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index 6104da582a7..e1d104c4971 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -1,5 +1,6 @@ use std::{ collections::{BTreeMap, BTreeSet}, + fmt::Display, rc::Rc, }; @@ -157,6 +158,9 @@ pub struct Elaborator<'context> { /// up all currently visible definitions. The first scope is always the global scope. comptime_scopes: Vec>, + /// The scope of --debug-comptime, or None if unset + debug_comptime_in_file: Option, + /// These are the globals that have yet to be elaborated. /// This map is used to lazily evaluate these globals if they're encountered before /// they are elaborated (e.g. in a function's type or another global's RHS). @@ -178,7 +182,11 @@ struct FunctionContext { } impl<'context> Elaborator<'context> { - pub fn new(context: &'context mut Context, crate_id: CrateId) -> Self { + pub fn new( + context: &'context mut Context, + crate_id: CrateId, + debug_comptime_in_file: Option, + ) -> Self { Self { scopes: ScopeForest::default(), errors: Vec::new(), @@ -198,6 +206,7 @@ impl<'context> Elaborator<'context> { function_context: vec![FunctionContext::default()], current_trait_impl: None, comptime_scopes: vec![HashMap::default()], + debug_comptime_in_file, unresolved_globals: BTreeMap::new(), } } @@ -206,8 +215,9 @@ impl<'context> Elaborator<'context> { context: &'context mut Context, crate_id: CrateId, items: CollectedItems, + debug_comptime_in_file: Option, ) -> Vec<(CompilationError, FileId)> { - let mut this = Self::new(context, crate_id); + let mut this = Self::new(context, crate_id, debug_comptime_in_file); // Filter out comptime items to execute their functions first if needed. // This step is why comptime items can only refer to other comptime items @@ -1191,6 +1201,7 @@ impl<'context> Elaborator<'context> { let span = typ.struct_def.span; let fields = self.resolve_struct_fields(typ.struct_def, type_id); + let fields_len = fields.len(); self.interner.update_struct(type_id, |struct_def| { struct_def.set_fields(fields); @@ -1217,6 +1228,11 @@ impl<'context> Elaborator<'context> { } }); + for field_index in 0..fields_len { + self.interner + .add_definition_location(ReferenceId::StructMember(type_id, field_index)); + } + self.run_comptime_attributes_on_struct(attributes, type_id, span, &mut generated_items); } @@ -1276,15 +1292,15 @@ impl<'context> Elaborator<'context> { let DefinitionKind::Function(function) = definition.kind else { return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); }; - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); - let location = Location::new(span, self.file); + let mut interpreter_errors = vec![]; + let mut interpreter = self.setup_interpreter(&mut interpreter_errors); let arguments = vec![(Value::StructDefinition(struct_id), location)]; let value = interpreter .call_function(function, arguments, TypeBindings::new(), location) .map_err(|error| error.into_compilation_error_pair())?; + self.include_interpreter_errors(interpreter_errors); if value != Value::Unit { let items = value @@ -1365,9 +1381,8 @@ impl<'context> Elaborator<'context> { let global = self.interner.get_global(global_id); let definition_id = global.definition_id; let location = global.location; - - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); + let mut interpreter_errors = vec![]; + let mut interpreter = self.setup_interpreter(&mut interpreter_errors); if let Err(error) = interpreter.evaluate_let(let_statement) { self.errors.push(error.into_compilation_error_pair()); @@ -1376,8 +1391,13 @@ impl<'context> Elaborator<'context> { .lookup_id(definition_id, location) .expect("The global should be defined since evaluate_let did not error"); + self.debug_comptime(location, |interner| { + interner.get_global(global_id).let_statement.to_display_ast(interner).kind + }); + self.interner.get_global_mut(global_id).value = Some(value); } + self.include_interpreter_errors(interpreter_errors); } fn define_function_metas( @@ -1473,6 +1493,10 @@ impl<'context> Elaborator<'context> { } } + fn include_interpreter_errors(&mut self, errors: Vec) { + self.errors.extend(errors.into_iter().map(InterpreterError::into_compilation_error_pair)); + } + /// True if we're currently within a `comptime` block, function, or global fn in_comptime_context(&self) -> bool { // The first context is the global context, followed by the function-specific context. @@ -1623,4 +1647,31 @@ impl<'context> Elaborator<'context> { } } } + + fn setup_interpreter<'a>( + &'a mut self, + interpreter_errors: &'a mut Vec, + ) -> Interpreter { + Interpreter::new( + self.interner, + &mut self.comptime_scopes, + self.crate_id, + self.debug_comptime_in_file, + interpreter_errors, + ) + } + + fn debug_comptime T>( + &mut self, + location: Location, + mut expr_f: F, + ) { + if Some(location.file) == self.debug_comptime_in_file { + let displayed_expr = expr_f(self.interner); + self.errors.push(( + InterpreterError::debug_evaluate_comptime(displayed_expr, location).into(), + location.file, + )); + } + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs index b1c9b1b37cc..7c920230b9d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -202,10 +202,18 @@ impl<'context> Elaborator<'context> { new_definitions, ); - let referenced = ReferenceId::Struct(struct_type.borrow().id); + let struct_id = struct_type.borrow().id; + + let referenced = ReferenceId::Struct(struct_id); let reference = ReferenceId::Reference(Location::new(name_span, self.file), is_self_type); self.interner.add_reference(referenced, reference); + for (field_index, field) in fields.iter().enumerate() { + let referenced = ReferenceId::StructMember(struct_id, field_index); + let reference = ReferenceId::Reference(Location::new(field.0.span(), self.file), false); + self.interner.add_reference(referenced, reference); + } + HirPattern::Struct(expected_type, fields, location) } @@ -456,9 +464,16 @@ impl<'context> Elaborator<'context> { // Comptime variables must be replaced with their values if let Some(definition) = self.interner.try_definition(definition_id) { if definition.comptime && !self.in_comptime_context() { - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); + let mut interpreter_errors = vec![]; + let mut interpreter = Interpreter::new( + self.interner, + &mut self.comptime_scopes, + self.crate_id, + self.debug_comptime_in_file, + &mut interpreter_errors, + ); let value = interpreter.evaluate(id); + self.include_interpreter_errors(interpreter_errors); return self.inline_comptime_value(value, span); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs index 1fc92ad28ba..13c59e3494e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs @@ -3,7 +3,6 @@ use noirc_errors::{Location, Span}; use crate::{ ast::{AssignStatement, ConstrainStatement, LValue}, hir::{ - comptime::Interpreter, resolution::errors::ResolverError, type_check::{Source, TypeCheckError}, }, @@ -381,6 +380,10 @@ impl<'context> Elaborator<'context> { Type::Struct(s, args) => { let s = s.borrow(); if let Some((field, index)) = s.get_field(field_name, args) { + let referenced = ReferenceId::StructMember(s.id, index); + let reference = ReferenceId::Reference(Location::new(span, self.file), false); + self.interner.add_reference(referenced, reference); + return Some((field, index)); } } @@ -444,11 +447,15 @@ impl<'context> Elaborator<'context> { let span = statement.span; let (hir_statement, _typ) = self.elaborate_statement(statement); self.check_and_pop_function_context(); - - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); + let mut interpreter_errors = vec![]; + let mut interpreter = self.setup_interpreter(&mut interpreter_errors); let value = interpreter.evaluate_statement(hir_statement); let (expr, typ) = self.inline_comptime_value(value, span); + self.include_interpreter_errors(interpreter_errors); + + let location = self.interner.id_location(hir_statement); + self.debug_comptime(location, |interner| expr.to_display_ast(interner).kind); + (HirStatement::Expression(expr), typ) } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs index 77ac8e476f8..4cd20820c56 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs @@ -47,7 +47,8 @@ impl<'context> Elaborator<'context> { // the interner may set `interner.ordering_type` based on the result type // of the Cmp trait, if this is it. if self.crate_id.is_stdlib() { - self.interner.try_add_operator_trait(trait_id); + self.interner.try_add_infix_operator_trait(trait_id); + self.interner.try_add_prefix_operator_trait(trait_id); } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index 698114cfb5e..4e9b3620760 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -670,57 +670,6 @@ impl<'context> Elaborator<'context> { } } - pub(super) fn type_check_prefix_operand( - &mut self, - op: &crate::ast::UnaryOp, - rhs_type: &Type, - span: Span, - ) -> Type { - let unify = |this: &mut Self, expected| { - this.unify(rhs_type, &expected, || TypeCheckError::TypeMismatch { - expr_typ: rhs_type.to_string(), - expected_typ: expected.to_string(), - expr_span: span, - }); - expected - }; - - match op { - crate::ast::UnaryOp::Minus => { - if rhs_type.is_unsigned() { - self.push_err(TypeCheckError::InvalidUnaryOp { - kind: rhs_type.to_string(), - span, - }); - } - let expected = self.polymorphic_integer_or_field(); - self.unify(rhs_type, &expected, || TypeCheckError::InvalidUnaryOp { - kind: rhs_type.to_string(), - span, - }); - expected - } - crate::ast::UnaryOp::Not => { - let rhs_type = rhs_type.follow_bindings(); - - // `!` can work on booleans or integers - if matches!(rhs_type, Type::Integer(..)) { - return rhs_type; - } - - unify(self, Type::Bool) - } - crate::ast::UnaryOp::MutableReference => { - Type::MutableReference(Box::new(rhs_type.follow_bindings())) - } - crate::ast::UnaryOp::Dereference { implicitly_added: _ } => { - let element_type = self.interner.next_type_variable(); - unify(self, Type::MutableReference(Box::new(element_type.clone()))); - element_type - } - } - } - /// Insert as many dereference operations as necessary to automatically dereference a method /// call object to its base value type T. pub(super) fn insert_auto_dereferences(&mut self, object: ExprId, typ: Type) -> (ExprId, Type) { @@ -730,6 +679,7 @@ impl<'context> Elaborator<'context> { let object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { operator: UnaryOp::Dereference { implicitly_added: true }, rhs: object, + trait_method_id: None, })); self.interner.push_expr_type(object, element.as_ref().clone()); self.interner.push_expr_location(object, location.span, location.file); @@ -1073,6 +1023,84 @@ impl<'context> Elaborator<'context> { } } + // Given a unary operator and a type, this method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. + pub(super) fn prefix_operand_type_rules( + &mut self, + op: &UnaryOp, + rhs_type: &Type, + span: Span, + ) -> Result<(Type, bool), TypeCheckError> { + use Type::*; + + match op { + crate::ast::UnaryOp::Minus | crate::ast::UnaryOp::Not => { + match rhs_type { + // An error type will always return an error + Error => Ok((Error, false)), + Alias(alias, args) => { + let alias = alias.borrow().get_type(args); + self.prefix_operand_type_rules(op, &alias, span) + } + + // Matches on TypeVariable must be first so that we follow any type + // bindings. + TypeVariable(int, _) => { + if let TypeBinding::Bound(binding) = &*int.borrow() { + return self.prefix_operand_type_rules(op, binding, span); + } + + // The `!` prefix operator is not valid for Field, so if this is a numeric + // type we constrain it to just (non-Field) integer types. + if matches!(op, crate::ast::UnaryOp::Not) && rhs_type.is_numeric() { + let integer_type = Type::polymorphic_integer(self.interner); + self.unify(rhs_type, &integer_type, || { + TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span } + }); + } + + Ok((rhs_type.clone(), !rhs_type.is_numeric())) + } + Integer(sign_x, bit_width_x) => { + if *op == UnaryOp::Minus && *sign_x == Signedness::Unsigned { + return Err(TypeCheckError::InvalidUnaryOp { + kind: rhs_type.to_string(), + span, + }); + } + Ok((Integer(*sign_x, *bit_width_x), false)) + } + // The result of a Field is always a witness + FieldElement => { + if *op == UnaryOp::Not { + return Err(TypeCheckError::FieldNot { span }); + } + Ok((FieldElement, false)) + } + + Bool => Ok((Bool, false)), + + _ => Ok((rhs_type.clone(), true)), + } + } + crate::ast::UnaryOp::MutableReference => { + Ok((Type::MutableReference(Box::new(rhs_type.follow_bindings())), false)) + } + crate::ast::UnaryOp::Dereference { implicitly_added: _ } => { + let element_type = self.interner.next_type_variable(); + let expected = Type::MutableReference(Box::new(element_type.clone())); + self.unify(rhs_type, &expected, || TypeCheckError::TypeMismatch { + expr_typ: rhs_type.to_string(), + expected_typ: expected.to_string(), + expr_span: span, + }); + Ok((element_type, false)) + } + } + } + /// Prerequisite: verify_trait_constraint of the operator's trait constraint. /// /// Although by this point the operator is expected to already have a trait impl, @@ -1140,6 +1168,7 @@ impl<'context> Elaborator<'context> { *access_lhs = this.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { operator: crate::ast::UnaryOp::Dereference { implicitly_added: true }, rhs: old_lhs, + trait_method_id: None, })); this.interner.push_expr_type(old_lhs, lhs_type); this.interner.push_expr_type(*access_lhs, element); @@ -1362,6 +1391,7 @@ impl<'context> Elaborator<'context> { self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { operator: UnaryOp::MutableReference, rhs: *object, + trait_method_id: None, })); self.interner.push_expr_type(new_object, new_type); self.interner.push_expr_location(new_object, location.span, location.file); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs index 25ff50085fe..0472b0040e5 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -1,3 +1,4 @@ +use std::fmt::Display; use std::rc::Rc; use crate::{ @@ -45,6 +46,7 @@ pub enum InterpreterError { NonStructInConstructor { typ: Type, location: Location }, CannotInlineMacro { value: Value, location: Location }, UnquoteFoundDuringEvaluation { location: Location }, + DebugEvaluateComptime { diagnostic: CustomDiagnostic, location: Location }, FailedToParseMacro { error: ParserError, tokens: Rc, rule: &'static str, file: FileId }, UnsupportedTopLevelItemUnquote { item: String, location: Location }, NonComptimeFnCallInSameCrate { function: String, location: Location }, @@ -117,7 +119,8 @@ impl InterpreterError { | InterpreterError::NoImpl { location, .. } | InterpreterError::ImplMethodTypeMismatch { location, .. } | InterpreterError::BreakNotInLoop { location, .. } - | InterpreterError::ContinueNotInLoop { location, .. } => *location, + | InterpreterError::DebugEvaluateComptime { location, .. } => *location, + InterpreterError::ContinueNotInLoop { location, .. } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) } @@ -129,6 +132,20 @@ impl InterpreterError { } } } + + pub(crate) fn debug_evaluate_comptime(expr: impl Display, location: Location) -> Self { + let mut formatted_result = format!("{}", expr); + // if multi-line, display on a separate line from the message + if formatted_result.contains('\n') { + formatted_result.insert(0, '\n'); + } + let diagnostic = CustomDiagnostic::simple_info( + "`comptime` expression ran:".to_string(), + format!("After evaluation: {}", formatted_result), + location.span, + ); + InterpreterError::DebugEvaluateComptime { diagnostic, location } + } } impl<'a> From<&'a InterpreterError> for CustomDiagnostic { @@ -291,6 +308,7 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = "This is a bug".into(); CustomDiagnostic::simple_error(msg, secondary, location.span) } + InterpreterError::DebugEvaluateComptime { diagnostic, .. } => diagnostic.clone(), InterpreterError::FailedToParseMacro { error, tokens, rule, file: _ } => { let message = format!("Failed to parse macro's token stream into {rule}"); let tokens = vecmap(&tokens.0, ToString::to_string).join(" "); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs new file mode 100644 index 00000000000..22763c9cb64 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -0,0 +1,428 @@ +use iter_extended::vecmap; +use noirc_errors::{Span, Spanned}; + +use crate::ast::{ + ArrayLiteral, AssignStatement, BlockExpression, CallExpression, CastExpression, ConstrainKind, + ConstructorExpression, ExpressionKind, ForLoopStatement, ForRange, Ident, IfExpression, + IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, + MemberAccessExpression, MethodCallExpression, Path, Pattern, PrefixExpression, UnresolvedType, + UnresolvedTypeData, UnresolvedTypeExpression, +}; +use crate::ast::{ConstrainStatement, Expression, Statement, StatementKind}; +use crate::hir_def::expr::{HirArrayLiteral, HirBlockExpression, HirExpression, HirIdent}; +use crate::hir_def::stmt::{HirLValue, HirPattern, HirStatement}; +use crate::hir_def::types::{Type, TypeBinding, TypeVariableKind}; +use crate::macros_api::HirLiteral; +use crate::node_interner::{ExprId, NodeInterner, StmtId}; + +// TODO: +// - Full path for idents & types +// - Assert/AssertEq information lost +// - The type name span is lost in constructor patterns & expressions +// - All type spans are lost +// - Type::TypeVariable has no equivalent in the Ast + +impl HirStatement { + pub fn to_display_ast(&self, interner: &NodeInterner, span: Span) -> Statement { + let kind = match self { + HirStatement::Let(let_stmt) => { + let pattern = let_stmt.pattern.to_display_ast(interner); + let r#type = interner.id_type(let_stmt.expression).to_display_ast(); + let expression = let_stmt.expression.to_display_ast(interner); + StatementKind::Let(LetStatement { + pattern, + r#type, + expression, + comptime: false, + attributes: Vec::new(), + }) + } + HirStatement::Constrain(constrain) => { + let expr = constrain.0.to_display_ast(interner); + let message = constrain.2.map(|message| message.to_display_ast(interner)); + + // TODO: Find difference in usage between Assert & AssertEq + StatementKind::Constrain(ConstrainStatement(expr, message, ConstrainKind::Assert)) + } + HirStatement::Assign(assign) => StatementKind::Assign(AssignStatement { + lvalue: assign.lvalue.to_display_ast(interner), + expression: assign.expression.to_display_ast(interner), + }), + HirStatement::For(for_stmt) => StatementKind::For(ForLoopStatement { + identifier: for_stmt.identifier.to_display_ast(interner), + range: ForRange::Range( + for_stmt.start_range.to_display_ast(interner), + for_stmt.end_range.to_display_ast(interner), + ), + block: for_stmt.block.to_display_ast(interner), + span, + }), + HirStatement::Break => StatementKind::Break, + HirStatement::Continue => StatementKind::Continue, + HirStatement::Expression(expr) => { + StatementKind::Expression(expr.to_display_ast(interner)) + } + HirStatement::Semi(expr) => StatementKind::Semi(expr.to_display_ast(interner)), + HirStatement::Error => StatementKind::Error, + HirStatement::Comptime(statement) => { + StatementKind::Comptime(Box::new(statement.to_display_ast(interner))) + } + }; + + Statement { kind, span } + } +} + +impl StmtId { + /// Convert to AST for display (some details lost) + pub fn to_display_ast(self, interner: &NodeInterner) -> Statement { + let statement = interner.statement(&self); + let span = interner.statement_span(self); + + statement.to_display_ast(interner, span) + } +} + +impl HirExpression { + /// Convert to AST for display (some details lost) + pub fn to_display_ast(&self, interner: &NodeInterner, span: Span) -> Expression { + let kind = match self { + HirExpression::Ident(ident, generics) => { + let path = Path::from_ident(ident.to_display_ast(interner)); + ExpressionKind::Variable( + path, + generics.as_ref().map(|option| { + option.iter().map(|generic| generic.to_display_ast()).collect() + }), + ) + } + HirExpression::Literal(HirLiteral::Array(array)) => { + let array = array.to_display_ast(interner, span); + ExpressionKind::Literal(Literal::Array(array)) + } + HirExpression::Literal(HirLiteral::Slice(array)) => { + let array = array.to_display_ast(interner, span); + ExpressionKind::Literal(Literal::Slice(array)) + } + HirExpression::Literal(HirLiteral::Bool(value)) => { + ExpressionKind::Literal(Literal::Bool(*value)) + } + HirExpression::Literal(HirLiteral::Integer(value, sign)) => { + ExpressionKind::Literal(Literal::Integer(*value, *sign)) + } + HirExpression::Literal(HirLiteral::Str(string)) => { + ExpressionKind::Literal(Literal::Str(string.clone())) + } + HirExpression::Literal(HirLiteral::FmtStr(string, _exprs)) => { + // TODO: Is throwing away the exprs here valid? + ExpressionKind::Literal(Literal::FmtStr(string.clone())) + } + HirExpression::Literal(HirLiteral::Unit) => ExpressionKind::Literal(Literal::Unit), + HirExpression::Block(expr) => ExpressionKind::Block(expr.to_display_ast(interner)), + HirExpression::Prefix(prefix) => ExpressionKind::Prefix(Box::new(PrefixExpression { + operator: prefix.operator, + rhs: prefix.rhs.to_display_ast(interner), + })), + HirExpression::Infix(infix) => ExpressionKind::Infix(Box::new(InfixExpression { + lhs: infix.lhs.to_display_ast(interner), + operator: Spanned::from(infix.operator.location.span, infix.operator.kind), + rhs: infix.rhs.to_display_ast(interner), + })), + HirExpression::Index(index) => ExpressionKind::Index(Box::new(IndexExpression { + collection: index.collection.to_display_ast(interner), + index: index.index.to_display_ast(interner), + })), + HirExpression::Constructor(constructor) => { + let type_name = constructor.r#type.borrow().name.to_string(); + let type_name = Path::from_single(type_name, span); + let fields = vecmap(constructor.fields.clone(), |(name, expr): (Ident, ExprId)| { + (name, expr.to_display_ast(interner)) + }); + let struct_type = None; + + ExpressionKind::Constructor(Box::new(ConstructorExpression { + type_name, + fields, + struct_type, + })) + } + HirExpression::MemberAccess(access) => { + ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { + lhs: access.lhs.to_display_ast(interner), + rhs: access.rhs.clone(), + })) + } + HirExpression::Call(call) => { + let func = Box::new(call.func.to_display_ast(interner)); + let arguments = vecmap(call.arguments.clone(), |arg| arg.to_display_ast(interner)); + let is_macro_call = false; + ExpressionKind::Call(Box::new(CallExpression { func, arguments, is_macro_call })) + } + HirExpression::MethodCall(method_call) => { + ExpressionKind::MethodCall(Box::new(MethodCallExpression { + object: method_call.object.to_display_ast(interner), + method_name: method_call.method.clone(), + arguments: vecmap(method_call.arguments.clone(), |arg| { + arg.to_display_ast(interner) + }), + generics: method_call.generics.clone().map(|option| { + option.iter().map(|generic| generic.to_display_ast()).collect() + }), + is_macro_call: false, + })) + } + HirExpression::Cast(cast) => { + let lhs = cast.lhs.to_display_ast(interner); + let r#type = cast.r#type.to_display_ast(); + ExpressionKind::Cast(Box::new(CastExpression { lhs, r#type })) + } + HirExpression::If(if_expr) => ExpressionKind::If(Box::new(IfExpression { + condition: if_expr.condition.to_display_ast(interner), + consequence: if_expr.consequence.to_display_ast(interner), + alternative: if_expr.alternative.map(|expr| expr.to_display_ast(interner)), + })), + HirExpression::Tuple(fields) => { + ExpressionKind::Tuple(vecmap(fields, |field| field.to_display_ast(interner))) + } + HirExpression::Lambda(lambda) => { + let parameters = vecmap(lambda.parameters.clone(), |(pattern, typ)| { + (pattern.to_display_ast(interner), typ.to_display_ast()) + }); + let return_type = lambda.return_type.to_display_ast(); + let body = lambda.body.to_display_ast(interner); + ExpressionKind::Lambda(Box::new(Lambda { parameters, return_type, body })) + } + HirExpression::Error => ExpressionKind::Error, + HirExpression::Comptime(block) => { + ExpressionKind::Comptime(block.to_display_ast(interner), span) + } + HirExpression::Quote(block) => ExpressionKind::Quote(block.clone()), + + // A macro was evaluated here: return the quoted result + HirExpression::Unquote(block) => ExpressionKind::Quote(block.clone()), + }; + + Expression::new(kind, span) + } +} + +impl ExprId { + /// Convert to AST for display (some details lost) + pub fn to_display_ast(self, interner: &NodeInterner) -> Expression { + let expression = interner.expression(&self); + // TODO: empty 0 span + let span = interner.try_expr_span(&self).unwrap_or_else(|| Span::empty(0)); + expression.to_display_ast(interner, span) + } +} + +impl HirPattern { + /// Convert to AST for display (some details lost) + fn to_display_ast(&self, interner: &NodeInterner) -> Pattern { + match self { + HirPattern::Identifier(ident) => Pattern::Identifier(ident.to_display_ast(interner)), + HirPattern::Mutable(pattern, location) => { + let pattern = Box::new(pattern.to_display_ast(interner)); + Pattern::Mutable(pattern, location.span, false) + } + HirPattern::Tuple(patterns, location) => { + let patterns = vecmap(patterns, |pattern| pattern.to_display_ast(interner)); + Pattern::Tuple(patterns, location.span) + } + HirPattern::Struct(typ, patterns, location) => { + let patterns = vecmap(patterns, |(name, pattern)| { + (name.clone(), pattern.to_display_ast(interner)) + }); + let name = match typ.follow_bindings() { + Type::Struct(struct_def, _) => { + let struct_def = struct_def.borrow(); + struct_def.name.0.contents.clone() + } + // This pass shouldn't error so if the type isn't a struct we just get a string + // representation of any other type and use that. We're relying on name + // resolution to fail later when this Ast is re-converted to Hir. + other => other.to_string(), + }; + // The name span is lost here + let path = Path::from_single(name, location.span); + Pattern::Struct(path, patterns, location.span) + } + } + } +} + +impl HirIdent { + /// Convert to AST for display (some details lost) + fn to_display_ast(&self, interner: &NodeInterner) -> Ident { + let name = interner.definition_name(self.id).to_owned(); + Ident(Spanned::from(self.location.span, name)) + } +} + +impl Type { + /// Convert to AST for display (some details lost) + fn to_display_ast(&self) -> UnresolvedType { + let typ = match self { + Type::FieldElement => UnresolvedTypeData::FieldElement, + Type::Array(length, element) => { + let length = length.to_type_expression(); + let element = Box::new(element.to_display_ast()); + UnresolvedTypeData::Array(length, element) + } + Type::Slice(element) => { + let element = Box::new(element.to_display_ast()); + UnresolvedTypeData::Slice(element) + } + Type::Integer(sign, bit_size) => UnresolvedTypeData::Integer(*sign, *bit_size), + Type::Bool => UnresolvedTypeData::Bool, + Type::String(length) => { + let length = length.to_type_expression(); + UnresolvedTypeData::String(length) + } + Type::FmtString(length, element) => { + let length = length.to_type_expression(); + let element = Box::new(element.to_display_ast()); + UnresolvedTypeData::FormatString(length, element) + } + Type::Unit => UnresolvedTypeData::Unit, + Type::Tuple(fields) => { + let fields = vecmap(fields, |field| field.to_display_ast()); + UnresolvedTypeData::Tuple(fields) + } + Type::Struct(def, generics) => { + let struct_def = def.borrow(); + let generics = vecmap(generics, |generic| generic.to_display_ast()); + let name = Path::from_ident(struct_def.name.clone()); + UnresolvedTypeData::Named(name, generics, false) + } + Type::Alias(type_def, generics) => { + // Keep the alias name instead of expanding this in case the + // alias' definition was changed + let type_def = type_def.borrow(); + let generics = vecmap(generics, |generic| generic.to_display_ast()); + let name = Path::from_ident(type_def.name.clone()); + UnresolvedTypeData::Named(name, generics, false) + } + Type::TypeVariable(binding, kind) => { + match &*binding.borrow() { + TypeBinding::Bound(typ) => return typ.to_display_ast(), + TypeBinding::Unbound(id) => { + let expression = match kind { + // TODO: fix span or make Option + TypeVariableKind::Constant(value) => { + UnresolvedTypeExpression::Constant(*value, Span::empty(0)) + } + other_kind => { + let name = format!("var_{:?}_{}", other_kind, id); + + // TODO: fix span or make Option + let path = Path::from_single(name, Span::empty(0)); + UnresolvedTypeExpression::Variable(path) + } + }; + + UnresolvedTypeData::Expression(expression) + } + } + } + Type::TraitAsType(_, name, generics) => { + let generics = vecmap(generics, |generic| generic.to_display_ast()); + let name = Path::from_single(name.as_ref().clone(), Span::default()); + UnresolvedTypeData::TraitAsType(name, generics) + } + Type::NamedGeneric(_var, name, _kind) => { + let name = Path::from_single(name.as_ref().clone(), Span::default()); + UnresolvedTypeData::TraitAsType(name, Vec::new()) + } + Type::Function(args, ret, env) => { + let args = vecmap(args, |arg| arg.to_display_ast()); + let ret = Box::new(ret.to_display_ast()); + let env = Box::new(env.to_display_ast()); + UnresolvedTypeData::Function(args, ret, env) + } + Type::MutableReference(element) => { + let element = Box::new(element.to_display_ast()); + UnresolvedTypeData::MutableReference(element) + } + // Type::Forall is only for generic functions which don't store a type + // in their Ast so they don't need to call to_display_ast for their Forall type. + // Since there is no UnresolvedTypeData equivalent for Type::Forall, we use + // this to ignore this case since it shouldn't be needed anyway. + Type::Forall(_, typ) => return typ.to_display_ast(), + Type::Constant(_) => panic!("Type::Constant where a type was expected: {self:?}"), + Type::Quoted(quoted_type) => UnresolvedTypeData::Quoted(*quoted_type), + Type::Error => UnresolvedTypeData::Error, + }; + + UnresolvedType { typ, span: None } + } + + /// Convert to AST for display (some details lost) + fn to_type_expression(&self) -> UnresolvedTypeExpression { + let span = Span::default(); + + match self.follow_bindings() { + Type::Constant(length) => UnresolvedTypeExpression::Constant(length, span), + Type::NamedGeneric(_var, name, _kind) => { + let path = Path::from_single(name.as_ref().clone(), span); + UnresolvedTypeExpression::Variable(path) + } + // TODO: This should be turned into a proper error. + other => panic!("Cannot represent {other:?} as type expression"), + } + } +} + +impl HirLValue { + /// Convert to AST for display (some details lost) + fn to_display_ast(&self, interner: &NodeInterner) -> LValue { + match self { + HirLValue::Ident(ident, _) => LValue::Ident(ident.to_display_ast(interner)), + HirLValue::MemberAccess { object, field_name, field_index: _, typ: _, location } => { + let object = Box::new(object.to_display_ast(interner)); + LValue::MemberAccess { object, field_name: field_name.clone(), span: location.span } + } + HirLValue::Index { array, index, typ: _, location } => { + let array = Box::new(array.to_display_ast(interner)); + let index = index.to_display_ast(interner); + LValue::Index { array, index, span: location.span } + } + HirLValue::Dereference { lvalue, element_type: _, location } => { + let lvalue = Box::new(lvalue.to_display_ast(interner)); + LValue::Dereference(lvalue, location.span) + } + } + } +} + +impl HirArrayLiteral { + /// Convert to AST for display (some details lost) + fn to_display_ast(&self, interner: &NodeInterner, span: Span) -> ArrayLiteral { + match self { + HirArrayLiteral::Standard(elements) => { + ArrayLiteral::Standard(vecmap(elements, |element| element.to_display_ast(interner))) + } + HirArrayLiteral::Repeated { repeated_element, length } => { + let repeated_element = Box::new(repeated_element.to_display_ast(interner)); + let length = match length { + Type::Constant(length) => { + let literal = Literal::Integer((*length as u128).into(), false); + let kind = ExpressionKind::Literal(literal); + Box::new(Expression::new(kind, span)) + } + other => panic!("Cannot convert non-constant type for repeated array literal from Hir -> Ast: {other:?}"), + }; + ArrayLiteral::Repeated { repeated_element, length } + } + } + } +} + +impl HirBlockExpression { + /// Convert to AST for display (some details lost) + fn to_display_ast(&self, interner: &NodeInterner) -> BlockExpression { + let statements = + vecmap(self.statements.clone(), |statement| statement.to_display_ast(interner)); + BlockExpression { statements } + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 605a35780ed..02714f77605 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -1,6 +1,7 @@ use std::{collections::hash_map::Entry, rc::Rc}; use acvm::{acir::AcirField, FieldElement}; +use fm::FileId; use im::Vector; use iter_extended::try_vecmap; use noirc_errors::Location; @@ -19,7 +20,7 @@ use crate::{ hir_def::{ expr::{ HirArrayLiteral, HirBlockExpression, HirCallExpression, HirCastExpression, - HirConstructorExpression, HirIdent, HirIfExpression, HirIndexExpression, + HirConstructorExpression, HirExpression, HirIdent, HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirMemberAccess, HirMethodCallExpression, HirPrefixExpression, }, @@ -28,7 +29,7 @@ use crate::{ HirPattern, }, }, - macros_api::{HirExpression, HirLiteral, HirStatement, NodeInterner}, + macros_api::{HirLiteral, HirStatement, NodeInterner}, node_interner::{DefinitionId, DefinitionKind, ExprId, FuncId, StmtId}, Shared, Type, TypeBinding, TypeBindings, TypeVariableKind, }; @@ -51,6 +52,10 @@ pub struct Interpreter<'interner> { crate_id: CrateId, + /// The scope of --debug-comptime, or None if unset + pub(super) debug_comptime_in_file: Option, + pub(super) debug_comptime_evaluations: &'interner mut Vec, + in_loop: bool, } @@ -60,8 +65,17 @@ impl<'a> Interpreter<'a> { interner: &'a mut NodeInterner, scopes: &'a mut Vec>, crate_id: CrateId, + debug_comptime_in_file: Option, + debug_comptime_evaluations: &'a mut Vec, ) -> Self { - Self { interner, scopes, crate_id, in_loop: false } + Self { + interner, + scopes, + crate_id, + debug_comptime_in_file, + debug_comptime_evaluations, + in_loop: false, + } } pub(crate) fn call_function( diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/mod.rs index ab984d2f2be..3cc7b5f7e98 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/mod.rs @@ -1,4 +1,5 @@ mod errors; +mod hir_to_display_ast; mod interpreter; mod scan; mod tests; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/scan.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/scan.rs index 770c523bd7f..f9cc54ef9e4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/scan.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/scan.rs @@ -29,6 +29,8 @@ use super::{ Value, }; +use noirc_errors::Location; + #[allow(dead_code)] impl<'interner> Interpreter<'interner> { /// Scan through a function, evaluating any Comptime nodes found. @@ -80,9 +82,10 @@ impl<'interner> Interpreter<'interner> { HirExpression::Lambda(lambda) => self.scan_lambda(lambda), HirExpression::Comptime(block) => { let location = self.interner.expr_location(&expr); - let new_expr = + let new_expr_id = self.evaluate_block(block)?.into_hir_expression(self.interner, location)?; - let new_expr = self.interner.expression(&new_expr); + let new_expr = self.interner.expression(&new_expr_id); + self.debug_comptime(new_expr_id, location); self.interner.replace_expr(&expr, new_expr); Ok(()) } @@ -118,7 +121,9 @@ impl<'interner> Interpreter<'interner> { if let Ok(value) = self.evaluate_ident(ident, id) { // TODO(#4922): Inlining closures is currently unimplemented if !matches!(value, Value::Closure(..)) { - self.inline_expression(value, id)?; + let new_expr = self.inline_expression(value, id)?; + let location = self.interner.id_location(id); + self.debug_comptime(new_expr, location); } } Ok(()) @@ -231,6 +236,7 @@ impl<'interner> Interpreter<'interner> { let new_expr = self .evaluate_comptime(comptime)? .into_hir_expression(self.interner, location)?; + self.debug_comptime(new_expr, location); self.interner.replace_statement(statement, HirStatement::Expression(new_expr)); Ok(()) } @@ -247,11 +253,19 @@ impl<'interner> Interpreter<'interner> { Ok(()) } - fn inline_expression(&mut self, value: Value, expr: ExprId) -> IResult<()> { + fn inline_expression(&mut self, value: Value, expr: ExprId) -> IResult { let location = self.interner.expr_location(&expr); - let new_expr = value.into_hir_expression(self.interner, location)?; - let new_expr = self.interner.expression(&new_expr); + let new_expr_id = value.into_hir_expression(self.interner, location)?; + let new_expr = self.interner.expression(&new_expr_id); self.interner.replace_expr(&expr, new_expr); - Ok(()) + Ok(new_expr_id) + } + + fn debug_comptime(&mut self, expr: ExprId, location: Location) { + if Some(location.file) == self.debug_comptime_in_file { + let expr = expr.to_display_ast(self.interner); + self.debug_comptime_evaluations + .push(InterpreterError::debug_evaluate_comptime(expr, location)); + } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs index 870f2bc458a..e8e05506c94 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -13,7 +13,15 @@ use crate::hir::type_check::test::type_check_src_code; fn interpret_helper(src: &str, func_namespace: Vec) -> Result { let (mut interner, main_id) = type_check_src_code(src, func_namespace); let mut scopes = vec![HashMap::default()]; - let mut interpreter = Interpreter::new(&mut interner, &mut scopes, CrateId::Root(0)); + let no_debug_evaluate_comptime = None; + let mut interpreter_errors = vec![]; + let mut interpreter = Interpreter::new( + &mut interner, + &mut scopes, + CrateId::Root(0), + no_debug_evaluate_comptime, + &mut interpreter_errors, + ); let no_location = Location::dummy(); interpreter.call_function(main_id, Vec::new(), HashMap::new(), no_location) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 43ab6224ea7..b474ccff0cc 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -29,12 +29,14 @@ use crate::ast::{ }; use crate::parser::{ParserError, SortedModule}; +use noirc_errors::{CustomDiagnostic, Location, Span}; + use fm::FileId; use iter_extended::vecmap; -use noirc_errors::{CustomDiagnostic, Location, Span}; use rustc_hash::FxHashMap as HashMap; use std::collections::BTreeMap; - +use std::fmt::Write; +use std::path::PathBuf; use std::vec; #[derive(Default)] @@ -42,6 +44,7 @@ pub struct ResolvedModule { pub globals: Vec<(FileId, GlobalId)>, pub functions: Vec<(FileId, FuncId)>, pub trait_impl_functions: Vec<(FileId, FuncId)>, + pub debug_comptime_in_file: Option, pub errors: Vec<(CompilationError, FileId)>, } @@ -195,6 +198,7 @@ pub enum CompilationError { ResolverError(ResolverError), TypeError(TypeCheckError), InterpreterError(InterpreterError), + DebugComptimeScopeNotFound(Vec), } impl CompilationError { @@ -212,6 +216,16 @@ impl<'a> From<&'a CompilationError> for CustomDiagnostic { CompilationError::ResolverError(error) => error.into(), CompilationError::TypeError(error) => error.into(), CompilationError::InterpreterError(error) => error.into(), + CompilationError::DebugComptimeScopeNotFound(error) => { + let msg = "multiple files found matching --debug-comptime path".into(); + let secondary = error.iter().fold(String::new(), |mut output, path| { + let _ = writeln!(output, " {}", path.display()); + output + }); + // NOTE: this span is empty as it is not expected to be displayed + let dummy_span = Span::default(); + CustomDiagnostic::simple_error(msg, secondary, dummy_span) + } } } } @@ -271,6 +285,7 @@ impl DefCollector { ast: SortedModule, root_file_id: FileId, use_legacy: bool, + debug_comptime_in_file: Option<&str>, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -288,6 +303,7 @@ impl DefCollector { dep.crate_id, context, use_legacy, + debug_comptime_in_file, macro_processors, )); @@ -396,13 +412,31 @@ impl DefCollector { } } + let handle_missing_file = |err| { + errors.push((CompilationError::DebugComptimeScopeNotFound(err), root_file_id)); + None + }; + let debug_comptime_in_file: Option = + debug_comptime_in_file.and_then(|debug_comptime_in_file| { + context + .file_manager + .find_by_path_suffix(debug_comptime_in_file) + .unwrap_or_else(handle_missing_file) + }); + if !use_legacy { - let mut more_errors = Elaborator::elaborate(context, crate_id, def_collector.items); + let mut more_errors = Elaborator::elaborate( + context, + crate_id, + def_collector.items, + debug_comptime_in_file, + ); errors.append(&mut more_errors); return errors; } - let mut resolved_module = ResolvedModule { errors, ..Default::default() }; + let mut resolved_module = + ResolvedModule { errors, debug_comptime_in_file, ..Default::default() }; // We must first resolve and intern the globals before we can resolve any stmts inside each function. // Each function uses its own resolver with a newly created ScopeForest, and must be resolved again to be within a function's scope @@ -621,7 +655,14 @@ impl ResolvedModule { fn evaluate_comptime(&mut self, interner: &mut NodeInterner, crate_id: CrateId) { if self.count_errors() == 0 { let mut scopes = vec![HashMap::default()]; - let mut interpreter = Interpreter::new(interner, &mut scopes, crate_id); + let mut interpreter_errors = vec![]; + let mut interpreter = Interpreter::new( + interner, + &mut scopes, + crate_id, + self.debug_comptime_in_file, + &mut interpreter_errors, + ); for (_file, global) in &self.globals { if let Err(error) = interpreter.scan_global(*global) { @@ -636,6 +677,9 @@ impl ResolvedModule { self.errors.push(error.into_compilation_error_pair()); } } + self.errors.extend( + interpreter_errors.into_iter().map(InterpreterError::into_compilation_error_pair), + ); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs index 59205f74d89..43d1548dc29 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -74,6 +74,7 @@ impl CrateDefMap { crate_id: CrateId, context: &mut Context, use_legacy: bool, + debug_comptime_in_file: Option<&str>, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled @@ -123,6 +124,7 @@ impl CrateDefMap { ast, root_file_id, use_legacy, + debug_comptime_in_file, macro_processors, )); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 793362bb3d6..856a769c9dd 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1548,6 +1548,7 @@ impl<'a> Resolver<'a> { ExpressionKind::Prefix(prefix) => { let operator = prefix.operator; let rhs = self.resolve_expression(prefix.rhs); + let trait_method_id = self.interner.get_prefix_operator_trait_method(&operator); if operator == UnaryOp::MutableReference { if let Err(error) = verify_mutable_reference(self.interner, rhs) { @@ -1555,7 +1556,7 @@ impl<'a> Resolver<'a> { } } - HirExpression::Prefix(HirPrefixExpression { operator, rhs }) + HirExpression::Prefix(HirPrefixExpression { operator, rhs, trait_method_id }) } ExpressionKind::Infix(infix) => { let lhs = self.resolve_expression(infix.lhs); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs index 6781c2833c4..28ee70393cd 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -72,7 +72,8 @@ pub(crate) fn resolve_traits( // the interner may set `interner.ordering_type` based on the result type // of the Cmp trait, if this is it. if crate_id.is_stdlib() { - context.def_interner.try_add_operator_trait(trait_id); + context.def_interner.try_add_infix_operator_trait(trait_id); + context.def_interner.try_add_prefix_operator_trait(trait_id); } } all_errors diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index e5c52213056..af168a10df9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -83,6 +83,8 @@ pub enum TypeCheckError { IntegerAndFieldBinaryOperation { span: Span }, #[error("Cannot do modulo on Fields, try casting to an integer first")] FieldModulo { span: Span }, + #[error("Cannot do not (`!`) on Fields, try casting to an integer first")] + FieldNot { span: Span }, #[error("Fields cannot be compared, try casting to an integer first")] FieldComparison { span: Span }, #[error("The bit count in a bit-shift operation must fit in a u8, try casting the right hand side into a u8 first")] @@ -256,6 +258,7 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { | TypeCheckError::IntegerAndFieldBinaryOperation { span } | TypeCheckError::OverflowingAssignment { span, .. } | TypeCheckError::FieldModulo { span } + | TypeCheckError::FieldNot { span } | TypeCheckError::ConstrainedReferenceToUnconstrained { span } | TypeCheckError::UnconstrainedReferenceToConstrained { span } | TypeCheckError::UnconstrainedSliceReturnToConstrained { span } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs index 1f3e9103cde..9dfe0901016 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -578,6 +578,7 @@ impl<'interner> TypeChecker<'interner> { self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { operator: UnaryOp::MutableReference, rhs: method_call.object, + trait_method_id: None, })); self.interner.push_expr_type(new_object, new_type); self.interner.push_expr_location(new_object, location.span, location.file); @@ -604,6 +605,7 @@ impl<'interner> TypeChecker<'interner> { let object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { operator: UnaryOp::Dereference { implicitly_added: true }, rhs: object, + trait_method_id: None, })); self.interner.push_expr_type(object, element.as_ref().clone()); self.interner.push_expr_location(object, location.span, location.file); @@ -799,9 +801,11 @@ impl<'interner> TypeChecker<'interner> { let dereference_lhs = |this: &mut Self, lhs_type, element| { let old_lhs = *access_lhs; + *access_lhs = this.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { operator: crate::ast::UnaryOp::Dereference { implicitly_added: true }, rhs: old_lhs, + trait_method_id: None, })); this.interner.push_expr_type(old_lhs, lhs_type); this.interner.push_expr_type(*access_lhs, element); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index 8de4f118774..ab2344746d1 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -124,6 +124,10 @@ pub enum HirArrayLiteral { pub struct HirPrefixExpression { pub operator: UnaryOp, pub rhs: ExprId, + + /// The trait method id for the operator trait method that corresponds to this operator, + /// if such a trait exists (for example, there's no trait for the dereference operator). + pub trait_method_id: Option, } #[derive(Debug, Clone)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 6777c0d1c79..8183911c845 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -342,6 +342,11 @@ impl StructType { vecmap(&self.fields, |(name, typ)| (name.0.contents.clone(), typ.clone())) } + /// Returns the field at the given index. Panics if no field exists at the given index. + pub fn field_at(&self, index: usize) -> &(Ident, Type) { + &self.fields[index] + } + pub fn field_names(&self) -> BTreeSet { self.fields.iter().map(|(name, _)| name.clone()).collect() } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs index fcaef0a8dd6..0efe385aa0a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs @@ -38,6 +38,11 @@ impl NodeInterner { let struct_type = struct_type.borrow(); Location::new(struct_type.name.span(), struct_type.location.file) } + ReferenceId::StructMember(id, field_index) => { + let struct_type = self.get_struct(id); + let struct_type = struct_type.borrow(); + Location::new(struct_type.field_at(field_index).0.span(), struct_type.location.file) + } ReferenceId::Trait(id) => { let trait_type = self.get_trait(id); Location::new(trait_type.name.span(), trait_type.location.file) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index fabfc74b901..be222cc4e35 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -444,13 +444,26 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Block(block) => self.block(block.statements)?, HirExpression::Prefix(prefix) => { + let rhs = self.expr(prefix.rhs)?; let location = self.interner.expr_location(&expr); - ast::Expression::Unary(ast::Unary { - operator: prefix.operator, - rhs: Box::new(self.expr(prefix.rhs)?), - result_type: Self::convert_type(&self.interner.id_type(expr), location)?, - location, - }) + + if self.interner.get_selected_impl_for_expression(expr).is_some() { + // If an impl was selected for this prefix operator, replace it + // with a method call to the appropriate trait impl method. + let (function_type, ret) = + self.interner.get_prefix_operator_type(expr, prefix.rhs); + + let method = prefix + .trait_method_id + .expect("ice: missing trait method if when impl was found"); + let func = self.resolve_trait_method_expr(expr, function_type, method)?; + self.create_prefix_operator_impl_call(func, rhs, ret, location)? + } else { + let operator = prefix.operator; + let rhs = Box::new(rhs); + let result_type = Self::convert_type(&self.interner.id_type(expr), location)?; + ast::Expression::Unary(ast::Unary { operator, rhs, result_type, location }) + } } HirExpression::Infix(infix) => { @@ -462,11 +475,12 @@ impl<'interner> Monomorphizer<'interner> { // If an impl was selected for this infix operator, replace it // with a method call to the appropriate trait impl method. let (function_type, ret) = - self.interner.get_operator_type(infix.lhs, operator, expr); + self.interner.get_infix_operator_type(infix.lhs, operator, expr); let method = infix.trait_method_id; let func = self.resolve_trait_method_expr(expr, function_type, method)?; - self.create_operator_impl_call(func, lhs, infix.operator, rhs, ret, location)? + let operator = infix.operator; + self.create_infix_operator_impl_call(func, lhs, operator, rhs, ret, location)? } else { let lhs = Box::new(lhs); let rhs = Box::new(rhs); @@ -1651,12 +1665,12 @@ impl<'interner> Monomorphizer<'interner> { }) } - /// Call an operator overloading method for the given operator. + /// Call an infix operator overloading method for the given operator. /// This function handles the special cases some operators have which don't map /// 1 to 1 onto their operator function. For example: != requires a negation on /// the result of its `eq` method, and the comparison operators each require a /// conversion from the `Ordering` result to a boolean. - fn create_operator_impl_call( + fn create_infix_operator_impl_call( &self, func: ast::Expression, lhs: ast::Expression, @@ -1715,6 +1729,21 @@ impl<'interner> Monomorphizer<'interner> { Ok(result) } + + /// Call an operator overloading method for the given prefix operator. + fn create_prefix_operator_impl_call( + &self, + func: ast::Expression, + rhs: ast::Expression, + ret: Type, + location: Location, + ) -> Result { + let arguments = vec![rhs]; + let func = Box::new(func); + let return_type = Self::convert_type(&ret, location)?; + + Ok(ast::Expression::Call(ast::Call { func, arguments, return_type, location })) + } } fn unwrap_tuple_type(typ: &HirType) -> Vec { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index 588b56afa1a..a009a42df53 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -2,6 +2,7 @@ use std::borrow::Cow; use std::collections::HashMap; use std::fmt; use std::hash::Hash; +use std::marker::Copy; use std::ops::Deref; use fm::FileId; @@ -18,6 +19,7 @@ use crate::hir::comptime; use crate::hir::def_collector::dc_crate::CompilationError; use crate::hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait, UnresolvedTypeAlias}; use crate::hir::def_map::{LocalModuleId, ModuleId}; +use crate::macros_api::UnaryOp; use crate::QuotedType; use crate::ast::{BinaryOpKind, FunctionDefinition, ItemVisibility}; @@ -139,8 +141,11 @@ pub struct NodeInterner { /// the context to get the concrete type of the object and select the correct impl itself. selected_trait_implementations: HashMap, - /// Holds the trait ids of the traits used for operator overloading - operator_traits: HashMap, + /// Holds the trait ids of the traits used for infix operator overloading + infix_operator_traits: HashMap, + + /// Holds the trait ids of the traits used for prefix operator overloading + prefix_operator_traits: HashMap, /// The `Ordering` type is a semi-builtin type that is the result of the comparison traits. ordering_type: Option, @@ -195,7 +200,7 @@ pub struct NodeInterner { /// Edges are directed from reference nodes to referenced nodes. /// For example: /// - /// ``` + /// ```text /// let foo = 3; /// // referenced /// // ^ @@ -241,6 +246,7 @@ pub enum DependencyId { pub enum ReferenceId { Module(ModuleId), Struct(StructId), + StructMember(StructId, usize), Trait(TraitId), Global(GlobalId), Function(FuncId), @@ -563,7 +569,8 @@ impl Default for NodeInterner { next_trait_implementation_id: 0, trait_implementation_map: HashMap::new(), selected_trait_implementations: HashMap::new(), - operator_traits: HashMap::new(), + infix_operator_traits: HashMap::new(), + prefix_operator_traits: HashMap::new(), ordering_type: None, instantiation_bindings: HashMap::new(), field_indices: HashMap::new(), @@ -1071,6 +1078,10 @@ impl NodeInterner { self.id_location(expr_id).span } + pub fn try_expr_span(&self, expr_id: &ExprId) -> Option { + self.try_id_location(expr_id).map(|location| location.span) + } + pub fn expr_location(&self, expr_id: &ExprId) -> Location { self.id_location(expr_id) } @@ -1175,8 +1186,14 @@ impl NodeInterner { } /// Returns the span of an item stored in the Interner - pub fn id_location(&self, index: impl Into) -> Location { - self.id_to_location.get(&index.into()).copied().unwrap() + pub fn id_location(&self, index: impl Into + Copy) -> Location { + self.try_id_location(index) + .unwrap_or_else(|| panic!("ID is missing a source location: {:?}", index.into())) + } + + /// Returns the span of an item stored in the Interner, if present + pub fn try_id_location(&self, index: impl Into) -> Option { + self.id_to_location.get(&index.into()).copied() } /// Replaces the HirExpression at the given ExprId with a new HirExpression @@ -1665,18 +1682,29 @@ impl NodeInterner { /// Retrieves the trait id for a given binary operator. /// All binary operators correspond to a trait - although multiple may correspond /// to the same trait (such as `==` and `!=`). - /// `self.operator_traits` is expected to be filled before name resolution, + /// `self.infix_operator_traits` is expected to be filled before name resolution, /// during definition collection. pub fn get_operator_trait_method(&self, operator: BinaryOpKind) -> TraitMethodId { - let trait_id = self.operator_traits[&operator]; + let trait_id = self.infix_operator_traits[&operator]; // Assume that the operator's method to be overloaded is the first method of the trait. TraitMethodId { trait_id, method_index: 0 } } + /// Retrieves the trait id for a given unary operator. + /// Only some unary operators correspond to a trait: `-` and `!`, but for example `*` does not. + /// `self.prefix_operator_traits` is expected to be filled before name resolution, + /// during definition collection. + pub fn get_prefix_operator_trait_method(&self, operator: &UnaryOp) -> Option { + let trait_id = self.prefix_operator_traits.get(operator)?; + + // Assume that the operator's method to be overloaded is the first method of the trait. + Some(TraitMethodId { trait_id: *trait_id, method_index: 0 }) + } + /// Add the given trait as an operator trait if its name matches one of the /// operator trait names (Add, Sub, ...). - pub fn try_add_operator_trait(&mut self, trait_id: TraitId) { + pub fn try_add_infix_operator_trait(&mut self, trait_id: TraitId) { let the_trait = self.get_trait(trait_id); let operator = match the_trait.name.0.contents.as_str() { @@ -1695,17 +1723,17 @@ impl NodeInterner { _ => return, }; - self.operator_traits.insert(operator, trait_id); + self.infix_operator_traits.insert(operator, trait_id); // Some operators also require we insert a matching entry for related operators match operator { BinaryOpKind::Equal => { - self.operator_traits.insert(BinaryOpKind::NotEqual, trait_id); + self.infix_operator_traits.insert(BinaryOpKind::NotEqual, trait_id); } BinaryOpKind::Less => { - self.operator_traits.insert(BinaryOpKind::LessEqual, trait_id); - self.operator_traits.insert(BinaryOpKind::Greater, trait_id); - self.operator_traits.insert(BinaryOpKind::GreaterEqual, trait_id); + self.infix_operator_traits.insert(BinaryOpKind::LessEqual, trait_id); + self.infix_operator_traits.insert(BinaryOpKind::Greater, trait_id); + self.infix_operator_traits.insert(BinaryOpKind::GreaterEqual, trait_id); let the_trait = self.get_trait(trait_id); self.ordering_type = match &the_trait.methods[0].typ { @@ -1720,27 +1748,43 @@ impl NodeInterner { } } + /// Add the given trait as an operator trait if its name matches one of the + /// prefix operator trait names (Not or Neg). + pub fn try_add_prefix_operator_trait(&mut self, trait_id: TraitId) { + let the_trait = self.get_trait(trait_id); + + let operator = match the_trait.name.0.contents.as_str() { + "Neg" => UnaryOp::Minus, + "Not" => UnaryOp::Not, + _ => return, + }; + + self.prefix_operator_traits.insert(operator, trait_id); + } + /// This function is needed when creating a NodeInterner for testing so that calls /// to `get_operator_trait` do not panic when the stdlib isn't present. #[cfg(test)] pub fn populate_dummy_operator_traits(&mut self) { let dummy_trait = TraitId(ModuleId::dummy_id()); - self.operator_traits.insert(BinaryOpKind::Add, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Subtract, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Multiply, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Divide, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Modulo, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Equal, dummy_trait); - self.operator_traits.insert(BinaryOpKind::NotEqual, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Less, dummy_trait); - self.operator_traits.insert(BinaryOpKind::LessEqual, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Greater, dummy_trait); - self.operator_traits.insert(BinaryOpKind::GreaterEqual, dummy_trait); - self.operator_traits.insert(BinaryOpKind::And, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Or, dummy_trait); - self.operator_traits.insert(BinaryOpKind::Xor, dummy_trait); - self.operator_traits.insert(BinaryOpKind::ShiftLeft, dummy_trait); - self.operator_traits.insert(BinaryOpKind::ShiftRight, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Add, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Subtract, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Multiply, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Divide, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Modulo, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Equal, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::NotEqual, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Less, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::LessEqual, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Greater, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::GreaterEqual, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::And, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Or, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::Xor, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::ShiftLeft, dummy_trait); + self.infix_operator_traits.insert(BinaryOpKind::ShiftRight, dummy_trait); + self.prefix_operator_traits.insert(UnaryOp::Minus, dummy_trait); + self.prefix_operator_traits.insert(UnaryOp::Not, dummy_trait); } pub(crate) fn ordering_type(&self) -> Type { @@ -1873,7 +1917,7 @@ impl NodeInterner { } /// Returns the type of an operator (which is always a function), along with its return type. - pub fn get_operator_type( + pub fn get_infix_operator_type( &self, lhs: ExprId, operator: BinaryOpKind, @@ -1894,6 +1938,15 @@ impl NodeInterner { let env = Box::new(Type::Unit); (Type::Function(args, Box::new(ret.clone()), env), ret) } + + /// Returns the type of a prefix operator (which is always a function), along with its return type. + pub fn get_prefix_operator_type(&self, operator_expr: ExprId, rhs: ExprId) -> (Type, Type) { + let rhs_type = self.id_type(rhs); + let args = vec![rhs_type]; + let ret = self.id_type(operator_expr); + let env = Box::new(Type::Unit); + (Type::Function(args, Box::new(ret.clone()), env), ret) + } } impl Methods { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 70f8f785d68..8cedeeeff0d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -86,7 +86,8 @@ pub(crate) fn get_program( program.clone().into_sorted(), root_file_id, use_legacy, - &[], // No macro processors + None, // No debug_comptime_in_file + &[], // No macro processors )); } (program, context, errors) diff --git a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr index c218ecd2348..a56d6f60390 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr @@ -14,7 +14,7 @@ impl BoundedVec { /// Get an element from the vector at the given index. /// Panics if the given index points beyond the end of the vector (`self.len()`). pub fn get(self, index: u32) -> T { - assert(index < self.len); + assert(index < self.len, "Attempted to read past end of BoundedVec"); self.get_unchecked(index) } @@ -152,25 +152,16 @@ impl From<[T; Len]> for BoundedVec } mod bounded_vec_tests { - // TODO: Allow imports from "super" - use crate::collections::bounded_vec::BoundedVec; - #[test] - fn empty_equality() { - let mut bounded_vec1: BoundedVec = BoundedVec::new(); - let mut bounded_vec2: BoundedVec = BoundedVec::new(); - - assert_eq(bounded_vec1, bounded_vec2); - } + mod get { + use crate::collections::bounded_vec::BoundedVec; - #[test] - fn inequality() { - let mut bounded_vec1: BoundedVec = BoundedVec::new(); - let mut bounded_vec2: BoundedVec = BoundedVec::new(); - bounded_vec1.push(1); - bounded_vec2.push(2); + #[test(should_fail_with = "Attempted to read past end of BoundedVec")] + fn panics_when_reading_elements_past_end_of_vec() { + let vec: BoundedVec = BoundedVec::new(); - assert(bounded_vec1 != bounded_vec2); + crate::println(vec.get(0)); + } } mod set { @@ -295,4 +286,26 @@ mod bounded_vec_tests { assert_eq(bounded_vec.storage()[1], 2); } } + + mod trait_eq { + use crate::collections::bounded_vec::BoundedVec; + + #[test] + fn empty_equality() { + let mut bounded_vec1: BoundedVec = BoundedVec::new(); + let mut bounded_vec2: BoundedVec = BoundedVec::new(); + + assert_eq(bounded_vec1, bounded_vec2); + } + + #[test] + fn inequality() { + let mut bounded_vec1: BoundedVec = BoundedVec::new(); + let mut bounded_vec2: BoundedVec = BoundedVec::new(); + bounded_vec1.push(1); + bounded_vec2.push(2); + + assert(bounded_vec1 != bounded_vec2); + } + } } diff --git a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr index 916943b737c..f02d2702d6b 100644 --- a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr @@ -1,6 +1,9 @@ use crate::ops::arith::{Add, Sub, Neg}; use crate::cmp::Eq; +/// A point on the embedded elliptic curve +/// By definition, the base field of the embedded curve is the scalar field of the proof system curve, i.e the Noir Field. +/// x and y denotes the Weierstrass coordinates of the point, if is_infinite is false. struct EmbeddedCurvePoint { x: Field, y: Field, @@ -8,32 +11,35 @@ struct EmbeddedCurvePoint { } impl EmbeddedCurvePoint { - fn new(x: Field, y: Field, is_infinite: bool) -> Self { - Self { x, y, is_infinite } - } - + /// Elliptic curve point doubling operation + /// returns the doubled point of a point P, i.e P+P fn double(self) -> EmbeddedCurvePoint { embedded_curve_add(self, self) } + /// Returns the null element of the curve; 'the point at infinity' fn point_at_infinity() -> EmbeddedCurvePoint { EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true } } } impl Add for EmbeddedCurvePoint { + /// Adds two points P+Q, using the curve addition formula, and also handles point at infinity fn add(self, other: EmbeddedCurvePoint) -> EmbeddedCurvePoint { embedded_curve_add(self, other) } } impl Sub for EmbeddedCurvePoint { + /// Points subtraction operation, using addition and negation fn sub(self, other: EmbeddedCurvePoint) -> EmbeddedCurvePoint { self + other.neg() } } impl Neg for EmbeddedCurvePoint { + /// Negates a point P, i.e returns -P, by negating the y coordinate. + /// If the point is at infinity, then the result is also at infinity. fn neg(self) -> EmbeddedCurvePoint { EmbeddedCurvePoint { x: self.x, @@ -44,12 +50,15 @@ impl Neg for EmbeddedCurvePoint { } impl Eq for EmbeddedCurvePoint { + /// Checks whether two points are equal fn eq(self: Self, b: EmbeddedCurvePoint) -> bool { (self.is_infinite & b.is_infinite) | ((self.is_infinite == b.is_infinite) & (self.x == b.x) & (self.y == b.y)) } } -// Scalar represented as low and high limbs +/// Scalar for the embedded curve represented as low and high limbs +/// By definition, the scalar field of the embedded curve is base field of the proving system curve. +/// It may not fit into a Field element, so it is represented with two Field elements; its low and high limbs. struct EmbeddedCurveScalar { lo: Field, hi: Field, @@ -60,10 +69,6 @@ impl EmbeddedCurveScalar { EmbeddedCurveScalar { lo, hi } } - pub fn derive_public_key(self) -> EmbeddedCurvePoint { - fixed_base_scalar_mul(self) - } - #[field(bn254)] fn from_field(scalar: Field) -> EmbeddedCurveScalar { let (a,b) = crate::field::bn254::decompose(scalar); @@ -72,8 +77,8 @@ impl EmbeddedCurveScalar { } impl Eq for EmbeddedCurveScalar { - fn eq(self, key: EmbeddedCurveScalar) -> bool { - (key.hi == self.hi) & (key.lo == self.lo) + fn eq(self, other: Self) -> bool { + (other.hi == self.hi) & (other.lo == self.lo) } } diff --git a/noir/noir-repo/scripts/bump-bb.sh b/noir/noir-repo/scripts/bump-bb.sh new file mode 100755 index 00000000000..36c1f78ca05 --- /dev/null +++ b/noir/noir-repo/scripts/bump-bb.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +BB_VERSION=$1 + +sed -i.bak "s/^VERSION=.*/VERSION=\"$BB_VERSION\"/" ./scripts/install_bb.sh && rm ./scripts/install_bb.sh.bak + +tmp=$(mktemp) +BACKEND_BARRETENBERG_PACKAGE_JSON=./tooling/noir_js_backend_barretenberg/package.json +jq --arg v $BB_VERSION '.dependencies."@aztec/bb.js" = $v' $BACKEND_BARRETENBERG_PACKAGE_JSON > $tmp && mv $tmp $BACKEND_BARRETENBERG_PACKAGE_JSON + +YARN_ENABLE_IMMUTABLE_INSTALLS=false yarn install diff --git a/noir/noir-repo/scripts/install_bb.sh b/noir/noir-repo/scripts/install_bb.sh index b0d55b6ff1d..95dcfdda880 100755 --- a/noir/noir-repo/scripts/install_bb.sh +++ b/noir/noir-repo/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.43.0" +VERSION="0.46.1" BBUP_PATH=~/.bb/bbup diff --git a/noir/noir-repo/test_programs/compile_failure/unary_not_on_field_type_variable/Nargo.toml b/noir/noir-repo/test_programs/compile_failure/unary_not_on_field_type_variable/Nargo.toml new file mode 100644 index 00000000000..ae5c9ecb9c9 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/unary_not_on_field_type_variable/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unary_not_on_field_type_variable" +type = "bin" +authors = [""] +compiler_version = "" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_failure/unary_not_on_field_type_variable/src/main.nr b/noir/noir-repo/test_programs/compile_failure/unary_not_on_field_type_variable/src/main.nr new file mode 100644 index 00000000000..1023f753c32 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/unary_not_on_field_type_variable/src/main.nr @@ -0,0 +1,4 @@ +fn main() { + let num: Field = 0; + assert_eq(!0, num); +} diff --git a/noir/noir-repo/test_programs/execution_success/binary_operator_overloading/Nargo.toml b/noir/noir-repo/test_programs/execution_success/binary_operator_overloading/Nargo.toml new file mode 100644 index 00000000000..a43f38bdf30 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/binary_operator_overloading/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "binary_operator_overloading" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/operator_overloading/Prover.toml b/noir/noir-repo/test_programs/execution_success/binary_operator_overloading/Prover.toml similarity index 100% rename from noir/noir-repo/test_programs/execution_success/operator_overloading/Prover.toml rename to noir/noir-repo/test_programs/execution_success/binary_operator_overloading/Prover.toml diff --git a/noir/noir-repo/test_programs/execution_success/operator_overloading/src/main.nr b/noir/noir-repo/test_programs/execution_success/binary_operator_overloading/src/main.nr similarity index 100% rename from noir/noir-repo/test_programs/execution_success/operator_overloading/src/main.nr rename to noir/noir-repo/test_programs/execution_success/binary_operator_overloading/src/main.nr diff --git a/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr b/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr index 66ac0a2b313..fd2fc20d08f 100644 --- a/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr @@ -10,7 +10,7 @@ fn main( to_pubkey_x: Field, to_pubkey_y: Field ) -> pub [Field; 2] { - let priv_key_as_scalar = std::embedded_curve_ops::EmbeddedCurveScalar::new(priv_key, 0); + let priv_key_as_scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: priv_key, hi: 0 }; // Compute public key from private key to show ownership let pubkey = std::embedded_curve_ops::fixed_base_scalar_mul(priv_key_as_scalar); // Compute input note commitment diff --git a/noir/noir-repo/test_programs/execution_success/operator_overloading/Nargo.toml b/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/Nargo.toml similarity index 70% rename from noir/noir-repo/test_programs/execution_success/operator_overloading/Nargo.toml rename to noir/noir-repo/test_programs/execution_success/unary_operator_overloading/Nargo.toml index 7f9f18ff567..ebc88faaaf4 100644 --- a/noir/noir-repo/test_programs/execution_success/operator_overloading/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "operator_overloading" +name = "unary_operator_overloading" type = "bin" authors = [""] compiler_version = ">=0.20.0" diff --git a/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/Prover.toml b/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/Prover.toml new file mode 100644 index 00000000000..b95c23a4d31 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/Prover.toml @@ -0,0 +1 @@ +x = 3 diff --git a/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/src/main.nr b/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/src/main.nr new file mode 100644 index 00000000000..20cdac51434 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/unary_operator_overloading/src/main.nr @@ -0,0 +1,36 @@ +use std::ops::{Neg, Not}; + +// x = 3 +fn main(x: u32) { + let wx = Wrapper::new(x as i32); + let ex: i32 = 3; + + assert((-wx).inner == -ex); + assert((!wx).inner == !ex); + + // Check that it works with type variables (x's type isn't immediately known) + let x = 3; + assert(-3 == -x); +} + +struct Wrapper { + inner: i32 +} + +impl Wrapper { + fn new(inner: i32) -> Self { + Wrapper { inner } + } +} + +impl Neg for Wrapper { + fn neg(self) -> Wrapper { + Wrapper::new(-self.inner) + } +} + +impl Not for Wrapper { + fn not(self) -> Wrapper { + Wrapper::new(!self.inner) + } +} diff --git a/noir/noir-repo/test_programs/gates_report.sh b/noir/noir-repo/test_programs/gates_report.sh index b33e81b037c..6c901ff24bc 100755 --- a/noir/noir-repo/test_programs/gates_report.sh +++ b/noir/noir-repo/test_programs/gates_report.sh @@ -4,7 +4,7 @@ set -e BACKEND=${BACKEND:-bb} # These tests are incompatible with gas reporting -excluded_dirs=("workspace" "workspace_default_member" "double_verify_nested_proof") +excluded_dirs=("workspace" "workspace_default_member" "databus") current_dir=$(pwd) artifacts_path="$current_dir/acir_artifacts" @@ -18,6 +18,10 @@ NUM_ARTIFACTS=$(ls -1q "$artifacts_path" | wc -l) ITER="1" for pathname in $test_dirs; do ARTIFACT_NAME=$(basename "$pathname") + if [[ " ${excluded_dirs[@]} " =~ "$ARTIFACT_NAME" ]]; then + ITER=$(( $ITER + 1 )) + continue + fi GATES_INFO=$($BACKEND gates -b "$artifacts_path/$ARTIFACT_NAME/target/program.json") MAIN_FUNCTION_INFO=$(echo $GATES_INFO | jq -r '.functions[0] | .name = "main"') diff --git a/noir/noir-repo/tooling/lsp/src/lib.rs b/noir/noir-repo/tooling/lsp/src/lib.rs index e47dfff714a..b62f97a4918 100644 --- a/noir/noir-repo/tooling/lsp/src/lib.rs +++ b/noir/noir-repo/tooling/lsp/src/lib.rs @@ -373,7 +373,8 @@ fn prepare_package_from_source_string() { let mut state = LspState::new(&client, acvm::blackbox_solver::StubbedBlackBoxSolver); let (mut context, crate_id) = crate::prepare_source(source.to_string(), &mut state); - let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false, false); + let _check_result = + noirc_driver::check_crate(&mut context, crate_id, false, false, false, None); let main_func_id = context.get_main_function(&crate_id); assert!(main_func_id.is_some()); } diff --git a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs index ed0ac13fae5..46a7b1cf866 100644 --- a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs @@ -56,7 +56,7 @@ pub(super) fn on_did_change_text_document( state.input_files.insert(params.text_document.uri.to_string(), text.clone()); let (mut context, crate_id) = prepare_source(text, state); - let _ = check_crate(&mut context, crate_id, false, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false, None); let workspace = match resolve_workspace_for_source_path( params.text_document.uri.to_file_path().unwrap().as_path(), @@ -139,10 +139,11 @@ fn process_noir_document( let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - let file_diagnostics = match check_crate(&mut context, crate_id, false, false, false) { - Ok(((), warnings)) => warnings, - Err(errors_and_warnings) => errors_and_warnings, - }; + let file_diagnostics = + match check_crate(&mut context, crate_id, false, false, false, None) { + Ok(((), warnings)) => warnings, + Err(errors_and_warnings) => errors_and_warnings, + }; let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); @@ -190,6 +191,7 @@ fn process_noir_document( let severity = match diagnostic.kind { DiagnosticKind::Error => DiagnosticSeverity::ERROR, DiagnosticKind::Warning => DiagnosticSeverity::WARNING, + DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, DiagnosticKind::Bug => DiagnosticSeverity::WARNING, }; Some(Diagnostic { diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs b/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs index 744bddedd9d..0b8803edc6f 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs @@ -67,7 +67,7 @@ fn on_code_lens_request_inner( let (mut context, crate_id) = prepare_source(source_string, state); // We ignore the warnings and errors produced by compilation for producing code lenses // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false, None); let collected_lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); diff --git a/noir/noir-repo/tooling/lsp/src/requests/mod.rs b/noir/noir-repo/tooling/lsp/src/requests/mod.rs index 266b5b124ac..48299ff7459 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/mod.rs @@ -294,7 +294,7 @@ where interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, false); + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, false, None); interner = &context.def_interner; } diff --git a/noir/noir-repo/tooling/lsp/src/requests/rename.rs b/noir/noir-repo/tooling/lsp/src/requests/rename.rs index ac6c6792e15..906a5cbcaab 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/rename.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/rename.rs @@ -199,4 +199,9 @@ mod rename_tests { async fn test_rename_local_variable() { check_rename_succeeds("local_variable", "some_var").await; } + + #[test] + async fn test_rename_struct_member() { + check_rename_succeeds("struct_member", "some_member").await; + } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index acd4f5800f3..b4b9b62d6b6 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -59,7 +59,7 @@ fn on_test_run_request_inner( Some(package) => { let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - if check_crate(&mut context, crate_id, false, false, false).is_err() { + if check_crate(&mut context, crate_id, false, false, false, None).is_err() { let result = NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), diff --git a/noir/noir-repo/tooling/lsp/src/requests/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/tests.rs index a2aa3ebc0bf..fb8b845df04 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/tests.rs @@ -61,7 +61,7 @@ fn on_tests_request_inner( crate::prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false, None); // We don't add test headings for a package if it contains no `#[test]` functions get_package_tests_in_crate(&context, &crate_id, &package.name) diff --git a/noir/noir-repo/tooling/lsp/test_programs/struct_member/Nargo.toml b/noir/noir-repo/tooling/lsp/test_programs/struct_member/Nargo.toml new file mode 100644 index 00000000000..5272b9abb68 --- /dev/null +++ b/noir/noir-repo/tooling/lsp/test_programs/struct_member/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "struct_member" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/tooling/lsp/test_programs/struct_member/src/main.nr b/noir/noir-repo/tooling/lsp/test_programs/struct_member/src/main.nr new file mode 100644 index 00000000000..3f1bac9df66 --- /dev/null +++ b/noir/noir-repo/tooling/lsp/test_programs/struct_member/src/main.nr @@ -0,0 +1,12 @@ +struct Foo { + some_member: Field +} + +fn main() { + let mut foo = Foo { some_member: 1 }; + foo.some_member = 2; + let _ = foo.some_member; + + let Foo { some_member } = foo; + let Foo { some_member: some_var } = foo; +} diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index 3789595aa26..9dfa0dfe861 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -61,7 +61,7 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ /// Certain features are only available in the elaborator. /// We skip these tests for non-elaborator code since they are not /// expected to work there. This can be removed once the old code is removed. -const IGNORED_NEW_FEATURE_TESTS: [&str; 8] = [ +const IGNORED_NEW_FEATURE_TESTS: [&str; 9] = [ "macros", "wildcard_type", "type_definition_annotation", @@ -69,6 +69,7 @@ const IGNORED_NEW_FEATURE_TESTS: [&str; 8] = [ "derive_impl", "comptime_traits", "comptime_slice_methods", + "unary_operator_overloading", "unquote_multiple_items_from_annotation", ]; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index 2db3b10429a..95726492418 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -88,6 +88,7 @@ fn check_package( compile_options.disable_macros, compile_options.silence_warnings, compile_options.use_legacy, + compile_options.debug_comptime_in_file.as_deref(), )?; if package.is_library() || package.is_contract() { @@ -161,8 +162,16 @@ pub(crate) fn check_crate_and_report_errors( disable_macros: bool, silence_warnings: bool, use_legacy: bool, + debug_comptime_in_file: Option<&str>, ) -> Result<(), CompileError> { - let result = check_crate(context, crate_id, deny_warnings, disable_macros, use_legacy); + let result = check_crate( + context, + crate_id, + deny_warnings, + disable_macros, + use_legacy, + debug_comptime_in_file, + ); report_errors(result, &context.file_manager, deny_warnings, silence_warnings) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index ee30b29b0f0..105190c653f 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -90,6 +90,7 @@ fn compile_exported_functions( compile_options.disable_macros, compile_options.silence_warnings, compile_options.use_legacy, + compile_options.debug_comptime_in_file.as_deref(), )?; let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index de9e8dc5d7c..27c66c956d9 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -177,6 +177,7 @@ fn run_test + Default>( compile_options.deny_warnings, compile_options.disable_macros, compile_options.use_legacy, + compile_options.debug_comptime_in_file.as_deref(), ) .expect("Any errors should have occurred when collecting test functions"); @@ -244,6 +245,7 @@ fn get_tests_in_package( compile_options.disable_macros, compile_options.silence_warnings, compile_options.use_legacy, + compile_options.debug_comptime_in_file.as_deref(), )?; Ok(context diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index bf6614860e2..10711b6d011 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -32,7 +32,7 @@ fn run_stdlib_tests() { let (mut context, dummy_crate_id) = prepare_package(&file_manager, &parsed_files, &dummy_package); - let result = check_crate(&mut context, dummy_crate_id, false, false, false); + let result = check_crate(&mut context, dummy_crate_id, false, false, false, None); report_errors(result, &context.file_manager, true, false) .expect("Error encountered while compiling standard library");