From e349f30b60a473e2068afafb6fae4a4ea50d185b Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Mon, 2 Sep 2024 18:07:31 -0300 Subject: [PATCH 01/26] feat: use visibility (#5856) # Description ## Problem Part of #4515 ## Summary We recently added a warning for unused imports... but only for bin and contract packages. We didn't enable it for lib packages because a `use` could also be used as `pub use`, something we don't have yet. I thought it would be really nice if we had `pub use`, and warned on unused imports in libs too. I checked the code and we already track visibility for any item, in general, it's just that for things that don't allow a visibility modifier we just consider it's public. So I tried to see how difficult it would be to implement it, and it turned out it wasn't that hard or time-consuming. That said, visibility for `use` involves some more logic, particularly for autocompletion, because now `pub use` should be suggested, but the "parent" module of that item isn't the actual parent (it's the module where the `pub use` is defined) but that was relatively striaght-forward to implement too. ## Additional Context If we decide to go forward with this, any existing `use` that was used as `pub use` will likely start producing a warning for libs (a lot of them in Aztec-Packages), but now that can be silenced by changing them to `pub use`. Where should this new feature be documented? I'm not sure if it should go in `dependencies.md` or `modules.md`. ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- aztec_macros/src/utils/parse_utils.rs | 6 +- compiler/noirc_driver/src/lib.rs | 28 +---- compiler/noirc_frontend/src/ast/mod.rs | 14 ++- compiler/noirc_frontend/src/ast/statement.rs | 10 +- compiler/noirc_frontend/src/ast/visitor.rs | 11 +- .../noirc_frontend/src/elaborator/comptime.rs | 2 +- .../noirc_frontend/src/elaborator/scope.rs | 67 ++++++----- .../src/hir/def_collector/dc_crate.rs | 109 +++++++++++++---- .../src/hir/def_collector/dc_mod.rs | 1 + .../src/hir/def_collector/errors.rs | 10 +- .../src/hir/def_map/module_data.rs | 25 +--- .../src/hir/resolution/import.rs | 79 +++++++++--- .../src/hir/resolution/path_resolver.rs | 20 +++- compiler/noirc_frontend/src/lib.rs | 1 + compiler/noirc_frontend/src/locations.rs | 31 +++-- compiler/noirc_frontend/src/node_interner.rs | 8 +- .../noirc_frontend/src/noir_parser.lalrpop | 2 +- compiler/noirc_frontend/src/parser/mod.rs | 25 ++-- compiler/noirc_frontend/src/parser/parser.rs | 15 ++- .../src/parser/parser/function.rs | 19 +-- .../src/parser/parser/visibility.rs | 27 +++++ compiler/noirc_frontend/src/tests.rs | 112 +++++++++++++++++- compiler/noirc_frontend/src/usage_tracker.rs | 26 ++++ .../noir/modules_packages_crates/modules.md | 28 ++++- noir_stdlib/src/array.nr | 1 - noir_stdlib/src/collections/map.nr | 1 - noir_stdlib/src/collections/umap.nr | 5 +- noir_stdlib/src/ec/consts/te.nr | 1 - noir_stdlib/src/eddsa.nr | 3 +- noir_stdlib/src/field/bn254.nr | 2 +- noir_stdlib/src/hash/mod.nr | 2 +- noir_stdlib/src/hash/poseidon/bn254.nr | 2 +- noir_stdlib/src/hash/poseidon/bn254/perm.nr | 1 - noir_stdlib/src/ops/mod.nr | 4 +- noir_stdlib/src/prelude.nr | 18 +-- noir_stdlib/src/sha256.nr | 2 +- noir_stdlib/src/sha512.nr | 2 +- noir_stdlib/src/uint128.nr | 1 - tooling/lsp/src/notifications/mod.rs | 8 +- tooling/lsp/src/requests/code_action.rs | 38 ++++-- tooling/lsp/src/requests/code_action/tests.rs | 35 ++++++ tooling/lsp/src/requests/completion.rs | 54 ++------- .../src/requests/completion/auto_import.rs | 38 ++++-- .../requests/completion/completion_items.rs | 47 +++----- tooling/lsp/src/requests/completion/kinds.rs | 12 -- tooling/lsp/src/requests/completion/tests.rs | 86 ++++++++++++-- tooling/nargo/src/package.rs | 7 -- tooling/nargo_cli/src/cli/check_cmd.rs | 11 +- tooling/nargo_cli/src/cli/export_cmd.rs | 6 +- tooling/nargo_cli/src/cli/test_cmd.rs | 11 +- tooling/nargo_fmt/src/rewrite/imports.rs | 16 ++- tooling/nargo_fmt/src/visitor/item.rs | 6 +- 52 files changed, 735 insertions(+), 361 deletions(-) create mode 100644 compiler/noirc_frontend/src/parser/parser/visibility.rs create mode 100644 compiler/noirc_frontend/src/usage_tracker.rs diff --git a/aztec_macros/src/utils/parse_utils.rs b/aztec_macros/src/utils/parse_utils.rs index f2998fbaafc..6a2a876e682 100644 --- a/aztec_macros/src/utils/parse_utils.rs +++ b/aztec_macros/src/utils/parse_utils.rs @@ -50,7 +50,7 @@ fn empty_item(item: &mut Item) { empty_parsed_submodule(parsed_submodule); } ItemKind::ModuleDecl(module_declaration) => empty_module_declaration(module_declaration), - ItemKind::Import(use_tree) => empty_use_tree(use_tree), + ItemKind::Import(use_tree, _) => empty_use_tree(use_tree), ItemKind::Struct(noir_struct) => empty_noir_struct(noir_struct), ItemKind::TypeAlias(noir_type_alias) => empty_noir_type_alias(noir_type_alias), } @@ -404,9 +404,9 @@ fn empty_pattern(pattern: &mut Pattern) { } fn empty_unresolved_trait_constraints( - unresolved_trait_constriants: &mut [UnresolvedTraitConstraint], + unresolved_trait_constraints: &mut [UnresolvedTraitConstraint], ) { - for trait_constraint in unresolved_trait_constriants.iter_mut() { + for trait_constraint in unresolved_trait_constraints.iter_mut() { empty_unresolved_trait_constraint(trait_constraint); } } diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index b7bb07ad64a..88918151366 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -131,18 +131,6 @@ pub struct CompileOptions { pub skip_underconstrained_check: bool, } -#[derive(Clone, Debug, Default)] -pub struct CheckOptions { - pub compile_options: CompileOptions, - pub error_on_unused_imports: bool, -} - -impl CheckOptions { - pub fn new(compile_options: &CompileOptions, error_on_unused_imports: bool) -> Self { - Self { compile_options: compile_options.clone(), error_on_unused_imports } - } -} - pub fn parse_expression_width(input: &str) -> Result { use std::io::{Error, ErrorKind}; let width = input @@ -290,20 +278,19 @@ pub fn add_dep( pub fn check_crate( context: &mut Context, crate_id: CrateId, - check_options: &CheckOptions, + options: &CompileOptions, ) -> CompilationResult<()> { - let options = &check_options.compile_options; - let macros: &[&dyn MacroProcessor] = if options.disable_macros { &[] } else { &[&aztec_macros::AztecMacro] }; let mut errors = vec![]; + let error_on_unused_imports = true; let diagnostics = CrateDefMap::collect_defs( crate_id, context, options.debug_comptime_in_file.as_deref(), options.arithmetic_generics, - check_options.error_on_unused_imports, + error_on_unused_imports, macros, ); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { @@ -337,10 +324,7 @@ pub fn compile_main( options: &CompileOptions, cached_program: Option, ) -> CompilationResult { - let error_on_unused_imports = true; - let check_options = CheckOptions::new(options, error_on_unused_imports); - - let (_, mut warnings) = check_crate(context, crate_id, &check_options)?; + let (_, mut warnings) = check_crate(context, crate_id, options)?; let main = context.get_main_function(&crate_id).ok_or_else(|| { // TODO(#2155): This error might be a better to exist in Nargo @@ -375,9 +359,7 @@ pub fn compile_contract( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult { - let error_on_unused_imports = true; - let check_options = CheckOptions::new(options, error_on_unused_imports); - let (_, warnings) = check_crate(context, crate_id, &check_options)?; + let (_, warnings) = check_crate(context, crate_id, options)?; // TODO: We probably want to error if contracts is empty let contracts = context.get_all_contracts(&crate_id); diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index e63222bfc87..3fd63249201 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -460,12 +460,22 @@ impl UnresolvedTypeExpression { } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] /// Represents whether the definition can be referenced outside its module/crate pub enum ItemVisibility { - Public, Private, PublicCrate, + Public, +} + +impl std::fmt::Display for ItemVisibility { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ItemVisibility::Public => write!(f, "pub"), + ItemVisibility::Private => Ok(()), + ItemVisibility::PublicCrate => write!(f, "pub(crate)"), + } + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 2fc08e1aea1..2e14761a1cc 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -7,7 +7,7 @@ use iter_extended::vecmap; use noirc_errors::{Span, Spanned}; use super::{ - BlockExpression, Expression, ExpressionKind, GenericTypeArgs, IndexExpression, + BlockExpression, Expression, ExpressionKind, GenericTypeArgs, IndexExpression, ItemVisibility, MemberAccessExpression, MethodCallExpression, UnresolvedType, }; use crate::elaborator::types::SELF_TYPE_NAME; @@ -302,6 +302,7 @@ impl std::fmt::Display for ModuleDeclaration { #[derive(Debug, PartialEq, Eq, Clone)] pub struct ImportStatement { + pub visibility: ItemVisibility, pub path: Path, pub alias: Option, } @@ -350,7 +351,7 @@ pub enum UseTreeKind { } impl UseTree { - pub fn desugar(self, root: Option) -> Vec { + pub fn desugar(self, root: Option, visibility: ItemVisibility) -> Vec { let prefix = if let Some(mut root) = root { root.segments.extend(self.prefix.segments); root @@ -360,10 +361,11 @@ impl UseTree { match self.kind { UseTreeKind::Path(name, alias) => { - vec![ImportStatement { path: prefix.join(name), alias }] + vec![ImportStatement { visibility, path: prefix.join(name), alias }] } UseTreeKind::List(trees) => { - trees.into_iter().flat_map(|tree| tree.desugar(Some(prefix.clone()))).collect() + let trees = trees.into_iter(); + trees.flat_map(|tree| tree.desugar(Some(prefix.clone()), visibility)).collect() } } } diff --git a/compiler/noirc_frontend/src/ast/visitor.rs b/compiler/noirc_frontend/src/ast/visitor.rs index 96183d3322f..3955e50b03e 100644 --- a/compiler/noirc_frontend/src/ast/visitor.rs +++ b/compiler/noirc_frontend/src/ast/visitor.rs @@ -21,8 +21,9 @@ use crate::{ }; use super::{ - FunctionReturnType, GenericTypeArgs, IntegerBitSize, Pattern, Signedness, UnresolvedGenerics, - UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, + FunctionReturnType, GenericTypeArgs, IntegerBitSize, ItemVisibility, Pattern, Signedness, + UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, + UnresolvedTypeExpression, }; /// Implements the [Visitor pattern](https://en.wikipedia.org/wiki/Visitor_pattern) for Noir's AST. @@ -252,7 +253,7 @@ pub trait Visitor { true } - fn visit_import(&mut self, _: &UseTree) -> bool { + fn visit_import(&mut self, _: &UseTree, _visibility: ItemVisibility) -> bool { true } @@ -470,8 +471,8 @@ impl Item { } } ItemKind::Trait(noir_trait) => noir_trait.accept(self.span, visitor), - ItemKind::Import(use_tree) => { - if visitor.visit_import(use_tree) { + ItemKind::Import(use_tree, visibility) => { + if visitor.visit_import(use_tree, *visibility) { use_tree.accept(visitor); } } diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index 3e71f167802..baa9c0ab371 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -333,7 +333,7 @@ impl<'context> Elaborator<'context> { TopLevelStatement::Error => (), TopLevelStatement::Module(_) - | TopLevelStatement::Import(_) + | TopLevelStatement::Import(..) | TopLevelStatement::Struct(_) | TopLevelStatement::Trait(_) | TopLevelStatement::Impl(_) diff --git a/compiler/noirc_frontend/src/elaborator/scope.rs b/compiler/noirc_frontend/src/elaborator/scope.rs index a51fd737f74..7a98e1856b3 100644 --- a/compiler/noirc_frontend/src/elaborator/scope.rs +++ b/compiler/noirc_frontend/src/elaborator/scope.rs @@ -60,12 +60,6 @@ impl<'context> Elaborator<'context> { let mut module_id = self.module_id(); let mut path = path; - if path.kind == PathKind::Plain { - let def_map = self.def_maps.get_mut(&self.crate_id).unwrap(); - let module_data = &mut def_map.modules[module_id.local_id.0]; - module_data.use_import(&path.segments[0].ident); - } - if path.kind == PathKind::Plain && path.first_name() == SELF_TYPE_NAME { if let Some(Type::Struct(struct_type, _)) = &self.self_type { let struct_type = struct_type.borrow(); @@ -90,34 +84,47 @@ impl<'context> Elaborator<'context> { fn resolve_path_in_module(&mut self, path: Path, module_id: ModuleId) -> PathResolutionResult { let resolver = StandardPathResolver::new(module_id); - let path_resolution; - - if self.interner.lsp_mode { - let last_segment = path.last_ident(); - let location = Location::new(last_segment.span(), self.file); - let is_self_type_name = last_segment.is_self_type_name(); - - let mut references: Vec<_> = Vec::new(); - path_resolution = - resolver.resolve(self.def_maps, path.clone(), &mut Some(&mut references))?; - - for (referenced, segment) in references.iter().zip(path.segments) { - self.interner.add_reference( - *referenced, - Location::new(segment.ident.span(), self.file), - segment.ident.is_self_type_name(), - ); - } - self.interner.add_module_def_id_reference( - path_resolution.module_def_id, - location, - is_self_type_name, + if !self.interner.lsp_mode { + return resolver.resolve( + self.def_maps, + path, + &mut self.interner.usage_tracker, + &mut None, + ); + } + + let last_segment = path.last_ident(); + let location = Location::new(last_segment.span(), self.file); + let is_self_type_name = last_segment.is_self_type_name(); + + let mut references: Vec<_> = Vec::new(); + let path_resolution = resolver.resolve( + self.def_maps, + path.clone(), + &mut self.interner.usage_tracker, + &mut Some(&mut references), + ); + + for (referenced, segment) in references.iter().zip(path.segments) { + self.interner.add_reference( + *referenced, + Location::new(segment.ident.span(), self.file), + segment.ident.is_self_type_name(), ); - } else { - path_resolution = resolver.resolve(self.def_maps, path, &mut None)?; } + let path_resolution = match path_resolution { + Ok(path_resolution) => path_resolution, + Err(err) => return Err(err), + }; + + self.interner.add_module_def_id_reference( + path_resolution.module_def_id, + location, + is_self_type_name, + ); + Ok(path_resolution) } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 30c91b42b2e..6a6cabe593d 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -19,8 +19,8 @@ use crate::node_interner::{ }; use crate::ast::{ - ExpressionKind, GenericTypeArgs, Ident, LetStatement, Literal, NoirFunction, NoirStruct, - NoirTrait, NoirTypeAlias, Path, PathKind, PathSegment, UnresolvedGenerics, + ExpressionKind, GenericTypeArgs, Ident, ItemVisibility, LetStatement, Literal, NoirFunction, + NoirStruct, NoirTrait, NoirTypeAlias, Path, PathKind, PathSegment, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, }; @@ -253,7 +253,7 @@ impl DefCollector { root_file_id: FileId, debug_comptime_in_file: Option<&str>, enable_arithmetic_generics: bool, - error_on_unused_imports: bool, + error_on_usage_tracker: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -267,13 +267,13 @@ impl DefCollector { let crate_graph = &context.crate_graph[crate_id]; for dep in crate_graph.dependencies.clone() { - let error_on_unused_imports = false; + let error_on_usage_tracker = false; errors.extend(CrateDefMap::collect_defs( dep.crate_id, context, debug_comptime_in_file, enable_arithmetic_generics, - error_on_unused_imports, + error_on_usage_tracker, macro_processors, )); @@ -286,8 +286,8 @@ impl DefCollector { def_map.extern_prelude.insert(dep.as_name(), module_id); let location = dep_def_map[dep_def_root].location; - let attriutes = ModuleAttributes { name: dep.as_name(), location, parent: None }; - context.def_interner.add_module_attributes(module_id, attriutes); + let attributes = ModuleAttributes { name: dep.as_name(), location, parent: None }; + context.def_interner.add_module_attributes(module_id, attributes); } // At this point, all dependencies are resolved and type checked. @@ -328,6 +328,7 @@ impl DefCollector { crate_id, &collected_import, &context.def_maps, + &mut context.def_interner.usage_tracker, &mut Some(&mut references), ); @@ -345,31 +346,89 @@ impl DefCollector { resolved_import } else { - resolve_import(crate_id, &collected_import, &context.def_maps, &mut None) + resolve_import( + crate_id, + &collected_import, + &context.def_maps, + &mut context.def_interner.usage_tracker, + &mut None, + ) }; match resolved_import { Ok(resolved_import) => { + let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); + let file_id = current_def_map.file_id(module_id); + if let Some(error) = resolved_import.error { errors.push(( DefCollectorErrorKind::PathResolutionError(error).into(), - root_file_id, + file_id, )); } // Populate module namespaces according to the imports used - let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); - let name = resolved_import.name; - for ns in resolved_import.resolved_namespace.iter_defs() { + let visibility = collected_import.visibility; + let is_prelude = resolved_import.is_prelude; + for (module_def_id, item_visibility, _) in + resolved_import.resolved_namespace.iter_items() + { + if item_visibility < visibility { + errors.push(( + DefCollectorErrorKind::CannotReexportItemWithLessVisibility { + item_name: name.clone(), + desired_visibility: visibility, + } + .into(), + file_id, + )); + } + let visibility = visibility.min(item_visibility); + let result = current_def_map.modules[resolved_import.module_scope.0] - .import(name.clone(), ns, resolved_import.is_prelude); + .import(name.clone(), visibility, module_def_id, is_prelude); + + // Empty spans could come from implicitly injected imports, and we don't want to track those + if visibility != ItemVisibility::Public + && name.span().start() < name.span().end() + { + let module_id = ModuleId { + krate: crate_id, + local_id: resolved_import.module_scope, + }; + + context + .def_interner + .usage_tracker + .add_unused_import(module_id, name.clone()); + } + + if visibility != ItemVisibility::Private { + let local_id = resolved_import.module_scope; + let defining_module = ModuleId { krate: crate_id, local_id }; + context.def_interner.register_name_for_auto_import( + name.to_string(), + module_def_id, + visibility, + Some(defining_module), + ); + } - let file_id = current_def_map.file_id(module_id); let last_segment = collected_import.path.last_ident(); - add_import_reference(ns, &last_segment, &mut context.def_interner, file_id); + add_import_reference( + module_def_id, + &last_segment, + &mut context.def_interner, + file_id, + ); if let Some(ref alias) = collected_import.alias { - add_import_reference(ns, alias, &mut context.def_interner, file_id); + add_import_reference( + module_def_id, + alias, + &mut context.def_interner, + file_id, + ); } if let Err((first_def, second_def)) = result { @@ -417,20 +476,24 @@ impl DefCollector { ); } - if error_on_unused_imports { - Self::check_unused_imports(context, crate_id, &mut errors); + if error_on_usage_tracker { + Self::check_usage_tracker(context, crate_id, &mut errors); } errors } - fn check_unused_imports( + fn check_usage_tracker( context: &Context, crate_id: CrateId, errors: &mut Vec<(CompilationError, FileId)>, ) { - errors.extend(context.def_maps[&crate_id].modules().iter().flat_map(|(_, module)| { - module.unused_imports().iter().map(|ident| { + let unused_imports = context.def_interner.usage_tracker.unused_imports().iter(); + let unused_imports = unused_imports.filter(|(module_id, _)| module_id.krate == crate_id); + + errors.extend(unused_imports.flat_map(|(module_id, usage_tracker)| { + let module = &context.def_maps[&crate_id].modules()[module_id.local_id.0]; + usage_tracker.iter().map(|ident| { let ident = ident.clone(); let error = CompilationError::ResolverError(ResolverError::UnusedImport { ident }); (error, module.location.file) @@ -456,7 +519,7 @@ fn add_import_reference( fn inject_prelude( crate_id: CrateId, - context: &Context, + context: &mut Context, crate_root: LocalModuleId, collected_imports: &mut Vec, ) { @@ -481,6 +544,7 @@ fn inject_prelude( &context.def_maps, ModuleId { krate: crate_id, local_id: crate_root }, path, + &mut context.def_interner.usage_tracker, &mut None, ) { assert!(error.is_none(), "Tried to add private item to prelude"); @@ -494,6 +558,7 @@ fn inject_prelude( collected_imports.insert( 0, ImportDirective { + visibility: ItemVisibility::Private, module_id: crate_root, path: Path { segments, kind: PathKind::Plain, span: Span::default() }, alias: None, diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 459c4869379..590cdc541ce 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -80,6 +80,7 @@ pub fn collect_defs( // Then add the imports to defCollector to resolve once all modules in the hierarchy have been resolved for import in ast.imports { collector.def_collector.imports.push(ImportDirective { + visibility: import.visibility, module_id: collector.module_id, path: import.path, alias: import.alias, diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index e705d7b6fad..f931a7cdf41 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -1,4 +1,4 @@ -use crate::ast::{Ident, Path, UnresolvedTypeData}; +use crate::ast::{Ident, ItemVisibility, Path, UnresolvedTypeData}; use crate::hir::resolution::import::PathResolutionError; use crate::hir::type_check::generics::TraitGenerics; @@ -35,6 +35,8 @@ pub enum DefCollectorErrorKind { OverlappingModuleDecls { mod_name: Ident, expected_path: String, alternative_path: String }, #[error("path resolution error")] PathResolutionError(PathResolutionError), + #[error("cannot re-export {item_name} because it has less visibility than this use statement")] + CannotReexportItemWithLessVisibility { item_name: Ident, desired_visibility: ItemVisibility }, #[error("Non-struct type used in impl")] NonStructTypeInImpl { span: Span }, #[error("Cannot implement trait on a mutable reference type")] @@ -173,6 +175,12 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { ) } DefCollectorErrorKind::PathResolutionError(error) => error.into(), + DefCollectorErrorKind::CannotReexportItemWithLessVisibility{item_name, desired_visibility} => { + Diagnostic::simple_warning( + format!("cannot re-export {item_name} because it has less visibility than this use statement"), + format!("consider marking {item_name} as {desired_visibility}"), + item_name.span()) + } DefCollectorErrorKind::NonStructTypeInImpl { span } => Diagnostic::simple_error( "Non-struct type used in impl".into(), "Only struct types may have implementation methods".into(), diff --git a/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/compiler/noirc_frontend/src/hir/def_map/module_data.rs index 7b14db8be77..f9542094be7 100644 --- a/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use noirc_errors::Location; @@ -24,10 +24,6 @@ pub struct ModuleData { /// True if this module is a `contract Foo { ... }` module containing contract functions pub is_contract: bool, - - /// List of all unused imports. Each time something is imported into this module it's added - /// to this set. When it's used, it's removed. At the end of the program only unused imports remain. - unused_imports: HashSet, } impl ModuleData { @@ -39,7 +35,6 @@ impl ModuleData { definitions: ItemScope::default(), location, is_contract, - unused_imports: HashSet::new(), } } @@ -123,15 +118,11 @@ impl ModuleData { pub fn import( &mut self, name: Ident, + visibility: ItemVisibility, id: ModuleDefId, is_prelude: bool, ) -> Result<(), (Ident, Ident)> { - // Empty spans could come from implicitly injected imports, and we don't want to track those - if name.span().start() < name.span().end() { - self.unused_imports.insert(name.clone()); - } - - self.scope.add_item_to_namespace(name, ItemVisibility::Public, id, None, is_prelude) + self.scope.add_item_to_namespace(name, visibility, id, None, is_prelude) } pub fn find_name(&self, name: &Ident) -> PerNs { @@ -147,14 +138,4 @@ impl ModuleData { pub fn value_definitions(&self) -> impl Iterator + '_ { self.definitions.values().values().flat_map(|a| a.values().map(|(id, _, _)| *id)) } - - /// Marks an ident as being used by an import. - pub fn use_import(&mut self, ident: &Ident) { - self.unused_imports.remove(ident); - } - - /// Returns the list of all unused imports at this moment. - pub fn unused_imports(&self) -> &HashSet { - &self.unused_imports - } } diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index b820e4664e3..938da0a879f 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -4,6 +4,7 @@ use thiserror::Error; use crate::graph::CrateId; use crate::hir::def_collector::dc_crate::CompilationError; use crate::node_interner::ReferenceId; +use crate::usage_tracker::UsageTracker; use std::collections::BTreeMap; use crate::ast::{Ident, ItemVisibility, Path, PathKind, PathSegment}; @@ -13,6 +14,7 @@ use super::errors::ResolverError; #[derive(Debug, Clone)] pub struct ImportDirective { + pub visibility: ItemVisibility, pub module_id: LocalModuleId, pub path: Path, pub alias: Option, @@ -86,6 +88,7 @@ pub fn resolve_import( crate_id: CrateId, import_directive: &ImportDirective, def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> Result { let module_scope = import_directive.module_id; @@ -93,7 +96,14 @@ pub fn resolve_import( module_id: resolved_module, namespace: resolved_namespace, mut error, - } = resolve_path_to_ns(import_directive, crate_id, crate_id, def_maps, path_references)?; + } = resolve_path_to_ns( + import_directive, + crate_id, + crate_id, + def_maps, + usage_tracker, + path_references, + )?; let name = resolve_path_name(import_directive); @@ -131,10 +141,10 @@ fn resolve_path_to_ns( crate_id: CrateId, importing_crate: CrateId, def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> NamespaceResolutionResult { let import_path = &import_directive.path.segments; - let def_map = &def_maps[&crate_id]; match import_directive.path.kind { crate::ast::PathKind::Crate => { @@ -144,6 +154,7 @@ fn resolve_path_to_ns( importing_crate, import_path, def_maps, + usage_tracker, path_references, ) } @@ -157,10 +168,13 @@ fn resolve_path_to_ns( import_path, import_directive.module_id, def_maps, + true, + usage_tracker, path_references, ); } + let def_map = &def_maps[&crate_id]; let current_mod_id = ModuleId { krate: crate_id, local_id: import_directive.module_id }; let current_mod = &def_map.modules[current_mod_id.local_id.0]; let first_segment = @@ -168,9 +182,11 @@ fn resolve_path_to_ns( if current_mod.find_name(first_segment).is_none() { // Resolve externally when first segment is unresolved return resolve_external_dep( - def_map, + crate_id, + // def_map, import_directive, def_maps, + usage_tracker, path_references, importing_crate, ); @@ -182,14 +198,17 @@ fn resolve_path_to_ns( import_path, import_directive.module_id, def_maps, + true, + usage_tracker, path_references, ) } crate::ast::PathKind::Dep => resolve_external_dep( - def_map, + crate_id, import_directive, def_maps, + usage_tracker, path_references, importing_crate, ), @@ -204,6 +223,8 @@ fn resolve_path_to_ns( import_path, parent_module_id, def_maps, + false, + usage_tracker, path_references, ) } else { @@ -221,24 +242,31 @@ fn resolve_path_from_crate_root( import_path: &[PathSegment], def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> NamespaceResolutionResult { + let starting_mod = def_maps[&crate_id].root; resolve_name_in_module( crate_id, importing_crate, import_path, - def_maps[&crate_id].root, + starting_mod, def_maps, + false, + usage_tracker, path_references, ) } +#[allow(clippy::too_many_arguments)] fn resolve_name_in_module( krate: CrateId, importing_crate: CrateId, import_path: &[PathSegment], starting_mod: LocalModuleId, def_maps: &BTreeMap, + plain: bool, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> NamespaceResolutionResult { let def_map = &def_maps[&krate]; @@ -261,8 +289,12 @@ fn resolve_name_in_module( return Err(PathResolutionError::Unresolved(first_segment.clone())); } + usage_tracker.mark_as_used(current_mod_id, first_segment); + let mut warning: Option = None; - for (last_segment, current_segment) in import_path.iter().zip(import_path.iter().skip(1)) { + for (index, (last_segment, current_segment)) in + import_path.iter().zip(import_path.iter().skip(1)).enumerate() + { let last_segment = &last_segment.ident; let current_segment = ¤t_segment.ident; @@ -298,13 +330,17 @@ fn resolve_name_in_module( }; warning = warning.or_else(|| { - if can_reference_module_id( - def_maps, - importing_crate, - starting_mod, - current_mod_id, - visibility, - ) { + // If the path is plain, the first segment will always refer to + // something that's visible from the current module. + if (plain && index == 0) + || can_reference_module_id( + def_maps, + importing_crate, + starting_mod, + current_mod_id, + visibility, + ) + { None } else { Some(PathResolutionError::Private(last_segment.clone())) @@ -320,6 +356,8 @@ fn resolve_name_in_module( return Err(PathResolutionError::Unresolved(current_segment.clone())); } + usage_tracker.mark_as_used(current_mod_id, current_segment); + current_ns = found_ns; } @@ -334,15 +372,18 @@ fn resolve_path_name(import_directive: &ImportDirective) -> Ident { } fn resolve_external_dep( - current_def_map: &CrateDefMap, + crate_id: CrateId, directive: &ImportDirective, def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, importing_crate: CrateId, ) -> NamespaceResolutionResult { // Use extern_prelude to get the dep let path = &directive.path.segments; + let current_def_map = &def_maps[&crate_id]; + // Fetch the root module from the prelude let crate_name = &path.first().unwrap().ident; let dep_module = current_def_map @@ -365,13 +406,21 @@ fn resolve_external_dep( span: Span::default(), }; let dep_directive = ImportDirective { + visibility: ItemVisibility::Private, module_id: dep_module.local_id, path, alias: directive.alias.clone(), is_prelude: false, }; - resolve_path_to_ns(&dep_directive, dep_module.krate, importing_crate, def_maps, path_references) + resolve_path_to_ns( + &dep_directive, + dep_module.krate, + importing_crate, + def_maps, + usage_tracker, + path_references, + ) } // Returns false if the given private function is being called from a non-child module, or diff --git a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs index 712951ad6cb..50089d849ae 100644 --- a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs @@ -1,6 +1,7 @@ use super::import::{resolve_import, ImportDirective, PathResolution, PathResolutionResult}; -use crate::ast::Path; +use crate::ast::{ItemVisibility, Path}; use crate::node_interner::ReferenceId; +use crate::usage_tracker::UsageTracker; use std::collections::BTreeMap; use crate::graph::CrateId; @@ -15,6 +16,7 @@ pub trait PathResolver { &self, def_maps: &BTreeMap, path: Path, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> PathResolutionResult; @@ -39,9 +41,10 @@ impl PathResolver for StandardPathResolver { &self, def_maps: &BTreeMap, path: Path, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> PathResolutionResult { - resolve_path(def_maps, self.module_id, path, path_references) + resolve_path(def_maps, self.module_id, path, usage_tracker, path_references) } fn local_module_id(&self) -> LocalModuleId { @@ -59,12 +62,19 @@ pub fn resolve_path( def_maps: &BTreeMap, module_id: ModuleId, path: Path, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> PathResolutionResult { // lets package up the path into an ImportDirective and resolve it using that - let import = - ImportDirective { module_id: module_id.local_id, path, alias: None, is_prelude: false }; - let resolved_import = resolve_import(module_id.krate, &import, def_maps, path_references)?; + let import = ImportDirective { + visibility: ItemVisibility::Private, + module_id: module_id.local_id, + path, + alias: None, + is_prelude: false, + }; + let resolved_import = + resolve_import(module_id.krate, &import, def_maps, usage_tracker, path_references)?; let namespace = resolved_import.resolved_namespace; let id = diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index b14f65a3e35..ec09f680bc2 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -20,6 +20,7 @@ pub mod monomorphization; pub mod node_interner; pub mod parser; pub mod resolve_locations; +pub mod usage_tracker; pub mod hir; pub mod hir_def; diff --git a/compiler/noirc_frontend/src/locations.rs b/compiler/noirc_frontend/src/locations.rs index 0ac13a58ecf..58de235455c 100644 --- a/compiler/noirc_frontend/src/locations.rs +++ b/compiler/noirc_frontend/src/locations.rs @@ -278,7 +278,8 @@ impl NodeInterner { } pub(crate) fn register_module(&mut self, id: ModuleId, name: String) { - self.register_name_for_auto_import(name, ModuleDefId::ModuleId(id), ItemVisibility::Public); + let visibility = ItemVisibility::Public; + self.register_name_for_auto_import(name, ModuleDefId::ModuleId(id), visibility, None); } pub(crate) fn register_global( @@ -290,7 +291,7 @@ impl NodeInterner { self.add_definition_location(ReferenceId::Global(id), Some(parent_module_id)); let visibility = ItemVisibility::Public; - self.register_name_for_auto_import(name, ModuleDefId::GlobalId(id), visibility); + self.register_name_for_auto_import(name, ModuleDefId::GlobalId(id), visibility, None); } pub(crate) fn register_struct( @@ -302,13 +303,14 @@ impl NodeInterner { self.add_definition_location(ReferenceId::Struct(id), Some(parent_module_id)); let visibility = ItemVisibility::Public; - self.register_name_for_auto_import(name, ModuleDefId::TypeId(id), visibility); + self.register_name_for_auto_import(name, ModuleDefId::TypeId(id), visibility, None); } pub(crate) fn register_trait(&mut self, id: TraitId, name: String, parent_module_id: ModuleId) { self.add_definition_location(ReferenceId::Trait(id), Some(parent_module_id)); - self.register_name_for_auto_import(name, ModuleDefId::TraitId(id), ItemVisibility::Public); + let visibility = ItemVisibility::Public; + self.register_name_for_auto_import(name, ModuleDefId::TraitId(id), visibility, None); } pub(crate) fn register_type_alias( @@ -320,31 +322,34 @@ impl NodeInterner { self.add_definition_location(ReferenceId::Alias(id), Some(parent_module_id)); let visibility = ItemVisibility::Public; - self.register_name_for_auto_import(name, ModuleDefId::TypeAliasId(id), visibility); + self.register_name_for_auto_import(name, ModuleDefId::TypeAliasId(id), visibility, None); } pub(crate) fn register_function(&mut self, id: FuncId, func_def: &FunctionDefinition) { - self.register_name_for_auto_import( - func_def.name.0.contents.clone(), - ModuleDefId::FunctionId(id), - func_def.visibility, - ); + let name = func_def.name.0.contents.clone(); + let id = ModuleDefId::FunctionId(id); + self.register_name_for_auto_import(name, id, func_def.visibility, None); } - fn register_name_for_auto_import( + pub fn register_name_for_auto_import( &mut self, name: String, module_def_id: ModuleDefId, visibility: ItemVisibility, + defining_module: Option, ) { if !self.lsp_mode { return; } - self.auto_import_names.entry(name).or_default().push((module_def_id, visibility)); + let entry = self.auto_import_names.entry(name).or_default(); + entry.push((module_def_id, visibility, defining_module)); } - pub fn get_auto_import_names(&self) -> &HashMap> { + #[allow(clippy::type_complexity)] + pub fn get_auto_import_names( + &self, + ) -> &HashMap)>> { &self.auto_import_names } } diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 32f25790e12..4a73df6a15f 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -27,6 +27,7 @@ use crate::hir::type_check::generics::TraitGenerics; use crate::hir_def::traits::NamedType; use crate::macros_api::ModuleDefId; use crate::macros_api::UnaryOp; +use crate::usage_tracker::UsageTracker; use crate::QuotedType; use crate::ast::{BinaryOpKind, FunctionDefinition, ItemVisibility}; @@ -253,9 +254,11 @@ pub struct NodeInterner { pub(crate) reference_modules: HashMap, // All names (and their definitions) that can be offered for auto_import. + // The third value in the tuple is the module where the definition is (only for pub use). // These include top-level functions, global variables and types, but excludes // impl and trait-impl methods. - pub(crate) auto_import_names: HashMap>, + pub(crate) auto_import_names: + HashMap)>>, /// Each value currently in scope in the comptime interpreter. /// Each element of the Vec represents a scope with every scope together making @@ -264,6 +267,8 @@ pub struct NodeInterner { /// This is stored in the NodeInterner so that the Elaborator from each crate can /// share the same global values. pub(crate) comptime_scopes: Vec>, + + pub(crate) usage_tracker: UsageTracker, } /// A dependency in the dependency graph may be a type or a definition. @@ -650,6 +655,7 @@ impl Default for NodeInterner { auto_import_names: HashMap::default(), comptime_scopes: vec![HashMap::default()], trait_impl_associated_types: HashMap::default(), + usage_tracker: UsageTracker::default(), } } } diff --git a/compiler/noirc_frontend/src/noir_parser.lalrpop b/compiler/noirc_frontend/src/noir_parser.lalrpop index 1488a53183e..01b8be8f721 100644 --- a/compiler/noirc_frontend/src/noir_parser.lalrpop +++ b/compiler/noirc_frontend/src/noir_parser.lalrpop @@ -103,7 +103,7 @@ extern { pub(crate) TopLevelStatement: TopLevelStatement = { "use" r"[\t\r\n ]+" ";" EOF => { - TopLevelStatement::Import(use_tree) + TopLevelStatement::Import(use_tree, crate::ast::ItemVisibility::Private) } } diff --git a/compiler/noirc_frontend/src/parser/mod.rs b/compiler/noirc_frontend/src/parser/mod.rs index 11944cd3304..c82906b69a2 100644 --- a/compiler/noirc_frontend/src/parser/mod.rs +++ b/compiler/noirc_frontend/src/parser/mod.rs @@ -12,8 +12,9 @@ mod labels; mod parser; use crate::ast::{ - Expression, Ident, ImportStatement, LetStatement, ModuleDeclaration, NoirFunction, NoirStruct, - NoirTrait, NoirTraitImpl, NoirTypeAlias, Recoverable, StatementKind, TypeImpl, UseTree, + Expression, Ident, ImportStatement, ItemVisibility, LetStatement, ModuleDeclaration, + NoirFunction, NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Recoverable, StatementKind, + TypeImpl, UseTree, }; use crate::token::{Keyword, Token}; @@ -32,7 +33,7 @@ pub use parser::{ pub enum TopLevelStatement { Function(NoirFunction), Module(ModuleDeclaration), - Import(UseTree), + Import(UseTree, ItemVisibility), Struct(NoirStruct), Trait(NoirTrait), TraitImpl(NoirTraitImpl), @@ -48,7 +49,7 @@ impl TopLevelStatement { match self { TopLevelStatement::Function(f) => Some(ItemKind::Function(f)), TopLevelStatement::Module(m) => Some(ItemKind::ModuleDecl(m)), - TopLevelStatement::Import(i) => Some(ItemKind::Import(i)), + TopLevelStatement::Import(i, visibility) => Some(ItemKind::Import(i, visibility)), TopLevelStatement::Struct(s) => Some(ItemKind::Struct(s)), TopLevelStatement::Trait(t) => Some(ItemKind::Trait(t)), TopLevelStatement::TraitImpl(t) => Some(ItemKind::TraitImpl(t)), @@ -298,7 +299,7 @@ impl ParsedModule { for item in self.items { match item.kind { - ItemKind::Import(import) => module.push_import(import), + ItemKind::Import(import, visibility) => module.push_import(import, visibility), ItemKind::Function(func) => module.push_function(func), ItemKind::Struct(typ) => module.push_type(typ), ItemKind::Trait(noir_trait) => module.push_trait(noir_trait), @@ -323,7 +324,7 @@ pub struct Item { #[derive(Clone, Debug)] pub enum ItemKind { - Import(UseTree), + Import(UseTree, ItemVisibility), Function(NoirFunction), Struct(NoirStruct), Trait(NoirTrait), @@ -398,8 +399,8 @@ impl SortedModule { self.type_aliases.push(type_alias); } - fn push_import(&mut self, import_stmt: UseTree) { - self.imports.extend(import_stmt.desugar(None)); + fn push_import(&mut self, import_stmt: UseTree, visibility: ItemVisibility) { + self.imports.extend(import_stmt.desugar(None, visibility)); } fn push_module_decl(&mut self, mod_decl: ModuleDeclaration) { @@ -497,7 +498,13 @@ impl std::fmt::Display for TopLevelStatement { match self { TopLevelStatement::Function(fun) => fun.fmt(f), TopLevelStatement::Module(m) => m.fmt(f), - TopLevelStatement::Import(tree) => write!(f, "use {tree}"), + TopLevelStatement::Import(tree, visibility) => { + if visibility == &ItemVisibility::Private { + write!(f, "use {tree}") + } else { + write!(f, "{visibility} use {tree}") + } + } TopLevelStatement::Trait(t) => t.fmt(f), TopLevelStatement::TraitImpl(i) => i.fmt(f), TopLevelStatement::Struct(s) => s.fmt(f), diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 5f0ef8909e7..bead1e69006 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -27,6 +27,7 @@ use self::path::as_trait_path; use self::primitives::{keyword, macro_quote_marker, mutable_reference, variable}; use self::types::{generic_type_args, maybe_comp_time}; pub use types::parse_type; +use visibility::visibility_modifier; use super::{ foldl_with_span, labels::ParsingRuleLabel, parameter_name_recovery, parameter_recovery, @@ -64,6 +65,7 @@ mod primitives; mod structs; pub(super) mod traits; mod types; +mod visibility; // synthesized by LALRPOP lalrpop_mod!(pub noir_parser); @@ -95,7 +97,7 @@ pub fn parse_program(source_program: &str) -> (ParsedModule, Vec) { for parsed_item in &parsed_module.items { if lalrpop_parser_supports_kind(&parsed_item.kind) { match &parsed_item.kind { - ItemKind::Import(parsed_use_tree) => { + ItemKind::Import(parsed_use_tree, _visibility) => { prototype_parse_use_tree(Some(parsed_use_tree), source_program); } // other kinds prevented by lalrpop_parser_supports_kind @@ -139,7 +141,7 @@ fn prototype_parse_use_tree(expected_use_tree_opt: Option<&UseTree>, input: &str ); match calculated.unwrap() { - TopLevelStatement::Import(parsed_use_tree) => { + TopLevelStatement::Import(parsed_use_tree, _visibility) => { assert_eq!(expected_use_tree, &parsed_use_tree); } unexpected_calculated => { @@ -161,7 +163,7 @@ fn prototype_parse_use_tree(expected_use_tree_opt: Option<&UseTree>, input: &str } fn lalrpop_parser_supports_kind(kind: &ItemKind) -> bool { - matches!(kind, ItemKind::Import(_)) + matches!(kind, ItemKind::Import(..)) } /// program: module EOF @@ -438,7 +440,10 @@ fn module_declaration() -> impl NoirParser { } fn use_statement() -> impl NoirParser { - keyword(Keyword::Use).ignore_then(use_tree()).map(TopLevelStatement::Import) + visibility_modifier() + .then_ignore(keyword(Keyword::Use)) + .then(use_tree()) + .map(|(visibility, use_tree)| TopLevelStatement::Import(use_tree, visibility)) } fn rename() -> impl NoirParser> { @@ -1556,7 +1561,7 @@ mod test { parse_recover(&use_statement(), &use_statement_str); use_statement_str.push(';'); match result_opt.unwrap() { - TopLevelStatement::Import(expected_use_statement) => { + TopLevelStatement::Import(expected_use_statement, _visibility) => { Some(expected_use_statement) } _ => unreachable!(), diff --git a/compiler/noirc_frontend/src/parser/parser/function.rs b/compiler/noirc_frontend/src/parser/parser/function.rs index 56760898374..9328c882e54 100644 --- a/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/compiler/noirc_frontend/src/parser/parser/function.rs @@ -3,7 +3,9 @@ use super::{ block, fresh_statement, ident, keyword, maybe_comp_time, nothing, optional_visibility, parameter_name_recovery, parameter_recovery, parenthesized, parse_type, pattern, primitives::token_kind, - self_parameter, where_clause, NoirParser, + self_parameter, + visibility::visibility_modifier, + where_clause, NoirParser, }; use crate::token::{Keyword, Token, TokenKind}; use crate::{ @@ -73,21 +75,6 @@ pub(super) fn function_definition(allow_self: bool) -> impl NoirParser impl NoirParser { - let is_pub_crate = (keyword(Keyword::Pub) - .then_ignore(just(Token::LeftParen)) - .then_ignore(keyword(Keyword::Crate)) - .then_ignore(just(Token::RightParen))) - .map(|_| ItemVisibility::PublicCrate); - - let is_pub = keyword(Keyword::Pub).map(|_| ItemVisibility::Public); - - let is_private = empty().map(|_| ItemVisibility::Private); - - choice((is_pub_crate, is_pub, is_private)) -} - /// function_modifiers: 'unconstrained'? (visibility)? /// /// returns (is_unconstrained, visibility) for whether each keyword was present diff --git a/compiler/noirc_frontend/src/parser/parser/visibility.rs b/compiler/noirc_frontend/src/parser/parser/visibility.rs new file mode 100644 index 00000000000..d9c1abf2123 --- /dev/null +++ b/compiler/noirc_frontend/src/parser/parser/visibility.rs @@ -0,0 +1,27 @@ +use chumsky::{ + prelude::{choice, empty, just}, + Parser, +}; + +use crate::{ + ast::ItemVisibility, + parser::NoirParser, + token::{Keyword, Token}, +}; + +use super::primitives::keyword; + +/// visibility_modifier: 'pub(crate)'? 'pub'? '' +pub(crate) fn visibility_modifier() -> impl NoirParser { + let is_pub_crate = (keyword(Keyword::Pub) + .then_ignore(just(Token::LeftParen)) + .then_ignore(keyword(Keyword::Crate)) + .then_ignore(just(Token::RightParen))) + .map(|_| ItemVisibility::PublicCrate); + + let is_pub = keyword(Keyword::Pub).map(|_| ItemVisibility::Public); + + let is_private = empty().map(|_| ItemVisibility::Private); + + choice((is_pub_crate, is_pub, is_private)) +} diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 870c781b89d..a30907211a3 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -3199,7 +3199,7 @@ fn as_trait_path_syntax_no_impl() { } #[test] -fn errors_on_unused_import() { +fn errors_on_unused_private_import() { let src = r#" mod foo { pub fn bar() {} @@ -3231,3 +3231,113 @@ fn errors_on_unused_import() { assert_eq!(ident.to_string(), "bar"); } + +#[test] +fn errors_on_unused_pub_crate_import() { + let src = r#" + mod foo { + pub fn bar() {} + pub fn baz() {} + + trait Foo { + } + } + + pub(crate) use foo::bar; + use foo::baz; + use foo::Foo; + + impl Foo for Field { + } + + fn main() { + baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedImport { ident }) = &errors[0].0 + else { + panic!("Expected an unused import error"); + }; + + assert_eq!(ident.to_string(), "bar"); +} + +#[test] +fn warns_on_use_of_private_exported_item() { + let src = r#" + mod foo { + mod bar { + pub fn baz() {} + } + + use bar::baz; + + fn qux() { + baz(); + } + } + + fn main() { + foo::baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 2); // An existing bug causes this error to be duplicated + + assert!(matches!( + &errors[0].0, + CompilationError::ResolverError(ResolverError::PathResolutionError( + PathResolutionError::Private(..), + )) + )); +} + +#[test] +fn can_use_pub_use_item() { + let src = r#" + mod foo { + mod bar { + pub fn baz() {} + } + + pub use bar::baz; + } + + fn main() { + foo::baz(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn warns_on_re_export_of_item_with_less_visibility() { + let src = r#" + mod foo { + mod bar { + pub(crate) fn baz() {} + } + + pub use bar::baz; + } + + fn main() { + foo::baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + assert!(matches!( + &errors[0].0, + CompilationError::DefinitionError( + DefCollectorErrorKind::CannotReexportItemWithLessVisibility { .. } + ) + )); +} diff --git a/compiler/noirc_frontend/src/usage_tracker.rs b/compiler/noirc_frontend/src/usage_tracker.rs new file mode 100644 index 00000000000..d8b7b271734 --- /dev/null +++ b/compiler/noirc_frontend/src/usage_tracker.rs @@ -0,0 +1,26 @@ +use std::collections::HashSet; + +use rustc_hash::FxHashMap as HashMap; + +use crate::{ast::Ident, hir::def_map::ModuleId}; + +#[derive(Debug, Default)] +pub struct UsageTracker { + /// List of all unused imports in each module. Each time something is imported it's added + /// to the module's set. When it's used, it's removed. At the end of the program only unused imports remain. + unused_imports: HashMap>, +} + +impl UsageTracker { + pub(crate) fn add_unused_import(&mut self, module_id: ModuleId, name: Ident) { + self.unused_imports.entry(module_id).or_default().insert(name); + } + + pub(crate) fn mark_as_used(&mut self, current_mod_id: ModuleId, name: &Ident) { + self.unused_imports.entry(current_mod_id).or_default().remove(name); + } + + pub(crate) fn unused_imports(&self) -> &HashMap> { + &self.unused_imports + } +} diff --git a/docs/docs/noir/modules_packages_crates/modules.md b/docs/docs/noir/modules_packages_crates/modules.md index 16b6307d2fd..d21b009be3b 100644 --- a/docs/docs/noir/modules_packages_crates/modules.md +++ b/docs/docs/noir/modules_packages_crates/modules.md @@ -182,4 +182,30 @@ fn from_bar() { from_foo(); // invokes super::from_foo(), which is bar::from_foo() super::from_foo(); // also invokes bar::from_foo() } -``` \ No newline at end of file +``` + +### `use` visibility + +`use` declarations are private to the containing module, by default. However, like functions, +they can be marked as `pub` or `pub(crate)`. Such a use declaration serves to _re-export_ a name. +A public `use` declaration can therefore redirect some public name to a different target definition: +even a definition with a private canonical path, inside a different module. + +An example of re-exporting: + +```rust +mod some_module { + pub use foo::{bar, baz}; + mod foo { + pub fn bar() {} + pub fn baz() {} + } +} + +fn main() { + some_module::bar(); + some_module::baz(); +} +``` + +In this example, the module `some_module` re-exports two public names defined in `foo`. \ No newline at end of file diff --git a/noir_stdlib/src/array.nr b/noir_stdlib/src/array.nr index 23683a54e45..68e134b56fa 100644 --- a/noir_stdlib/src/array.nr +++ b/noir_stdlib/src/array.nr @@ -1,5 +1,4 @@ use crate::cmp::Ord; -use crate::option::Option; use crate::convert::From; impl [T; N] { diff --git a/noir_stdlib/src/collections/map.nr b/noir_stdlib/src/collections/map.nr index 4607b06d667..27a7d0d3550 100644 --- a/noir_stdlib/src/collections/map.nr +++ b/noir_stdlib/src/collections/map.nr @@ -1,5 +1,4 @@ use crate::cmp::Eq; -use crate::collections::vec::Vec; use crate::option::Option; use crate::default::Default; use crate::hash::{Hash, Hasher, BuildHasher}; diff --git a/noir_stdlib/src/collections/umap.nr b/noir_stdlib/src/collections/umap.nr index c552c053a92..c71905e63b3 100644 --- a/noir_stdlib/src/collections/umap.nr +++ b/noir_stdlib/src/collections/umap.nr @@ -1,10 +1,7 @@ use crate::cmp::Eq; -use crate::collections::vec::Vec; use crate::option::Option; use crate::default::Default; -use crate::hash::{Hash, Hasher, BuildHasher, BuildHasherDefault}; -use crate::hash::poseidon2::Poseidon2; -use crate::collections::bounded_vec::BoundedVec; +use crate::hash::{Hash, Hasher, BuildHasher}; // An unconstrained hash table with open addressing and quadratic probing. // Note that "unconstrained" here means that almost all operations on this diff --git a/noir_stdlib/src/ec/consts/te.nr b/noir_stdlib/src/ec/consts/te.nr index e25f373593a..8cea7654e39 100644 --- a/noir_stdlib/src/ec/consts/te.nr +++ b/noir_stdlib/src/ec/consts/te.nr @@ -1,4 +1,3 @@ -use crate::compat; use crate::ec::tecurve::affine::Point as TEPoint; use crate::ec::tecurve::affine::Curve as TECurve; diff --git a/noir_stdlib/src/eddsa.nr b/noir_stdlib/src/eddsa.nr index 337969be90e..cfdbbe9c3d0 100644 --- a/noir_stdlib/src/eddsa.nr +++ b/noir_stdlib/src/eddsa.nr @@ -1,7 +1,6 @@ -use crate::hash::poseidon; use crate::ec::consts::te::baby_jubjub; use crate::ec::tecurve::affine::Point as TEPoint; -use crate::hash::{Hash, Hasher, BuildHasher, BuildHasherDefault}; +use crate::hash::Hasher; use crate::hash::poseidon::PoseidonHasher; use crate::default::Default; diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr index 19a49402642..0aa5ca0717b 100644 --- a/noir_stdlib/src/field/bn254.nr +++ b/noir_stdlib/src/field/bn254.nr @@ -142,7 +142,7 @@ pub fn lt(a: Field, b: Field) -> bool { mod tests { // TODO: Allow imports from "super" - use crate::field::bn254::{decompose_hint, decompose, compute_lt, assert_gt, gt, lt, TWO_POW_128, compute_lte, PLO, PHI}; + use crate::field::bn254::{decompose, compute_lt, assert_gt, gt, TWO_POW_128, compute_lte, PLO, PHI}; #[test] fn check_decompose() { diff --git a/noir_stdlib/src/hash/mod.nr b/noir_stdlib/src/hash/mod.nr index 33be56cdc3d..0e15595ff40 100644 --- a/noir_stdlib/src/hash/mod.nr +++ b/noir_stdlib/src/hash/mod.nr @@ -12,7 +12,7 @@ use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_s use crate::meta::derive_via; // Kept for backwards compatibility -use sha256::{digest, sha256, sha256_compression, sha256_var}; +pub use sha256::{digest, sha256, sha256_compression, sha256_var}; #[foreign(blake2s)] // docs:start:blake2s diff --git a/noir_stdlib/src/hash/poseidon/bn254.nr b/noir_stdlib/src/hash/poseidon/bn254.nr index 103ab3d166a..848d561f755 100644 --- a/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir_stdlib/src/hash/poseidon/bn254.nr @@ -2,7 +2,7 @@ mod perm; mod consts; -use crate::hash::poseidon::{PoseidonConfig, absorb}; +use crate::hash::poseidon::absorb; // Variable-length Poseidon-128 sponge as suggested in second bullet point of §3 of https://eprint.iacr.org/2019/458.pdf #[field(bn254)] diff --git a/noir_stdlib/src/hash/poseidon/bn254/perm.nr b/noir_stdlib/src/hash/poseidon/bn254/perm.nr index b7c022c5a00..b7dc05ef9de 100644 --- a/noir_stdlib/src/hash/poseidon/bn254/perm.nr +++ b/noir_stdlib/src/hash/poseidon/bn254/perm.nr @@ -1,7 +1,6 @@ // Instantiations of Poseidon permutation for the prime field of the same order as BN254 use crate::hash::poseidon::bn254::consts; use crate::hash::poseidon::permute; -use crate::hash::poseidon::PoseidonConfig; #[field(bn254)] pub fn x5_2(mut state: [Field; 2]) -> [Field; 2] { diff --git a/noir_stdlib/src/ops/mod.nr b/noir_stdlib/src/ops/mod.nr index 8b1903cff0b..6cf20432468 100644 --- a/noir_stdlib/src/ops/mod.nr +++ b/noir_stdlib/src/ops/mod.nr @@ -1,5 +1,5 @@ mod arith; mod bit; -use arith::{Add, Sub, Mul, Div, Rem, Neg}; -use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; +pub use arith::{Add, Sub, Mul, Div, Rem, Neg}; +pub use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; diff --git a/noir_stdlib/src/prelude.nr b/noir_stdlib/src/prelude.nr index 0d423e3556d..b14f38bdf55 100644 --- a/noir_stdlib/src/prelude.nr +++ b/noir_stdlib/src/prelude.nr @@ -1,9 +1,9 @@ -use crate::collections::vec::Vec; -use crate::collections::bounded_vec::BoundedVec; -use crate::option::Option; -use crate::{print, println, assert_constant}; -use crate::uint128::U128; -use crate::cmp::{Eq, Ord}; -use crate::default::Default; -use crate::convert::{From, Into}; -use crate::meta::{derive, derive_via}; +pub use crate::collections::vec::Vec; +pub use crate::collections::bounded_vec::BoundedVec; +pub use crate::option::Option; +pub use crate::{print, println, assert_constant}; +pub use crate::uint128::U128; +pub use crate::cmp::{Eq, Ord}; +pub use crate::default::Default; +pub use crate::convert::{From, Into}; +pub use crate::meta::{derive, derive_via}; diff --git a/noir_stdlib/src/sha256.nr b/noir_stdlib/src/sha256.nr index c3e18b13e91..ce217f7a689 100644 --- a/noir_stdlib/src/sha256.nr +++ b/noir_stdlib/src/sha256.nr @@ -1,2 +1,2 @@ // This file is kept for backwards compatibility. -use crate::hash::sha256::{digest, sha256_var}; +pub use crate::hash::sha256::{digest, sha256_var}; diff --git a/noir_stdlib/src/sha512.nr b/noir_stdlib/src/sha512.nr index 1ddbf6e0ae1..b474e27b416 100644 --- a/noir_stdlib/src/sha512.nr +++ b/noir_stdlib/src/sha512.nr @@ -1,2 +1,2 @@ // This file is kept for backwards compatibility. -use crate::hash::sha512::digest; +pub use crate::hash::sha512::digest; diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr index 6b2e78f33d6..ac7f744cb3b 100644 --- a/noir_stdlib/src/uint128.nr +++ b/noir_stdlib/src/uint128.nr @@ -1,6 +1,5 @@ use crate::ops::{Add, Sub, Mul, Div, Rem, Not, BitOr, BitAnd, BitXor, Shl, Shr}; use crate::cmp::{Eq, Ord, Ordering}; -use crate::println; global pow64 : Field = 18446744073709551616; //2^64; global pow63 : Field = 9223372036854775808; // 2^63; diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index f44f8e2e4d5..d1ffdb55066 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -3,7 +3,7 @@ use std::ops::ControlFlow; use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; use lsp_types::DiagnosticTag; -use noirc_driver::{check_crate, file_manager_with_stdlib, CheckOptions}; +use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; use crate::types::{ @@ -133,11 +133,7 @@ pub(crate) fn process_workspace_for_noir_document( let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - let options = CheckOptions { - error_on_unused_imports: package.error_on_unused_imports(), - ..Default::default() - }; - let file_diagnostics = match check_crate(&mut context, crate_id, &options) { + let file_diagnostics = match check_crate(&mut context, crate_id, &Default::default()) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; diff --git a/tooling/lsp/src/requests/code_action.rs b/tooling/lsp/src/requests/code_action.rs index fd8c42a3b87..8e153bb0b46 100644 --- a/tooling/lsp/src/requests/code_action.rs +++ b/tooling/lsp/src/requests/code_action.rs @@ -21,7 +21,7 @@ use noirc_frontend::{ use crate::{ byte_span_to_range, - modules::{get_parent_module_id, module_full_path}, + modules::{get_parent_module_id, module_full_path, module_id_path}, utils, LspState, }; @@ -267,21 +267,33 @@ impl<'a> Visitor for CodeActionFinder<'a> { continue; } - for (module_def_id, visibility) in entries { - let Some(module_full_path) = module_full_path( - *module_def_id, - *visibility, - self.module_id, - current_module_parent_id, - self.interner, - ) else { - continue; + for (module_def_id, visibility, defining_module) in entries { + let module_full_path = if let Some(defining_module) = defining_module { + module_id_path( + *defining_module, + &self.module_id, + current_module_parent_id, + self.interner, + ) + } else { + let Some(module_full_path) = module_full_path( + *module_def_id, + *visibility, + self.module_id, + current_module_parent_id, + self.interner, + ) else { + continue; + }; + module_full_path }; - let full_path = if let ModuleDefId::ModuleId(..) = module_def_id { - module_full_path.clone() - } else { + let full_path = if defining_module.is_some() + || !matches!(module_def_id, ModuleDefId::ModuleId(..)) + { format!("{}::{}", module_full_path, name) + } else { + module_full_path.clone() }; let qualify_prefix = if let ModuleDefId::ModuleId(..) = module_def_id { diff --git a/tooling/lsp/src/requests/code_action/tests.rs b/tooling/lsp/src/requests/code_action/tests.rs index 21b5fab96c6..a5a19049141 100644 --- a/tooling/lsp/src/requests/code_action/tests.rs +++ b/tooling/lsp/src/requests/code_action/tests.rs @@ -199,3 +199,38 @@ fn main() { assert_code_action(title, src, expected).await; } + +#[test] +async fn test_qualify_code_action_for_pub_use_import() { + let title = "Qualify as bar::foobar"; + + let src = r#" + mod bar { + mod baz { + pub fn qux() {} + } + + pub use baz::qux as foobar; + } + + fn main() { + foob>| { lines: Vec<&'a str>, byte_index: usize, byte: Option, - /// The module ID of the current file. - root_module_id: ModuleId, /// The module ID in scope. This might change as we traverse the AST /// if we are analyzing something inside an inline module declaration. module_id: ModuleId, @@ -131,7 +126,6 @@ impl<'a> NodeFinder<'a> { ) -> Self { // Find the module the current file belongs to let def_map = &def_maps[&krate]; - let root_module_id = ModuleId { krate, local_id: def_map.root() }; let local_id = if let Some((module_index, _)) = def_map.modules().iter().find(|(_, module_data)| module_data.location.file == file) { @@ -146,7 +140,6 @@ impl<'a> NodeFinder<'a> { lines: source.lines().collect(), byte_index, byte, - root_module_id, module_id, def_maps, dependencies, @@ -278,12 +271,6 @@ impl<'a> NodeFinder<'a> { let is_single_segment = !after_colons && idents.is_empty() && path.kind == PathKind::Plain; let module_id; - let module_completion_kind = if after_colons || !idents.is_empty() { - ModuleCompletionKind::DirectChildren - } else { - ModuleCompletionKind::AllVisibleItems - }; - // When completing in the middle of an ident, we don't want to complete // with function parameters because there might already be function parameters, // and in the middle of a path it leads to code that won't compile @@ -346,7 +333,6 @@ impl<'a> NodeFinder<'a> { &prefix, path.kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -447,7 +433,6 @@ impl<'a> NodeFinder<'a> { } } - let module_completion_kind = ModuleCompletionKind::DirectChildren; let function_completion_kind = FunctionCompletionKind::Name; let requested_items = RequestedItems::AnyItems; @@ -463,7 +448,6 @@ impl<'a> NodeFinder<'a> { prefix, path_kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -478,7 +462,6 @@ impl<'a> NodeFinder<'a> { &prefix, path_kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -489,7 +472,6 @@ impl<'a> NodeFinder<'a> { &prefix, path_kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -604,6 +586,7 @@ impl<'a> NodeFinder<'a> { for func_id in methods.iter() { if name_matches(name, prefix) { if let Some(completion_item) = self.function_completion_item( + name, func_id, function_completion_kind, function_kind, @@ -625,9 +608,12 @@ impl<'a> NodeFinder<'a> { ) { for (name, func_id) in &trait_.method_ids { if name_matches(name, prefix) { - if let Some(completion_item) = - self.function_completion_item(*func_id, function_completion_kind, function_kind) - { + if let Some(completion_item) = self.function_completion_item( + name, + *func_id, + function_completion_kind, + function_kind, + ) { self.completion_items.push(completion_item); self.suggested_module_def_ids.insert(ModuleDefId::FunctionId(*func_id)); } @@ -661,7 +647,6 @@ impl<'a> NodeFinder<'a> { prefix: &str, path_kind: PathKind, at_root: bool, - module_completion_kind: ModuleCompletionKind, function_completion_kind: FunctionCompletionKind, requested_items: RequestedItems, ) { @@ -694,12 +679,7 @@ impl<'a> NodeFinder<'a> { let function_kind = FunctionKind::Any; - let items = match module_completion_kind { - ModuleCompletionKind::DirectChildren => module_data.definitions(), - ModuleCompletionKind::AllVisibleItems => module_data.scope(), - }; - - for ident in items.names() { + for ident in module_data.scope().names() { let name = &ident.0.contents; if name_matches(name, prefix) { @@ -773,14 +753,6 @@ impl<'a> NodeFinder<'a> { fn resolve_path(&self, segments: Vec) -> Option { let last_segment = segments.last().unwrap().clone(); - let path_segments = segments.into_iter().map(PathSegment::from).collect(); - let path = Path { segments: path_segments, kind: PathKind::Plain, span: Span::default() }; - - let path_resolver = StandardPathResolver::new(self.root_module_id); - if let Ok(path_resolution) = path_resolver.resolve(self.def_maps, path, &mut None) { - return Some(path_resolution.module_def_id); - } - // If we can't resolve a path trough lookup, let's see if the last segment is bound to a type let location = Location::new(last_segment.span(), self.file); if let Some(reference_id) = self.interner.find_referenced(location) { @@ -808,7 +780,7 @@ impl<'a> Visitor for NodeFinder<'a> { self.includes_span(item.span) } - fn visit_import(&mut self, use_tree: &UseTree) -> bool { + fn visit_import(&mut self, use_tree: &UseTree, _visibility: ItemVisibility) -> bool { let mut prefixes = Vec::new(); self.find_in_use_tree(use_tree, &mut prefixes); false diff --git a/tooling/lsp/src/requests/completion/auto_import.rs b/tooling/lsp/src/requests/completion/auto_import.rs index bf3ff7f0291..bbd471dfea1 100644 --- a/tooling/lsp/src/requests/completion/auto_import.rs +++ b/tooling/lsp/src/requests/completion/auto_import.rs @@ -1,7 +1,7 @@ use lsp_types::{Position, Range, TextEdit}; use noirc_frontend::macros_api::ModuleDefId; -use crate::modules::{get_parent_module_id, module_full_path}; +use crate::modules::{get_parent_module_id, module_full_path, module_id_path}; use super::{ kinds::{FunctionCompletionKind, FunctionKind, RequestedItems}, @@ -24,7 +24,7 @@ impl<'a> NodeFinder<'a> { continue; } - for (module_def_id, visibility) in entries { + for (module_def_id, visibility, defining_module) in entries { if self.suggested_module_def_ids.contains(module_def_id) { continue; } @@ -39,20 +39,32 @@ impl<'a> NodeFinder<'a> { continue; }; - let Some(module_full_path) = module_full_path( - *module_def_id, - *visibility, - self.module_id, - current_module_parent_id, - self.interner, - ) else { - continue; + let module_full_path = if let Some(defining_module) = defining_module { + module_id_path( + *defining_module, + &self.module_id, + current_module_parent_id, + self.interner, + ) + } else { + let Some(module_full_path) = module_full_path( + *module_def_id, + *visibility, + self.module_id, + current_module_parent_id, + self.interner, + ) else { + continue; + }; + module_full_path }; - let full_path = if let ModuleDefId::ModuleId(..) = module_def_id { - module_full_path - } else { + let full_path = if defining_module.is_some() + || !matches!(module_def_id, ModuleDefId::ModuleId(..)) + { format!("{}::{}", module_full_path, name) + } else { + module_full_path }; let mut label_details = completion_item.label_details.unwrap(); diff --git a/tooling/lsp/src/requests/completion/completion_items.rs b/tooling/lsp/src/requests/completion/completion_items.rs index d4190f5241c..21c3a607b18 100644 --- a/tooling/lsp/src/requests/completion/completion_items.rs +++ b/tooling/lsp/src/requests/completion/completion_items.rs @@ -3,8 +3,8 @@ use lsp_types::{ }; use noirc_frontend::{ hir_def::{function::FuncMeta, stmt::HirPattern}, - macros_api::{ModuleDefId, StructId}, - node_interner::{FuncId, GlobalId, TraitId, TypeAliasId}, + macros_api::ModuleDefId, + node_interner::{FuncId, GlobalId}, Type, }; @@ -38,45 +38,32 @@ impl<'a> NodeFinder<'a> { match module_def_id { ModuleDefId::ModuleId(_) => Some(module_completion_item(name)), - ModuleDefId::FunctionId(func_id) => { - self.function_completion_item(func_id, function_completion_kind, function_kind) - } - ModuleDefId::TypeId(struct_id) => Some(self.struct_completion_item(struct_id)), - ModuleDefId::TypeAliasId(type_alias_id) => { - Some(self.type_alias_completion_item(type_alias_id)) - } - ModuleDefId::TraitId(trait_id) => Some(self.trait_completion_item(trait_id)), - ModuleDefId::GlobalId(global_id) => Some(self.global_completion_item(global_id)), + ModuleDefId::FunctionId(func_id) => self.function_completion_item( + &name, + func_id, + function_completion_kind, + function_kind, + ), + ModuleDefId::TypeId(..) => Some(self.struct_completion_item(name)), + ModuleDefId::TypeAliasId(..) => Some(self.type_alias_completion_item(name)), + ModuleDefId::TraitId(..) => Some(self.trait_completion_item(name)), + ModuleDefId::GlobalId(global_id) => Some(self.global_completion_item(name, global_id)), } } - fn struct_completion_item(&self, struct_id: StructId) -> CompletionItem { - let struct_type = self.interner.get_struct(struct_id); - let struct_type = struct_type.borrow(); - let name = struct_type.name.to_string(); - + fn struct_completion_item(&self, name: String) -> CompletionItem { simple_completion_item(name.clone(), CompletionItemKind::STRUCT, Some(name)) } - fn type_alias_completion_item(&self, type_alias_id: TypeAliasId) -> CompletionItem { - let type_alias = self.interner.get_type_alias(type_alias_id); - let type_alias = type_alias.borrow(); - let name = type_alias.name.to_string(); - + fn type_alias_completion_item(&self, name: String) -> CompletionItem { simple_completion_item(name.clone(), CompletionItemKind::STRUCT, Some(name)) } - fn trait_completion_item(&self, trait_id: TraitId) -> CompletionItem { - let trait_ = self.interner.get_trait(trait_id); - let name = trait_.name.to_string(); - + fn trait_completion_item(&self, name: String) -> CompletionItem { simple_completion_item(name.clone(), CompletionItemKind::INTERFACE, Some(name)) } - fn global_completion_item(&self, global_id: GlobalId) -> CompletionItem { - let global_definition = self.interner.get_global_definition(global_id); - let name = global_definition.name.clone(); - + fn global_completion_item(&self, name: String, global_id: GlobalId) -> CompletionItem { let global = self.interner.get_global(global_id); let typ = self.interner.definition_type(global.definition_id); let description = typ.to_string(); @@ -86,12 +73,12 @@ impl<'a> NodeFinder<'a> { pub(super) fn function_completion_item( &self, + name: &String, func_id: FuncId, function_completion_kind: FunctionCompletionKind, function_kind: FunctionKind, ) -> Option { let func_meta = self.interner.function_meta(&func_id); - let name = &self.interner.function_name(&func_id).to_string(); let func_self_type = if let Some((pattern, typ, _)) = func_meta.parameters.0.first() { if self.hir_pattern_is_self_type(pattern) { diff --git a/tooling/lsp/src/requests/completion/kinds.rs b/tooling/lsp/src/requests/completion/kinds.rs index e01fcfc8c56..2fe039ba331 100644 --- a/tooling/lsp/src/requests/completion/kinds.rs +++ b/tooling/lsp/src/requests/completion/kinds.rs @@ -1,17 +1,5 @@ use noirc_frontend::Type; -/// When finding items in a module, whether to show only direct children or all visible items. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub(super) enum ModuleCompletionKind { - // Only show a module's direct children. This is used when completing a use statement - // or a path after the first segment. - DirectChildren, - // Show all of a module's visible items. This is used when completing a path outside - // of a use statement (in regular code) when the path is just a single segment: - // we want to find items exposed in the current module. - AllVisibleItems, -} - /// When suggest a function as a result of completion, whether to autocomplete its name or its name and parameters. #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub(super) enum FunctionCompletionKind { diff --git a/tooling/lsp/src/requests/completion/tests.rs b/tooling/lsp/src/requests/completion/tests.rs index 2d667ead6bf..d621ca21bb8 100644 --- a/tooling/lsp/src/requests/completion/tests.rs +++ b/tooling/lsp/src/requests/completion/tests.rs @@ -119,14 +119,14 @@ mod completion_tests { #[test] async fn test_use_first_segment() { let src = r#" - mod foo {} + mod foobaz {} mod foobar {} - use f>|< + use foob>|< "#; assert_completion( src, - vec![module_completion_item("foo"), module_completion_item("foobar")], + vec![module_completion_item("foobaz"), module_completion_item("foobar")], ) .await; } @@ -218,7 +218,7 @@ mod completion_tests { #[test] async fn test_use_suggests_hardcoded_crate() { let src = r#" - use c>|< + use cr>|< "#; assert_completion( @@ -291,16 +291,16 @@ mod completion_tests { #[test] async fn test_use_after_super() { let src = r#" - mod foo {} + mod foobar {} mod bar { mod something {} - use super::f>|< + use super::foob>|< } "#; - assert_completion(src, vec![module_completion_item("foo")]).await; + assert_completion(src, vec![module_completion_item("foobar")]).await; } #[test] @@ -1791,4 +1791,76 @@ mod completion_tests { ) .await; } + + #[test] + async fn test_suggests_pub_use() { + let src = r#" + mod bar { + mod baz { + mod coco {} + } + + pub use baz::coco; + } + + fn main() { + bar::c>|< + } + "#; + assert_completion(src, vec![module_completion_item("coco")]).await; + } + + #[test] + async fn test_auto_import_suggests_pub_use_for_module() { + let src = r#" + mod bar { + mod baz { + mod coco {} + } + + pub use baz::coco as foobar; + } + + fn main() { + foob>|< + } + "#; + + let items = get_completions(src).await; + assert_eq!(items.len(), 1); + + let item = &items[0]; + assert_eq!(item.label, "foobar"); + assert_eq!( + item.label_details.as_ref().unwrap().detail, + Some("(use bar::foobar)".to_string()), + ); + } + + #[test] + async fn test_auto_import_suggests_pub_use_for_function() { + let src = r#" + mod bar { + mod baz { + pub fn coco() {} + } + + pub use baz::coco as foobar; + } + + fn main() { + foob>|< + } + "#; + + let items = get_completions(src).await; + assert_eq!(items.len(), 1); + + let item = &items[0]; + assert_eq!(item.label, "foobar()"); + assert_eq!( + item.label_details.as_ref().unwrap().detail, + Some("(use bar::foobar)".to_string()), + ); + } } diff --git a/tooling/nargo/src/package.rs b/tooling/nargo/src/package.rs index cde616a9e32..f55ca5550a3 100644 --- a/tooling/nargo/src/package.rs +++ b/tooling/nargo/src/package.rs @@ -73,11 +73,4 @@ impl Package { pub fn is_library(&self) -> bool { self.package_type == PackageType::Library } - - pub fn error_on_unused_imports(&self) -> bool { - match self.package_type { - PackageType::Library => false, - PackageType::Binary | PackageType::Contract => true, - } - } } diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index 1130a82fdfc..5239070b4d2 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -10,7 +10,7 @@ use nargo::{ use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::{ - check_crate, compute_function_abi, file_manager_with_stdlib, CheckOptions, CompileOptions, + check_crate, compute_function_abi, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ @@ -81,9 +81,7 @@ fn check_package( allow_overwrite: bool, ) -> Result { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(compile_options, error_on_unused_imports); - check_crate_and_report_errors(&mut context, crate_id, &check_options)?; + check_crate_and_report_errors(&mut context, crate_id, compile_options)?; if package.is_library() || package.is_contract() { // Libraries do not have ABIs while contracts have many, so we cannot generate a `Prover.toml` file. @@ -152,10 +150,9 @@ fn create_input_toml_template( pub(crate) fn check_crate_and_report_errors( context: &mut Context, crate_id: CrateId, - check_options: &CheckOptions, + options: &CompileOptions, ) -> Result<(), CompileError> { - let options = &check_options.compile_options; - let result = check_crate(context, crate_id, check_options); + let result = check_crate(context, crate_id, options); report_errors(result, &context.file_manager, options.deny_warnings, options.silence_warnings) } diff --git a/tooling/nargo_cli/src/cli/export_cmd.rs b/tooling/nargo_cli/src/cli/export_cmd.rs index 5721dd33e27..19add7f30dc 100644 --- a/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/tooling/nargo_cli/src/cli/export_cmd.rs @@ -12,7 +12,7 @@ use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ - compile_no_check, file_manager_with_stdlib, CheckOptions, CompileOptions, CompiledProgram, + compile_no_check, file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, }; @@ -83,9 +83,7 @@ fn compile_exported_functions( compile_options: &CompileOptions, ) -> Result<(), CliError> { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(compile_options, error_on_unused_imports); - check_crate_and_report_errors(&mut context, crate_id, &check_options)?; + check_crate_and_report_errors(&mut context, crate_id, compile_options)?; let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index 751d49e6427..2f9390d72e0 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -10,8 +10,7 @@ use nargo::{ }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ - check_crate, file_manager_with_stdlib, CheckOptions, CompileOptions, - NOIR_ARTIFACT_VERSION_STRING, + check_crate, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::CrateName, @@ -186,9 +185,7 @@ fn run_test + Default>( // We then need to construct a separate copy for each test. let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(compile_options, error_on_unused_imports); - check_crate(&mut context, crate_id, &check_options) + check_crate(&mut context, crate_id, compile_options) .expect("Any errors should have occurred when collecting test functions"); let test_functions = context @@ -217,9 +214,7 @@ fn get_tests_in_package( options: &CompileOptions, ) -> Result, CliError> { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(options, error_on_unused_imports); - check_crate_and_report_errors(&mut context, crate_id, &check_options)?; + check_crate_and_report_errors(&mut context, crate_id, options)?; Ok(context .get_all_test_functions_in_crate_matching(&crate_id, fn_name) diff --git a/tooling/nargo_fmt/src/rewrite/imports.rs b/tooling/nargo_fmt/src/rewrite/imports.rs index 025d354259e..6c63c551f7d 100644 --- a/tooling/nargo_fmt/src/rewrite/imports.rs +++ b/tooling/nargo_fmt/src/rewrite/imports.rs @@ -1,4 +1,4 @@ -use noirc_frontend::ast; +use noirc_frontend::ast::{self, ItemVisibility}; use crate::{ items::Item, @@ -96,8 +96,18 @@ impl UseTree { result } - pub(crate) fn rewrite_top_level(&self, visitor: &FmtVisitor, shape: Shape) -> String { - format!("use {};", self.rewrite(visitor, shape)) + pub(crate) fn rewrite_top_level( + &self, + visitor: &FmtVisitor, + shape: Shape, + visibility: ItemVisibility, + ) -> String { + let rewrite = self.rewrite(visitor, shape); + if visibility == ItemVisibility::Private { + format!("use {};", rewrite) + } else { + format!("{} use {};", visibility, rewrite) + } } fn rewrite(&self, visitor: &FmtVisitor, shape: Shape) -> String { diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index 94a32449ebe..0e2d07f13d0 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -216,9 +216,9 @@ impl super::FmtVisitor<'_> { self.last_position = span.end(); } } - ItemKind::Import(use_tree) => { - let use_tree = - UseTree::from_ast(use_tree).rewrite_top_level(self, self.shape()); + ItemKind::Import(use_tree, visibility) => { + let use_tree = UseTree::from_ast(use_tree); + let use_tree = use_tree.rewrite_top_level(self, self.shape(), visibility); self.push_rewrite(use_tree, span); self.last_position = span.end(); } From 45344bfe1148a2f592c2e432744d3fb3d46340cc Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:07:56 +0100 Subject: [PATCH 02/26] feat: implement `str_as_bytes` in the `comptime` interpreter (#5887) # Description ## Problem\* Resolves ## Summary\* This PR implements converting a string to a byte array in comptime, something very useful for function signatures in aztec-nr ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/hir/comptime/interpreter/builtin.rs | 27 +++++++++++++++++++ .../comptime_str_as_bytes/Nargo.toml | 7 +++++ .../comptime_str_as_bytes/src/main.nr | 9 +++++++ 3 files changed, 43 insertions(+) create mode 100644 test_programs/compile_success_empty/comptime_str_as_bytes/Nargo.toml create mode 100644 test_programs/compile_success_empty/comptime_str_as_bytes/src/main.nr diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 7a196d98f52..e5b098b41ed 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -126,6 +126,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "slice_push_back" => slice_push_back(interner, arguments, location), "slice_push_front" => slice_push_front(interner, arguments, location), "slice_remove" => slice_remove(interner, arguments, location, call_stack), + "str_as_bytes" => str_as_bytes(interner, arguments, location), "struct_def_as_type" => struct_def_as_type(interner, arguments, location), "struct_def_fields" => struct_def_fields(interner, arguments, location), "struct_def_generics" => struct_def_generics(interner, arguments, location), @@ -242,6 +243,32 @@ fn slice_push_back( Ok(Value::Slice(values, typ)) } +fn str_as_bytes( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let (string, string_location) = check_one_argument(arguments, location)?; + + match string { + Value::String(string) => { + let string_as_bytes = string.as_bytes(); + let bytes_vector: Vec = string_as_bytes.iter().cloned().map(Value::U8).collect(); + let byte_array_type = Type::Array( + Box::new(Type::Constant(string_as_bytes.len() as u32)), + Box::new(Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight)), + ); + Ok(Value::Array(bytes_vector.into(), byte_array_type)) + } + value => { + let type_var = Box::new(interner.next_type_variable()); + let expected = Type::Array(type_var.clone(), type_var); + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location: string_location }) + } + } +} + /// fn as_type(self) -> Type fn struct_def_as_type( interner: &NodeInterner, diff --git a/test_programs/compile_success_empty/comptime_str_as_bytes/Nargo.toml b/test_programs/compile_success_empty/comptime_str_as_bytes/Nargo.toml new file mode 100644 index 00000000000..f387e0e393a --- /dev/null +++ b/test_programs/compile_success_empty/comptime_str_as_bytes/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_str_as_bytes" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/comptime_str_as_bytes/src/main.nr b/test_programs/compile_success_empty/comptime_str_as_bytes/src/main.nr new file mode 100644 index 00000000000..eefea67100f --- /dev/null +++ b/test_programs/compile_success_empty/comptime_str_as_bytes/src/main.nr @@ -0,0 +1,9 @@ +fn main() { + comptime + { + let hello_world_string = "hello world"; + let hello_world_bytes: [u8; 11] = [0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64]; + + assert_eq(hello_world_string.as_bytes(), hello_world_bytes); + } +} From 869bc0f060b39d7b543d3e7673679ecf07d50d16 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:51:55 +0100 Subject: [PATCH 03/26] chore: update git user for release PRs (#5894) # Description ## Problem\* Resolves ## Summary\* I noticed that the commits for the release PR is still set up to be authored by Kev. I've replaced this with the noirwhal account. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/release.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d27fac0e039..5124592a3fe 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -54,8 +54,8 @@ jobs: - name: Configure git run: | - git config user.name kevaundray - git config user.email kevtheappdev@gmail.com + git config user.name noirwhal + git config user.email tomfrench@aztecprotocol.com - name: Commit updates run: | @@ -100,8 +100,8 @@ jobs: - name: Configure git run: | - git config --local user.name 'kevaundray' - git config --local user.email 'kevtheappdev@gmail.com' + git config --local user.name noirwhal + git config --local user.email tomfrench@aztecprotocol.com - name: Commit new documentation version run: | From d1d93c764ba7f932dba3121257e8b90d00a6b20e Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 3 Sep 2024 11:44:51 -0500 Subject: [PATCH 04/26] chore: Cleanup str_as_bytes (#5900) # Description ## Problem\* Resolves ## Summary\* Applies some suggestions from the PR review for `str_as_bytes` in the comptime interpreter ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/hir/comptime/interpreter/builtin.rs | 28 ++++++------------- .../interpreter/builtin/builtin_helpers.rs | 13 +++++++++ 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index e5b098b41ed..070749e45ba 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -39,7 +39,7 @@ use crate::{ QuotedType, Shared, Type, }; -use self::builtin_helpers::{get_array, get_u8}; +use self::builtin_helpers::{get_array, get_str, get_u8}; use super::Interpreter; pub(crate) mod builtin_helpers; @@ -248,25 +248,15 @@ fn str_as_bytes( arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - let (string, string_location) = check_one_argument(arguments, location)?; + let string = check_one_argument(arguments, location)?; + let string = get_str(interner, string)?; - match string { - Value::String(string) => { - let string_as_bytes = string.as_bytes(); - let bytes_vector: Vec = string_as_bytes.iter().cloned().map(Value::U8).collect(); - let byte_array_type = Type::Array( - Box::new(Type::Constant(string_as_bytes.len() as u32)), - Box::new(Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight)), - ); - Ok(Value::Array(bytes_vector.into(), byte_array_type)) - } - value => { - let type_var = Box::new(interner.next_type_variable()); - let expected = Type::Array(type_var.clone(), type_var); - let actual = value.get_type().into_owned(); - Err(InterpreterError::TypeMismatch { expected, actual, location: string_location }) - } - } + let bytes: im::Vector = string.bytes().map(Value::U8).collect(); + let byte_array_type = Type::Array( + Box::new(Type::Constant(bytes.len() as u32)), + Box::new(Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight)), + ); + Ok(Value::Array(bytes, byte_array_type)) } /// fn as_type(self) -> Type diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs index dd9ea51961e..14a0e177544 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs @@ -104,6 +104,19 @@ pub(crate) fn get_slice( } } +pub(crate) fn get_str( + interner: &NodeInterner, + (value, location): (Value, Location), +) -> IResult> { + match value { + Value::String(string) => Ok(string), + value => { + let expected = Type::String(Box::new(interner.next_type_variable())); + type_mismatch(value, expected, location) + } + } +} + pub(crate) fn get_tuple( interner: &NodeInterner, (value, location): (Value, Location), From 1e6e4f4f53c7d331c054dd84f3fe6064d2e844e3 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Tue, 3 Sep 2024 14:18:17 -0300 Subject: [PATCH 05/26] feat: LSP code action "Fill struct fields" (#5885) # Description ## Problem Part of https://github.com/noir-lang/noir/issues/1579 ## Summary My second mostly-used code action in Rust is "Fill struct fields" (and "Fill match arms", but we don't have match in Noir yet). This PR implements that. ![lsp-fill-struct-fields](https://github.com/user-attachments/assets/cd8bc4bd-c06e-4270-bfb3-7e703ee3899c) ## Additional Context We don't have `todo!()` in Noir, so I used `()` instead. I think the most helpful thing about this code action is filling out the field names, so using `()` or `todo!()` is almost the same as you'll have to replace either with something else. ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- tooling/lsp/src/requests/code_action.rs | 136 ++------ .../code_action/fill_struct_fields.rs | 307 ++++++++++++++++++ .../requests/code_action/import_or_qualify.rs | 240 ++++++++++++++ tooling/lsp/src/requests/code_action/tests.rs | 150 +-------- 4 files changed, 571 insertions(+), 262 deletions(-) create mode 100644 tooling/lsp/src/requests/code_action/fill_struct_fields.rs create mode 100644 tooling/lsp/src/requests/code_action/import_or_qualify.rs diff --git a/tooling/lsp/src/requests/code_action.rs b/tooling/lsp/src/requests/code_action.rs index 8e153bb0b46..95cdc0b88b4 100644 --- a/tooling/lsp/src/requests/code_action.rs +++ b/tooling/lsp/src/requests/code_action.rs @@ -7,26 +7,26 @@ use async_lsp::ResponseError; use fm::{FileId, FileMap, PathString}; use lsp_types::{ CodeAction, CodeActionKind, CodeActionOrCommand, CodeActionParams, CodeActionResponse, - Position, Range, TextDocumentPositionParams, TextEdit, Url, WorkspaceEdit, + TextDocumentPositionParams, TextEdit, Url, WorkspaceEdit, }; -use noirc_errors::{Location, Span}; +use noirc_errors::Span; use noirc_frontend::{ - ast::{Ident, Path, Visitor}, + ast::{ConstructorExpression, Path, Visitor}, graph::CrateId, hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}, - macros_api::{ModuleDefId, NodeInterner}, + macros_api::NodeInterner, +}; +use noirc_frontend::{ parser::{Item, ItemKind, ParsedSubModule}, ParsedModule, }; -use crate::{ - byte_span_to_range, - modules::{get_parent_module_id, module_full_path, module_id_path}, - utils, LspState, -}; +use crate::{utils, LspState}; use super::{process_request, to_lsp_location}; +mod fill_struct_fields; +mod import_or_qualify; #[cfg(test)] mod tests; @@ -68,6 +68,7 @@ struct CodeActionFinder<'a> { uri: Url, files: &'a FileMap, file: FileId, + source: &'a str, lines: Vec<&'a str>, byte_index: usize, /// The module ID in scope. This might change as we traverse the AST @@ -108,6 +109,7 @@ impl<'a> CodeActionFinder<'a> { uri, files, file, + source, lines: source.lines().collect(), byte_index, module_id, @@ -137,46 +139,7 @@ impl<'a> CodeActionFinder<'a> { Some(code_actions) } - fn push_import_code_action(&mut self, full_path: &str) { - let line = self.auto_import_line as u32; - let character = (self.nesting * 4) as u32; - let indent = " ".repeat(self.nesting * 4); - let mut newlines = "\n"; - - // If the line we are inserting into is not an empty line, insert an extra line to make some room - if let Some(line_text) = self.lines.get(line as usize) { - if !line_text.trim().is_empty() { - newlines = "\n\n"; - } - } - - let title = format!("Import {}", full_path); - let text_edit = TextEdit { - range: Range { start: Position { line, character }, end: Position { line, character } }, - new_text: format!("use {};{}{}", full_path, newlines, indent), - }; - - let code_action = self.new_quick_fix(title, text_edit); - self.code_actions.push(CodeActionOrCommand::CodeAction(code_action)); - } - - fn push_qualify_code_action(&mut self, ident: &Ident, prefix: &str, full_path: &str) { - let Some(range) = byte_span_to_range( - self.files, - self.file, - ident.span().start() as usize..ident.span().start() as usize, - ) else { - return; - }; - - let title = format!("Qualify as {}", full_path); - let text_edit = TextEdit { range, new_text: format!("{}::", prefix) }; - - let code_action = self.new_quick_fix(title, text_edit); - self.code_actions.push(CodeActionOrCommand::CodeAction(code_action)); - } - - fn new_quick_fix(&self, title: String, text_edit: TextEdit) -> CodeAction { + fn new_quick_fix(&self, title: String, text_edit: TextEdit) -> CodeActionOrCommand { let mut changes = HashMap::new(); changes.insert(self.uri.clone(), vec![text_edit]); @@ -186,7 +149,7 @@ impl<'a> CodeActionFinder<'a> { change_annotations: None, }; - CodeAction { + CodeActionOrCommand::CodeAction(CodeAction { title, kind: Some(CodeActionKind::QUICKFIX), diagnostics: None, @@ -195,7 +158,7 @@ impl<'a> CodeActionFinder<'a> { is_preferred: None, disabled: None, data: None, - } + }) } fn includes_span(&self, span: Span) -> bool { @@ -244,69 +207,16 @@ impl<'a> Visitor for CodeActionFinder<'a> { } fn visit_path(&mut self, path: &Path) { - if path.segments.len() != 1 { - return; - } - - let ident = &path.segments[0].ident; - if !self.includes_span(ident.span()) { - return; - } - - let location = Location::new(ident.span(), self.file); - if self.interner.find_referenced(location).is_some() { - return; - } - - let current_module_parent_id = get_parent_module_id(self.def_maps, self.module_id); - - // The Path doesn't resolve to anything so it means it's an error and maybe we - // can suggest an import or to fully-qualify the path. - for (name, entries) in self.interner.get_auto_import_names() { - if name != &ident.0.contents { - continue; - } - - for (module_def_id, visibility, defining_module) in entries { - let module_full_path = if let Some(defining_module) = defining_module { - module_id_path( - *defining_module, - &self.module_id, - current_module_parent_id, - self.interner, - ) - } else { - let Some(module_full_path) = module_full_path( - *module_def_id, - *visibility, - self.module_id, - current_module_parent_id, - self.interner, - ) else { - continue; - }; - module_full_path - }; - - let full_path = if defining_module.is_some() - || !matches!(module_def_id, ModuleDefId::ModuleId(..)) - { - format!("{}::{}", module_full_path, name) - } else { - module_full_path.clone() - }; + self.import_or_qualify(path); + } - let qualify_prefix = if let ModuleDefId::ModuleId(..) = module_def_id { - let mut segments: Vec<_> = module_full_path.split("::").collect(); - segments.pop(); - segments.join("::") - } else { - module_full_path - }; + fn visit_constructor_expression( + &mut self, + constructor: &ConstructorExpression, + span: Span, + ) -> bool { + self.fill_struct_fields(constructor, span); - self.push_import_code_action(&full_path); - self.push_qualify_code_action(ident, &qualify_prefix, &full_path); - } - } + true } } diff --git a/tooling/lsp/src/requests/code_action/fill_struct_fields.rs b/tooling/lsp/src/requests/code_action/fill_struct_fields.rs new file mode 100644 index 00000000000..f57fbc652ad --- /dev/null +++ b/tooling/lsp/src/requests/code_action/fill_struct_fields.rs @@ -0,0 +1,307 @@ +use lsp_types::TextEdit; +use noirc_errors::{Location, Span}; +use noirc_frontend::{ast::ConstructorExpression, node_interner::ReferenceId}; + +use crate::byte_span_to_range; + +use super::CodeActionFinder; + +impl<'a> CodeActionFinder<'a> { + pub(super) fn fill_struct_fields(&mut self, constructor: &ConstructorExpression, span: Span) { + if !self.includes_span(span) { + return; + } + + // Find out which struct this is + let location = Location::new(constructor.type_name.last_ident().span(), self.file); + let Some(ReferenceId::Struct(struct_id)) = self.interner.find_referenced(location) else { + return; + }; + + let struct_type = self.interner.get_struct(struct_id); + let struct_type = struct_type.borrow(); + + // First get all of the struct's fields + let mut fields = struct_type.get_fields_as_written(); + + // Remove the ones that already exists in the constructor + for (field, _) in &constructor.fields { + fields.retain(|(name, _)| name != &field.0.contents); + } + + if fields.is_empty() { + return; + } + + // Some fields are missing. Let's suggest a quick fix that adds them. + let bytes = self.source.as_bytes(); + let right_brace_index = span.end() as usize - 1; + let mut index = right_brace_index - 1; + while bytes[index].is_ascii_whitespace() { + index -= 1; + } + + let char_before_right_brace = bytes[index] as char; + + index += 1; + + let Some(range) = byte_span_to_range(self.files, self.file, index..index) else { + return; + }; + + // If the constructor spans multiple lines, we'll add the new fields in new lines too. + // Otherwise we'll add all the fields in a single line. + let constructor_range = + byte_span_to_range(self.files, self.file, span.start() as usize..span.end() as usize); + + // If it's multiline, find out the indent of the beginning line: we'll add new fields + // with that indent "plus one" (4 more spaces). + let line_indent = if let Some(constructor_range) = constructor_range { + if constructor_range.start.line == constructor_range.end.line { + None + } else { + let line = self.lines[constructor_range.start.line as usize]; + let whitespace_bytes = + line.bytes().take_while(|byte| byte.is_ascii_whitespace()).count(); + Some(whitespace_bytes) + } + } else { + None + }; + let line_indent = line_indent.map(|indent| " ".repeat(indent + 4)); + + let on_whitespace = bytes[index].is_ascii_whitespace(); + + let mut new_text = String::new(); + + // Add a comma if there's not a trailing one (if there are existing fields) + if !constructor.fields.is_empty() && char_before_right_brace != ',' { + new_text.push(','); + } + + // Add space or newline depending on whether it's multiline or not + if let Some(line_indent) = &line_indent { + new_text.push('\n'); + new_text.push_str(line_indent); + } else if !on_whitespace || constructor.fields.is_empty() { + new_text.push(' '); + } + + for (index, (name, _)) in fields.iter().enumerate() { + if index > 0 { + new_text.push(','); + if let Some(line_indent) = &line_indent { + new_text.push('\n'); + new_text.push_str(line_indent); + } else { + new_text.push(' '); + } + } + new_text.push_str(name); + new_text.push_str(": ()"); + } + + if !bytes[right_brace_index - 1].is_ascii_whitespace() { + new_text.push(' '); + } + + let title = "Fill struct fields".to_string(); + let text_edit = TextEdit { range, new_text }; + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(code_action); + } +} + +#[cfg(test)] +mod tests { + use tokio::test; + + use crate::requests::code_action::tests::assert_code_action; + + #[test] + async fn test_fill_struct_fields_code_action_no_space() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo {>|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { one: (), two: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_space() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { >|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { one: (), two: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_some_fields() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1>|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1, one: (), three: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_some_fields_trailing_comma() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1,>|<} + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + three: Field, + } + + fn main() { + Foo { two: 1, one: (), three: () } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_multiline_empty() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo {>|< + } + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { + one: (), + two: () + } + } + "#; + + assert_code_action(title, src, expected).await; + } + + #[test] + async fn test_fill_struct_fields_code_action_multiline_some_fields() { + let title = "Fill struct fields"; + + let src = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo {>|< + one: 1, + } + } + "#; + + let expected = r#" + struct Foo { + one: Field, + two: Field, + } + + fn main() { + Foo { + one: 1, + two: () + } + } + "#; + + assert_code_action(title, src, expected).await; + } +} diff --git a/tooling/lsp/src/requests/code_action/import_or_qualify.rs b/tooling/lsp/src/requests/code_action/import_or_qualify.rs new file mode 100644 index 00000000000..d07d117a317 --- /dev/null +++ b/tooling/lsp/src/requests/code_action/import_or_qualify.rs @@ -0,0 +1,240 @@ +use lsp_types::{Position, Range, TextEdit}; +use noirc_errors::Location; +use noirc_frontend::{ + ast::{Ident, Path}, + macros_api::ModuleDefId, +}; + +use crate::{ + byte_span_to_range, + modules::{get_parent_module_id, module_full_path, module_id_path}, +}; + +use super::CodeActionFinder; + +impl<'a> CodeActionFinder<'a> { + pub(super) fn import_or_qualify(&mut self, path: &Path) { + if path.segments.len() != 1 { + return; + } + + let ident = &path.segments[0].ident; + if !self.includes_span(ident.span()) { + return; + } + + let location = Location::new(ident.span(), self.file); + if self.interner.find_referenced(location).is_some() { + return; + } + + let current_module_parent_id = get_parent_module_id(self.def_maps, self.module_id); + + // The Path doesn't resolve to anything so it means it's an error and maybe we + // can suggest an import or to fully-qualify the path. + for (name, entries) in self.interner.get_auto_import_names() { + if name != &ident.0.contents { + continue; + } + + for (module_def_id, visibility, defining_module) in entries { + let module_full_path = if let Some(defining_module) = defining_module { + module_id_path( + *defining_module, + &self.module_id, + current_module_parent_id, + self.interner, + ) + } else { + let Some(module_full_path) = module_full_path( + *module_def_id, + *visibility, + self.module_id, + current_module_parent_id, + self.interner, + ) else { + continue; + }; + module_full_path + }; + + let full_path = if defining_module.is_some() + || !matches!(module_def_id, ModuleDefId::ModuleId(..)) + { + format!("{}::{}", module_full_path, name) + } else { + module_full_path.clone() + }; + + let qualify_prefix = if let ModuleDefId::ModuleId(..) = module_def_id { + let mut segments: Vec<_> = module_full_path.split("::").collect(); + segments.pop(); + segments.join("::") + } else { + module_full_path + }; + + self.push_import_code_action(&full_path); + self.push_qualify_code_action(ident, &qualify_prefix, &full_path); + } + } + } + + fn push_import_code_action(&mut self, full_path: &str) { + let line = self.auto_import_line as u32; + let character = (self.nesting * 4) as u32; + let indent = " ".repeat(self.nesting * 4); + let mut newlines = "\n"; + + // If the line we are inserting into is not an empty line, insert an extra line to make some room + if let Some(line_text) = self.lines.get(line as usize) { + if !line_text.trim().is_empty() { + newlines = "\n\n"; + } + } + + let title = format!("Import {}", full_path); + let text_edit = TextEdit { + range: Range { start: Position { line, character }, end: Position { line, character } }, + new_text: format!("use {};{}{}", full_path, newlines, indent), + }; + + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(code_action); + } + + fn push_qualify_code_action(&mut self, ident: &Ident, prefix: &str, full_path: &str) { + let Some(range) = byte_span_to_range( + self.files, + self.file, + ident.span().start() as usize..ident.span().start() as usize, + ) else { + return; + }; + + let title = format!("Qualify as {}", full_path); + let text_edit = TextEdit { range, new_text: format!("{}::", prefix) }; + + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(code_action); + } +} + +#[cfg(test)] +mod tests { + use tokio::test; + + use crate::requests::code_action::tests::assert_code_action; + + #[test] + async fn test_qualify_code_action_for_struct() { + let title = "Qualify as foo::bar::SomeTypeInBar"; + + let src = r#" + mod foo { + mod bar { + struct SomeTypeInBar {} + } + } + + fn foo(x: SomeType>|||| CodeActionResponse { .unwrap() } -async fn assert_code_action(title: &str, src: &str, expected: &str) { +pub(crate) async fn assert_code_action(title: &str, src: &str, expected: &str) { let actions = get_code_action(src).await; let action = actions .iter() @@ -87,150 +86,3 @@ fn apply_text_edit(src: &str, text_edit: &TextEdit) -> String { lines[text_edit.range.start.line as usize] = &line; lines.join("\n") } - -#[test] -async fn test_qualify_code_action_for_struct() { - let title = "Qualify as foo::bar::SomeTypeInBar"; - - let src = r#" - mod foo { - mod bar { - struct SomeTypeInBar {} - } - } - - fn foo(x: SomeType>||||| Date: Tue, 3 Sep 2024 14:37:09 -0300 Subject: [PATCH 06/26] feat: LSP diagnostics for all package files (#5895) # Description ## Problem I noticed that currently LSP will only produce diagnostics for the file you save. That's not very convenient, as if for example you make a change to a function signature that breaks someone using it, you won't find out unless you go to that other file and save it (but how do you know which file is it?) My guess is that you'd use `nargo` on the command line to find out. ## Summary Now when a package is type-checked, all errors are reported, regardless of whether they match the file that was saved. I believe this will speed up developing with Nargo! (it was also slowing me down when trying out new warnings or errors) ![lsp-error-on-all-files](https://github.com/user-attachments/assets/2441e1dd-15fc-4f23-826c-7ba833012c0e) ## Additional Context I thought implementing this was going to be straight-forward (just start reporting errors in all files) but there's a catch: if a file had errors but doesn't have errors anymore, we still have to notify that the file now **doesn't** have errors. This requires us tracking which files currently have errors and making a diff with the new errors. That said: maybe there was a reason errors were only reported in the file that was saved? Final comment: ideally when you save a file in a package, all packages that depend on this package would also get type-checked... but that's currently slow (or noticeably slow), but I hope we could eventually make it work (like in Rust Analyzer). ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- tooling/lsp/src/lib.rs | 11 +- tooling/lsp/src/notifications/mod.rs | 207 +++++++++++++++------------ tooling/lsp/src/requests/mod.rs | 10 +- 3 files changed, 129 insertions(+), 99 deletions(-) diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index 4a764f4268b..6557975743c 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -4,7 +4,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] use std::{ - collections::{BTreeMap, HashMap}, + collections::{BTreeMap, HashMap, HashSet}, future::Future, ops::{self, ControlFlow}, path::{Path, PathBuf}, @@ -91,10 +91,13 @@ pub struct LspState { open_documents_count: usize, input_files: HashMap, cached_lenses: HashMap>, - cached_definitions: HashMap, + cached_definitions: HashMap, cached_parsed_files: HashMap))>, - cached_def_maps: HashMap>, + cached_def_maps: HashMap>, options: LspInitializationOptions, + + // Tracks files that currently have errors, by package root. + files_with_errors: HashMap>, } impl LspState { @@ -113,6 +116,8 @@ impl LspState { cached_parsed_files: HashMap::new(), cached_def_maps: HashMap::new(), options: Default::default(), + + files_with_errors: HashMap::new(), } } } diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index d1ffdb55066..87e7bea8c3b 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -1,8 +1,12 @@ +use std::collections::HashSet; use std::ops::ControlFlow; +use std::path::PathBuf; use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use lsp_types::DiagnosticTag; +use fm::{FileManager, FileMap}; +use fxhash::FxHashMap as HashMap; +use lsp_types::{DiagnosticTag, Url}; use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; @@ -105,7 +109,7 @@ pub(super) fn on_did_save_text_document( // caching code lenses and type definitions, and notifying about compilation errors. pub(crate) fn process_workspace_for_noir_document( state: &mut LspState, - document_uri: lsp_types::Url, + document_uri: Url, output_diagnostics: bool, ) -> Result<(), async_lsp::Error> { let file_path = document_uri.to_file_path().map_err(|_| { @@ -125,100 +129,123 @@ pub(crate) fn process_workspace_for_noir_document( let parsed_files = parse_diff(&workspace_file_manager, state); - let diagnostics: Vec<_> = workspace - .into_iter() - .flat_map(|package| -> Vec { - let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); - - let (mut context, crate_id) = - crate::prepare_package(&workspace_file_manager, &parsed_files, package); - - let file_diagnostics = match check_crate(&mut context, crate_id, &Default::default()) { - Ok(((), warnings)) => warnings, - Err(errors_and_warnings) => errors_and_warnings, - }; - - // We don't add test headings for a package if it contains no `#[test]` functions - if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { - let _ = state.client.notify::(NargoPackageTests { - package: package.name.to_string(), - tests, - }); - } - - let collected_lenses = crate::requests::collect_lenses_for_package( - &context, - crate_id, - &workspace, - package, - Some(&file_path), - ); - state.cached_lenses.insert(document_uri.to_string(), collected_lenses); - state.cached_definitions.insert(package_root_dir.clone(), context.def_interner); - state.cached_def_maps.insert(package_root_dir.clone(), context.def_maps); - - let fm = &context.file_manager; - let files = fm.as_file_map(); - - if output_diagnostics { - file_diagnostics - .into_iter() - .filter_map(|FileDiagnostic { file_id, diagnostic, call_stack: _ }| { - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - if fm.path(file_id).expect("file must exist to have emitted diagnostic") - != file_path - { - return None; - } - - // TODO: Should this be the first item in secondaries? Should we bail when we find a range? - let range = diagnostic - .secondaries - .into_iter() - .filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into())) - .last() - .unwrap_or_default(); - - let severity = match diagnostic.kind { - DiagnosticKind::Error => DiagnosticSeverity::ERROR, - DiagnosticKind::Warning => DiagnosticSeverity::WARNING, - DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, - DiagnosticKind::Bug => DiagnosticSeverity::WARNING, - }; - - let mut tags = Vec::new(); - if diagnostic.unnecessary { - tags.push(DiagnosticTag::UNNECESSARY); - } - if diagnostic.deprecated { - tags.push(DiagnosticTag::DEPRECATED); - } - - Some(Diagnostic { - range, - severity: Some(severity), - message: diagnostic.message, - tags: if tags.is_empty() { None } else { Some(tags) }, - ..Default::default() - }) - }) - .collect() - } else { - vec![] - } - }) - .collect(); - - if output_diagnostics { + for package in workspace.into_iter() { + let (mut context, crate_id) = + crate::prepare_package(&workspace_file_manager, &parsed_files, package); + + let file_diagnostics = match check_crate(&mut context, crate_id, &Default::default()) { + Ok(((), warnings)) => warnings, + Err(errors_and_warnings) => errors_and_warnings, + }; + + // We don't add test headings for a package if it contains no `#[test]` functions + if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { + let _ = state.client.notify::(NargoPackageTests { + package: package.name.to_string(), + tests, + }); + } + + let collected_lenses = crate::requests::collect_lenses_for_package( + &context, + crate_id, + &workspace, + package, + Some(&file_path), + ); + state.cached_lenses.insert(document_uri.to_string(), collected_lenses); + state.cached_definitions.insert(package.root_dir.clone(), context.def_interner); + state.cached_def_maps.insert(package.root_dir.clone(), context.def_maps); + + let fm = &context.file_manager; + let files = fm.as_file_map(); + + if output_diagnostics { + publish_diagnostics(state, &package.root_dir, files, fm, file_diagnostics); + } + } + + Ok(()) +} + +fn publish_diagnostics( + state: &mut LspState, + package_root_dir: &PathBuf, + files: &FileMap, + fm: &FileManager, + file_diagnostics: Vec, +) { + let mut diagnostics_per_url: HashMap> = HashMap::default(); + + for file_diagnostic in file_diagnostics.into_iter() { + let file_id = file_diagnostic.file_id; + let diagnostic = file_diagnostic_to_diagnostic(file_diagnostic, files); + + let path = fm.path(file_id).expect("file must exist to have emitted diagnostic"); + if let Ok(uri) = Url::from_file_path(path) { + diagnostics_per_url.entry(uri).or_default().push(diagnostic); + } + } + + let new_files_with_errors: HashSet<_> = diagnostics_per_url.keys().cloned().collect(); + + for (uri, diagnostics) in diagnostics_per_url { let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { - uri: document_uri, + uri, version: None, diagnostics, }); } - Ok(()) + // For files that previously had errors but no longer have errors we still need to publish empty diagnostics + if let Some(old_files_with_errors) = state.files_with_errors.get(package_root_dir) { + for uri in old_files_with_errors.difference(&new_files_with_errors) { + let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { + uri: uri.clone(), + version: None, + diagnostics: vec![], + }); + } + } + + // Remember which files currently have errors, for next time + state.files_with_errors.insert(package_root_dir.clone(), new_files_with_errors); +} + +fn file_diagnostic_to_diagnostic(file_diagnostic: FileDiagnostic, files: &FileMap) -> Diagnostic { + let file_id = file_diagnostic.file_id; + let diagnostic = file_diagnostic.diagnostic; + + // TODO: Should this be the first item in secondaries? Should we bail when we find a range? + let range = diagnostic + .secondaries + .into_iter() + .filter_map(|sec| byte_span_to_range(files, file_id, sec.span.into())) + .last() + .unwrap_or_default(); + + let severity = match diagnostic.kind { + DiagnosticKind::Error => DiagnosticSeverity::ERROR, + DiagnosticKind::Warning => DiagnosticSeverity::WARNING, + DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, + DiagnosticKind::Bug => DiagnosticSeverity::WARNING, + }; + + let mut tags = Vec::new(); + if diagnostic.unnecessary { + tags.push(DiagnosticTag::UNNECESSARY); + } + if diagnostic.deprecated { + tags.push(DiagnosticTag::DEPRECATED); + } + + Diagnostic { + range, + severity: Some(severity), + message: diagnostic.message, + tags: if tags.is_empty() { None } else { Some(tags) }, + ..Default::default() + } } pub(super) fn on_exit( diff --git a/tooling/lsp/src/requests/mod.rs b/tooling/lsp/src/requests/mod.rs index 5bd9959fd63..af58396550d 100644 --- a/tooling/lsp/src/requests/mod.rs +++ b/tooling/lsp/src/requests/mod.rs @@ -407,7 +407,7 @@ pub(crate) struct ProcessRequestCallbackArgs<'a> { location: noirc_errors::Location, files: &'a FileMap, interner: &'a NodeInterner, - interners: &'a HashMap, + interners: &'a HashMap, crate_id: CrateId, crate_name: String, dependencies: &'a Vec, @@ -432,8 +432,6 @@ where ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") })?; - let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager( state, @@ -447,9 +445,9 @@ where let interner; let def_maps; - if let Some(def_interner) = state.cached_definitions.get(&package_root_path) { + if let Some(def_interner) = state.cached_definitions.get(&package.root_dir) { interner = def_interner; - def_maps = state.cached_def_maps.get(&package_root_path).unwrap(); + def_maps = state.cached_def_maps.get(&package.root_dir).unwrap(); } else { // We ignore the warnings and errors produced by compilation while resolving the definition let _ = noirc_driver::check_crate(&mut context, crate_id, &Default::default()); @@ -479,7 +477,7 @@ where pub(crate) fn find_all_references_in_workspace( location: noirc_errors::Location, interner: &NodeInterner, - cached_interners: &HashMap, + cached_interners: &HashMap, files: &FileMap, include_declaration: bool, include_self_type_name: bool, From 6f30e42f8a895c7813e770d6ee9ffbc9977c335b Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Tue, 3 Sep 2024 14:41:42 -0300 Subject: [PATCH 07/26] feat: better println for Quoted (#5896) # Description ## Problem Resolves #5899 For debugging purposes, when you `println` a quoted value, some token values aren't expanded, making it a bit harder to understand what's going on. ## Summary Now interned expressions are shown in `Quoted` values. This program: ```rust fn main() { comptime { let n = quote { [1, 2, 3] }.as_expr().unwrap(); let q = quote { $n }; println(q); } } ``` Used to print: ``` quote { (expr) } ``` Now it prints: ``` quote { [1, 2, 3] } ``` ## Additional Context None. ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../noirc_frontend/src/hir/comptime/value.rs | 37 +++++++++++++++++-- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/comptime/value.rs b/compiler/noirc_frontend/src/hir/comptime/value.rs index b96c4852931..c5818c20c57 100644 --- a/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -567,11 +567,33 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { Value::Quoted(tokens) => { write!(f, "quote {{")?; for token in tokens.iter() { + write!(f, " ")?; + match token { Token::QuotedType(id) => { - write!(f, " {}", self.interner.get_quoted_type(*id))?; + write!(f, "{}", self.interner.get_quoted_type(*id))?; + } + Token::InternedExpr(id) => { + let value = Value::expression(ExpressionKind::Interned(*id)); + value.display(self.interner).fmt(f)?; + } + Token::InternedStatement(id) => { + let value = Value::statement(StatementKind::Interned(*id)); + value.display(self.interner).fmt(f)?; + } + Token::InternedLValue(id) => { + let value = Value::lvalue(LValue::Interned(*id, Span::default())); + value.display(self.interner).fmt(f)?; } - other => write!(f, " {other}")?, + Token::InternedUnresolvedTypeData(id) => { + let value = Value::UnresolvedType(UnresolvedTypeData::Interned(*id)); + value.display(self.interner).fmt(f)?; + } + Token::UnquoteMarker(id) => { + let value = Value::TypedExpr(TypedExpr::ExprId(*id)); + value.display(self.interner).fmt(f)?; + } + other => write!(f, "{other}")?, } } write!(f, " }}") @@ -632,7 +654,16 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { Value::Expr(ExprValue::LValue(lvalue)) => { write!(f, "{}", remove_interned_in_lvalue(self.interner, lvalue.clone())) } - Value::TypedExpr(_) => write!(f, "(typed expr)"), + Value::TypedExpr(TypedExpr::ExprId(id)) => { + let hir_expr = self.interner.expression(id); + let expr = hir_expr.to_display_ast(self.interner, Span::default()); + write!(f, "{}", expr.kind) + } + Value::TypedExpr(TypedExpr::StmtId(id)) => { + let hir_statement = self.interner.statement(id); + let stmt = hir_statement.to_display_ast(self.interner, Span::default()); + write!(f, "{}", stmt.kind) + } Value::UnresolvedType(typ) => { if let UnresolvedTypeData::Interned(id) = typ { let typ = self.interner.get_unresolved_type_data(*id); From 712468a08dd95eaed42345320e8c3e6751e88c47 Mon Sep 17 00:00:00 2001 From: Michael J Klein Date: Tue, 3 Sep 2024 15:41:30 -0400 Subject: [PATCH 08/26] chore: make nested slice error more clear for `[[T]; N]` case (#5906) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description ## Problem\* The `NestedSlice` error is unclear when there is only one slice: ```bash error: Nested slices are not supported ┌─ /Users/michaelklein/Coding/noir/two_tag/src/main.nr:31:12 │ 31 │ rules: [[Field]; N], │ ------------ Try to use a constant sized array instead ``` ## Summary\* Rephrases the `NestedSlice` error message to make it more clear that slices within arrays or other slices are disallowed. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/errors.rs | 2 +- compiler/noirc_frontend/src/hir/resolution/errors.rs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 2c7ec0f8e1a..c4f56d032f9 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -44,7 +44,7 @@ pub enum RuntimeError { StaticAssertDynamicPredicate { call_stack: CallStack }, #[error("Argument is false")] StaticAssertFailed { call_stack: CallStack }, - #[error("Nested slices are not supported")] + #[error("Nested slices, i.e. slices within an array or slice, are not supported")] NestedSlice { call_stack: CallStack }, #[error("Big Integer modulus do no match")] BigIntModulus { call_stack: CallStack }, diff --git a/compiler/noirc_frontend/src/hir/resolution/errors.rs b/compiler/noirc_frontend/src/hir/resolution/errors.rs index cede04dd582..c2038c646b5 100644 --- a/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -72,7 +72,7 @@ pub enum ResolverError { NumericConstantInFormatString { name: String, span: Span }, #[error("Closure environment must be a tuple or unit type")] InvalidClosureEnvironment { typ: Type, span: Span }, - #[error("Nested slices are not supported")] + #[error("Nested slices, i.e. slices within an array or slice, are not supported")] NestedSlices { span: Span }, #[error("#[recursive] attribute is only allowed on entry points to a program")] MisplacedRecursiveAttribute { ident: Ident }, @@ -323,8 +323,8 @@ impl<'a> From<&'a ResolverError> for Diagnostic { format!("{typ} is not a valid closure environment type"), "Closure environment must be a tuple or unit type".to_string(), *span), ResolverError::NestedSlices { span } => Diagnostic::simple_error( - "Nested slices are not supported".into(), - "Try to use a constant sized array instead".into(), + "Nested slices, i.e. slices within an array or slice, are not supported".into(), + "Try to use a constant sized array or BoundedVec instead".into(), *span, ), ResolverError::MisplacedRecursiveAttribute { ident } => { From 34cb23fc645cbc2baa81dcc664324db340fa76f9 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 4 Sep 2024 07:43:30 +0100 Subject: [PATCH 09/26] chore: bump some dependencies (#5893) # Description ## Problem\* Resolves ## Summary\* Bumping some dependencies ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- Cargo.lock | 189 +++++++++++++++++++++++++++-------------------------- 1 file changed, 98 insertions(+), 91 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cd936e4bca2..3f56f5b6965 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7,7 +7,7 @@ name = "acir" version = "0.49.0" dependencies = [ "acir_field", - "base64 0.21.2", + "base64 0.21.7", "bincode", "brillig", "criterion", @@ -488,9 +488,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.2" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -665,7 +665,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", - "regex-automata 0.3.3", + "regex-automata 0.3.9", "serde", ] @@ -771,9 +771,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", @@ -781,7 +781,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -1123,7 +1123,7 @@ dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", - "memoffset 0.9.0", + "memoffset 0.9.1", "scopeguard", ] @@ -1454,12 +1454,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -2428,7 +2428,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.5", + "regex-automata 0.4.7", ] [[package]] @@ -2471,9 +2471,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" @@ -2567,9 +2567,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] @@ -2582,9 +2582,9 @@ checksum = "2145869435ace5ea6ea3d35f59be559317ec9a0d04e1812d5f185a87b6d36f1a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] @@ -2986,7 +2986,7 @@ name = "noirc_errors" version = "0.33.0" dependencies = [ "acvm", - "base64 0.21.2", + "base64 0.21.7", "chumsky", "codespan", "codespan-reporting", @@ -3027,7 +3027,7 @@ name = "noirc_frontend" version = "0.33.0" dependencies = [ "acvm", - "base64 0.21.2", + "base64 0.21.7", "bn254_blackbox_solver", "cfg-if 1.0.0", "chumsky", @@ -3272,7 +3272,7 @@ dependencies = [ "libc", "redox_syscall 0.3.5", "smallvec", - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -3743,9 +3743,9 @@ checksum = "977b1e897f9d764566891689e642653e5ed90c6895106acd005eb4c1d0203991" [[package]] name = "rayon" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -3753,9 +3753,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -3813,7 +3813,7 @@ checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.5", + "regex-automata 0.4.7", "regex-syntax 0.8.2", ] @@ -3828,15 +3828,15 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" [[package]] name = "regex-automata" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", @@ -3945,15 +3945,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.5.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -4113,9 +4113,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.202" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395" +checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09" dependencies = [ "serde_derive", ] @@ -4156,9 +4156,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.202" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838" +checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170" dependencies = [ "proc-macro2", "quote", @@ -4202,7 +4202,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1402f54f9a3b9e2efe71c1cea24e648acce55887983553eeb858cf3115acfd49" dependencies = [ - "base64 0.21.2", + "base64 0.21.7", "chrono", "hex", "indexmap 1.9.3", @@ -4670,9 +4670,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.36.0" +version = "1.38.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +checksum = "eb2caba9f80616f438e09748d5acda951967e1ea58508ef53d9c6402485a46df" dependencies = [ "backtrace", "bytes", @@ -4687,9 +4687,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2", "quote", @@ -5215,7 +5215,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af6041b3f84485c21b57acdc0fee4f4f0c93f426053dc05fa5d6fc262537bbff" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -5224,7 +5224,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -5233,122 +5233,129 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" From 33bd102d6021912b56fe880efab65346c3ea9228 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Wed, 4 Sep 2024 07:06:45 -0400 Subject: [PATCH 10/26] feat: Sync from aztec-packages (#5917) Automated pull of Noir development from [aztec-packages](https://github.com/AztecProtocol/aztec-packages). BEGIN_COMMIT_OVERRIDE fix: Temporary register leaks in brillig gen (https://github.com/AztecProtocol/aztec-packages/pull/8350) feat: calculate `FunctionSelector`s and `EventSelector`s during comptime (https://github.com/AztecProtocol/aztec-packages/pull/8354) feat: Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/8333) END_COMMIT_OVERRIDE Co-authored-by: TomAFrench --- .aztec-sync-commit | 2 +- aztec_macros/src/transforms/events.rs | 12 ++-- aztec_macros/src/transforms/note_interface.rs | 6 +- .../brillig/brillig_gen/brillig_black_box.rs | 67 ++++++++++++++---- .../src/brillig/brillig_gen/brillig_block.rs | 69 ++++++++++--------- tooling/nargo_fmt/tests/expected/contract.nr | 12 ++-- tooling/nargo_fmt/tests/input/contract.nr | 12 ++-- 7 files changed, 115 insertions(+), 65 deletions(-) diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 49baefec2c3..58bcd84922c 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -f5bbb89b489bc85f286bcc5ed45c30f38032810c +249e50efafd306fa8cd9005972636adbddbca81e diff --git a/aztec_macros/src/transforms/events.rs b/aztec_macros/src/transforms/events.rs index 8b71bd77ae6..ede8a350bf2 100644 --- a/aztec_macros/src/transforms/events.rs +++ b/aztec_macros/src/transforms/events.rs @@ -230,7 +230,7 @@ fn generate_fn_get_event_type_id( let function_source = format!( " fn get_event_type_id() -> dep::aztec::protocol_types::abis::event_selector::EventSelector {{ - dep::aztec::protocol_types::abis::event_selector::EventSelector::from_signature(\"{event_type}({from_signature_input})\") + comptime {{ dep::aztec::protocol_types::abis::event_selector::EventSelector::from_signature(\"{event_type}({from_signature_input})\") }} }} ", ) @@ -260,8 +260,8 @@ fn generate_fn_private_to_be_bytes( fn private_to_be_bytes(self: {event_type}, randomness: Field) -> [u8; {byte_length}] {{ let mut buffer: [u8; {byte_length}] = [0; {byte_length}]; - let randomness_bytes = randomness.to_be_bytes(32); - let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + let randomness_bytes: [u8; 32] = randomness.to_be_bytes(); + let event_type_id_bytes: [u8; 32] = {event_type}::get_event_type_id().to_field().to_be_bytes(); for i in 0..32 {{ buffer[i] = randomness_bytes[i]; @@ -271,7 +271,7 @@ fn generate_fn_private_to_be_bytes( let serialized_event = self.serialize(); for i in 0..serialized_event.len() {{ - let bytes = serialized_event[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); for j in 0..32 {{ buffer[64 + i * 32 + j] = bytes[j]; }} @@ -308,7 +308,7 @@ fn generate_fn_to_be_bytes( fn to_be_bytes(self: {event_type}) -> [u8; {byte_length_without_randomness}] {{ let mut buffer: [u8; {byte_length_without_randomness}] = [0; {byte_length_without_randomness}]; - let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + let event_type_id_bytes: [u8; 32] = {event_type}::get_event_type_id().to_field().to_be_bytes(); for i in 0..32 {{ buffer[i] = event_type_id_bytes[i]; @@ -317,7 +317,7 @@ fn generate_fn_to_be_bytes( let serialized_event = self.serialize(); for i in 0..serialized_event.len() {{ - let bytes = serialized_event[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); for j in 0..32 {{ buffer[32 + i * 32 + j] = bytes[j]; }} diff --git a/aztec_macros/src/transforms/note_interface.rs b/aztec_macros/src/transforms/note_interface.rs index 8df1d128c6f..df237926486 100644 --- a/aztec_macros/src/transforms/note_interface.rs +++ b/aztec_macros/src/transforms/note_interface.rs @@ -244,8 +244,8 @@ fn generate_note_to_be_bytes( let mut buffer: [u8; {0}] = [0; {0}]; - let storage_slot_bytes = storage_slot.to_be_bytes(32); - let note_type_id_bytes = {1}::get_note_type_id().to_be_bytes(32); + let storage_slot_bytes: [u8; 32] = storage_slot.to_be_bytes(); + let note_type_id_bytes: [u8; 32] = {1}::get_note_type_id().to_be_bytes(); for i in 0..32 {{ buffer[i] = storage_slot_bytes[i]; @@ -253,7 +253,7 @@ fn generate_note_to_be_bytes( }} for i in 0..serialized_note.len() {{ - let bytes = serialized_note[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_note[i].to_be_bytes(); for j in 0..32 {{ buffer[64 + i * 32 + j] = bytes[j]; }} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 802d442885f..bd9190c1cfe 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -29,6 +29,7 @@ pub(crate) fn convert_black_box_call( + brillig_context: &mut BrilligContext, + original_array_or_vector: &BrilligVariable, + converted_vector: BrilligVector, + bb_func: &BlackBoxFunc, +) { + match original_array_or_vector { + BrilligVariable::BrilligArray(_) => { + brillig_context.deallocate_register(converted_vector.size); + } + BrilligVariable::BrilligVector(_) => {} + _ => unreachable!( + "ICE: {} expected an array or a vector, but got {:?}", + bb_func.name(), + original_array_or_vector + ), + } +} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index d3d0e2231ad..26abafe177f 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -767,6 +767,12 @@ impl<'block> BrilligBlock<'block> { // puts the returns into the returned_registers and restores saved_registers self.brillig_context .codegen_post_call_prep_returns_load_registers(&returned_registers, &saved_registers); + + // Reset the register state to the one needed to hold the current available variables + let variables = self.variables.get_available_variables(self.function_context); + let registers = + variables.into_iter().flat_map(|variable| variable.extract_registers()).collect(); + self.brillig_context.set_allocated_registers(registers); } fn validate_array_index( @@ -1751,7 +1757,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); let array = variable.extract_array(); - self.allocate_nested_array(typ, Some(array)); + self.allocate_foreign_call_result_array(typ, array); variable } @@ -1778,40 +1784,39 @@ impl<'block> BrilligBlock<'block> { } } - fn allocate_nested_array( - &mut self, - typ: &Type, - array: Option, - ) -> BrilligVariable { - match typ { - Type::Array(types, size) => { - let array = array.unwrap_or(BrilligArray { - pointer: self.brillig_context.allocate_register(), - size: *size, - rc: self.brillig_context.allocate_register(), - }); - self.brillig_context.codegen_allocate_fixed_length_array(array.pointer, array.size); - self.brillig_context.usize_const_instruction(array.rc, 1_usize.into()); - - let mut index = 0_usize; - for _ in 0..*size { - for element_type in types.iter() { - match element_type { - Type::Array(_, _) => { - let inner_array = self.allocate_nested_array(element_type, None); - let idx = - self.brillig_context.make_usize_constant_instruction(index.into()); - self.brillig_context.codegen_store_variable_in_array(array.pointer, idx, inner_array); - } - Type::Slice(_) => unreachable!("ICE: unsupported slice type in allocate_nested_array(), expects an array or a numeric type"), - _ => (), - } - index += 1; + fn allocate_foreign_call_result_array(&mut self, typ: &Type, array: BrilligArray) { + let Type::Array(types, size) = typ else { + unreachable!("ICE: allocate_foreign_call_array() expects an array, got {typ:?}") + }; + + self.brillig_context.codegen_allocate_fixed_length_array(array.pointer, array.size); + self.brillig_context.usize_const_instruction(array.rc, 1_usize.into()); + + let mut index = 0_usize; + for _ in 0..*size { + for element_type in types.iter() { + match element_type { + Type::Array(_, nested_size) => { + let inner_array = BrilligArray { + pointer: self.brillig_context.allocate_register(), + rc: self.brillig_context.allocate_register(), + size: *nested_size, + }; + self.allocate_foreign_call_result_array(element_type, inner_array); + + let idx = + self.brillig_context.make_usize_constant_instruction(index.into()); + self.brillig_context.codegen_store_variable_in_array(array.pointer, idx, BrilligVariable::BrilligArray(inner_array)); + + self.brillig_context.deallocate_single_addr(idx); + self.brillig_context.deallocate_register(inner_array.pointer); + self.brillig_context.deallocate_register(inner_array.rc); } + Type::Slice(_) => unreachable!("ICE: unsupported slice type in allocate_nested_array(), expects an array or a numeric type"), + _ => (), } - BrilligVariable::BrilligArray(array) + index += 1; } - _ => unreachable!("ICE: allocate_nested_array() expects an array, got {typ:?}"), } } diff --git a/tooling/nargo_fmt/tests/expected/contract.nr b/tooling/nargo_fmt/tests/expected/contract.nr index e3a5877725a..cb7505f845c 100644 --- a/tooling/nargo_fmt/tests/expected/contract.nr +++ b/tooling/nargo_fmt/tests/expected/contract.nr @@ -1,6 +1,6 @@ -// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. -// Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. -// Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. +// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. +// Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. +// Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { use aztec::protocol_types::abis::function_selector::FunctionSelector; @@ -63,7 +63,9 @@ contract Benchmarking { storage.balances.at(owner).write(current + value); let _callStackItem1 = context.call_public_function( context.this_address(), - FunctionSelector::from_signature("broadcast(Field)"), + comptime { + FunctionSelector::from_signature("broadcast(Field)") + }, [owner] ); } @@ -75,5 +77,5 @@ contract Benchmarking { } } -// Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 +// Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 contract Uniswap {} diff --git a/tooling/nargo_fmt/tests/input/contract.nr b/tooling/nargo_fmt/tests/input/contract.nr index e3a5877725a..cb7505f845c 100644 --- a/tooling/nargo_fmt/tests/input/contract.nr +++ b/tooling/nargo_fmt/tests/input/contract.nr @@ -1,6 +1,6 @@ -// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. -// Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. -// Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. +// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. +// Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. +// Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { use aztec::protocol_types::abis::function_selector::FunctionSelector; @@ -63,7 +63,9 @@ contract Benchmarking { storage.balances.at(owner).write(current + value); let _callStackItem1 = context.call_public_function( context.this_address(), - FunctionSelector::from_signature("broadcast(Field)"), + comptime { + FunctionSelector::from_signature("broadcast(Field)") + }, [owner] ); } @@ -75,5 +77,5 @@ contract Benchmarking { } } -// Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 +// Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 contract Uniswap {} From 79df6a36901d50d8cc8d5b59e51078fe74ad1ac2 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 4 Sep 2024 06:30:51 -0500 Subject: [PATCH 11/26] chore: Add pass to normalize Ids in SSA (#5909) # Description ## Problem\* SSA can be difficult to debug manually, particularly for very large files. Even when comparing two sources side by side it can be hard to find exactly where they differ since one optimization difference earlier on can affect where ValueIds start in every function afterward. ## Summary\* This PR adds a pass to normalize ids in an SSA program - restarting from v0 after every SSA pass instead of continuing from the previous end. The goal of this is to be able to take two SSA programs and easily diff them to find out where they start differing. E.g. using this on two files containing the final SSA from https://github.com/noir-lang/noir/issues/5771 in both failing and passing versions, it is clear that they differ in exactly one ValueId in one instruction. ## Additional Context This new pass is only run when `--show-ssa` is specified, and is run before each printout. ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- compiler/noirc_evaluator/src/ssa.rs | 4 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 10 +- .../check_for_underconstrained_values.rs | 2 +- .../noirc_evaluator/src/ssa/ir/function.rs | 7 + compiler/noirc_evaluator/src/ssa/ir/map.rs | 10 +- .../noirc_evaluator/src/ssa/opt/inlining.rs | 13 +- compiler/noirc_evaluator/src/ssa/opt/mod.rs | 1 + .../src/ssa/opt/normalize_value_ids.rs | 194 ++++++++++++++++++ 8 files changed, 227 insertions(+), 14 deletions(-) create mode 100644 compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 57bd76d4f78..ad6645df228 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -82,6 +82,7 @@ pub(crate) fn optimize_into_acir( ) -> Result { let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); + let mut ssa = SsaBuilder::new( program, options.enable_ssa_logging, @@ -418,8 +419,9 @@ impl SsaBuilder { Ok(self.print(msg)) } - fn print(self, msg: &str) -> Self { + fn print(mut self, msg: &str) -> Self { if self.print_ssa_passes { + self.ssa.normalize_ids(); println!("{msg}\n{}", self.ssa); } self diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index a2b9e46a15a..0360b15d950 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -770,10 +770,12 @@ impl<'a> Context<'a> { .map(|result_id| dfg.type_of_value(*result_id).flattened_size()) .sum(); - let acir_function_id = ssa - .entry_point_to_generated_index - .get(id) - .expect("ICE: should have an associated final index"); + let Some(acir_function_id) = + ssa.entry_point_to_generated_index.get(id) + else { + unreachable!("Expected an associated final index for call to acir function {id} with args {arguments:?}"); + }; + let output_vars = self.acir_context.call_acir_function( AcirFunctionId(*acir_function_id), inputs, diff --git a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 26eab290d4b..aa5f4c8df95 100644 --- a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -244,7 +244,7 @@ impl Context { } }, Value::ForeignFunction(..) => { - panic!("Should not be able to reach foreign function from non-brillig functions"); + panic!("Should not be able to reach foreign function from non-brillig functions, {func_id} in function {}", function.name()); } Value::Array { .. } | Value::Instruction { .. } diff --git a/compiler/noirc_evaluator/src/ssa/ir/function.rs b/compiler/noirc_evaluator/src/ssa/ir/function.rs index bae9f82e4f1..65a616ef612 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function.rs @@ -72,6 +72,13 @@ impl Function { Self { name: another.name.clone(), id, entry_block, dfg, runtime: another.runtime } } + /// Takes the signature (function name & runtime) from a function but does not copy the body. + pub(crate) fn clone_signature(id: FunctionId, another: &Function) -> Self { + let mut new_function = Function::new(another.name.clone(), id); + new_function.runtime = another.runtime; + new_function + } + /// The name of the function. /// Used exclusively for debugging purposes. pub(crate) fn name(&self) -> &str { diff --git a/compiler/noirc_evaluator/src/ssa/ir/map.rs b/compiler/noirc_evaluator/src/ssa/ir/map.rs index 769d52e6e65..23f5380f030 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/map.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/map.rs @@ -1,6 +1,7 @@ use fxhash::FxHashMap as HashMap; use serde::{Deserialize, Serialize}; use std::{ + collections::BTreeMap, hash::Hash, str::FromStr, sync::atomic::{AtomicUsize, Ordering}, @@ -240,7 +241,7 @@ impl std::ops::IndexMut> for DenseMap { /// call to .remove(). #[derive(Debug)] pub(crate) struct SparseMap { - storage: HashMap, T>, + storage: BTreeMap, T>, } impl SparseMap { @@ -271,11 +272,16 @@ impl SparseMap { pub(crate) fn remove(&mut self, id: Id) -> Option { self.storage.remove(&id) } + + /// Unwraps the inner storage of this map + pub(crate) fn into_btree(self) -> BTreeMap, T> { + self.storage + } } impl Default for SparseMap { fn default() -> Self { - Self { storage: HashMap::default() } + Self { storage: Default::default() } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 1ff593a1531..7843c55da65 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -2,7 +2,7 @@ //! The purpose of this pass is to inline the instructions of each function call //! within the function caller. If all function calls are known, there will only //! be a single function remaining when the pass finishes. -use std::collections::{BTreeSet, HashSet}; +use std::collections::{BTreeSet, HashSet, VecDeque}; use acvm::acir::AcirField; use iter_extended::{btree_map, vecmap}; @@ -372,14 +372,14 @@ impl<'function> PerFunctionContext<'function> { fn translate_block( &mut self, source_block: BasicBlockId, - block_queue: &mut Vec, + block_queue: &mut VecDeque, ) -> BasicBlockId { if let Some(block) = self.blocks.get(&source_block) { return *block; } // The block is not yet inlined, queue it - block_queue.push(source_block); + block_queue.push_back(source_block); // The block is not already present in the function being inlined into so we must create it. // The block's instructions are not copied over as they will be copied later in inlining. @@ -415,13 +415,14 @@ impl<'function> PerFunctionContext<'function> { /// Inline all reachable blocks within the source_function into the destination function. fn inline_blocks(&mut self, ssa: &Ssa) -> Vec { let mut seen_blocks = HashSet::new(); - let mut block_queue = vec![self.source_function.entry_block()]; + let mut block_queue = VecDeque::new(); + block_queue.push_back(self.source_function.entry_block()); // This Vec will contain each block with a Return instruction along with the // returned values of that block. let mut function_returns = vec![]; - while let Some(source_block_id) = block_queue.pop() { + while let Some(source_block_id) = block_queue.pop_front() { if seen_blocks.contains(&source_block_id) { continue; } @@ -609,7 +610,7 @@ impl<'function> PerFunctionContext<'function> { fn handle_terminator_instruction( &mut self, block_id: BasicBlockId, - block_queue: &mut Vec, + block_queue: &mut VecDeque, ) -> Option<(BasicBlockId, Vec)> { match self.source_function.dfg[block_id].unwrap_terminator() { TerminatorInstruction::Jmp { destination, arguments, call_stack } => { diff --git a/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 4e5fa262696..bd9d0baff97 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -13,6 +13,7 @@ mod die; pub(crate) mod flatten_cfg; mod inlining; mod mem2reg; +mod normalize_value_ids; mod rc; mod remove_bit_shifts; mod remove_enable_side_effects; diff --git a/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs b/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs new file mode 100644 index 00000000000..f11b310494b --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs @@ -0,0 +1,194 @@ +use std::collections::BTreeMap; + +use crate::ssa::{ + ir::{ + basic_block::BasicBlockId, + function::{Function, FunctionId}, + map::SparseMap, + post_order::PostOrder, + value::{Value, ValueId}, + }, + ssa_gen::Ssa, +}; +use fxhash::FxHashMap as HashMap; +use iter_extended::vecmap; + +impl Ssa { + /// This is a debugging pass which re-inserts each instruction + /// and block in a fresh DFG context for each function so that ValueIds, + /// BasicBlockIds, and FunctionIds are always identical for the same SSA code. + /// + /// During normal compilation this is often not the case since prior passes + /// may increase the ID counter so that later passes start at different offsets, + /// even if they contain the same SSA code. + pub(crate) fn normalize_ids(&mut self) { + let mut context = Context::default(); + context.populate_functions(&self.functions); + for function in self.functions.values_mut() { + context.normalize_ids(function); + } + self.functions = context.functions.into_btree(); + } +} + +#[derive(Default)] +struct Context { + functions: SparseMap, + + new_ids: IdMaps, +} + +/// Maps from old ids to new ones. +/// Separate from the rest of Context so we can call mutable methods on it +/// while Context gives out mutable references to functions within. +#[derive(Default)] +struct IdMaps { + // Maps old function id -> new function id + function_ids: HashMap, + + // Maps old block id -> new block id + // Cleared in between each function. + blocks: HashMap, + + // Maps old value id -> new value id + // Cleared in between each function. + values: HashMap, +} + +impl Context { + fn populate_functions(&mut self, functions: &BTreeMap) { + for (id, function) in functions { + self.functions.insert_with_id(|new_id| { + self.new_ids.function_ids.insert(*id, new_id); + Function::clone_signature(new_id, function) + }); + } + } + + fn normalize_ids(&mut self, old_function: &mut Function) { + self.new_ids.blocks.clear(); + self.new_ids.values.clear(); + + let new_function_id = self.new_ids.function_ids[&old_function.id()]; + let new_function = &mut self.functions[new_function_id]; + + let mut reachable_blocks = PostOrder::with_function(old_function).into_vec(); + reachable_blocks.reverse(); + + self.new_ids.populate_blocks(&reachable_blocks, old_function, new_function); + + // Map each parameter, instruction, and terminator + for old_block_id in reachable_blocks { + let new_block_id = self.new_ids.blocks[&old_block_id]; + + let old_block = &mut old_function.dfg[old_block_id]; + for old_instruction_id in old_block.take_instructions() { + let instruction = old_function.dfg[old_instruction_id] + .map_values(|value| self.new_ids.map_value(new_function, old_function, value)); + + let call_stack = old_function.dfg.get_call_stack(old_instruction_id); + let old_results = old_function.dfg.instruction_results(old_instruction_id); + + let ctrl_typevars = instruction + .requires_ctrl_typevars() + .then(|| vecmap(old_results, |result| old_function.dfg.type_of_value(*result))); + + let new_results = new_function.dfg.insert_instruction_and_results( + instruction, + new_block_id, + ctrl_typevars, + call_stack, + ); + + assert_eq!(old_results.len(), new_results.len()); + for (old_result, new_result) in old_results.iter().zip(new_results.results().iter()) + { + let old_result = old_function.dfg.resolve(*old_result); + self.new_ids.values.insert(old_result, *new_result); + } + } + + let old_block = &mut old_function.dfg[old_block_id]; + let mut terminator = old_block + .take_terminator() + .map_values(|value| self.new_ids.map_value(new_function, old_function, value)); + terminator.mutate_blocks(|old_block| self.new_ids.blocks[&old_block]); + new_function.dfg.set_block_terminator(new_block_id, terminator); + } + } +} + +impl IdMaps { + fn populate_blocks( + &mut self, + reachable_blocks: &[BasicBlockId], + old_function: &mut Function, + new_function: &mut Function, + ) { + let old_entry = old_function.entry_block(); + self.blocks.insert(old_entry, new_function.entry_block()); + + for old_id in reachable_blocks { + if *old_id != old_entry { + let new_id = new_function.dfg.make_block(); + self.blocks.insert(*old_id, new_id); + } + + let new_id = self.blocks[old_id]; + let old_block = &mut old_function.dfg[*old_id]; + for old_parameter in old_block.take_parameters() { + let old_parameter = old_function.dfg.resolve(old_parameter); + let typ = old_function.dfg.type_of_value(old_parameter); + let new_parameter = new_function.dfg.add_block_parameter(new_id, typ); + self.values.insert(old_parameter, new_parameter); + } + } + } + + fn map_value( + &mut self, + new_function: &mut Function, + old_function: &Function, + old_value: ValueId, + ) -> ValueId { + let old_value = old_function.dfg.resolve(old_value); + match &old_function.dfg[old_value] { + value @ Value::Instruction { instruction, .. } => { + *self.values.get(&old_value).unwrap_or_else(|| { + let instruction = &old_function.dfg[*instruction]; + unreachable!("Unmapped value with id {old_value}: {value:?}\n from instruction: {instruction:?}, SSA: {old_function}") + }) + } + + value @ Value::Param { .. } => { + *self.values.get(&old_value).unwrap_or_else(|| { + unreachable!("Unmapped value with id {old_value}: {value:?}") + }) + } + + Value::Function(id) => { + let new_id = self.function_ids[id]; + new_function.dfg.import_function(new_id) + } + + Value::NumericConstant { constant, typ } => { + new_function.dfg.make_constant(*constant, typ.clone()) + } + Value::Array { array, typ } => { + if let Some(value) = self.values.get(&old_value) { + return *value; + } + + let array = array + .iter() + .map(|value| self.map_value(new_function, old_function, *value)) + .collect(); + let new_value = new_function.dfg.make_array(array, typ.clone()); + self.values.insert(old_value, new_value); + new_value + } + Value::Intrinsic(intrinsic) => new_function.dfg.import_intrinsic(*intrinsic), + Value::ForeignFunction(name) => new_function.dfg.import_foreign_function(name), + } + } +} From 1737b656c861706c38b59bd5ef6cd095687a2898 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Wed, 4 Sep 2024 08:37:31 -0400 Subject: [PATCH 12/26] feat(perf): Remove last store in return block if last load is before that store (#5910) # Description ## Problem\* Working towards #4535 ## Summary\* Small optimization found while working in mem2reg. Now that we have a small per function state we can see which stores/loads that are leftover can be removed. A place we are potentially missing stores that can be removed is in return blocks. If we have a store inside of a return block that is not loaded from inside that block, we can safely remove that store (given it is not a reference param). ## Additional Context ## Documentation\* Check one: - [X] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: TomAFrench --- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 32 ++++++++++++++++--- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 9d6582c0db7..3d98f4126cf 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -116,7 +116,7 @@ struct PerFunctionContext<'f> { /// Track a value's last load across all blocks. /// If a value is not used in anymore loads we can remove the last store to that value. - last_loads: HashMap, + last_loads: HashMap, } impl<'f> PerFunctionContext<'f> { @@ -152,9 +152,31 @@ impl<'f> PerFunctionContext<'f> { // This rule does not apply to reference parameters, which we must also check for before removing these stores. for (block_id, block) in self.blocks.iter() { let block_params = self.inserter.function.dfg.block_parameters(*block_id); - for (value, store_instruction) in block.last_stores.iter() { - let is_reference_param = block_params.contains(value); - if self.last_loads.get(value).is_none() && !is_reference_param { + for (store_address, store_instruction) in block.last_stores.iter() { + let is_reference_param = block_params.contains(store_address); + let terminator = self.inserter.function.dfg[*block_id].unwrap_terminator(); + + let is_return = matches!(terminator, TerminatorInstruction::Return { .. }); + let remove_load = if is_return { + // Determine whether the last store is used in the return value + let mut is_return_value = false; + terminator.for_each_value(|return_value| { + is_return_value = return_value == *store_address || is_return_value; + }); + + // If the last load of a store is not part of the block with a return terminator, + // we can safely remove this store. + let last_load_not_in_return = self + .last_loads + .get(store_address) + .map(|(_, last_load_block)| *last_load_block != *block_id) + .unwrap_or(true); + !is_return_value && last_load_not_in_return + } else { + self.last_loads.get(store_address).is_none() + }; + + if remove_load && !is_reference_param { self.instructions_to_remove.insert(*store_instruction); } } @@ -259,7 +281,7 @@ impl<'f> PerFunctionContext<'f> { } else { references.mark_value_used(address, self.inserter.function); - self.last_loads.insert(address, instruction); + self.last_loads.insert(address, (instruction, block_id)); } } Instruction::Store { address, value } => { From 779e013147cecc6c6bd13492cd60c4e26b7a7113 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 4 Sep 2024 14:17:34 +0100 Subject: [PATCH 13/26] chore: remove equality operation on boolean constraints against constants (#5919) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … # Description ## Problem\* Resolves ## Summary\* We're currently performing a `x == 1` equality and then constraining the result to be `1` when dealing with boolean values in brillig. This PR updates the codegen to just act on `x` directly. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/brillig/brillig_gen/brillig_block.rs | 44 +++++++++++++++---- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 26abafe177f..ef5fbce83d4 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -255,16 +255,40 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_binary(binary, dfg, result_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let condition = SingleAddrVariable { - address: self.brillig_context.allocate_register(), - bit_size: 1, + let (condition, deallocate) = match ( + dfg.get_numeric_constant_with_type(*lhs), + dfg.get_numeric_constant_with_type(*rhs), + ) { + // If the constraint is of the form `x == u1 1` then we can simply constrain `x` directly + ( + Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))), + None, + ) if constant == FieldElement::one() => { + (self.convert_ssa_single_addr_value(*rhs, dfg), false) + } + ( + None, + Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))), + ) if constant == FieldElement::one() => { + (self.convert_ssa_single_addr_value(*lhs, dfg), false) + } + + // Otherwise we need to perform the equality explicitly. + _ => { + let condition = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: 1, + }; + self.convert_ssa_binary( + &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, + dfg, + condition, + ); + + (condition, true) + } }; - self.convert_ssa_binary( - &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, - dfg, - condition, - ); match assert_message { Some(ConstrainError::Dynamic(selector, values)) => { let payload_values = @@ -287,7 +311,9 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.codegen_constrain(condition, None); } } - self.brillig_context.deallocate_single_addr(condition); + if deallocate { + self.brillig_context.deallocate_single_addr(condition); + } } Instruction::Allocate => { let result_value = dfg.instruction_results(instruction_id)[0]; From 44cf9a2140bc06b550d4b46966f1637598ac11a7 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 12:15:10 -0300 Subject: [PATCH 14/26] fix: prevent comptime println from crashing LSP (#5918) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description ## Problem Resolves #5904 ## Summary I found out that writing to the error stream when connected to the LSP client actually sends that output to the LSP output window ("Noir Language Server") in our case so I chose to do that for comptime println/print. Advantages: - No more crashing of the LSP server on comptime println - If you know it goes to that output you can actually see the output without having to go to a terminal to run that code, so debugging comptime will be pretty fast now - (if you don't know the output goes there, it's fine too: at least the LSP server doesn't crash 😄) ![lsp-comptime-println](https://github.com/user-attachments/assets/7d3ecaca-00fb-4c15-90a0-9868260ce7f5) ## Additional Context Though the original issue was about using sockets to communicate with the LSP client, I think we won't need that after all if comptime println was the only issue (and I think most LSP servers use stdin/stdout for communication because it's simpler). Also: it would be better if this output showed up in a dedicated output window, say "Noir Language comptime output" or something like that, but only the LSP client can create those windows and write to them... so implementing that would mean somehow communicating this data from the server to the client (maybe a socket?), which is much harder to do... so for now the output will have to show up like regular logs. And a question: when the LSP server starts we have this code: ```rust eprintln!("LSP starting..."); ``` that shows up on the output window... should we remove it? It feel like it was used to debug the server to know that it started, but I don't know how useful it is now. ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/hir/comptime/interpreter.rs | 16 +++++++++++++--- tooling/nargo_cli/src/cli/lsp_cmd.rs | 2 -- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 4980045c68d..d8e62b66eca 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -1653,10 +1653,20 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { assert_eq!(arguments.len(), 2); let print_newline = arguments[0].0 == Value::Bool(true); - if print_newline { - println!("{}", arguments[1].0.display(self.elaborator.interner)); + let contents = arguments[1].0.display(self.elaborator.interner); + if self.elaborator.interner.is_in_lsp_mode() { + // If we `println!` in LSP it gets mixed with the protocol stream and leads to crashing + // the connection. If we use `eprintln!` not only it doesn't crash, but the output + // appears in the "Noir Language Server" output window in case you want to see it. + if print_newline { + eprintln!("{}", contents); + } else { + eprint!("{}", contents); + } + } else if print_newline { + println!("{}", contents); } else { - print!("{}", arguments[1].0.display(self.elaborator.interner)); + print!("{}", contents); } Ok(Value::Unit) diff --git a/tooling/nargo_cli/src/cli/lsp_cmd.rs b/tooling/nargo_cli/src/cli/lsp_cmd.rs index 9ff7a42e5f5..bfaa913b33a 100644 --- a/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -35,8 +35,6 @@ pub(crate) fn run(_args: LspCommand, _config: NargoConfig) -> Result<(), CliErro .service(router) }); - eprintln!("LSP starting..."); - // Prefer truly asynchronous piped stdin/stdout without blocking tasks. #[cfg(unix)] let (stdin, stdout) = ( From d2caa5bb86f944d6d09182482bef6e35ca2213d6 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 14:09:54 -0300 Subject: [PATCH 15/26] feat: LSP will now suggest private items if they are visible (#5923) # Description ## Problem Resolves #5879 ## Summary Uses the existing visibility check instead of just considering private items to never be visible. ## Additional Context None. ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../noirc_frontend/src/elaborator/comptime.rs | 5 +--- .../src/hir/def_collector/dc_mod.rs | 5 +--- tooling/lsp/src/modules.rs | 5 ++-- .../requests/code_action/import_or_qualify.rs | 1 + tooling/lsp/src/requests/completion.rs | 22 ++------------- .../src/requests/completion/auto_import.rs | 1 + tooling/lsp/src/requests/completion/tests.rs | 25 +++++++++++++++++ tooling/lsp/src/visibility.rs | 28 +++++++++++++------ 8 files changed, 54 insertions(+), 38 deletions(-) diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index baa9c0ab371..5678fa8ddee 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -269,10 +269,7 @@ impl<'context> Elaborator<'context> { let module = self.module_id(); self.interner.push_function(id, &function.def, module, location); - if self.interner.is_in_lsp_mode() - && !function.def.is_test() - && !function.def.is_private() - { + if self.interner.is_in_lsp_mode() && !function.def.is_test() { self.interner.register_function(id, &function.def); } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 590cdc541ce..1dbd5a1383b 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -239,10 +239,7 @@ impl<'a> ModCollector<'a> { let location = Location::new(function.span(), self.file_id); context.def_interner.push_function(func_id, &function.def, module, location); - if context.def_interner.is_in_lsp_mode() - && !function.def.is_test() - && !function.def.is_private() - { + if context.def_interner.is_in_lsp_mode() && !function.def.is_test() { context.def_interner.register_function(func_id, &function.def); } diff --git a/tooling/lsp/src/modules.rs b/tooling/lsp/src/modules.rs index 54074dbd94c..d78da15a8ff 100644 --- a/tooling/lsp/src/modules.rs +++ b/tooling/lsp/src/modules.rs @@ -47,10 +47,11 @@ pub(crate) fn module_full_path( current_module_id: ModuleId, current_module_parent_id: Option, interner: &NodeInterner, + def_maps: &BTreeMap, ) -> Option { let full_path; if let ModuleDefId::ModuleId(module_id) = module_def_id { - if !is_visible(visibility, current_module_id, module_id) { + if !is_visible(module_id, current_module_id, visibility, def_maps) { return None; } @@ -61,7 +62,7 @@ pub(crate) fn module_full_path( return None; }; - if !is_visible(visibility, current_module_id, parent_module) { + if !is_visible(parent_module, current_module_id, visibility, def_maps) { return None; } diff --git a/tooling/lsp/src/requests/code_action/import_or_qualify.rs b/tooling/lsp/src/requests/code_action/import_or_qualify.rs index d07d117a317..25accc8a008 100644 --- a/tooling/lsp/src/requests/code_action/import_or_qualify.rs +++ b/tooling/lsp/src/requests/code_action/import_or_qualify.rs @@ -52,6 +52,7 @@ impl<'a> CodeActionFinder<'a> { self.module_id, current_module_parent_id, self.interner, + self.def_maps, ) else { continue; }; diff --git a/tooling/lsp/src/requests/completion.rs b/tooling/lsp/src/requests/completion.rs index 987746d37ce..59758f4b972 100644 --- a/tooling/lsp/src/requests/completion.rs +++ b/tooling/lsp/src/requests/completion.rs @@ -22,10 +22,7 @@ use noirc_frontend::{ UnresolvedGenerics, UnresolvedType, UseTree, UseTreeKind, Visitor, }, graph::{CrateId, Dependency}, - hir::{ - def_map::{CrateDefMap, LocalModuleId, ModuleId}, - resolution::import::can_reference_module_id, - }, + hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}, hir_def::traits::Trait, macros_api::{ModuleDefId, NodeInterner}, node_interner::ReferenceId, @@ -34,7 +31,7 @@ use noirc_frontend::{ }; use sort_text::underscore_sort_text; -use crate::{requests::to_lsp_location, utils, LspState}; +use crate::{requests::to_lsp_location, utils, visibility::is_visible, LspState}; use super::process_request; @@ -1263,21 +1260,6 @@ fn module_def_id_from_reference_id(reference_id: ReferenceId) -> Option, -) -> bool { - can_reference_module_id( - def_maps, - current_module_id.krate, - current_module_id.local_id, - target_module_id, - visibility, - ) -} - #[cfg(test)] mod completion_name_matches_tests { use crate::requests::completion::name_matches; diff --git a/tooling/lsp/src/requests/completion/auto_import.rs b/tooling/lsp/src/requests/completion/auto_import.rs index bbd471dfea1..d8823794999 100644 --- a/tooling/lsp/src/requests/completion/auto_import.rs +++ b/tooling/lsp/src/requests/completion/auto_import.rs @@ -53,6 +53,7 @@ impl<'a> NodeFinder<'a> { self.module_id, current_module_parent_id, self.interner, + self.def_maps, ) else { continue; }; diff --git a/tooling/lsp/src/requests/completion/tests.rs b/tooling/lsp/src/requests/completion/tests.rs index d621ca21bb8..ca959f5d5ca 100644 --- a/tooling/lsp/src/requests/completion/tests.rs +++ b/tooling/lsp/src/requests/completion/tests.rs @@ -1863,4 +1863,29 @@ mod completion_tests { Some("(use bar::foobar)".to_string()), ); } + + #[test] + async fn test_auto_import_suggests_private_function_if_visibile() { + let src = r#" + mod foo { + fn qux() { + barba>|< + } + } + + fn barbaz() {} + + fn main() {} + "#; + + let items = get_completions(src).await; + assert_eq!(items.len(), 1); + + let item = &items[0]; + assert_eq!(item.label, "barbaz()"); + assert_eq!( + item.label_details.as_ref().unwrap().detail, + Some("(use super::barbaz)".to_string()), + ); + } } diff --git a/tooling/lsp/src/visibility.rs b/tooling/lsp/src/visibility.rs index aad8b47fbbe..d6e26f7bc48 100644 --- a/tooling/lsp/src/visibility.rs +++ b/tooling/lsp/src/visibility.rs @@ -1,13 +1,25 @@ -use noirc_frontend::{ast::ItemVisibility, hir::def_map::ModuleId}; +use std::collections::BTreeMap; + +use noirc_frontend::{ + ast::ItemVisibility, + graph::CrateId, + hir::{ + def_map::{CrateDefMap, ModuleId}, + resolution::import::can_reference_module_id, + }, +}; pub(super) fn is_visible( + target_module_id: ModuleId, + current_module_id: ModuleId, visibility: ItemVisibility, - current_module: ModuleId, - target_module: ModuleId, + def_maps: &BTreeMap, ) -> bool { - match visibility { - ItemVisibility::Public => true, - ItemVisibility::Private => false, - ItemVisibility::PublicCrate => current_module.krate == target_module.krate, - } + can_reference_module_id( + def_maps, + current_module_id.krate, + current_module_id.local_id, + target_module_id, + visibility, + ) } From 91f693d81edb1913bf56d2c1038441cec5844646 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 14:46:57 -0300 Subject: [PATCH 16/26] feat: check argument count and types on attribute function callback (#5921) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description ## Problem Resolves #5903 ## Summary Now the compiler will check that attribute function callbacks have at least one argument, that that argument's type matches the corresponding type, and that remaining arguments also match the types given. Also previously errors on these callbacks were shown on the function that had the attribute, instead of on the attribute, likely because attributes didn't have a Span attached to them: this PR adds that too. ## Additional Context The error message is still a bit strange because if you have code like this: ```rust #[attr] fn foo() {} fn attr() {} fn main() {} ``` You get this: ``` error: Expected 0 arguments, but 1 was provided ┌─ src/main.nr:1:3 │ 1 │ #[attr] │ ---- Too many arguments ``` which kind of makes sense, because 1 implicit argument was provided but 0 are expected in the callback, but maybe the error should point out that the callback actually needs one argument. Let me know if you think we should improve the error message here... but at least it doesn't error anymore. Oh, I remember why I didn't improve that error message: the error should likely be on the callback function, but it should point out that the error happens because of a given attribute, so we need two different locations for the error, which I think we currently doesn't support. ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- aztec_macros/src/utils/ast_utils.rs | 4 +- compiler/noirc_driver/src/lib.rs | 10 +- compiler/noirc_errors/src/position.rs | 4 + .../noirc_frontend/src/elaborator/comptime.rs | 94 ++++++++++++++----- compiler/noirc_frontend/src/elaborator/mod.rs | 3 +- .../noirc_frontend/src/elaborator/types.rs | 8 +- .../src/hir/comptime/interpreter/builtin.rs | 2 +- .../noirc_frontend/src/hir_def/function.rs | 3 +- compiler/noirc_frontend/src/hir_def/types.rs | 18 +--- compiler/noirc_frontend/src/lexer/lexer.rs | 19 +++- compiler/noirc_frontend/src/lexer/token.rs | 36 +++++-- 11 files changed, 138 insertions(+), 63 deletions(-) diff --git a/aztec_macros/src/utils/ast_utils.rs b/aztec_macros/src/utils/ast_utils.rs index 19372fa5cb5..316aa60da62 100644 --- a/aztec_macros/src/utils/ast_utils.rs +++ b/aztec_macros/src/utils/ast_utils.rs @@ -187,8 +187,8 @@ pub fn check_trait_method_implemented(trait_impl: &NoirTraitImpl, method_name: & /// Checks if an attribute is a custom attribute with a specific name pub fn is_custom_attribute(attr: &SecondaryAttribute, attribute_name: &str) -> bool { - if let SecondaryAttribute::Custom(custom_attr) = attr { - custom_attr.as_str() == attribute_name + if let SecondaryAttribute::Custom(custom_attribute) = attr { + custom_attribute.contents.as_str() == attribute_name } else { false } diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 88918151366..a315e7ed397 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -452,9 +452,13 @@ fn compile_contract_inner( .attributes .secondary .iter() - .filter_map( - |attr| if let SecondaryAttribute::Custom(tag) = attr { Some(tag) } else { None }, - ) + .filter_map(|attr| { + if let SecondaryAttribute::Custom(attribute) = attr { + Some(&attribute.contents) + } else { + None + } + }) .cloned() .collect(); diff --git a/compiler/noirc_errors/src/position.rs b/compiler/noirc_errors/src/position.rs index 1792197eab7..9b031f56ae2 100644 --- a/compiler/noirc_errors/src/position.rs +++ b/compiler/noirc_errors/src/position.rs @@ -103,6 +103,10 @@ impl Span { let other_distance = other.end() - other.start(); self_distance < other_distance } + + pub fn shift_by(&self, offset: u32) -> Span { + Self::from(self.start() + offset..self.end() + offset) + } } impl From for Range { diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index 5678fa8ddee..0c1bc82e1ce 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -24,7 +24,7 @@ use crate::{ }, node_interner::{DefinitionKind, DependencyId, FuncId, TraitId}, parser::{self, TopLevelStatement}, - Type, TypeBindings, + Type, TypeBindings, UnificationError, }; use super::{Elaborator, FunctionContext, ResolverMeta}; @@ -96,10 +96,14 @@ impl<'context> Elaborator<'context> { generated_items: &mut CollectedItems, ) { for attribute in attributes { - if let SecondaryAttribute::Custom(name) = attribute { - if let Err(error) = - self.run_comptime_attribute_on_item(name, item.clone(), span, generated_items) - { + if let SecondaryAttribute::Custom(attribute) = attribute { + if let Err(error) = self.run_comptime_attribute_on_item( + &attribute.contents, + item.clone(), + span, + attribute.contents_span, + generated_items, + ) { self.errors.push(error); } } @@ -111,10 +115,11 @@ impl<'context> Elaborator<'context> { attribute: &str, item: Value, span: Span, + attribute_span: Span, generated_items: &mut CollectedItems, ) -> Result<(), (CompilationError, FileId)> { - let location = Location::new(span, self.file); - let Some((function, arguments)) = Self::parse_attribute(attribute, self.file)? else { + let location = Location::new(attribute_span, self.file); + let Some((function, arguments)) = Self::parse_attribute(attribute, location)? else { // Do not issue an error if the attribute is unknown return Ok(()); }; @@ -141,12 +146,17 @@ impl<'context> Elaborator<'context> { }; let mut interpreter = self.setup_interpreter(); - let mut arguments = - Self::handle_attribute_arguments(&mut interpreter, function, arguments, location) - .map_err(|error| { - let file = error.get_location().file; - (error.into(), file) - })?; + let mut arguments = Self::handle_attribute_arguments( + &mut interpreter, + &item, + function, + arguments, + location, + ) + .map_err(|error| { + let file = error.get_location().file; + (error.into(), file) + })?; arguments.insert(0, (item, location)); @@ -170,33 +180,62 @@ impl<'context> Elaborator<'context> { #[allow(clippy::type_complexity)] pub(crate) fn parse_attribute( annotation: &str, - file: FileId, + location: Location, ) -> Result)>, (CompilationError, FileId)> { let (tokens, mut lexing_errors) = Lexer::lex(annotation); if !lexing_errors.is_empty() { - return Err((lexing_errors.swap_remove(0).into(), file)); + return Err((lexing_errors.swap_remove(0).into(), location.file)); } let expression = parser::expression() .parse(tokens) - .map_err(|mut errors| (errors.swap_remove(0).into(), file))?; + .map_err(|mut errors| (errors.swap_remove(0).into(), location.file))?; - Ok(match expression.kind { - ExpressionKind::Call(call) => Some((*call.func, call.arguments)), - ExpressionKind::Variable(_) => Some((expression, Vec::new())), - _ => None, - }) + let (mut func, mut arguments) = match expression.kind { + ExpressionKind::Call(call) => (*call.func, call.arguments), + ExpressionKind::Variable(_) => (expression, Vec::new()), + _ => return Ok(None), + }; + + func.span = func.span.shift_by(location.span.start()); + + for argument in &mut arguments { + argument.span = argument.span.shift_by(location.span.start()); + } + + Ok(Some((func, arguments))) } fn handle_attribute_arguments( interpreter: &mut Interpreter, + item: &Value, function: FuncId, arguments: Vec, location: Location, ) -> Result, InterpreterError> { let meta = interpreter.elaborator.interner.function_meta(&function); + let mut parameters = vecmap(&meta.parameters.0, |(_, typ, _)| typ.clone()); + if parameters.is_empty() { + return Err(InterpreterError::ArgumentCountMismatch { + expected: 0, + actual: arguments.len() + 1, + location, + }); + } + + let expected_type = item.get_type(); + let expected_type = expected_type.as_ref(); + + if ¶meters[0] != expected_type { + return Err(InterpreterError::TypeMismatch { + expected: parameters[0].clone(), + actual: expected_type.clone(), + location, + }); + } + // Remove the initial parameter for the comptime item since that is not included // in `arguments` at this point. parameters.remove(0); @@ -213,6 +252,7 @@ impl<'context> Elaborator<'context> { let mut varargs = im::Vector::new(); for (i, arg) in arguments.into_iter().enumerate() { + let arg_location = Location::new(arg.span, location.file); let param_type = parameters.get(i).or(varargs_elem_type).unwrap_or(&Type::Error); let mut push_arg = |arg| { @@ -233,9 +273,17 @@ impl<'context> Elaborator<'context> { }?; push_arg(Value::TraitDefinition(trait_id)); } else { - let expr_id = interpreter.elaborator.elaborate_expression(arg).0; + let (expr_id, expr_type) = interpreter.elaborator.elaborate_expression(arg); push_arg(interpreter.evaluate(expr_id)?); - } + + if let Err(UnificationError) = expr_type.unify(param_type) { + return Err(InterpreterError::TypeMismatch { + expected: param_type.clone(), + actual: expr_type, + location: arg_location, + }); + } + }; } if is_varargs { diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index e84ed76050d..44240b72af0 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -28,6 +28,7 @@ use crate::{ node_interner::{ DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, ReferenceId, TraitId, TypeAliasId, }, + token::CustomAtrribute, Shared, Type, TypeVariable, }; use crate::{ @@ -819,7 +820,7 @@ impl<'context> Elaborator<'context> { let attributes = func.secondary_attributes().iter(); let attributes = attributes.filter_map(|secondary_attribute| secondary_attribute.as_custom()); - let attributes = attributes.map(|str| str.to_string()).collect(); + let attributes: Vec = attributes.cloned().collect(); let meta = FuncMeta { name: name_ident, diff --git a/compiler/noirc_frontend/src/elaborator/types.rs b/compiler/noirc_frontend/src/elaborator/types.rs index e41234a5be5..8dccd5f0344 100644 --- a/compiler/noirc_frontend/src/elaborator/types.rs +++ b/compiler/noirc_frontend/src/elaborator/types.rs @@ -36,7 +36,7 @@ use crate::{ TraitImplKind, TraitMethodId, }, Generics, Kind, ResolvedGeneric, Type, TypeBinding, TypeBindings, TypeVariable, - TypeVariableKind, + TypeVariableKind, UnificationError, }; use super::{lints, Elaborator}; @@ -713,9 +713,9 @@ impl<'context> Elaborator<'context> { expected: &Type, make_error: impl FnOnce() -> TypeCheckError, ) { - let mut errors = Vec::new(); - actual.unify(expected, &mut errors, make_error); - self.errors.extend(errors.into_iter().map(|error| (error.into(), self.file))); + if let Err(UnificationError) = actual.unify(expected) { + self.errors.push((make_error().into(), self.file)); + } } /// Wrapper of Type::unify_with_coercions using self.errors diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 070749e45ba..36e6fd014d5 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -1609,7 +1609,7 @@ fn function_def_has_named_attribute( let name = name.iter().map(|token| token.to_string()).collect::>().join(""); for attribute in attributes { - let parse_result = Elaborator::parse_attribute(attribute, location.file); + let parse_result = Elaborator::parse_attribute(&attribute.contents, location); let Ok(Some((function, _arguments))) = parse_result else { continue; }; diff --git a/compiler/noirc_frontend/src/hir_def/function.rs b/compiler/noirc_frontend/src/hir_def/function.rs index 7fa33746f31..8e3baef1d00 100644 --- a/compiler/noirc_frontend/src/hir_def/function.rs +++ b/compiler/noirc_frontend/src/hir_def/function.rs @@ -10,6 +10,7 @@ use crate::graph::CrateId; use crate::hir::def_map::LocalModuleId; use crate::macros_api::{BlockExpression, StructId}; use crate::node_interner::{ExprId, NodeInterner, TraitId, TraitImplId}; +use crate::token::CustomAtrribute; use crate::{ResolvedGeneric, Type}; /// A Hir function is a block expression with a list of statements. @@ -166,7 +167,7 @@ pub struct FuncMeta { pub self_type: Option, /// Custom attributes attached to this function. - pub custom_attributes: Vec, + pub custom_attributes: Vec, } #[derive(Debug, Clone)] diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 638003d3fcd..7b3d0d7a205 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -1467,21 +1467,13 @@ impl Type { /// equal to the other type in the process. When comparing types, unification /// (including try_unify) are almost always preferred over Type::eq as unification /// will correctly handle generic types. - pub fn unify( - &self, - expected: &Type, - errors: &mut Vec, - make_error: impl FnOnce() -> TypeCheckError, - ) { + pub fn unify(&self, expected: &Type) -> Result<(), UnificationError> { let mut bindings = TypeBindings::new(); - match self.try_unify(expected, &mut bindings) { - Ok(()) => { - // Commit any type bindings on success - Self::apply_type_bindings(bindings); - } - Err(UnificationError) => errors.push(make_error()), - } + self.try_unify(expected, &mut bindings).map(|()| { + // Commit any type bindings on success + Self::apply_type_bindings(bindings); + }) } /// `try_unify` is a bit of a misnomer since although errors are not committed, diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index 0afcb02caac..7265593238d 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -295,8 +295,12 @@ impl<'a> Lexer<'a> { } self.next_char(); + let contents_start = self.position + 1; + let word = self.eat_while(None, |ch| ch != ']'); + let contents_end = self.position; + if !self.peek_char_is(']') { return Err(LexerErrorKind::UnexpectedCharacter { span: Span::single_char(self.position), @@ -308,7 +312,10 @@ impl<'a> Lexer<'a> { let end = self.position; - let attribute = Attribute::lookup_attribute(&word, Span::inclusive(start, end))?; + let span = Span::inclusive(start, end); + let contents_span = Span::inclusive(contents_start, contents_end); + + let attribute = Attribute::lookup_attribute(&word, span, contents_span)?; Ok(attribute.into_span(start, end)) } @@ -682,7 +689,7 @@ mod tests { use iter_extended::vecmap; use super::*; - use crate::token::{FunctionAttribute, SecondaryAttribute, TestScope}; + use crate::token::{CustomAtrribute, FunctionAttribute, SecondaryAttribute, TestScope}; #[test] fn test_single_double_char() { @@ -810,9 +817,11 @@ mod tests { let token = lexer.next_token().unwrap(); assert_eq!( token.token(), - &Token::Attribute(Attribute::Secondary(SecondaryAttribute::Custom( - "custom(hello)".to_string() - ))) + &Token::Attribute(Attribute::Secondary(SecondaryAttribute::Custom(CustomAtrribute { + contents: "custom(hello)".to_string(), + span: Span::from(0..16), + contents_span: Span::from(2..15) + }))) ); } diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 585e22ce92b..1692908187e 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -697,7 +697,11 @@ impl fmt::Display for Attribute { impl Attribute { /// If the string is a fixed attribute return that, else /// return the custom attribute - pub(crate) fn lookup_attribute(word: &str, span: Span) -> Result { + pub(crate) fn lookup_attribute( + word: &str, + span: Span, + contents_span: Span, + ) -> Result { let word_segments: Vec<&str> = word .split(|c| c == '(' || c == ')') .filter(|string_segment| !string_segment.is_empty()) @@ -770,7 +774,11 @@ impl Attribute { ["varargs"] => Attribute::Secondary(SecondaryAttribute::Varargs), tokens => { tokens.iter().try_for_each(|token| validate(token))?; - Attribute::Secondary(SecondaryAttribute::Custom(word.to_owned())) + Attribute::Secondary(SecondaryAttribute::Custom(CustomAtrribute { + contents: word.to_owned(), + span, + contents_span, + })) } }; @@ -863,17 +871,26 @@ pub enum SecondaryAttribute { ContractLibraryMethod, Export, Field(String), - Custom(String), + Custom(CustomAtrribute), Abi(String), /// A variable-argument comptime function. Varargs, } +#[derive(PartialEq, Eq, Hash, Debug, Clone, PartialOrd, Ord)] +pub struct CustomAtrribute { + pub contents: String, + // The span of the entire attribute, including leading `#[` and trailing `]` + pub span: Span, + // The span for the attribute contents (what's inside `#[...]`) + pub contents_span: Span, +} + impl SecondaryAttribute { - pub(crate) fn as_custom(&self) -> Option<&str> { - if let Self::Custom(str) = self { - Some(str) + pub(crate) fn as_custom(&self) -> Option<&CustomAtrribute> { + if let Self::Custom(attribute) = self { + Some(attribute) } else { None } @@ -887,7 +904,7 @@ impl fmt::Display for SecondaryAttribute { SecondaryAttribute::Deprecated(Some(ref note)) => { write!(f, r#"#[deprecated("{note}")]"#) } - SecondaryAttribute::Custom(ref k) => write!(f, "#[{k}]"), + SecondaryAttribute::Custom(ref attribute) => write!(f, "#[{}]", attribute.contents), SecondaryAttribute::ContractLibraryMethod => write!(f, "#[contract_library_method]"), SecondaryAttribute::Export => write!(f, "#[export]"), SecondaryAttribute::Field(ref k) => write!(f, "#[field({k})]"), @@ -916,9 +933,8 @@ impl AsRef for SecondaryAttribute { match self { SecondaryAttribute::Deprecated(Some(string)) => string, SecondaryAttribute::Deprecated(None) => "", - SecondaryAttribute::Custom(string) - | SecondaryAttribute::Field(string) - | SecondaryAttribute::Abi(string) => string, + SecondaryAttribute::Custom(attribute) => &attribute.contents, + SecondaryAttribute::Field(string) | SecondaryAttribute::Abi(string) => string, SecondaryAttribute::ContractLibraryMethod => "", SecondaryAttribute::Export => "", SecondaryAttribute::Varargs => "", From 70ebb905da23a0541915a8f6883d6f530934be4e Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 14:52:20 -0300 Subject: [PATCH 17/26] feat: unquote some value as tokens, not as unquote markers (#5924) # Description ## Problem Resolves #5916 ## Summary ## Additional Context I think there might be other `Value`s that we could convert into tokens, like `String`, but I didn't want to handle those in this PR (should it be `Str` or `RawStr`? etc.). And it's possible that we only need this special logic for integers anyway. ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../noirc_frontend/src/hir/comptime/value.rs | 37 +++++++++++++++++++ compiler/noirc_frontend/src/tests.rs | 28 ++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/compiler/noirc_frontend/src/hir/comptime/value.rs b/compiler/noirc_frontend/src/hir/comptime/value.rs index c5818c20c57..fdac95a07fe 100644 --- a/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -429,6 +429,9 @@ impl Value { location: Location, ) -> IResult> { let token = match self { + Value::Unit => { + return Ok(vec![Token::LeftParen, Token::RightParen]); + } Value::Quoted(tokens) => return Ok(unwrap_rc(tokens)), Value::Type(typ) => Token::QuotedType(interner.push_quoted_type(typ)), Value::Expr(ExprValue::Expression(expr)) => { @@ -443,6 +446,40 @@ impl Value { Value::UnresolvedType(typ) => { Token::InternedUnresolvedTypeData(interner.push_unresolved_type_data(typ)) } + Value::U1(bool) => Token::Bool(bool), + Value::U8(value) => Token::Int((value as u128).into()), + Value::U16(value) => Token::Int((value as u128).into()), + Value::U32(value) => Token::Int((value as u128).into()), + Value::U64(value) => Token::Int((value as u128).into()), + Value::I8(value) => { + if value < 0 { + return Ok(vec![Token::Minus, Token::Int((-value as u128).into())]); + } else { + Token::Int((value as u128).into()) + } + } + Value::I16(value) => { + if value < 0 { + return Ok(vec![Token::Minus, Token::Int((-value as u128).into())]); + } else { + Token::Int((value as u128).into()) + } + } + Value::I32(value) => { + if value < 0 { + return Ok(vec![Token::Minus, Token::Int((-value as u128).into())]); + } else { + Token::Int((value as u128).into()) + } + } + Value::I64(value) => { + if value < 0 { + return Ok(vec![Token::Minus, Token::Int((-value as u128).into())]); + } else { + Token::Int((value as u128).into()) + } + } + Value::Field(value) => Token::Int(value), other => Token::UnquoteMarker(other.into_hir_expression(interner, location)?), }; Ok(vec![token]) diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index a30907211a3..e7dbe11f0d1 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -3341,3 +3341,31 @@ fn warns_on_re_export_of_item_with_less_visibility() { ) )); } + +#[test] +fn unoquted_integer_as_integer_token() { + let src = r#" + trait Serialize { + fn serialize() {} + } + + #[attr] + fn foobar() {} + + fn attr(_f: FunctionDefinition) -> Quoted { + let serialized_len = 1; + // We are testing that when we unoqute $serialized_len, it's unquoted + // as the token `1` and not as something else that later won't be parsed correctly + // in the context of a generic argument. + quote { + impl Serialize<$serialized_len> for Field { + fn serialize() { } + } + } + } + + fn main() {} + "#; + + assert_no_errors(src); +} From 2ca2e5cf207a2a1f41ca86d877f0288bcbbfd212 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 15:33:17 -0300 Subject: [PATCH 18/26] feat: module attributes (#5888) # Description ## Problem Resolves #5495 ## Summary Pending: - [ ] Decide whether to keep attributes as String or SecondaryAttribute in ModuleData - [ ] Parsing of module attributes is not ideal (it errors on non-secondary attributes, but I think not all secondary attributes are valid for modules... but maybe it's fine because struct attributes work the same way) ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- aztec_macros/src/utils/parse_utils.rs | 1 + compiler/noirc_frontend/src/ast/statement.rs | 1 + compiler/noirc_frontend/src/ast/visitor.rs | 13 +++- .../noirc_frontend/src/elaborator/comptime.rs | 63 ++++++++++++---- compiler/noirc_frontend/src/elaborator/mod.rs | 7 +- .../src/hir/comptime/interpreter/builtin.rs | 33 +++++++++ .../noirc_frontend/src/hir/comptime/tests.rs | 8 +- .../src/hir/def_collector/dc_crate.rs | 18 +++++ .../src/hir/def_collector/dc_mod.rs | 74 ++++++++++++++++++- .../noirc_frontend/src/hir/def_map/mod.rs | 8 +- .../src/hir/def_map/module_data.rs | 20 ++++- compiler/noirc_frontend/src/lexer/errors.rs | 8 ++ compiler/noirc_frontend/src/lexer/lexer.rs | 38 +++++++++- compiler/noirc_frontend/src/lexer/token.rs | 11 ++- compiler/noirc_frontend/src/parser/mod.rs | 12 ++- compiler/noirc_frontend/src/parser/parser.rs | 56 +++++++++++--- .../src/parser/parser/attributes.rs | 9 +++ compiler/noirc_frontend/src/tests.rs | 23 +++++- .../docs/noir/standard_library/meta/module.md | 6 ++ noir_stdlib/src/meta/module.nr | 5 ++ .../comptime_module/src/main.nr | 45 +++++++++++ .../comptime_module/src/separate_module.nr | 5 ++ tooling/nargo_fmt/src/visitor/item.rs | 8 +- tooling/nargo_fmt/tests/expected/module.nr | 12 +++ tooling/nargo_fmt/tests/input/module.nr | 12 +++ 25 files changed, 454 insertions(+), 42 deletions(-) create mode 100644 test_programs/compile_success_empty/comptime_module/src/separate_module.nr diff --git a/aztec_macros/src/utils/parse_utils.rs b/aztec_macros/src/utils/parse_utils.rs index 6a2a876e682..e7b3e347a96 100644 --- a/aztec_macros/src/utils/parse_utils.rs +++ b/aztec_macros/src/utils/parse_utils.rs @@ -53,6 +53,7 @@ fn empty_item(item: &mut Item) { ItemKind::Import(use_tree, _) => empty_use_tree(use_tree), ItemKind::Struct(noir_struct) => empty_noir_struct(noir_struct), ItemKind::TypeAlias(noir_type_alias) => empty_noir_type_alias(noir_type_alias), + ItemKind::InnerAttribute(_) => (), } } diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 2e14761a1cc..30db8ad63fd 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -292,6 +292,7 @@ pub trait Recoverable { #[derive(Debug, PartialEq, Eq, Clone)] pub struct ModuleDeclaration { pub ident: Ident, + pub outer_attributes: Vec, } impl std::fmt::Display for ModuleDeclaration { diff --git a/compiler/noirc_frontend/src/ast/visitor.rs b/compiler/noirc_frontend/src/ast/visitor.rs index 3955e50b03e..0aeeed39dd0 100644 --- a/compiler/noirc_frontend/src/ast/visitor.rs +++ b/compiler/noirc_frontend/src/ast/visitor.rs @@ -16,7 +16,7 @@ use crate::{ QuotedTypeId, }, parser::{Item, ItemKind, ParsedSubModule}, - token::Tokens, + token::{SecondaryAttribute, Tokens}, ParsedModule, QuotedType, }; @@ -432,6 +432,8 @@ pub trait Visitor { fn visit_struct_pattern(&mut self, _: &Path, _: &[(Ident, Pattern)], _: Span) -> bool { true } + + fn visit_secondary_attribute(&mut self, _: &SecondaryAttribute, _: Span) {} } impl ParsedModule { @@ -481,6 +483,9 @@ impl Item { ItemKind::ModuleDecl(module_declaration) => { module_declaration.accept(self.span, visitor); } + ItemKind::InnerAttribute(attribute) => { + attribute.accept(self.span, visitor); + } } } } @@ -1289,6 +1294,12 @@ impl Pattern { } } +impl SecondaryAttribute { + pub fn accept(&self, span: Span, visitor: &mut impl Visitor) { + visitor.visit_secondary_attribute(self, span); + } +} + fn visit_expressions(expressions: &[Expression], visitor: &mut impl Visitor) { for expression in expressions { expression.accept(visitor); diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index 0c1bc82e1ce..cfc2e34c520 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -10,11 +10,12 @@ use crate::{ comptime::{Interpreter, InterpreterError, Value}, def_collector::{ dc_crate::{ - CollectedItems, CompilationError, UnresolvedFunctions, UnresolvedStruct, - UnresolvedTrait, UnresolvedTraitImpl, + CollectedItems, CompilationError, ModuleAttribute, UnresolvedFunctions, + UnresolvedStruct, UnresolvedTrait, UnresolvedTraitImpl, }, dc_mod, }, + def_map::ModuleId, resolution::errors::ResolverError, }, hir_def::expr::HirIdent, @@ -96,21 +97,31 @@ impl<'context> Elaborator<'context> { generated_items: &mut CollectedItems, ) { for attribute in attributes { - if let SecondaryAttribute::Custom(attribute) = attribute { - if let Err(error) = self.run_comptime_attribute_on_item( - &attribute.contents, - item.clone(), - span, - attribute.contents_span, - generated_items, - ) { - self.errors.push(error); - } - } + self.run_comptime_attribute_on_item(attribute, &item, span, generated_items); } } fn run_comptime_attribute_on_item( + &mut self, + attribute: &SecondaryAttribute, + item: &Value, + span: Span, + generated_items: &mut CollectedItems, + ) { + if let SecondaryAttribute::Custom(attribute) = attribute { + if let Err(error) = self.run_comptime_attribute_name_on_item( + &attribute.contents, + item.clone(), + span, + attribute.contents_span, + generated_items, + ) { + self.errors.push(error); + } + } + } + + fn run_comptime_attribute_name_on_item( &mut self, attribute: &str, item: Value, @@ -383,7 +394,8 @@ impl<'context> Elaborator<'context> { | TopLevelStatement::Trait(_) | TopLevelStatement::Impl(_) | TopLevelStatement::TypeAlias(_) - | TopLevelStatement::SubModule(_) => { + | TopLevelStatement::SubModule(_) + | TopLevelStatement::InnerAttribute(_) => { let item = item.to_string(); let error = InterpreterError::UnsupportedTopLevelItemUnquote { item, location }; self.errors.push(error.into_compilation_error_pair()); @@ -422,6 +434,7 @@ impl<'context> Elaborator<'context> { traits: &BTreeMap, types: &BTreeMap, functions: &[UnresolvedFunctions], + module_attributes: &[ModuleAttribute], ) -> CollectedItems { let mut generated_items = CollectedItems::default(); @@ -444,9 +457,31 @@ impl<'context> Elaborator<'context> { } self.run_attributes_on_functions(functions, &mut generated_items); + + self.run_attributes_on_modules(module_attributes, &mut generated_items); + generated_items } + fn run_attributes_on_modules( + &mut self, + module_attributes: &[ModuleAttribute], + generated_items: &mut CollectedItems, + ) { + for module_attribute in module_attributes { + let local_id = module_attribute.module_id; + let module_id = ModuleId { krate: self.crate_id, local_id }; + let item = Value::ModuleDefinition(module_id); + let attribute = &module_attribute.attribute; + let span = Span::default(); + + self.local_module = module_attribute.attribute_module_id; + self.file = module_attribute.attribute_file_id; + + self.run_comptime_attribute_on_item(attribute, &item, span, generated_items); + } + } + fn run_attributes_on_functions( &mut self, function_sets: &[UnresolvedFunctions], diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index 44240b72af0..161742029f6 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -321,7 +321,12 @@ impl<'context> Elaborator<'context> { // We have to run any comptime attributes on functions before the function is elaborated // since the generated items are checked beforehand as well. - let generated_items = self.run_attributes(&items.traits, &items.types, &items.functions); + let generated_items = self.run_attributes( + &items.traits, + &items.types, + &items.functions, + &items.module_attributes, + ); // After everything is collected, we can elaborate our generated items. // It may be better to inline these within `items` entirely since elaborating them diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 36e6fd014d5..d1fcc76c55b 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -108,6 +108,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { function_def_set_return_type(self, arguments, location) } "module_functions" => module_functions(self, arguments, location), + "module_has_named_attribute" => module_has_named_attribute(self, arguments, location), "module_is_contract" => module_is_contract(self, arguments, location), "module_name" => module_name(interner, arguments, location), "modulus_be_bits" => modulus_be_bits(interner, arguments, location), @@ -1816,6 +1817,38 @@ fn module_functions( Ok(Value::Slice(func_ids, slice_type)) } +// fn has_named_attribute(self, name: Quoted) -> bool +fn module_has_named_attribute( + interpreter: &Interpreter, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let (self_argument, name) = check_two_arguments(arguments, location)?; + let module_id = get_module(self_argument)?; + let module_data = interpreter.elaborator.get_module(module_id); + let name = get_quoted(name)?; + + let name = name.iter().map(|token| token.to_string()).collect::>().join(""); + + let attributes = module_data.outer_attributes.iter().chain(&module_data.inner_attributes); + for attribute in attributes { + let parse_result = Elaborator::parse_attribute(attribute, location); + let Ok(Some((function, _arguments))) = parse_result else { + continue; + }; + + let ExpressionKind::Variable(path) = function.kind else { + continue; + }; + + if path.last_name() == name { + return Ok(Value::Bool(true)); + } + } + + Ok(Value::Bool(false)) +} + // fn is_contract(self) -> bool fn module_is_contract( interpreter: &Interpreter, diff --git a/compiler/noirc_frontend/src/hir/comptime/tests.rs b/compiler/noirc_frontend/src/hir/comptime/tests.rs index 4c1adf9fca0..64b489422a0 100644 --- a/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -23,7 +23,13 @@ fn interpret_helper(src: &str) -> Result { let module_id = LocalModuleId(Index::unsafe_zeroed()); let mut modules = noirc_arena::Arena::default(); let location = Location::new(Default::default(), file); - let root = LocalModuleId(modules.insert(ModuleData::new(None, location, false))); + let root = LocalModuleId(modules.insert(ModuleData::new( + None, + location, + Vec::new(), + Vec::new(), + false, + ))); assert_eq!(root, module_id); let file_manager = FileManager::new(&PathBuf::new()); diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 6a6cabe593d..98555375790 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -7,6 +7,7 @@ use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; use crate::hir::resolution::path_resolver; use crate::hir::type_check::TypeCheckError; +use crate::token::SecondaryAttribute; use crate::{Generics, Type}; use crate::hir::resolution::import::{resolve_import, ImportDirective, PathResolution}; @@ -111,6 +112,21 @@ pub struct UnresolvedGlobal { pub stmt_def: LetStatement, } +pub struct ModuleAttribute { + // The file in which the module is defined + pub file_id: FileId, + // The module this attribute is attached to + pub module_id: LocalModuleId, + // The file where the attribute exists (it could be the same as `file_id` + // or a different one if it's an inner attribute in a different file) + pub attribute_file_id: FileId, + // The module where the attribute is defined (similar to `attribute_file_id`, + // it could be different than `module_id` for inner attributes) + pub attribute_module_id: LocalModuleId, + pub attribute: SecondaryAttribute, + pub is_inner: bool, +} + /// Given a Crate root, collect all definitions in that crate pub struct DefCollector { pub(crate) def_map: CrateDefMap, @@ -127,6 +143,7 @@ pub struct CollectedItems { pub globals: Vec, pub(crate) impls: ImplMap, pub(crate) trait_impls: Vec, + pub(crate) module_attributes: Vec, } impl CollectedItems { @@ -238,6 +255,7 @@ impl DefCollector { impls: HashMap::default(), globals: vec![], trait_impls: vec![], + module_attributes: vec![], }, } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 1dbd5a1383b..520cccf7580 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -17,6 +17,7 @@ use crate::ast::{ use crate::hir::resolution::errors::ResolverError; use crate::macros_api::{Expression, NodeInterner, UnresolvedType, UnresolvedTypeData}; use crate::node_interner::ModuleAttributes; +use crate::token::SecondaryAttribute; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, @@ -26,6 +27,7 @@ use crate::{ }; use crate::{Generics, Kind, ResolvedGeneric, Type, TypeVariable}; +use super::dc_crate::ModuleAttribute; use super::{ dc_crate::{ CompilationError, DefCollector, UnresolvedFunctions, UnresolvedGlobal, UnresolvedTraitImpl, @@ -63,8 +65,10 @@ pub fn collect_defs( for decl in ast.module_decls { errors.extend(collector.parse_module_declaration( context, - &decl, + decl, crate_id, + file_id, + module_id, macro_processors, )); } @@ -72,6 +76,7 @@ pub fn collect_defs( errors.extend(collector.collect_submodules( context, crate_id, + module_id, ast.submodules, file_id, macro_processors, @@ -102,10 +107,40 @@ pub fn collect_defs( collector.collect_impls(context, ast.impls, crate_id); + collector.collect_attributes( + ast.inner_attributes, + file_id, + module_id, + file_id, + module_id, + true, + ); + errors } impl<'a> ModCollector<'a> { + fn collect_attributes( + &mut self, + attributes: Vec, + file_id: FileId, + module_id: LocalModuleId, + attribute_file_id: FileId, + attribute_module_id: LocalModuleId, + is_inner: bool, + ) { + for attribute in attributes { + self.def_collector.items.module_attributes.push(ModuleAttribute { + file_id, + module_id, + attribute_file_id, + attribute_module_id, + attribute, + is_inner, + }); + } + } + fn collect_globals( &mut self, context: &mut Context, @@ -301,6 +336,8 @@ impl<'a> ModCollector<'a> { context, &name, Location::new(name.span(), self.file_id), + Vec::new(), + Vec::new(), false, false, ) { @@ -433,6 +470,8 @@ impl<'a> ModCollector<'a> { context, &name, Location::new(name.span(), self.file_id), + Vec::new(), + Vec::new(), false, false, ) { @@ -616,6 +655,7 @@ impl<'a> ModCollector<'a> { &mut self, context: &mut Context, crate_id: CrateId, + parent_module_id: LocalModuleId, submodules: Vec, file_id: FileId, macro_processors: &[&dyn MacroProcessor], @@ -626,10 +666,21 @@ impl<'a> ModCollector<'a> { context, &submodule.name, Location::new(submodule.name.span(), file_id), + submodule.outer_attributes.clone(), + submodule.contents.inner_attributes.clone(), true, submodule.is_contract, ) { Ok(child) => { + self.collect_attributes( + submodule.outer_attributes, + file_id, + child.local_id, + file_id, + parent_module_id, + false, + ); + errors.extend(collect_defs( self.def_collector, submodule.contents, @@ -654,8 +705,10 @@ impl<'a> ModCollector<'a> { fn parse_module_declaration( &mut self, context: &mut Context, - mod_decl: &ModuleDeclaration, + mod_decl: ModuleDeclaration, crate_id: CrateId, + parent_file_id: FileId, + parent_module_id: LocalModuleId, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -717,10 +770,21 @@ impl<'a> ModCollector<'a> { context, &mod_decl.ident, Location::new(Span::empty(0), child_file_id), + mod_decl.outer_attributes.clone(), + ast.inner_attributes.clone(), true, false, ) { Ok(child_mod_id) => { + self.collect_attributes( + mod_decl.outer_attributes, + child_file_id, + child_mod_id.local_id, + parent_file_id, + parent_module_id, + false, + ); + // Track that the "foo" in `mod foo;` points to the module "foo" context.def_interner.add_module_reference(child_mod_id, location); @@ -743,11 +807,14 @@ impl<'a> ModCollector<'a> { /// Add a child module to the current def_map. /// On error this returns None and pushes to `errors` + #[allow(clippy::too_many_arguments)] fn push_child_module( &mut self, context: &mut Context, mod_name: &Ident, mod_location: Location, + outer_attributes: Vec, + inner_attributes: Vec, add_to_parent_scope: bool, is_contract: bool, ) -> Result { @@ -761,7 +828,8 @@ impl<'a> ModCollector<'a> { // Eventually the location put in `ModuleData` is used for codelenses about `contract`s, // so we keep using `location` so that it continues to work as usual. let location = Location::new(mod_name.span(), mod_location.file); - let new_module = ModuleData::new(parent, location, is_contract); + let new_module = + ModuleData::new(parent, location, outer_attributes, inner_attributes, is_contract); let module_id = self.def_collector.def_map.modules.insert(new_module); let modules = &mut self.def_collector.def_map.modules; diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 758b4cf6e5c..a1c4d04cb30 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -111,7 +111,13 @@ impl CrateDefMap { // Allocate a default Module for the root, giving it a ModuleId let mut modules: Arena = Arena::default(); let location = Location::new(Default::default(), root_file_id); - let root = modules.insert(ModuleData::new(None, location, false)); + let root = modules.insert(ModuleData::new( + None, + location, + Vec::new(), + ast.inner_attributes.clone(), + false, + )); let def_map = CrateDefMap { root: LocalModuleId(root), diff --git a/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/compiler/noirc_frontend/src/hir/def_map/module_data.rs index f9542094be7..e829df3572c 100644 --- a/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -5,6 +5,7 @@ use noirc_errors::Location; use super::{ItemScope, LocalModuleId, ModuleDefId, ModuleId, PerNs}; use crate::ast::{Ident, ItemVisibility}; use crate::node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}; +use crate::token::SecondaryAttribute; /// Contains the actual contents of a module: its parent (if one exists), /// children, and scope with all definitions defined within the scope. @@ -24,10 +25,25 @@ pub struct ModuleData { /// True if this module is a `contract Foo { ... }` module containing contract functions pub is_contract: bool, + + pub outer_attributes: Vec, + pub inner_attributes: Vec, } impl ModuleData { - pub fn new(parent: Option, location: Location, is_contract: bool) -> ModuleData { + pub fn new( + parent: Option, + location: Location, + outer_attributes: Vec, + inner_attributes: Vec, + is_contract: bool, + ) -> ModuleData { + let outer_attributes = outer_attributes.iter().filter_map(|attr| attr.as_custom()); + let outer_attributes = outer_attributes.map(|attr| attr.contents.to_string()).collect(); + + let inner_attributes = inner_attributes.iter().filter_map(|attr| attr.as_custom()); + let inner_attributes = inner_attributes.map(|attr| attr.contents.to_string()).collect(); + ModuleData { parent, children: HashMap::new(), @@ -35,6 +51,8 @@ impl ModuleData { definitions: ItemScope::default(), location, is_contract, + outer_attributes, + inner_attributes, } } diff --git a/compiler/noirc_frontend/src/lexer/errors.rs b/compiler/noirc_frontend/src/lexer/errors.rs index be5180a777b..2440109af15 100644 --- a/compiler/noirc_frontend/src/lexer/errors.rs +++ b/compiler/noirc_frontend/src/lexer/errors.rs @@ -20,6 +20,8 @@ pub enum LexerErrorKind { IntegerLiteralTooLarge { span: Span, limit: String }, #[error("{:?} is not a valid attribute", found)] MalformedFuncAttribute { span: Span, found: String }, + #[error("{:?} is not a valid inner attribute", found)] + InvalidInnerAttribute { span: Span, found: String }, #[error("Logical and used instead of bitwise and")] LogicalAnd { span: Span }, #[error("Unterminated block comment")] @@ -57,6 +59,7 @@ impl LexerErrorKind { LexerErrorKind::InvalidIntegerLiteral { span, .. } => *span, LexerErrorKind::IntegerLiteralTooLarge { span, .. } => *span, LexerErrorKind::MalformedFuncAttribute { span, .. } => *span, + LexerErrorKind::InvalidInnerAttribute { span, .. } => *span, LexerErrorKind::LogicalAnd { span } => *span, LexerErrorKind::UnterminatedBlockComment { span } => *span, LexerErrorKind::UnterminatedStringLiteral { span } => *span, @@ -103,6 +106,11 @@ impl LexerErrorKind { format!(" {found} is not a valid attribute"), *span, ), + LexerErrorKind::InvalidInnerAttribute { span, found } => ( + "Invalid inner attribute".to_string(), + format!(" {found} is not a valid inner attribute"), + *span, + ), LexerErrorKind::LogicalAnd { span } => ( "Noir has no logical-and (&&) operator since short-circuiting is much less efficient when compiling to circuits".to_string(), "Try `&` instead, or use `if` only if you require short-circuiting".to_string(), diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index 7265593238d..b7492396c90 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -286,6 +286,13 @@ impl<'a> Lexer<'a> { fn eat_attribute(&mut self) -> SpannedTokenResult { let start = self.position; + let is_inner = if self.peek_char_is('!') { + self.next_char(); + true + } else { + false + }; + if !self.peek_char_is('[') { return Err(LexerErrorKind::UnexpectedCharacter { span: Span::single_char(self.position), @@ -316,8 +323,19 @@ impl<'a> Lexer<'a> { let contents_span = Span::inclusive(contents_start, contents_end); let attribute = Attribute::lookup_attribute(&word, span, contents_span)?; - - Ok(attribute.into_span(start, end)) + if is_inner { + match attribute { + Attribute::Function(attribute) => Err(LexerErrorKind::InvalidInnerAttribute { + span: Span::from(start..end), + found: attribute.to_string(), + }), + Attribute::Secondary(attribute) => { + Ok(Token::InnerAttribute(attribute).into_span(start, end)) + } + } + } else { + Ok(Token::Attribute(attribute).into_span(start, end)) + } } //XXX(low): Can increase performance if we use iterator semantic and utilize some of the methods on String. See below @@ -907,6 +925,22 @@ mod tests { assert_eq!(sub_string, "test(invalid_scope)"); } + #[test] + fn test_inner_attribute() { + let input = r#"#![something]"#; + let mut lexer = Lexer::new(input); + + let token = lexer.next_token().unwrap(); + assert_eq!( + token.token(), + &Token::InnerAttribute(SecondaryAttribute::Custom(CustomAtrribute { + contents: "something".to_string(), + span: Span::from(0..13), + contents_span: Span::from(3..12), + })) + ); + } + #[test] fn test_int_type() { let input = "u16 i16 i108 u104.5"; diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 1692908187e..7b805b5fd8d 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -27,6 +27,7 @@ pub enum BorrowedToken<'input> { Keyword(Keyword), IntType(IntType), Attribute(Attribute), + InnerAttribute(SecondaryAttribute), LineComment(&'input str, Option), BlockComment(&'input str, Option), Quote(&'input Tokens), @@ -132,6 +133,7 @@ pub enum Token { Keyword(Keyword), IntType(IntType), Attribute(Attribute), + InnerAttribute(SecondaryAttribute), LineComment(String, Option), BlockComment(String, Option), // A `quote { ... }` along with the tokens in its token stream. @@ -244,6 +246,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::RawStr(ref b, hashes) => BorrowedToken::RawStr(b, *hashes), Token::Keyword(k) => BorrowedToken::Keyword(*k), Token::Attribute(ref a) => BorrowedToken::Attribute(a.clone()), + Token::InnerAttribute(ref a) => BorrowedToken::InnerAttribute(a.clone()), Token::LineComment(ref s, _style) => BorrowedToken::LineComment(s, *_style), Token::BlockComment(ref s, _style) => BorrowedToken::BlockComment(s, *_style), Token::Quote(stream) => BorrowedToken::Quote(stream), @@ -363,6 +366,7 @@ impl fmt::Display for Token { } Token::Keyword(k) => write!(f, "{k}"), Token::Attribute(ref a) => write!(f, "{a}"), + Token::InnerAttribute(ref a) => write!(f, "#![{a}]"), Token::LineComment(ref s, _style) => write!(f, "//{s}"), Token::BlockComment(ref s, _style) => write!(f, "/*{s}*/"), Token::Quote(ref stream) => { @@ -428,6 +432,7 @@ pub enum TokenKind { Literal, Keyword, Attribute, + InnerAttribute, Quote, QuotedType, InternedExpr, @@ -445,6 +450,7 @@ impl fmt::Display for TokenKind { TokenKind::Literal => write!(f, "literal"), TokenKind::Keyword => write!(f, "keyword"), TokenKind::Attribute => write!(f, "attribute"), + TokenKind::InnerAttribute => write!(f, "inner attribute"), TokenKind::Quote => write!(f, "quote"), TokenKind::QuotedType => write!(f, "quoted type"), TokenKind::InternedExpr => write!(f, "interned expr"), @@ -467,6 +473,7 @@ impl Token { | Token::FmtStr(_) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::Attribute(_) => TokenKind::Attribute, + Token::InnerAttribute(_) => TokenKind::InnerAttribute, Token::UnquoteMarker(_) => TokenKind::UnquoteMarker, Token::Quote(_) => TokenKind::Quote, Token::QuotedType(_) => TokenKind::QuotedType, @@ -701,7 +708,7 @@ impl Attribute { word: &str, span: Span, contents_span: Span, - ) -> Result { + ) -> Result { let word_segments: Vec<&str> = word .split(|c| c == '(' || c == ')') .filter(|string_segment| !string_segment.is_empty()) @@ -782,7 +789,7 @@ impl Attribute { } }; - Ok(Token::Attribute(attribute)) + Ok(attribute) } } diff --git a/compiler/noirc_frontend/src/parser/mod.rs b/compiler/noirc_frontend/src/parser/mod.rs index c82906b69a2..596d15176bc 100644 --- a/compiler/noirc_frontend/src/parser/mod.rs +++ b/compiler/noirc_frontend/src/parser/mod.rs @@ -16,7 +16,7 @@ use crate::ast::{ NoirFunction, NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Recoverable, StatementKind, TypeImpl, UseTree, }; -use crate::token::{Keyword, Token}; +use crate::token::{Keyword, SecondaryAttribute, Token}; use chumsky::prelude::*; use chumsky::primitive::Container; @@ -41,6 +41,7 @@ pub enum TopLevelStatement { TypeAlias(NoirTypeAlias), SubModule(ParsedSubModule), Global(LetStatement), + InnerAttribute(SecondaryAttribute), Error, } @@ -57,6 +58,7 @@ impl TopLevelStatement { TopLevelStatement::TypeAlias(t) => Some(ItemKind::TypeAlias(t)), TopLevelStatement::SubModule(s) => Some(ItemKind::Submodules(s)), TopLevelStatement::Global(c) => Some(ItemKind::Global(c)), + TopLevelStatement::InnerAttribute(a) => Some(ItemKind::InnerAttribute(a)), TopLevelStatement::Error => None, } } @@ -247,6 +249,8 @@ pub struct SortedModule { /// Full submodules as in `mod foo { ... definitions ... }` pub submodules: Vec, + + pub inner_attributes: Vec, } impl std::fmt::Display for SortedModule { @@ -309,6 +313,7 @@ impl ParsedModule { ItemKind::Global(global) => module.push_global(global), ItemKind::ModuleDecl(mod_name) => module.push_module_decl(mod_name), ItemKind::Submodules(submodule) => module.push_submodule(submodule.into_sorted()), + ItemKind::InnerAttribute(attribute) => module.inner_attributes.push(attribute), } } @@ -334,6 +339,7 @@ pub enum ItemKind { Global(LetStatement), ModuleDecl(ModuleDeclaration), Submodules(ParsedSubModule), + InnerAttribute(SecondaryAttribute), } /// A submodule defined via `mod name { contents }` in some larger file. @@ -342,6 +348,7 @@ pub enum ItemKind { pub struct ParsedSubModule { pub name: Ident, pub contents: ParsedModule, + pub outer_attributes: Vec, pub is_contract: bool, } @@ -350,6 +357,7 @@ impl ParsedSubModule { SortedSubModule { name: self.name, contents: self.contents.into_sorted(), + outer_attributes: self.outer_attributes, is_contract: self.is_contract, } } @@ -371,6 +379,7 @@ impl std::fmt::Display for SortedSubModule { pub struct SortedSubModule { pub name: Ident, pub contents: SortedModule, + pub outer_attributes: Vec, pub is_contract: bool, } @@ -512,6 +521,7 @@ impl std::fmt::Display for TopLevelStatement { TopLevelStatement::TypeAlias(t) => t.fmt(f), TopLevelStatement::SubModule(s) => s.fmt(f), TopLevelStatement::Global(c) => c.fmt(f), + TopLevelStatement::InnerAttribute(a) => write!(f, "#![{}]", a), TopLevelStatement::Error => write!(f, "error"), } } diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index bead1e69006..2bc7a88c6c5 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -26,6 +26,7 @@ use self::path::as_trait_path; use self::primitives::{keyword, macro_quote_marker, mutable_reference, variable}; use self::types::{generic_type_args, maybe_comp_time}; +use attributes::{attributes, inner_attribute, validate_secondary_attributes}; pub use types::parse_type; use visibility::visibility_modifier; @@ -91,7 +92,7 @@ pub fn parse_program(source_program: &str) -> (ParsedModule, Vec) { let (module, mut parsing_errors) = program().parse_recovery_verbose(tokens); parsing_errors.extend(lexing_errors.into_iter().map(Into::into)); - let parsed_module = module.unwrap_or(ParsedModule { items: vec![] }); + let parsed_module = module.unwrap_or_default(); if cfg!(feature = "experimental_parser") { for parsed_item in &parsed_module.items { @@ -215,6 +216,7 @@ fn top_level_statement<'a>( module_declaration().then_ignore(force(just(Token::Semicolon))), use_statement().then_ignore(force(just(Token::Semicolon))), global_declaration().then_ignore(force(just(Token::Semicolon))), + inner_attribute().map(TopLevelStatement::InnerAttribute), )) .recover_via(top_level_statement_recovery()) } @@ -287,25 +289,39 @@ fn global_declaration() -> impl NoirParser { /// submodule: 'mod' ident '{' module '}' fn submodule(module_parser: impl NoirParser) -> impl NoirParser { - keyword(Keyword::Mod) - .ignore_then(ident()) + attributes() + .then_ignore(keyword(Keyword::Mod)) + .then(ident()) .then_ignore(just(Token::LeftBrace)) .then(module_parser) .then_ignore(just(Token::RightBrace)) - .map(|(name, contents)| { - TopLevelStatement::SubModule(ParsedSubModule { name, contents, is_contract: false }) + .validate(|((attributes, name), contents), span, emit| { + let attributes = validate_secondary_attributes(attributes, span, emit); + TopLevelStatement::SubModule(ParsedSubModule { + name, + contents, + outer_attributes: attributes, + is_contract: false, + }) }) } /// contract: 'contract' ident '{' module '}' fn contract(module_parser: impl NoirParser) -> impl NoirParser { - keyword(Keyword::Contract) - .ignore_then(ident()) + attributes() + .then_ignore(keyword(Keyword::Contract)) + .then(ident()) .then_ignore(just(Token::LeftBrace)) .then(module_parser) .then_ignore(just(Token::RightBrace)) - .map(|(name, contents)| { - TopLevelStatement::SubModule(ParsedSubModule { name, contents, is_contract: true }) + .validate(|((attributes, name), contents), span, emit| { + let attributes = validate_secondary_attributes(attributes, span, emit); + TopLevelStatement::SubModule(ParsedSubModule { + name, + contents, + outer_attributes: attributes, + is_contract: true, + }) }) } @@ -434,9 +450,12 @@ fn optional_type_annotation<'a>() -> impl NoirParser + 'a { } fn module_declaration() -> impl NoirParser { - keyword(Keyword::Mod) - .ignore_then(ident()) - .map(|ident| TopLevelStatement::Module(ModuleDeclaration { ident })) + attributes().then_ignore(keyword(Keyword::Mod)).then(ident()).validate( + |(attributes, ident), span, emit| { + let attributes = validate_secondary_attributes(attributes, span, emit); + TopLevelStatement::Module(ModuleDeclaration { ident, outer_attributes: attributes }) + }, + ) } fn use_statement() -> impl NoirParser { @@ -1522,9 +1541,22 @@ mod test { #[test] fn parse_module_declaration() { parse_with(module_declaration(), "mod foo").unwrap(); + parse_with(module_declaration(), "#[attr] mod foo").unwrap(); parse_with(module_declaration(), "mod 1").unwrap_err(); } + #[test] + fn parse_submodule_declaration() { + parse_with(submodule(module()), "mod foo {}").unwrap(); + parse_with(submodule(module()), "#[attr] mod foo {}").unwrap(); + } + + #[test] + fn parse_contract() { + parse_with(contract(module()), "contract foo {}").unwrap(); + parse_with(contract(module()), "#[attr] contract foo {}").unwrap(); + } + #[test] fn parse_use() { let valid_use_statements = [ diff --git a/compiler/noirc_frontend/src/parser/parser/attributes.rs b/compiler/noirc_frontend/src/parser/parser/attributes.rs index 47add6f82e0..66d0ca29ca6 100644 --- a/compiler/noirc_frontend/src/parser/parser/attributes.rs +++ b/compiler/noirc_frontend/src/parser/parser/attributes.rs @@ -67,3 +67,12 @@ pub(super) fn validate_secondary_attributes( struct_attributes } + +pub(super) fn inner_attribute() -> impl NoirParser { + token_kind(TokenKind::InnerAttribute).map(|token| match token { + Token::InnerAttribute(attribute) => attribute, + _ => unreachable!( + "Parser should have already errored due to token not being an inner attribute" + ), + }) +} diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index e7dbe11f0d1..c6c8c5d4b4b 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -28,7 +28,8 @@ use crate::hir::def_collector::dc_crate::DefCollector; use crate::hir_def::expr::HirExpression; use crate::hir_def::stmt::HirStatement; use crate::monomorphization::monomorphize; -use crate::parser::ParserErrorReason; +use crate::parser::{ItemKind, ParserErrorReason}; +use crate::token::SecondaryAttribute; use crate::ParsedModule; use crate::{ hir::def_map::{CrateDefMap, LocalModuleId}, @@ -64,10 +65,28 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation remove_experimental_warnings(&mut errors); if !has_parser_error(&errors) { + let inner_attributes: Vec = program + .items + .iter() + .filter_map(|item| { + if let ItemKind::InnerAttribute(attribute) = &item.kind { + Some(attribute.clone()) + } else { + None + } + }) + .collect(); + // Allocate a default Module for the root, giving it a ModuleId let mut modules: Arena = Arena::default(); let location = Location::new(Default::default(), root_file_id); - let root = modules.insert(ModuleData::new(None, location, false)); + let root = modules.insert(ModuleData::new( + None, + location, + Vec::new(), + inner_attributes.clone(), + false, + )); let def_map = CrateDefMap { root: LocalModuleId(root), diff --git a/docs/docs/noir/standard_library/meta/module.md b/docs/docs/noir/standard_library/meta/module.md index d283f2da8b2..870e366461c 100644 --- a/docs/docs/noir/standard_library/meta/module.md +++ b/docs/docs/noir/standard_library/meta/module.md @@ -20,6 +20,12 @@ Returns the name of the module. Returns each function in the module. +### has_named_attribute + +#include_code has_named_attribute noir_stdlib/src/meta/module.nr rust + +Returns true if this module has a custom attribute with the given name. + ### is_contract #include_code is_contract noir_stdlib/src/meta/module.nr rust diff --git a/noir_stdlib/src/meta/module.nr b/noir_stdlib/src/meta/module.nr index 6ea3ca55fb1..b3f76812b8a 100644 --- a/noir_stdlib/src/meta/module.nr +++ b/noir_stdlib/src/meta/module.nr @@ -1,4 +1,9 @@ impl Module { + #[builtin(module_has_named_attribute)] + // docs:start:has_named_attribute + fn has_named_attribute(self, name: Quoted) -> bool {} + // docs:end:has_named_attribute + #[builtin(module_is_contract)] // docs:start:is_contract fn is_contract(self) -> bool {} diff --git a/test_programs/compile_success_empty/comptime_module/src/main.nr b/test_programs/compile_success_empty/comptime_module/src/main.nr index 8d834381fea..5722d42ca26 100644 --- a/test_programs/compile_success_empty/comptime_module/src/main.nr +++ b/test_programs/compile_success_empty/comptime_module/src/main.nr @@ -1,10 +1,45 @@ +#[outer_attribute] mod foo { + #![some_attribute] fn x() {} fn y() {} } contract bar {} +#[some_attribute] +mod another_module {} + +#[outer_attribute_func] +mod yet_another_module { + #![super::inner_attribute_func] + fn foo() {} +} + +#[outer_attribute_separate_module] +mod separate_module; + +comptime mut global counter = 0; + +fn increment_counter() { + counter += 1; +} + +fn outer_attribute_func(m: Module) { + assert_eq(m.name(), quote { yet_another_module }); + increment_counter(); +} + +fn inner_attribute_func(m: Module) { + assert_eq(m.name(), quote { yet_another_module }); + increment_counter(); +} + +fn outer_attribute_separate_module(m: Module) { + assert_eq(m.name(), quote { separate_module }); + increment_counter(); +} + fn main() { comptime { @@ -15,6 +50,8 @@ fn main() { let bar = quote { bar }.as_module().unwrap(); assert(bar.is_contract()); + let another_module = quote { another_module }.as_module().unwrap(); + // Check Module::functions assert_eq(foo.functions().len(), 2); assert_eq(bar.functions().len(), 0); @@ -22,7 +59,15 @@ fn main() { // Check Module::name assert_eq(foo.name(), quote { foo }); assert_eq(bar.name(), quote { bar }); + + // Check Module::has_named_attribute + assert(foo.has_named_attribute(quote { some_attribute })); + assert(foo.has_named_attribute(quote { outer_attribute })); + assert(!bar.has_named_attribute(quote { some_attribute })); + assert(another_module.has_named_attribute(quote { some_attribute })); } + + assert_eq(counter, 4); } // docs:start:as_module_example diff --git a/test_programs/compile_success_empty/comptime_module/src/separate_module.nr b/test_programs/compile_success_empty/comptime_module/src/separate_module.nr new file mode 100644 index 00000000000..53784101507 --- /dev/null +++ b/test_programs/compile_success_empty/comptime_module/src/separate_module.nr @@ -0,0 +1,5 @@ +#![inner_attribute_separate_module] +fn inner_attribute_separate_module(m: Module) { + assert_eq(m.name(), quote { separate_module }); + super::increment_counter(); +} diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index 0e2d07f13d0..9e556e0fcbe 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -165,6 +165,11 @@ impl super::FmtVisitor<'_> { continue; } + for attribute in module.outer_attributes { + self.push_str(&format!("#[{}]\n", attribute.as_ref())); + self.push_str(&self.indent.to_string()); + } + let name = module.name; let after_brace = self.span_after(span, Token::LeftBrace).start(); self.last_position = after_brace; @@ -227,7 +232,8 @@ impl super::FmtVisitor<'_> { | ItemKind::TraitImpl(_) | ItemKind::TypeAlias(_) | ItemKind::Global(_) - | ItemKind::ModuleDecl(_) => { + | ItemKind::ModuleDecl(_) + | ItemKind::InnerAttribute(_) => { self.push_rewrite(self.slice(span).to_string(), span); self.last_position = span.end(); } diff --git a/tooling/nargo_fmt/tests/expected/module.nr b/tooling/nargo_fmt/tests/expected/module.nr index e419543dbc4..0a051a1b50f 100644 --- a/tooling/nargo_fmt/tests/expected/module.nr +++ b/tooling/nargo_fmt/tests/expected/module.nr @@ -1,3 +1,6 @@ +#![inner] +#![inner2] + mod a { mod b { struct Data { @@ -13,6 +16,8 @@ mod a { Data2 { a } } + #[custom] + #[another_custom] mod tests { #[test] fn test() { @@ -20,4 +25,11 @@ mod a { data2(1); } } + + #[attr] + mod baz; + + mod empty { + #![inner] + } } diff --git a/tooling/nargo_fmt/tests/input/module.nr b/tooling/nargo_fmt/tests/input/module.nr index e419543dbc4..0a051a1b50f 100644 --- a/tooling/nargo_fmt/tests/input/module.nr +++ b/tooling/nargo_fmt/tests/input/module.nr @@ -1,3 +1,6 @@ +#![inner] +#![inner2] + mod a { mod b { struct Data { @@ -13,6 +16,8 @@ mod a { Data2 { a } } + #[custom] + #[another_custom] mod tests { #[test] fn test() { @@ -20,4 +25,11 @@ mod a { data2(1); } } + + #[attr] + mod baz; + + mod empty { + #![inner] + } } From 94e661e7520d80496bdc9da39b9736bafacb96dc Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 4 Sep 2024 14:31:29 -0500 Subject: [PATCH 19/26] feat: Allow inserting new structs and into programs from attributes (#5927) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/5913 ## Summary\* Allows inserting new structs and impls into the program from comptime attributes. ## Additional Context Inserting new structs wasn't allowed originally since functions wouldn't be able to refer to them in their signature. This use case isn't needed for aztec currently so I'm adding this feature now and we can look into expanding it later. See https://github.com/noir-lang/noir/issues/5926 ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../noirc_frontend/src/elaborator/comptime.rs | 20 +- .../noirc_frontend/src/hir/comptime/errors.rs | 2 +- .../src/hir/def_collector/dc_mod.rs | 374 ++++++++++-------- compiler/noirc_frontend/src/hir/mod.rs | 4 +- docs/docs/noir/concepts/comptime.md | 2 +- .../unquote_struct/Nargo.toml | 7 + .../unquote_struct/src/main.nr | 28 ++ 7 files changed, 272 insertions(+), 165 deletions(-) create mode 100644 test_programs/compile_success_empty/unquote_struct/Nargo.toml create mode 100644 test_programs/compile_success_empty/unquote_struct/src/main.nr diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index cfc2e34c520..e8dbf2ec775 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -385,14 +385,30 @@ impl<'context> Elaborator<'context> { self.errors.push(error); } } + TopLevelStatement::Struct(struct_def) => { + if let Some((type_id, the_struct)) = dc_mod::collect_struct( + self.interner, + self.def_maps.get_mut(&self.crate_id).unwrap(), + struct_def, + self.file, + self.local_module, + self.crate_id, + &mut self.errors, + ) { + generated_items.types.insert(type_id, the_struct); + } + } + TopLevelStatement::Impl(r#impl) => { + let module = self.module_id(); + dc_mod::collect_impl(self.interner, generated_items, r#impl, self.file, module); + } + // Assume that an error has already been issued TopLevelStatement::Error => (), TopLevelStatement::Module(_) | TopLevelStatement::Import(..) - | TopLevelStatement::Struct(_) | TopLevelStatement::Trait(_) - | TopLevelStatement::Impl(_) | TopLevelStatement::TypeAlias(_) | TopLevelStatement::SubModule(_) | TopLevelStatement::InnerAttribute(_) => { diff --git a/compiler/noirc_frontend/src/hir/comptime/errors.rs b/compiler/noirc_frontend/src/hir/comptime/errors.rs index cfee6bcedac..48efc08f463 100644 --- a/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -488,7 +488,7 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { InterpreterError::UnsupportedTopLevelItemUnquote { item, location } => { let msg = "Unsupported statement type to unquote".into(); let secondary = - "Only functions, globals, and trait impls can be unquoted here".into(); + "Only functions, structs, globals, and impls can be unquoted here".into(); let mut error = CustomDiagnostic::simple_error(msg, secondary, location.span); error.add_note(format!("Unquoted item was:\n{item}")); error diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 520cccf7580..d6432b0ca56 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -15,7 +15,7 @@ use crate::ast::{ TypeImpl, }; use crate::hir::resolution::errors::ResolverError; -use crate::macros_api::{Expression, NodeInterner, UnresolvedType, UnresolvedTypeData}; +use crate::macros_api::{Expression, NodeInterner, StructId, UnresolvedType, UnresolvedTypeData}; use crate::node_interner::ModuleAttributes; use crate::token::SecondaryAttribute; use crate::{ @@ -27,6 +27,7 @@ use crate::{ }; use crate::{Generics, Kind, ResolvedGeneric, Type, TypeVariable}; +use super::dc_crate::CollectedItems; use super::dc_crate::ModuleAttribute; use super::{ dc_crate::{ @@ -171,24 +172,13 @@ impl<'a> ModCollector<'a> { let module_id = ModuleId { krate, local_id: self.module_id }; for r#impl in impls { - let mut unresolved_functions = UnresolvedFunctions { - file_id: self.file_id, - functions: Vec::new(), - trait_id: None, - self_type: None, - }; - - for (mut method, _) in r#impl.methods { - let func_id = context.def_interner.push_empty_fn(); - method.def.where_clause.extend(r#impl.where_clause.clone()); - let location = Location::new(method.span(), self.file_id); - context.def_interner.push_function(func_id, &method.def, module_id, location); - unresolved_functions.push_fn(self.module_id, func_id, method); - } - - let key = (r#impl.object_type, self.module_id); - let methods = self.def_collector.items.impls.entry(key).or_default(); - methods.push((r#impl.generics, r#impl.type_span, unresolved_functions)); + collect_impl( + &mut context.def_interner, + &mut self.def_collector.items, + r#impl, + self.file_id, + module_id, + ); } } @@ -315,92 +305,21 @@ impl<'a> ModCollector<'a> { ) -> Vec<(CompilationError, FileId)> { let mut definition_errors = vec![]; for struct_definition in types { - self.check_duplicate_field_names(&struct_definition, &mut definition_errors); - - let name = struct_definition.name.clone(); - - let unresolved = UnresolvedStruct { - file_id: self.file_id, - module_id: self.module_id, - struct_def: struct_definition, - }; - - let resolved_generics = context.resolve_generics( - &unresolved.struct_def.generics, - &mut definition_errors, + if let Some((id, the_struct)) = collect_struct( + &mut context.def_interner, + &mut self.def_collector.def_map, + struct_definition, self.file_id, - ); - - // Create the corresponding module for the struct namespace - let id = match self.push_child_module( - context, - &name, - Location::new(name.span(), self.file_id), - Vec::new(), - Vec::new(), - false, - false, + self.module_id, + krate, + &mut definition_errors, ) { - Ok(module_id) => context.def_interner.new_struct( - &unresolved, - resolved_generics, - krate, - module_id.local_id, - self.file_id, - ), - Err(error) => { - definition_errors.push((error.into(), self.file_id)); - continue; - } - }; - - // Add the struct to scope so its path can be looked up later - let result = self.def_collector.def_map.modules[self.module_id.0] - .declare_struct(name.clone(), id); - - if let Err((first_def, second_def)) = result { - let error = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::TypeDefinition, - first_def, - second_def, - }; - definition_errors.push((error.into(), self.file_id)); - } - - // And store the TypeId -> StructType mapping somewhere it is reachable - self.def_collector.items.types.insert(id, unresolved); - - if context.def_interner.is_in_lsp_mode() { - let parent_module_id = ModuleId { krate, local_id: self.module_id }; - context.def_interner.register_struct(id, name.to_string(), parent_module_id); + self.def_collector.items.types.insert(id, the_struct); } } definition_errors } - fn check_duplicate_field_names( - &self, - struct_definition: &NoirStruct, - definition_errors: &mut Vec<(CompilationError, FileId)>, - ) { - let mut seen_field_names = std::collections::HashSet::new(); - for (field_name, _) in &struct_definition.fields { - if seen_field_names.insert(field_name) { - continue; - } - - let previous_field_name = *seen_field_names.get(field_name).unwrap(); - definition_errors.push(( - DefCollectorErrorKind::DuplicateField { - first_def: previous_field_name.clone(), - second_def: field_name.clone(), - } - .into(), - self.file_id, - )); - } - } - /// Collect any type aliases definitions declared within the ast. /// Returns a vector of errors if any type aliases were already defined. fn collect_type_aliases( @@ -420,7 +339,8 @@ impl<'a> ModCollector<'a> { type_alias_def: type_alias, }; - let resolved_generics = context.resolve_generics( + let resolved_generics = Context::resolve_generics( + &context.def_interner, &unresolved.type_alias_def.generics, &mut errors, self.file_id, @@ -623,8 +543,12 @@ impl<'a> ModCollector<'a> { } } - let resolved_generics = - context.resolve_generics(&trait_definition.generics, &mut errors, self.file_id); + let resolved_generics = Context::resolve_generics( + &context.def_interner, + &trait_definition.generics, + &mut errors, + self.file_id, + ); let unresolved = UnresolvedTrait { file_id: self.file_id, @@ -818,64 +742,17 @@ impl<'a> ModCollector<'a> { add_to_parent_scope: bool, is_contract: bool, ) -> Result { - let parent = Some(self.module_id); - - // Note: the difference between `location` and `mod_location` is: - // - `mod_location` will point to either the token "foo" in `mod foo { ... }` - // if it's an inline module, or the first char of a the file if it's an external module. - // - `location` will always point to the token "foo" in `mod foo` regardless of whether - // it's inline or external. - // Eventually the location put in `ModuleData` is used for codelenses about `contract`s, - // so we keep using `location` so that it continues to work as usual. - let location = Location::new(mod_name.span(), mod_location.file); - let new_module = - ModuleData::new(parent, location, outer_attributes, inner_attributes, is_contract); - let module_id = self.def_collector.def_map.modules.insert(new_module); - - let modules = &mut self.def_collector.def_map.modules; - - // Update the parent module to reference the child - modules[self.module_id.0].children.insert(mod_name.clone(), LocalModuleId(module_id)); - - let mod_id = ModuleId { - krate: self.def_collector.def_map.krate, - local_id: LocalModuleId(module_id), - }; - - // Add this child module into the scope of the parent module as a module definition - // module definitions are definitions which can only exist at the module level. - // ModuleDefinitionIds can be used across crates since they contain the CrateId - // - // We do not want to do this in the case of struct modules (each struct type corresponds - // to a child module containing its methods) since the module name should not shadow - // the struct name. - if add_to_parent_scope { - if let Err((first_def, second_def)) = - modules[self.module_id.0].declare_child_module(mod_name.to_owned(), mod_id) - { - let err = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Module, - first_def, - second_def, - }; - return Err(err); - } - - context.def_interner.add_module_attributes( - mod_id, - ModuleAttributes { - name: mod_name.0.contents.clone(), - location: mod_location, - parent: Some(self.module_id), - }, - ); - - if context.def_interner.is_in_lsp_mode() { - context.def_interner.register_module(mod_id, mod_name.0.contents.clone()); - } - } - - Ok(mod_id) + push_child_module( + &mut context.def_interner, + &mut self.def_collector.def_map, + self.module_id, + mod_name, + mod_location, + outer_attributes, + inner_attributes, + add_to_parent_scope, + is_contract, + ) } fn resolve_associated_constant_type( @@ -896,6 +773,162 @@ impl<'a> ModCollector<'a> { } } +/// Add a child module to the current def_map. +/// On error this returns None and pushes to `errors` +#[allow(clippy::too_many_arguments)] +fn push_child_module( + interner: &mut NodeInterner, + def_map: &mut CrateDefMap, + parent: LocalModuleId, + mod_name: &Ident, + mod_location: Location, + outer_attributes: Vec, + inner_attributes: Vec, + add_to_parent_scope: bool, + is_contract: bool, +) -> Result { + // Note: the difference between `location` and `mod_location` is: + // - `mod_location` will point to either the token "foo" in `mod foo { ... }` + // if it's an inline module, or the first char of a the file if it's an external module. + // - `location` will always point to the token "foo" in `mod foo` regardless of whether + // it's inline or external. + // Eventually the location put in `ModuleData` is used for codelenses about `contract`s, + // so we keep using `location` so that it continues to work as usual. + let location = Location::new(mod_name.span(), mod_location.file); + let new_module = + ModuleData::new(Some(parent), location, outer_attributes, inner_attributes, is_contract); + + let module_id = def_map.modules.insert(new_module); + let modules = &mut def_map.modules; + + // Update the parent module to reference the child + modules[parent.0].children.insert(mod_name.clone(), LocalModuleId(module_id)); + + let mod_id = ModuleId { krate: def_map.krate, local_id: LocalModuleId(module_id) }; + + // Add this child module into the scope of the parent module as a module definition + // module definitions are definitions which can only exist at the module level. + // ModuleDefinitionIds can be used across crates since they contain the CrateId + // + // We do not want to do this in the case of struct modules (each struct type corresponds + // to a child module containing its methods) since the module name should not shadow + // the struct name. + if add_to_parent_scope { + if let Err((first_def, second_def)) = + modules[parent.0].declare_child_module(mod_name.to_owned(), mod_id) + { + let err = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::Module, + first_def, + second_def, + }; + return Err(err); + } + + interner.add_module_attributes( + mod_id, + ModuleAttributes { + name: mod_name.0.contents.clone(), + location: mod_location, + parent: Some(parent), + }, + ); + + if interner.is_in_lsp_mode() { + interner.register_module(mod_id, mod_name.0.contents.clone()); + } + } + + Ok(mod_id) +} + +pub fn collect_struct( + interner: &mut NodeInterner, + def_map: &mut CrateDefMap, + struct_definition: NoirStruct, + file_id: FileId, + module_id: LocalModuleId, + krate: CrateId, + definition_errors: &mut Vec<(CompilationError, FileId)>, +) -> Option<(StructId, UnresolvedStruct)> { + check_duplicate_field_names(&struct_definition, file_id, definition_errors); + + let name = struct_definition.name.clone(); + + let unresolved = UnresolvedStruct { file_id, module_id, struct_def: struct_definition }; + + let resolved_generics = Context::resolve_generics( + interner, + &unresolved.struct_def.generics, + definition_errors, + file_id, + ); + + // Create the corresponding module for the struct namespace + let location = Location::new(name.span(), file_id); + let id = match push_child_module( + interner, + def_map, + module_id, + &name, + location, + Vec::new(), + Vec::new(), + false, + false, + ) { + Ok(module_id) => { + interner.new_struct(&unresolved, resolved_generics, krate, module_id.local_id, file_id) + } + Err(error) => { + definition_errors.push((error.into(), file_id)); + return None; + } + }; + + // Add the struct to scope so its path can be looked up later + let result = def_map.modules[module_id.0].declare_struct(name.clone(), id); + + if let Err((first_def, second_def)) = result { + let error = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::TypeDefinition, + first_def, + second_def, + }; + definition_errors.push((error.into(), file_id)); + } + + if interner.is_in_lsp_mode() { + let parent_module_id = ModuleId { krate, local_id: module_id }; + interner.register_struct(id, name.to_string(), parent_module_id); + } + + Some((id, unresolved)) +} + +pub fn collect_impl( + interner: &mut NodeInterner, + items: &mut CollectedItems, + r#impl: TypeImpl, + file_id: FileId, + module_id: ModuleId, +) { + let mut unresolved_functions = + UnresolvedFunctions { file_id, functions: Vec::new(), trait_id: None, self_type: None }; + + for (mut method, _) in r#impl.methods { + let func_id = interner.push_empty_fn(); + method.def.where_clause.extend(r#impl.where_clause.clone()); + let location = Location::new(method.span(), file_id); + interner.push_function(func_id, &method.def, module_id, location); + unresolved_functions.push_fn(module_id.local_id, func_id, method); + } + + let key = (r#impl.object_type, module_id.local_id); + let methods = items.impls.entry(key).or_default(); + methods.push((r#impl.generics, r#impl.type_span, unresolved_functions)); +} + fn find_module( file_manager: &FileManager, anchor: FileId, @@ -1054,6 +1087,29 @@ pub(crate) fn collect_global( (global, error) } +fn check_duplicate_field_names( + struct_definition: &NoirStruct, + file: FileId, + definition_errors: &mut Vec<(CompilationError, FileId)>, +) { + let mut seen_field_names = std::collections::HashSet::new(); + for (field_name, _) in &struct_definition.fields { + if seen_field_names.insert(field_name) { + continue; + } + + let previous_field_name = *seen_field_names.get(field_name).unwrap(); + definition_errors.push(( + DefCollectorErrorKind::DuplicateField { + first_def: previous_field_name.clone(), + second_def: field_name.clone(), + } + .into(), + file, + )); + } +} + #[cfg(test)] mod find_module_tests { use super::*; diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index e4f000778d1..c631edfa889 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -272,14 +272,14 @@ impl Context<'_, '_> { /// Each result is returned in a list rather than returned as a single result as to allow /// definition collection to provide an error for each ill-formed numeric generic. pub(crate) fn resolve_generics( - &mut self, + interner: &NodeInterner, generics: &UnresolvedGenerics, errors: &mut Vec<(CompilationError, FileId)>, file_id: FileId, ) -> Generics { vecmap(generics, |generic| { // Map the generic to a fresh type variable - let id = self.def_interner.next_type_variable_id(); + let id = interner.next_type_variable_id(); let type_var = TypeVariable::unbound(id); let ident = generic.ident(); let span = ident.0.span(); diff --git a/docs/docs/noir/concepts/comptime.md b/docs/docs/noir/concepts/comptime.md index ed55a541fbd..ba078c763d0 100644 --- a/docs/docs/noir/concepts/comptime.md +++ b/docs/docs/noir/concepts/comptime.md @@ -183,7 +183,7 @@ comptime fn my_function_annotation(f: FunctionDefinition) { Anything returned from one of these functions will be inserted at top-level along with the original item. Note that expressions are not valid at top-level so you'll get an error trying to return `3` or similar just as if you tried to write a program containing `3; struct Foo {}`. -You can insert other top-level items such as traits, structs, or functions this way though. +You can insert other top-level items such as trait impls, structs, or functions this way though. For example, this is the mechanism used to insert additional trait implementations into the program when deriving a trait impl from a struct: #include_code derive-field-count-example noir_stdlib/src/meta/mod.nr rust diff --git a/test_programs/compile_success_empty/unquote_struct/Nargo.toml b/test_programs/compile_success_empty/unquote_struct/Nargo.toml new file mode 100644 index 00000000000..c40d6a07093 --- /dev/null +++ b/test_programs/compile_success_empty/unquote_struct/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unquote_struct" +type = "bin" +authors = [""] +compiler_version = ">=0.33.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/unquote_struct/src/main.nr b/test_programs/compile_success_empty/unquote_struct/src/main.nr new file mode 100644 index 00000000000..e90711dd710 --- /dev/null +++ b/test_programs/compile_success_empty/unquote_struct/src/main.nr @@ -0,0 +1,28 @@ +fn main() { + let foo = Foo { x: 4, y: 4 }; + foo.assert_equal(); +} + +#[output_struct] +fn foo(x: Field, y: u32) -> u32 { + x as u32 + y +} + +// Given a function, wrap its parameters in a struct definition +comptime fn output_struct(f: FunctionDefinition) -> Quoted { + let fields = f.parameters().map(|param: (Quoted, Type)| { + let name = param.0; + let typ = param.1; + quote { $name: $typ, } + }).join(quote {}); + + quote { + struct Foo { $fields } + + impl Foo { + fn assert_equal(self) { + assert_eq(self.x as u32, self.y); + } + } + } +} From 34f21c0eadfc8a03f5177d72de7958903de8ac98 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 4 Sep 2024 14:38:06 -0500 Subject: [PATCH 20/26] fix: Support debug comptime flag for attributes (#5929) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/5915 ## Summary\* We were just missing a quick check after running attributes. Now their output will be shown when the `--debug-comptime-in-file` flag is used ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_frontend/src/elaborator/comptime.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index e8dbf2ec775..cf6679af8e9 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -175,6 +175,8 @@ impl<'context> Elaborator<'context> { .call_function(function, arguments, TypeBindings::new(), location) .map_err(|error| error.into_compilation_error_pair())?; + self.debug_comptime(location, |interner| value.display(interner).to_string()); + if value != Value::Unit { let items = value .into_top_level_items(location, self.interner) From 2c22fe555dc41fffc623026b4b8c57d44b869cd2 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 17:39:58 -0300 Subject: [PATCH 21/26] fix: collect functions generated by attributes (#5930) # Description ## Problem Resolves #5901 ## Summary ## Additional Context ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: jfecher --- .../noirc_frontend/src/elaborator/comptime.rs | 29 +++---- .../src/hir/def_collector/dc_mod.rs | 75 +++++++++++-------- .../unquote_function/Nargo.toml | 7 ++ .../unquote_function/src/main.nr | 12 +++ .../unquote_struct/src/main.nr | 6 +- 5 files changed, 82 insertions(+), 47 deletions(-) create mode 100644 test_programs/compile_success_empty/unquote_function/Nargo.toml create mode 100644 test_programs/compile_success_empty/unquote_function/src/main.nr diff --git a/compiler/noirc_frontend/src/elaborator/comptime.rs b/compiler/noirc_frontend/src/elaborator/comptime.rs index cf6679af8e9..7da5efd0b5a 100644 --- a/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -326,21 +326,24 @@ impl<'context> Elaborator<'context> { ) { match item { TopLevelStatement::Function(function) => { - let id = self.interner.push_empty_fn(); - let module = self.module_id(); - self.interner.push_function(id, &function.def, module, location); + let module_id = self.module_id(); - if self.interner.is_in_lsp_mode() && !function.def.is_test() { - self.interner.register_function(id, &function.def); + if let Some(id) = dc_mod::collect_function( + self.interner, + self.def_maps.get_mut(&self.crate_id).unwrap(), + &function, + module_id, + self.file, + &mut self.errors, + ) { + let functions = vec![(self.local_module, id, function)]; + generated_items.functions.push(UnresolvedFunctions { + file_id: self.file, + functions, + trait_id: None, + self_type: None, + }); } - - let functions = vec![(self.local_module, id, function)]; - generated_items.functions.push(UnresolvedFunctions { - file_id: self.file, - functions, - trait_id: None, - self_type: None, - }); } TopLevelStatement::TraitImpl(mut trait_impl) => { let (methods, associated_types, associated_constants) = diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index d6432b0ca56..79b55f60b76 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -248,25 +248,16 @@ impl<'a> ModCollector<'a> { let module = ModuleId { krate, local_id: self.module_id }; for function in functions { - // check if optional field attribute is compatible with native field - if let Some(field) = function.attributes().get_field_attribute() { - if !is_native_field(&field) { - continue; - } - } - - let name = function.name_ident().clone(); - let func_id = context.def_interner.push_empty_fn(); - let visibility = function.def.visibility; - - // First create dummy function in the DefInterner - // So that we can get a FuncId - let location = Location::new(function.span(), self.file_id); - context.def_interner.push_function(func_id, &function.def, module, location); - - if context.def_interner.is_in_lsp_mode() && !function.def.is_test() { - context.def_interner.register_function(func_id, &function.def); - } + let Some(func_id) = collect_function( + &mut context.def_interner, + &mut self.def_collector.def_map, + &function, + module, + self.file_id, + &mut errors, + ) else { + continue; + }; // Now link this func_id to a crate level map with the noir function and the module id // Encountering a NoirFunction, we retrieve it's module_data to get the namespace @@ -275,19 +266,6 @@ impl<'a> ModCollector<'a> { // With this method we iterate each function in the Crate and not each module // This may not be great because we have to pull the module_data for each function unresolved_functions.push_fn(self.module_id, func_id, function); - - // Add function to scope/ns of the module - let result = self.def_collector.def_map.modules[self.module_id.0] - .declare_function(name, visibility, func_id); - - if let Err((first_def, second_def)) = result { - let error = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Function, - first_def, - second_def, - }; - errors.push((error.into(), self.file_id)); - } } self.def_collector.items.functions.push(unresolved_functions); @@ -842,6 +820,39 @@ fn push_child_module( Ok(mod_id) } +pub fn collect_function( + interner: &mut NodeInterner, + def_map: &mut CrateDefMap, + function: &NoirFunction, + module: ModuleId, + file: FileId, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Option { + if let Some(field) = function.attributes().get_field_attribute() { + if !is_native_field(&field) { + return None; + } + } + let name = function.name_ident().clone(); + let func_id = interner.push_empty_fn(); + let visibility = function.def.visibility; + let location = Location::new(function.span(), file); + interner.push_function(func_id, &function.def, module, location); + if interner.is_in_lsp_mode() && !function.def.is_test() { + interner.register_function(func_id, &function.def); + } + let result = def_map.modules[module.local_id.0].declare_function(name, visibility, func_id); + if let Err((first_def, second_def)) = result { + let error = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::Function, + first_def, + second_def, + }; + errors.push((error.into(), file)); + } + Some(func_id) +} + pub fn collect_struct( interner: &mut NodeInterner, def_map: &mut CrateDefMap, diff --git a/test_programs/compile_success_empty/unquote_function/Nargo.toml b/test_programs/compile_success_empty/unquote_function/Nargo.toml new file mode 100644 index 00000000000..aa56a5798df --- /dev/null +++ b/test_programs/compile_success_empty/unquote_function/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unquote_function" +type = "bin" +authors = [""] +compiler_version = ">=0.33.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/unquote_function/src/main.nr b/test_programs/compile_success_empty/unquote_function/src/main.nr new file mode 100644 index 00000000000..273a091b26d --- /dev/null +++ b/test_programs/compile_success_empty/unquote_function/src/main.nr @@ -0,0 +1,12 @@ +fn main() { + bar(); +} + +#[output_function] +fn foo() {} + +comptime fn output_function(_f: FunctionDefinition) -> Quoted { + quote { + fn bar() {} + } +} diff --git a/test_programs/compile_success_empty/unquote_struct/src/main.nr b/test_programs/compile_success_empty/unquote_struct/src/main.nr index e90711dd710..603440b5c76 100644 --- a/test_programs/compile_success_empty/unquote_struct/src/main.nr +++ b/test_programs/compile_success_empty/unquote_struct/src/main.nr @@ -10,11 +10,13 @@ fn foo(x: Field, y: u32) -> u32 { // Given a function, wrap its parameters in a struct definition comptime fn output_struct(f: FunctionDefinition) -> Quoted { - let fields = f.parameters().map(|param: (Quoted, Type)| { + let fields = f.parameters().map( + |param: (Quoted, Type)| { let name = param.0; let typ = param.1; quote { $name: $typ, } - }).join(quote {}); + } + ).join(quote {}); quote { struct Foo { $fields } From f18e9ca86c025f736af6e515f812e36fbb622930 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 18:06:14 -0300 Subject: [PATCH 22/26] feat: add `fmtstr::contents` (#5928) # Description ## Problem Resolves #5899 Resolves #5914 ## Summary Two things here: 1. When interpolating quotes values inside a format string, we do that without producing the `quote {` and `}` parts, which is likely what a user would expect (similar to unquoting those values). 2. In order to create identifiers (or any piece of code in general) by joining severa quoted values you can use format strings together with the new `fmtstr::contents` method, which returns a `Quoted` value with the string contents (that is, without the leading and trailing double quotes). ## Additional Context I originally thought about a method like `fmtstr::as_identifier` that would try to parse the string contents as an identifier (maybe an `Ident`, or maybe a `Path`), returning `Option` and `None` in case it couldn't be parsed to that. But I think in general it could be more useful to just get the string contents as a `Quoted` value. After all, if it isn't an identifier you'll learn it later on once the value is unquoted or interpolated. ## Documentation Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/hir/comptime/interpreter.rs | 14 +++- .../src/hir/comptime/interpreter/builtin.rs | 25 ++++++- .../interpreter/builtin/builtin_helpers.rs | 14 ++++ .../noirc_frontend/src/hir/comptime/value.rs | 73 ++++++++++++------- docs/docs/noir/standard_library/fmtstr.md | 13 ++++ noir_stdlib/src/meta/format_string.nr | 6 ++ noir_stdlib/src/meta/mod.nr | 1 + .../comptime_fmt_strings/src/main.nr | 14 ++++ tooling/lsp/src/requests/completion/tests.rs | 2 +- 9 files changed, 130 insertions(+), 32 deletions(-) create mode 100644 docs/docs/noir/standard_library/fmtstr.md create mode 100644 noir_stdlib/src/meta/format_string.nr diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index d8e62b66eca..9f559b7c5e6 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -586,7 +586,19 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { consuming = false; if let Some(value) = values.pop_front() { - result.push_str(&value.display(self.elaborator.interner).to_string()); + // When interpolating a quoted value inside a format string, we don't include the + // surrounding `quote {` ... `}` as if we are unquoting the quoted value inside the string. + if let Value::Quoted(tokens) = value { + for (index, token) in tokens.iter().enumerate() { + if index > 0 { + result.push(' '); + } + result + .push_str(&token.display(self.elaborator.interner).to_string()); + } + } else { + result.push_str(&value.display(self.elaborator.interner).to_string()); + } } } other if !consuming => { diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index d1fcc76c55b..d2c9e4ffc0c 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -7,9 +7,9 @@ use acvm::{AcirField, FieldElement}; use builtin_helpers::{ block_expression_to_value, check_argument_count, check_function_not_yet_resolved, check_one_argument, check_three_arguments, check_two_arguments, get_expr, get_field, - get_function_def, get_module, get_quoted, get_slice, get_struct, get_trait_constraint, - get_trait_def, get_trait_impl, get_tuple, get_type, get_typed_expr, get_u32, - get_unresolved_type, hir_pattern_to_tokens, mutate_func_meta_type, parse, + get_format_string, get_function_def, get_module, get_quoted, get_slice, get_struct, + get_trait_constraint, get_trait_def, get_trait_impl, get_tuple, get_type, get_typed_expr, + get_u32, get_unresolved_type, hir_pattern_to_tokens, mutate_func_meta_type, parse, replace_func_meta_parameters, replace_func_meta_return_type, }; use chumsky::{prelude::choice, Parser}; @@ -32,6 +32,7 @@ use crate::{ InterpreterError, Value, }, hir_def::function::FunctionBody, + lexer::Lexer, macros_api::{HirExpression, HirLiteral, ModuleDefId, NodeInterner, Signedness}, node_interner::{DefinitionKind, TraitImplKind}, parser::{self}, @@ -95,6 +96,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "expr_is_continue" => expr_is_continue(interner, arguments, location), "expr_resolve" => expr_resolve(self, arguments, location), "is_unconstrained" => Ok(Value::Bool(true)), + "fmtstr_quoted_contents" => fmtstr_quoted_contents(interner, arguments, location), "function_def_body" => function_def_body(interner, arguments, location), "function_def_has_named_attribute" => { function_def_has_named_attribute(interner, arguments, location) @@ -1576,6 +1578,23 @@ fn unwrap_expr_value(interner: &NodeInterner, mut expr_value: ExprValue) -> Expr expr_value } +// fn quoted_contents(self) -> Quoted +fn fmtstr_quoted_contents( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let self_argument = check_one_argument(arguments, location)?; + let (string, _) = get_format_string(interner, self_argument)?; + let (tokens, _) = Lexer::lex(&string); + let mut tokens: Vec<_> = tokens.0.into_iter().map(|token| token.into_token()).collect(); + if let Some(Token::EOF) = tokens.last() { + tokens.pop(); + } + + Ok(Value::Quoted(Rc::new(tokens))) +} + // fn body(self) -> Expr fn function_def_body( interner: &NodeInterner, diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs index 14a0e177544..ff3da6d253f 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin/builtin_helpers.rs @@ -189,6 +189,20 @@ pub(crate) fn get_expr( } } +pub(crate) fn get_format_string( + interner: &NodeInterner, + (value, location): (Value, Location), +) -> IResult<(Rc, Type)> { + match value { + Value::FormatString(value, typ) => Ok((value, typ)), + value => { + let n = Box::new(interner.next_type_variable()); + let e = Box::new(interner.next_type_variable()); + type_mismatch(value, Type::FmtString(n, e), location) + } + } +} + pub(crate) fn get_function_def((value, location): (Value, Location)) -> IResult { match value { Value::FunctionDefinition(id) => Ok(id), diff --git a/compiler/noirc_frontend/src/hir/comptime/value.rs b/compiler/noirc_frontend/src/hir/comptime/value.rs index fdac95a07fe..7d6e4475c7b 100644 --- a/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -605,33 +605,7 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { write!(f, "quote {{")?; for token in tokens.iter() { write!(f, " ")?; - - match token { - Token::QuotedType(id) => { - write!(f, "{}", self.interner.get_quoted_type(*id))?; - } - Token::InternedExpr(id) => { - let value = Value::expression(ExpressionKind::Interned(*id)); - value.display(self.interner).fmt(f)?; - } - Token::InternedStatement(id) => { - let value = Value::statement(StatementKind::Interned(*id)); - value.display(self.interner).fmt(f)?; - } - Token::InternedLValue(id) => { - let value = Value::lvalue(LValue::Interned(*id, Span::default())); - value.display(self.interner).fmt(f)?; - } - Token::InternedUnresolvedTypeData(id) => { - let value = Value::UnresolvedType(UnresolvedTypeData::Interned(*id)); - value.display(self.interner).fmt(f)?; - } - Token::UnquoteMarker(id) => { - let value = Value::TypedExpr(TypedExpr::ExprId(*id)); - value.display(self.interner).fmt(f)?; - } - other => write!(f, "{other}")?, - } + token.display(self.interner).fmt(f)?; } write!(f, " }}") } @@ -713,6 +687,51 @@ impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { } } +impl Token { + pub fn display<'token, 'interner>( + &'token self, + interner: &'interner NodeInterner, + ) -> TokenPrinter<'token, 'interner> { + TokenPrinter { token: self, interner } + } +} + +pub struct TokenPrinter<'token, 'interner> { + token: &'token Token, + interner: &'interner NodeInterner, +} + +impl<'token, 'interner> Display for TokenPrinter<'token, 'interner> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.token { + Token::QuotedType(id) => { + write!(f, "{}", self.interner.get_quoted_type(*id)) + } + Token::InternedExpr(id) => { + let value = Value::expression(ExpressionKind::Interned(*id)); + value.display(self.interner).fmt(f) + } + Token::InternedStatement(id) => { + let value = Value::statement(StatementKind::Interned(*id)); + value.display(self.interner).fmt(f) + } + Token::InternedLValue(id) => { + let value = Value::lvalue(LValue::Interned(*id, Span::default())); + value.display(self.interner).fmt(f) + } + Token::InternedUnresolvedTypeData(id) => { + let value = Value::UnresolvedType(UnresolvedTypeData::Interned(*id)); + value.display(self.interner).fmt(f) + } + Token::UnquoteMarker(id) => { + let value = Value::TypedExpr(TypedExpr::ExprId(*id)); + value.display(self.interner).fmt(f) + } + other => write!(f, "{other}"), + } + } +} + fn display_trait_constraint(interner: &NodeInterner, trait_constraint: &TraitConstraint) -> String { let trait_ = interner.get_trait(trait_constraint.trait_id); format!("{}: {}{}", trait_constraint.typ, trait_.name, trait_constraint.trait_generics) diff --git a/docs/docs/noir/standard_library/fmtstr.md b/docs/docs/noir/standard_library/fmtstr.md new file mode 100644 index 00000000000..293793e23ff --- /dev/null +++ b/docs/docs/noir/standard_library/fmtstr.md @@ -0,0 +1,13 @@ +--- +title: fmtstr +--- + +`fmtstr` is the type resulting from using format string (`f"..."`). + +## Methods + +### quoted_contents + +#include_code quoted_contents noir_stdlib/src/meta/format_string.nr rust + +Returns the format string contents (that is, without the leading and trailing double quotes) as a `Quoted` value. \ No newline at end of file diff --git a/noir_stdlib/src/meta/format_string.nr b/noir_stdlib/src/meta/format_string.nr new file mode 100644 index 00000000000..44b69719efe --- /dev/null +++ b/noir_stdlib/src/meta/format_string.nr @@ -0,0 +1,6 @@ +impl fmtstr { + #[builtin(fmtstr_quoted_contents)] + // docs:start:quoted_contents + fn quoted_contents(self) -> Quoted {} + // docs:end:quoted_contents +} diff --git a/noir_stdlib/src/meta/mod.nr b/noir_stdlib/src/meta/mod.nr index 24398054467..9fc399ddbf9 100644 --- a/noir_stdlib/src/meta/mod.nr +++ b/noir_stdlib/src/meta/mod.nr @@ -1,4 +1,5 @@ mod expr; +mod format_string; mod function_def; mod module; mod op; diff --git a/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr b/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr index 705a1b2ab4e..0e2d459a00f 100644 --- a/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr +++ b/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr @@ -12,4 +12,18 @@ fn main() { }; assert_eq(s1, "x is 4, fake interpolation: {y}, y is 5"); assert_eq(s2, "\0\0\0\0"); + + // Mainly test fmtstr::quoted_contents + call!(glue(quote { hello }, quote { world })); +} + +fn glue(x: Quoted, y: Quoted) -> Quoted { + f"{x}_{y}".quoted_contents() } + +fn hello_world() {} + +comptime fn call(x: Quoted) -> Quoted { + quote { $x() } +} + diff --git a/tooling/lsp/src/requests/completion/tests.rs b/tooling/lsp/src/requests/completion/tests.rs index ca959f5d5ca..a7cfa77a73d 100644 --- a/tooling/lsp/src/requests/completion/tests.rs +++ b/tooling/lsp/src/requests/completion/tests.rs @@ -336,7 +336,7 @@ mod completion_tests { fo>|< } "#; - assert_completion(src, vec![module_completion_item("foobar")]).await; + assert_completion_excluding_auto_import(src, vec![module_completion_item("foobar")]).await; } #[test] From af3db4bf2e8f7feba6d06c3095d7cdf17c8dde75 Mon Sep 17 00:00:00 2001 From: Ary Borenszweig Date: Wed, 4 Sep 2024 19:03:52 -0300 Subject: [PATCH 23/26] feat: warn on unused functions (#5892) # Description ## Problem Now that we warn on unused imports, doing that for functions too is relatively straight-forward. ## Summary Now unused private or `pub(crate)` functions will be reported as unused. ## Additional Context I'd like to try this on some Aztec-Packages projects, but for that it would be nice to merge #5895 first so I could get all warnings/errors for a package inside VS Code. We can eventually do the same thing with globals, traits, etc., once we track their visibility. I also think we could warn on unused `pub` functions in a "bin" or "contract" package, not sure... but if we decide to do that, it could be a separate PR. ## Documentation Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/hir/def_collector/dc_crate.rs | 40 +++++---- .../src/hir/def_collector/dc_mod.rs | 20 ++++- .../src/hir/resolution/errors.rs | 10 +-- compiler/noirc_frontend/src/node_interner.rs | 2 +- compiler/noirc_frontend/src/tests.rs | 84 +++++++++++++------ compiler/noirc_frontend/src/usage_tracker.rs | 52 +++++++++--- 6 files changed, 144 insertions(+), 64 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 98555375790..3cfa0989d7d 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -8,6 +8,7 @@ use crate::hir::resolution::errors::ResolverError; use crate::hir::resolution::path_resolver; use crate::hir::type_check::TypeCheckError; use crate::token::SecondaryAttribute; +use crate::usage_tracker::UnusedItem; use crate::{Generics, Type}; use crate::hir::resolution::import::{resolve_import, ImportDirective, PathResolution}; @@ -271,7 +272,7 @@ impl DefCollector { root_file_id: FileId, debug_comptime_in_file: Option<&str>, enable_arithmetic_generics: bool, - error_on_usage_tracker: bool, + error_on_unused_items: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -406,20 +407,14 @@ impl DefCollector { let result = current_def_map.modules[resolved_import.module_scope.0] .import(name.clone(), visibility, module_def_id, is_prelude); - // Empty spans could come from implicitly injected imports, and we don't want to track those - if visibility != ItemVisibility::Public - && name.span().start() < name.span().end() - { - let module_id = ModuleId { - krate: crate_id, - local_id: resolved_import.module_scope, - }; - - context - .def_interner - .usage_tracker - .add_unused_import(module_id, name.clone()); - } + let module_id = + ModuleId { krate: crate_id, local_id: resolved_import.module_scope }; + context.def_interner.usage_tracker.add_unused_item( + module_id, + name.clone(), + UnusedItem::Import, + visibility, + ); if visibility != ItemVisibility::Private { let local_id = resolved_import.module_scope; @@ -494,26 +489,29 @@ impl DefCollector { ); } - if error_on_usage_tracker { - Self::check_usage_tracker(context, crate_id, &mut errors); + if error_on_unused_items { + Self::check_unused_items(context, crate_id, &mut errors); } errors } - fn check_usage_tracker( + fn check_unused_items( context: &Context, crate_id: CrateId, errors: &mut Vec<(CompilationError, FileId)>, ) { - let unused_imports = context.def_interner.usage_tracker.unused_imports().iter(); + let unused_imports = context.def_interner.usage_tracker.unused_items().iter(); let unused_imports = unused_imports.filter(|(module_id, _)| module_id.krate == crate_id); errors.extend(unused_imports.flat_map(|(module_id, usage_tracker)| { let module = &context.def_maps[&crate_id].modules()[module_id.local_id.0]; - usage_tracker.iter().map(|ident| { + usage_tracker.iter().map(|(ident, unused_item)| { let ident = ident.clone(); - let error = CompilationError::ResolverError(ResolverError::UnusedImport { ident }); + let error = CompilationError::ResolverError(ResolverError::UnusedItem { + ident, + item_type: unused_item.item_type(), + }); (error, module.location.file) }) })); diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 79b55f60b76..6c1b7632a2e 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -18,6 +18,7 @@ use crate::hir::resolution::errors::ResolverError; use crate::macros_api::{Expression, NodeInterner, StructId, UnresolvedType, UnresolvedTypeData}; use crate::node_interner::ModuleAttributes; use crate::token::SecondaryAttribute; +use crate::usage_tracker::UnusedItem; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, @@ -36,7 +37,7 @@ use super::{ }, errors::{DefCollectorErrorKind, DuplicateType}, }; -use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleData, ModuleId}; +use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleData, ModuleId, MAIN_FUNCTION}; use crate::hir::resolution::import::ImportDirective; use crate::hir::Context; @@ -833,6 +834,16 @@ pub fn collect_function( return None; } } + + let module_data = &mut def_map.modules[module.local_id.0]; + + let is_test = function.def.attributes.is_test_function(); + let is_entry_point_function = if module_data.is_contract { + function.attributes().is_contract_entry_point() + } else { + function.name() == MAIN_FUNCTION + }; + let name = function.name_ident().clone(); let func_id = interner.push_empty_fn(); let visibility = function.def.visibility; @@ -841,6 +852,13 @@ pub fn collect_function( if interner.is_in_lsp_mode() && !function.def.is_test() { interner.register_function(func_id, &function.def); } + + if !is_test && !is_entry_point_function { + let item = UnusedItem::Function(func_id); + interner.usage_tracker.add_unused_item(module, name.clone(), item, visibility); + } + + // Add function to scope/ns of the module let result = def_map.modules[module.local_id.0].declare_function(name, visibility, func_id); if let Err((first_def, second_def)) = result { let error = DefCollectorErrorKind::Duplicate { diff --git a/compiler/noirc_frontend/src/hir/resolution/errors.rs b/compiler/noirc_frontend/src/hir/resolution/errors.rs index c2038c646b5..e74468bdf18 100644 --- a/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -20,8 +20,8 @@ pub enum ResolverError { DuplicateDefinition { name: String, first_span: Span, second_span: Span }, #[error("Unused variable")] UnusedVariable { ident: Ident }, - #[error("Unused import")] - UnusedImport { ident: Ident }, + #[error("Unused {item_type}")] + UnusedItem { ident: Ident, item_type: &'static str }, #[error("Could not find variable in this scope")] VariableNotDeclared { name: String, span: Span }, #[error("path is not an identifier")] @@ -158,12 +158,12 @@ impl<'a> From<&'a ResolverError> for Diagnostic { diagnostic.unnecessary = true; diagnostic } - ResolverError::UnusedImport { ident } => { + ResolverError::UnusedItem { ident, item_type } => { let name = &ident.0.contents; let mut diagnostic = Diagnostic::simple_warning( - format!("unused import {name}"), - "unused import ".to_string(), + format!("unused {item_type} {name}"), + format!("unused {item_type}"), ident.span(), ); diagnostic.unnecessary = true; diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 4a73df6a15f..aa51779d24b 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -655,7 +655,7 @@ impl Default for NodeInterner { auto_import_names: HashMap::default(), comptime_scopes: vec![HashMap::default()], trait_impl_associated_types: HashMap::default(), - usage_tracker: UsageTracker::default(), + usage_tracker: UsageTracker::new(), } } } diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index c6c8c5d4b4b..04c4e414858 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -1381,7 +1381,7 @@ fn ban_mutable_globals() { fn deny_inline_attribute_on_unconstrained() { let src = r#" #[no_predicates] - unconstrained fn foo(x: Field, y: Field) { + unconstrained pub fn foo(x: Field, y: Field) { assert(x != y); } "#; @@ -1397,7 +1397,7 @@ fn deny_inline_attribute_on_unconstrained() { fn deny_fold_attribute_on_unconstrained() { let src = r#" #[fold] - unconstrained fn foo(x: Field, y: Field) { + unconstrained pub fn foo(x: Field, y: Field) { assert(x != y); } "#; @@ -1554,7 +1554,7 @@ fn struct_numeric_generic_in_function() { inner: u64 } - fn bar() { } + pub fn bar() { } "#; let errors = get_program_errors(src); assert_eq!(errors.len(), 1); @@ -1586,7 +1586,7 @@ fn struct_numeric_generic_in_struct() { #[test] fn bool_numeric_generic() { let src = r#" - fn read() -> Field { + pub fn read() -> Field { if N { 0 } else { @@ -1605,7 +1605,7 @@ fn bool_numeric_generic() { #[test] fn numeric_generic_binary_operation_type_mismatch() { let src = r#" - fn foo() -> bool { + pub fn foo() -> bool { let mut check: bool = true; check = N; check @@ -1622,7 +1622,7 @@ fn numeric_generic_binary_operation_type_mismatch() { #[test] fn bool_generic_as_loop_bound() { let src = r#" - fn read() { + pub fn read() { let mut fields = [0; N]; for i in 0..N { fields[i] = i + 1; @@ -1652,7 +1652,7 @@ fn bool_generic_as_loop_bound() { #[test] fn numeric_generic_in_function_signature() { let src = r#" - fn foo(arr: [Field; N]) -> [Field; N] { arr } + pub fn foo(arr: [Field; N]) -> [Field; N] { arr } "#; assert_no_errors(src); } @@ -1694,7 +1694,7 @@ fn normal_generic_as_array_length() { #[test] fn numeric_generic_as_param_type() { let src = r#" - fn foo(x: I) -> I { + pub fn foo(x: I) -> I { let _q: I = 5; x } @@ -1833,7 +1833,7 @@ fn numeric_generic_used_in_where_clause() { fn deserialize(fields: [Field; N]) -> Self; } - fn read() -> T where T: Deserialize { + pub fn read() -> T where T: Deserialize { let mut fields: [Field; N] = [0; N]; for i in 0..N { fields[i] = i as Field + 1; @@ -1847,12 +1847,12 @@ fn numeric_generic_used_in_where_clause() { #[test] fn numeric_generic_used_in_turbofish() { let src = r#" - fn double() -> u32 { + pub fn double() -> u32 { // Used as an expression N * 2 } - fn double_numeric_generics_test() { + pub fn double_numeric_generics_test() { // Example usage of a numeric generic arguments. assert(double::<9>() == 18); assert(double::<7 + 8>() == 30); @@ -1888,7 +1888,7 @@ fn normal_generic_used_when_numeric_expected_in_where_clause() { fn deserialize(fields: [Field; N]) -> Self; } - fn read() -> T where T: Deserialize { + pub fn read() -> T where T: Deserialize { T::deserialize([0, 1]) } "#; @@ -1904,7 +1904,7 @@ fn normal_generic_used_when_numeric_expected_in_where_clause() { fn deserialize(fields: [Field; N]) -> Self; } - fn read() -> T where T: Deserialize { + pub fn read() -> T where T: Deserialize { let mut fields: [Field; N] = [0; N]; for i in 0..N { fields[i] = i as Field + 1; @@ -2450,7 +2450,7 @@ fn use_super() { mod foo { use super::some_func; - fn bar() { + pub fn bar() { some_func(); } } @@ -2464,7 +2464,7 @@ fn use_super_in_path() { fn some_func() {} mod foo { - fn func() { + pub fn func() { super::some_func(); } } @@ -2755,7 +2755,7 @@ fn trait_constraint_on_tuple_type() { fn foo(self, x: A) -> bool; } - fn bar(x: (T, U), y: V) -> bool where (T, U): Foo { + pub fn bar(x: (T, U), y: V) -> bool where (T, U): Foo { x.foo(y) } @@ -3091,7 +3091,7 @@ fn trait_impl_for_a_type_that_implements_another_trait() { } } - fn use_it(t: T) -> i32 where T: Two { + pub fn use_it(t: T) -> i32 where T: Two { Two::two(t) } @@ -3131,7 +3131,7 @@ fn trait_impl_for_a_type_that_implements_another_trait_with_another_impl_used() } } - fn use_it(t: u32) -> i32 { + pub fn use_it(t: u32) -> i32 { Two::two(t) } @@ -3243,12 +3243,14 @@ fn errors_on_unused_private_import() { let errors = get_program_errors(src); assert_eq!(errors.len(), 1); - let CompilationError::ResolverError(ResolverError::UnusedImport { ident }) = &errors[0].0 + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 else { - panic!("Expected an unused import error"); + panic!("Expected an unused item error"); }; assert_eq!(ident.to_string(), "bar"); + assert_eq!(*item_type, "import"); } #[test] @@ -3277,12 +3279,14 @@ fn errors_on_unused_pub_crate_import() { let errors = get_program_errors(src); assert_eq!(errors.len(), 1); - let CompilationError::ResolverError(ResolverError::UnusedImport { ident }) = &errors[0].0 + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 else { - panic!("Expected an unused import error"); + panic!("Expected an unused item error"); }; assert_eq!(ident.to_string(), "bar"); + assert_eq!(*item_type, "import"); } #[test] @@ -3295,7 +3299,7 @@ fn warns_on_use_of_private_exported_item() { use bar::baz; - fn qux() { + pub fn qux() { baz(); } } @@ -3369,7 +3373,7 @@ fn unoquted_integer_as_integer_token() { } #[attr] - fn foobar() {} + pub fn foobar() {} fn attr(_f: FunctionDefinition) -> Quoted { let serialized_len = 1; @@ -3388,3 +3392,35 @@ fn unoquted_integer_as_integer_token() { assert_no_errors(src); } + +#[test] +fn errors_on_unused_function() { + let src = r#" + contract some_contract { + // This function is unused, but it's a contract entrypoint + // so it should not produce a warning + fn foo() -> pub Field { + 1 + } + } + + + fn foo() { + bar(); + } + + fn bar() {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 + else { + panic!("Expected an unused item error"); + }; + + assert_eq!(ident.to_string(), "foo"); + assert_eq!(*item_type, "function"); +} diff --git a/compiler/noirc_frontend/src/usage_tracker.rs b/compiler/noirc_frontend/src/usage_tracker.rs index d8b7b271734..836f9824436 100644 --- a/compiler/noirc_frontend/src/usage_tracker.rs +++ b/compiler/noirc_frontend/src/usage_tracker.rs @@ -1,26 +1,54 @@ -use std::collections::HashSet; +use std::collections::HashMap; -use rustc_hash::FxHashMap as HashMap; +use crate::{ + ast::{Ident, ItemVisibility}, + hir::def_map::ModuleId, + node_interner::FuncId, +}; -use crate::{ast::Ident, hir::def_map::ModuleId}; +#[derive(Debug)] +pub enum UnusedItem { + Import, + Function(FuncId), +} + +impl UnusedItem { + pub fn item_type(&self) -> &'static str { + match self { + UnusedItem::Import => "import", + UnusedItem::Function(_) => "function", + } + } +} -#[derive(Debug, Default)] +#[derive(Debug)] pub struct UsageTracker { - /// List of all unused imports in each module. Each time something is imported it's added - /// to the module's set. When it's used, it's removed. At the end of the program only unused imports remain. - unused_imports: HashMap>, + unused_items: HashMap>, } impl UsageTracker { - pub(crate) fn add_unused_import(&mut self, module_id: ModuleId, name: Ident) { - self.unused_imports.entry(module_id).or_default().insert(name); + pub(crate) fn new() -> Self { + Self { unused_items: HashMap::new() } + } + + pub(crate) fn add_unused_item( + &mut self, + module_id: ModuleId, + name: Ident, + item: UnusedItem, + visibility: ItemVisibility, + ) { + // Empty spans could come from implicitly injected imports, and we don't want to track those + if visibility != ItemVisibility::Public && name.span().start() < name.span().end() { + self.unused_items.entry(module_id).or_default().insert(name, item); + } } pub(crate) fn mark_as_used(&mut self, current_mod_id: ModuleId, name: &Ident) { - self.unused_imports.entry(current_mod_id).or_default().remove(name); + self.unused_items.entry(current_mod_id).or_default().remove(name); } - pub(crate) fn unused_imports(&self) -> &HashMap> { - &self.unused_imports + pub(crate) fn unused_items(&self) -> &HashMap> { + &self.unused_items } } From 416b29314d3a32f566a4cfa55921b4e229d9b786 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Thu, 5 Sep 2024 10:31:09 +0200 Subject: [PATCH 24/26] chore: error on false constraint (#5890) # Description ## Problem\* Report a bug during compilation when a constraint is false ## Summary\* It is done in acir-gen which ensures that all side effects are resolved. ## Additional Context The reported bug does not fail the compilation, because it will mess around compile/run-time failures and also for the Noir tests. We want a consistent handling of assert, whether it fails at compile-time, run-time, or test-time. ## Documentation\* Check one: - [X] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: jfecher --- compiler/noirc_evaluator/src/errors.rs | 3 +++ .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 10 +++++++++- compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs | 1 + 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index c4f56d032f9..bcd6865b721 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -88,6 +88,7 @@ impl From for FileDiagnostic { InternalBug::IndependentSubgraph { call_stack } => { ("There is no path from the output of this brillig call to either return values or inputs of the circuit, which creates an independent subgraph. This is quite likely a soundness vulnerability".to_string(),call_stack) } + InternalBug::AssertFailed { call_stack } => ("As a result, the compiled circuit is ensured to fail. Other assertions may also fail during execution".to_string(), call_stack) }; let call_stack = vecmap(call_stack, |location| location); let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); @@ -111,6 +112,8 @@ pub enum InternalWarning { pub enum InternalBug { #[error("Input to brillig function is in a separate subgraph to output")] IndependentSubgraph { call_stack: CallStack }, + #[error("Assertion is always false")] + AssertFailed { call_stack: CallStack }, } #[derive(Debug, PartialEq, Eq, Clone, Error)] diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index a0be1ee19cf..d12d49784ec 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -2,7 +2,7 @@ use super::big_int::BigIntContext; use super::generated_acir::{BrilligStdlibFunc, GeneratedAcir, PLACEHOLDER_BRILLIG_INDEX}; use crate::brillig::brillig_gen::brillig_directive; use crate::brillig::brillig_ir::artifact::GeneratedBrillig; -use crate::errors::{InternalError, RuntimeError, SsaReport}; +use crate::errors::{InternalBug, InternalError, RuntimeError, SsaReport}; use crate::ssa::acir_gen::{AcirDynamicArray, AcirValue}; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::types::Type as SsaType; @@ -126,6 +126,8 @@ pub(crate) struct AcirContext { big_int_ctx: BigIntContext, expression_width: ExpressionWidth, + + pub(crate) warnings: Vec, } impl AcirContext { @@ -518,6 +520,12 @@ impl AcirContext { self.mark_variables_equivalent(lhs, rhs)?; return Ok(()); } + if diff_expr.is_const() { + // Constraint is always false + self.warnings.push(SsaReport::Bug(InternalBug::AssertFailed { + call_stack: self.get_call_stack(), + })); + } self.acir_ir.assert_is_zero(diff_expr); if let Some(payload) = assert_message { diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 0360b15d950..15b44fde65d 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -453,6 +453,7 @@ impl<'a> Context<'a> { } warnings.extend(return_warnings); + warnings.extend(self.acir_context.warnings.clone()); // Add the warnings from the alter Ssa passes Ok(self.acir_context.finish(input_witness, return_witnesses, warnings)) From 8b60bbc8082513e29f6573e5235e0a33fdd1517b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Thu, 5 Sep 2024 12:47:20 +0200 Subject: [PATCH 25/26] feat: Only check array bounds in brillig if index is unsafe (#5938) # Description ## Problem\* Optimizes bounds checks ## Summary\* ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../src/brillig/brillig_gen/brillig_block.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index ef5fbce83d4..fe986089686 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -633,7 +633,11 @@ impl<'block> BrilligBlock<'block> { }; let index_variable = self.convert_ssa_single_addr_value(*index, dfg); - self.validate_array_index(array_variable, index_variable); + + if !dfg.is_safe_index(*index, *array) { + self.validate_array_index(array_variable, index_variable); + } + self.retrieve_variable_from_array( array_pointer, index_variable, @@ -652,7 +656,11 @@ impl<'block> BrilligBlock<'block> { result_ids[0], dfg, ); - self.validate_array_index(source_variable, index_register); + + if !dfg.is_safe_index(*index, *array) { + self.validate_array_index(source_variable, index_register); + } + self.convert_ssa_array_set( source_variable, destination_variable, From 9d2629dd1bb28a8c2ecb4c33d26119da75d626c2 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 5 Sep 2024 06:17:16 -0500 Subject: [PATCH 26/26] feat: Add `StructDefinition::set_fields` (#5931) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/5911 ## Summary\* Adds a function to set the fields on a struct type ## Additional Context Hyper-specific error messages can be fun. Here's the error issued when one of the field names isn't a valid identifier: ``` error: Quoted value in index 1 of this slice is not a valid field name ┌─ src/main.nr:25:18 │ 25 │ s.set_fields(fields); │ ------ `quote { foo bar }` is not a valid field name for `set_fields` │ ``` ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[For Experimental Features]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Ary Borenszweig --- .../noirc_frontend/src/hir/comptime/errors.rs | 13 ++++ .../src/hir/comptime/interpreter/builtin.rs | 61 ++++++++++++++++++- compiler/noirc_frontend/src/hir_def/types.rs | 1 - .../noir/standard_library/meta/struct_def.md | 29 +++++++++ noir_stdlib/src/meta/struct_def.nr | 9 +++ .../comptime_type_definition/src/main.nr | 13 ++++ 6 files changed, 124 insertions(+), 2 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/comptime/errors.rs b/compiler/noirc_frontend/src/hir/comptime/errors.rs index 48efc08f463..f6585786eeb 100644 --- a/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -202,6 +202,11 @@ pub enum InterpreterError { TypeAnnotationsNeededForMethodCall { location: Location, }, + ExpectedIdentForStructField { + value: String, + index: usize, + location: Location, + }, // These cases are not errors, they are just used to prevent us from running more code // until the loop can be resumed properly. These cases will never be displayed to users. @@ -269,6 +274,7 @@ impl InterpreterError { | InterpreterError::FailedToResolveTraitBound { location, .. } | InterpreterError::FunctionAlreadyResolved { location, .. } | InterpreterError::MultipleMatchingImpls { location, .. } + | InterpreterError::ExpectedIdentForStructField { location, .. } | InterpreterError::TypeAnnotationsNeededForMethodCall { location } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { @@ -566,6 +572,13 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { error.add_note(message.to_string()); error } + InterpreterError::ExpectedIdentForStructField { value, index, location } => { + let msg = format!( + "Quoted value in index {index} of this slice is not a valid field name" + ); + let secondary = format!("`{value}` is not a valid field name for `set_fields`"); + CustomDiagnostic::simple_error(msg, secondary, location.span) + } } } } diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index d2c9e4ffc0c..65c9c3f018d 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -33,7 +33,7 @@ use crate::{ }, hir_def::function::FunctionBody, lexer::Lexer, - macros_api::{HirExpression, HirLiteral, ModuleDefId, NodeInterner, Signedness}, + macros_api::{HirExpression, HirLiteral, Ident, ModuleDefId, NodeInterner, Signedness}, node_interner::{DefinitionKind, TraitImplKind}, parser::{self}, token::Token, @@ -133,6 +133,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "struct_def_as_type" => struct_def_as_type(interner, arguments, location), "struct_def_fields" => struct_def_fields(interner, arguments, location), "struct_def_generics" => struct_def_generics(interner, arguments, location), + "struct_def_set_fields" => struct_def_set_fields(interner, arguments, location), "to_le_radix" => to_le_radix(arguments, return_type, location), "trait_constraint_eq" => trait_constraint_eq(interner, arguments, location), "trait_constraint_hash" => trait_constraint_hash(interner, arguments, location), @@ -326,6 +327,64 @@ fn struct_def_fields( Ok(Value::Slice(fields, typ)) } +/// fn set_fields(self, new_fields: [(Quoted, Type)]) {} +/// Returns (name, type) pairs of each field of this StructDefinition +fn struct_def_set_fields( + interner: &mut NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let (the_struct, fields) = check_two_arguments(arguments, location)?; + let struct_id = get_struct(the_struct)?; + + let struct_def = interner.get_struct(struct_id); + let mut struct_def = struct_def.borrow_mut(); + + let field_location = fields.1; + let fields = get_slice(interner, fields)?.0; + + let new_fields = fields + .into_iter() + .flat_map(|field_pair| get_tuple(interner, (field_pair, field_location))) + .enumerate() + .map(|(index, mut field_pair)| { + if field_pair.len() == 2 { + let typ = field_pair.pop().unwrap(); + let name_value = field_pair.pop().unwrap(); + + let name_tokens = get_quoted((name_value.clone(), field_location))?; + let typ = get_type((typ, field_location))?; + + match name_tokens.first() { + Some(Token::Ident(name)) if name_tokens.len() == 1 => { + Ok((Ident::new(name.clone(), field_location.span), typ)) + } + _ => { + let value = name_value.display(interner).to_string(); + let location = field_location; + Err(InterpreterError::ExpectedIdentForStructField { + value, + index, + location, + }) + } + } + } else { + let type_var = interner.next_type_variable(); + let expected = Type::Tuple(vec![type_var.clone(), type_var]); + + let actual = + Type::Tuple(vecmap(&field_pair, |value| value.get_type().into_owned())); + + Err(InterpreterError::TypeMismatch { expected, actual, location }) + } + }) + .collect::, _>>()?; + + struct_def.set_fields(new_fields); + Ok(Value::Unit) +} + fn slice_remove( interner: &mut NodeInterner, arguments: Vec<(Value, Location)>, diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 7b3d0d7a205..113a4fb3888 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -259,7 +259,6 @@ impl StructType { /// created. Therefore, this method is used to set the fields once they /// become known. pub fn set_fields(&mut self, fields: Vec<(Ident, Type)>) { - assert!(self.fields.is_empty()); self.fields = fields; } diff --git a/docs/docs/noir/standard_library/meta/struct_def.md b/docs/docs/noir/standard_library/meta/struct_def.md index ab3ea4e0698..95e377dffd4 100644 --- a/docs/docs/noir/standard_library/meta/struct_def.md +++ b/docs/docs/noir/standard_library/meta/struct_def.md @@ -43,3 +43,32 @@ comptime fn example(foo: StructDefinition) { #include_code fields noir_stdlib/src/meta/struct_def.nr rust Returns each field of this struct as a pair of (field name, field type). + +### set_fields + +#include_code set_fields noir_stdlib/src/meta/struct_def.nr rust + +Sets the fields of this struct to the given fields list where each element +is a pair of the field's name and the field's type. Expects each field name +to be a single identifier. Note that this will override any previous fields +on this struct. If those should be preserved, use `.fields()` to retrieve the +current fields on the struct type and append the new fields from there. + +Example: + +```rust +// Change this struct to: +// struct Foo { +// a: u32, +// b: i8, +// } +#[mangle_fields] +struct Foo { x: Field } + +comptime fn mangle_fields(s: StructDefinition) { + s.set_fields(&[ + (quote { a }, quote { u32 }.as_type()), + (quote { b }, quote { i8 }.as_type()), + ]); +} +``` diff --git a/noir_stdlib/src/meta/struct_def.nr b/noir_stdlib/src/meta/struct_def.nr index 60fdeba21aa..1ca1b6a3925 100644 --- a/noir_stdlib/src/meta/struct_def.nr +++ b/noir_stdlib/src/meta/struct_def.nr @@ -18,4 +18,13 @@ impl StructDefinition { // docs:start:fields fn fields(self) -> [(Quoted, Type)] {} // docs:end:fields + + /// Sets the fields of this struct to the given fields list. + /// All existing fields of the struct will be overridden with the given fields. + /// Each element of the fields list corresponds to the name and type of a field. + /// Each name is expected to be a single identifier. + #[builtin(struct_def_set_fields)] + // docs:start:set_fields + fn set_fields(self, new_fields: [(Quoted, Type)]) {} + // docs:end:set_fields } diff --git a/test_programs/compile_success_empty/comptime_type_definition/src/main.nr b/test_programs/compile_success_empty/comptime_type_definition/src/main.nr index cdfc9bd6b75..aca8d067dde 100644 --- a/test_programs/compile_success_empty/comptime_type_definition/src/main.nr +++ b/test_programs/compile_success_empty/comptime_type_definition/src/main.nr @@ -6,8 +6,21 @@ struct MyType { field2: (B, C), } +#[mutate_struct_fields] +struct I32AndField { + z: i8, +} + comptime fn my_comptime_fn(typ: StructDefinition) { let _ = typ.as_type(); assert_eq(typ.generics().len(), 3); assert_eq(typ.fields().len(), 2); } + +comptime fn mutate_struct_fields(s: StructDefinition) { + let fields = &[ + (quote[x], quote[i32].as_type()), + (quote[y], quote[Field].as_type()) + ]; + s.set_fields(fields); +}