diff --git a/src/base/ModuleEnv.zig b/src/base/ModuleEnv.zig index 2cf883aae1..51eae21e44 100644 --- a/src/base/ModuleEnv.zig +++ b/src/base/ModuleEnv.zig @@ -20,6 +20,12 @@ idents: Ident.Store = .{}, ident_ids_for_slicing: collections.SafeList(Ident.Idx), strings: StringLiteral.Store, types: types_mod.Store, +/// Map of exposed items by their string representation (not interned) +/// This is built during canonicalization and preserved for later use +exposed_by_str: std.StringHashMapUnmanaged(void) = .{}, +/// Map of exposed item names to their CIR node indices (stored as u16) +/// This is populated during canonicalization to allow cross-module lookups +exposed_nodes: std.StringHashMapUnmanaged(u16) = .{}, /// Line starts for error reporting. We retain only start and offset positions in the IR /// and then use these line starts to calculate the line number and column number as required. @@ -47,6 +53,8 @@ pub fn deinit(self: *Self) void { self.strings.deinit(self.gpa); self.types.deinit(); self.line_starts.deinit(); + self.exposed_by_str.deinit(self.gpa); + self.exposed_nodes.deinit(self.gpa); } /// Calculate and store line starts from the source text diff --git a/src/check/canonicalize.zig b/src/check/canonicalize.zig index 2e3f5891aa..f45635c48d 100644 --- a/src/check/canonicalize.zig +++ b/src/check/canonicalize.zig @@ -18,6 +18,12 @@ const Token = tokenize.Token; can_ir: *CIR, parse_ir: *AST, scopes: std.ArrayListUnmanaged(Scope) = .{}, +/// Special scope for tracking exposed items from module header +exposed_scope: Scope = undefined, +/// Track exposed identifiers by text to handle changing indices +exposed_ident_texts: std.StringHashMapUnmanaged(Region) = .{}, +/// Track exposed types by text to handle changing indices +exposed_type_texts: std.StringHashMapUnmanaged(Region) = .{}, /// Stack of function regions for tracking var reassignment across function boundaries function_regions: std.ArrayListUnmanaged(Region), /// Maps var patterns to the function region they were declared in @@ -26,6 +32,10 @@ var_function_regions: std.AutoHashMapUnmanaged(CIR.Pattern.Idx, Region), var_patterns: std.AutoHashMapUnmanaged(CIR.Pattern.Idx, void), /// Tracks which pattern indices have been used/referenced used_patterns: std.AutoHashMapUnmanaged(CIR.Pattern.Idx, void), +/// Map of module name strings to their ModuleEnv pointers for import validation +module_envs: ?*const std.StringHashMap(*ModuleEnv), +/// Map from module name string to Import.Idx for tracking unique imports +import_indices: std.StringHashMapUnmanaged(CIR.Import.Idx), /// Scratch type variables scratch_vars: base.Scratch(TypeVar), /// Scratch ident @@ -83,13 +93,15 @@ pub fn deinit( ) void { const gpa = self.can_ir.env.gpa; - // First deinit individual scopes + self.exposed_scope.deinit(gpa); + self.exposed_ident_texts.deinit(gpa); + self.exposed_type_texts.deinit(gpa); + for (0..self.scopes.items.len) |i| { var scope = &self.scopes.items[i]; scope.deinit(gpa); } - // Then deinit the collections self.scopes.deinit(gpa); self.function_regions.deinit(gpa); self.var_function_regions.deinit(gpa); @@ -99,9 +111,10 @@ pub fn deinit( self.scratch_idents.deinit(gpa); self.scratch_record_fields.deinit(gpa); self.scratch_seen_record_fields.deinit(gpa); + self.import_indices.deinit(gpa); } -pub fn init(self: *CIR, parse_ir: *AST) std.mem.Allocator.Error!Self { +pub fn init(self: *CIR, parse_ir: *AST, module_envs: ?*const std.StringHashMap(*ModuleEnv)) std.mem.Allocator.Error!Self { const gpa = self.env.gpa; // Create the canonicalizer with scopes @@ -113,10 +126,13 @@ pub fn init(self: *CIR, parse_ir: *AST) std.mem.Allocator.Error!Self { .var_function_regions = std.AutoHashMapUnmanaged(CIR.Pattern.Idx, Region){}, .var_patterns = std.AutoHashMapUnmanaged(CIR.Pattern.Idx, void){}, .used_patterns = std.AutoHashMapUnmanaged(CIR.Pattern.Idx, void){}, + .module_envs = module_envs, + .import_indices = std.StringHashMapUnmanaged(CIR.Import.Idx){}, .scratch_vars = base.Scratch(TypeVar).init(gpa), .scratch_idents = base.Scratch(Ident.Idx).init(gpa), .scratch_record_fields = base.Scratch(types.RecordField).init(gpa), .scratch_seen_record_fields = base.Scratch(SeenRecordField).init(gpa), + .exposed_scope = Scope.init(false), }; // Top-level scope is not a function boundary @@ -311,6 +327,22 @@ pub fn canonicalizeFile( // canonicalize_header_packages(); + // First, process the header to create exposed_scope + const header = self.parse_ir.store.getHeader(file.header); + switch (header) { + .module => |h| try self.createExposedScope(h.exposes), + .package => |h| try self.createExposedScope(h.exposes), + .platform => |h| try self.createExposedScope(h.exposes), + .hosted => |h| try self.createExposedScope(h.exposes), + .app => { + // App headers have 'provides' instead of 'exposes' + // TODO: Handle app provides differently + }, + .malformed => { + // Skip malformed headers + }, + } + // Track the start of scratch defs and statements const scratch_defs_start = self.can_ir.store.scratchDefTop(); const scratch_statements_start = self.can_ir.store.scratch_statements.top(); @@ -325,7 +357,7 @@ pub fn canonicalizeFile( const region = self.parse_ir.tokenizedRegionToRegion(type_decl.region); // Extract the type name from the header to introduce it into scope early - const header = self.can_ir.store.getTypeHeader(header_idx); + const type_header = self.can_ir.store.getTypeHeader(header_idx); // Create a placeholder type declaration statement to introduce the type name into scope // This allows recursive type references to work during annotation canonicalization @@ -353,7 +385,7 @@ pub fn canonicalizeFile( const placeholder_type_decl_idx = self.can_ir.store.addStatement(placeholder_cir_type_decl); // Introduce the type name into scope early to support recursive references - self.scopeIntroduceTypeDecl(header.name, placeholder_type_decl_idx, region); + self.scopeIntroduceTypeDecl(type_header.name, placeholder_type_decl_idx, region); // Process type parameters and annotation in a separate scope const anno_idx = blk: { @@ -379,14 +411,14 @@ pub fn canonicalizeFile( // Create types for each arg annotation const scratch_anno_start = self.scratch_vars.top(); - for (self.can_ir.store.sliceTypeAnnos(header.args)) |arg_anno_idx| { + for (self.can_ir.store.sliceTypeAnnos(type_header.args)) |arg_anno_idx| { const arg_anno_var = try self.canonicalizeTypeAnnoToTypeVar(arg_anno_idx); self.scratch_vars.append(self.can_ir.env.gpa, arg_anno_var); } const arg_anno_slice = self.scratch_vars.slice(scratch_anno_start, self.scratch_vars.top()); // The identified of the type - const type_ident = types.TypeIdent{ .ident_idx = header.name }; + const type_ident = types.TypeIdent{ .ident_idx = type_header.name }; // The number of args for the alias/nominal type const num_args = @as(u32, @intCast(arg_anno_slice.len)); @@ -466,7 +498,11 @@ pub fn canonicalizeFile( self.scratch_vars.clearFrom(scratch_anno_start); // Update the scope to point to the real statement instead of the placeholder - self.scopeUpdateTypeDecl(header.name, type_decl_stmt_idx); + self.scopeUpdateTypeDecl(type_header.name, type_decl_stmt_idx); + + // Remove from exposed_type_texts since the type is now fully defined + const type_text = self.can_ir.env.idents.getText(type_header.name); + _ = self.exposed_type_texts.remove(type_text); }, else => { // Skip non-type-declaration statements in first pass @@ -512,6 +548,23 @@ pub fn canonicalizeFile( const def_idx = try self.canonicalizeDeclWithAnnotation(decl, annotation_idx); self.can_ir.store.addScratchDef(def_idx); last_type_anno = null; // Clear after successful use + + // If this declaration successfully defined an exposed value, remove it from exposed_ident_texts + // and add it to exposed_nodes + const pattern = self.parse_ir.store.getPattern(decl.pattern); + if (pattern == .ident) { + const token_region = self.parse_ir.tokens.resolve(@intCast(pattern.ident.ident_tok)); + const ident_text = self.parse_ir.source[token_region.start.offset..token_region.end.offset]; + + // If this identifier is exposed, add it to exposed_nodes + if (self.exposed_ident_texts.contains(ident_text)) { + // Store the def index as u16 in exposed_nodes + const def_idx_u16: u16 = @intCast(@intFromEnum(def_idx)); + self.can_ir.env.exposed_nodes.put(self.can_ir.env.gpa, ident_text, def_idx_u16) catch |err| exitOnOom(err); + } + + _ = self.exposed_ident_texts.remove(ident_text); + } }, .@"var" => |var_stmt| { // Not valid at top-level @@ -694,48 +747,138 @@ pub fn canonicalizeFile( } } - // Get the header and canonicalize exposes based on header type - const header = self.parse_ir.store.getHeader(file.header); - switch (header) { - .module => |h| self.canonicalizeHeaderExposes(h.exposes), - .package => |h| self.canonicalizeHeaderExposes(h.exposes), - .platform => |h| self.canonicalizeHeaderExposes(h.exposes), - .hosted => |h| self.canonicalizeHeaderExposes(h.exposes), - .app => { - // App headers have 'provides' instead of 'exposes' - // TODO: Handle app provides differently - }, - .malformed => { - // Skip malformed headers - }, - } + // Check for exposed but not implemented items + self.checkExposedButNotImplemented(); // Create the span of all top-level defs and statements self.can_ir.all_defs = self.can_ir.store.defSpanFrom(scratch_defs_start); self.can_ir.all_statements = self.can_ir.store.statementSpanFrom(scratch_statements_start); } -fn canonicalizeHeaderExposes( +fn createExposedScope( self: *Self, exposes: AST.Collection.Idx, -) void { +) std.mem.Allocator.Error!void { + const gpa = self.can_ir.env.gpa; + + // Reset exposed_scope (already initialized in init) + self.exposed_scope.deinit(gpa); + self.exposed_scope = Scope.init(false); + const collection = self.parse_ir.store.getCollection(exposes); const exposed_items = self.parse_ir.store.exposedItemSlice(.{ .span = collection.span }); + // Check if we have too many exports (>= maxInt(u16) to reserve 0 as potential sentinel) + if (exposed_items.len >= std.math.maxInt(u16)) { + const region = self.parse_ir.tokenizedRegionToRegion(collection.region); + self.can_ir.pushDiagnostic(CIR.Diagnostic{ .too_many_exports = .{ + .count = @intCast(exposed_items.len), + .region = region, + } }); + return; + } + for (exposed_items) |exposed_idx| { const exposed = self.parse_ir.store.getExposedItem(exposed_idx); switch (exposed) { .lower_ident => |ident| { - // TODO -- do we need a Pattern for "exposed_lower" identifiers? - _ = ident; + // Get the text of the identifier token to use as key + const token_region = self.parse_ir.tokens.resolve(@intCast(ident.ident)); + const ident_text = self.parse_ir.source[token_region.start.offset..token_region.end.offset]; + + // Add to exposed_by_str for permanent storage (unconditionally) + self.can_ir.env.exposed_by_str.put(gpa, ident_text, {}) catch |err| collections.utils.exitOnOom(err); + + // Also build exposed_scope with proper identifiers + if (self.parse_ir.tokens.resolveIdentifier(ident.ident)) |ident_idx| { + // Use a dummy pattern index - we just need to track that it's exposed + const dummy_idx = @as(CIR.Pattern.Idx, @enumFromInt(0)); + self.exposed_scope.put(gpa, .ident, ident_idx, dummy_idx); + } + + // Store by text in a temporary hash map, since indices may change + const region = self.parse_ir.tokenizedRegionToRegion(ident.region); + + // Check if this identifier was already exposed + if (self.exposed_ident_texts.get(ident_text)) |original_region| { + // Report redundant exposed entry error + if (self.parse_ir.tokens.resolveIdentifier(ident.ident)) |ident_idx| { + const diag = CIR.Diagnostic{ .redundant_exposed = .{ + .ident = ident_idx, + .region = region, + .original_region = original_region, + } }; + self.can_ir.pushDiagnostic(diag); + } + } else { + self.exposed_ident_texts.put(gpa, ident_text, region) catch |err| collections.utils.exitOnOom(err); + } }, .upper_ident => |type_name| { - // TODO -- do we need a Pattern for "exposed_upper" identifiers? - _ = type_name; + // Get the text of the identifier token to use as key + const token_region = self.parse_ir.tokens.resolve(@intCast(type_name.ident)); + const type_text = self.parse_ir.source[token_region.start.offset..token_region.end.offset]; + + // Add to exposed_by_str for permanent storage (unconditionally) + self.can_ir.env.exposed_by_str.put(gpa, type_text, {}) catch |err| collections.utils.exitOnOom(err); + + // Also build exposed_scope with proper identifiers + if (self.parse_ir.tokens.resolveIdentifier(type_name.ident)) |ident_idx| { + // Use a dummy statement index - we just need to track that it's exposed + const dummy_idx = @as(CIR.Statement.Idx, @enumFromInt(0)); + self.exposed_scope.put(gpa, .type_decl, ident_idx, dummy_idx); + } + + // Store by text in a temporary hash map, since indices may change + const region = self.parse_ir.tokenizedRegionToRegion(type_name.region); + + // Check if this type was already exposed + if (self.exposed_type_texts.get(type_text)) |original_region| { + // Report redundant exposed entry error + if (self.parse_ir.tokens.resolveIdentifier(type_name.ident)) |ident_idx| { + const diag = CIR.Diagnostic{ .redundant_exposed = .{ + .ident = ident_idx, + .region = region, + .original_region = original_region, + } }; + self.can_ir.pushDiagnostic(diag); + } + } else { + self.exposed_type_texts.put(gpa, type_text, region) catch |err| collections.utils.exitOnOom(err); + } }, .upper_ident_star => |type_with_constructors| { - // TODO -- do we need a Pattern for "exposed_upper_star" identifiers? - _ = type_with_constructors; + // Get the text of the identifier token to use as key + const token_region = self.parse_ir.tokens.resolve(@intCast(type_with_constructors.ident)); + const type_text = self.parse_ir.source[token_region.start.offset..token_region.end.offset]; + + // Add to exposed_by_str for permanent storage (unconditionally) + self.can_ir.env.exposed_by_str.put(gpa, type_text, {}) catch |err| collections.utils.exitOnOom(err); + + // Also build exposed_scope with proper identifiers + if (self.parse_ir.tokens.resolveIdentifier(type_with_constructors.ident)) |ident_idx| { + // Use a dummy statement index - we just need to track that it's exposed + const dummy_idx = @as(CIR.Statement.Idx, @enumFromInt(0)); + self.exposed_scope.put(gpa, .type_decl, ident_idx, dummy_idx); + } + + // Store by text in a temporary hash map, since indices may change + const region = self.parse_ir.tokenizedRegionToRegion(type_with_constructors.region); + + // Check if this type was already exposed + if (self.exposed_type_texts.get(type_text)) |original_region| { + // Report redundant exposed entry error + if (self.parse_ir.tokens.resolveIdentifier(type_with_constructors.ident)) |ident_idx| { + const diag = CIR.Diagnostic{ .redundant_exposed = .{ + .ident = ident_idx, + .region = region, + .original_region = original_region, + } }; + self.can_ir.pushDiagnostic(diag); + } + } else { + self.exposed_type_texts.put(gpa, type_text, region) catch |err| collections.utils.exitOnOom(err); + } }, .malformed => |malformed| { // Malformed exposed items are already captured as diagnostics during parsing @@ -745,6 +888,41 @@ fn canonicalizeHeaderExposes( } } +fn checkExposedButNotImplemented(self: *Self) void { + const gpa = self.can_ir.env.gpa; + + // Check for remaining exposed identifiers + var ident_iter = self.exposed_ident_texts.iterator(); + while (ident_iter.next()) |entry| { + const ident_text = entry.key_ptr.*; + const region = entry.value_ptr.*; + // Create an identifier for error reporting + const ident_idx = self.can_ir.env.idents.insert(gpa, base.Ident.for_text(ident_text), region); + + // Report error: exposed identifier but not implemented + const diag = CIR.Diagnostic{ .exposed_but_not_implemented = .{ + .ident = ident_idx, + .region = region, + } }; + self.can_ir.pushDiagnostic(diag); + } + + // Check for remaining exposed types + var iter = self.exposed_type_texts.iterator(); + while (iter.next()) |entry| { + const type_text = entry.key_ptr.*; + const region = entry.value_ptr.*; + // Create an identifier for error reporting + const ident_idx = self.can_ir.env.idents.insert(gpa, base.Ident.for_text(type_text), region); + + // Report error: exposed type but not implemented + self.can_ir.pushDiagnostic(CIR.Diagnostic{ .exposed_but_not_implemented = .{ + .ident = ident_idx, + .region = region, + } }); + } +} + fn bringImportIntoScope( self: *Self, import: *const AST.Statement, @@ -897,17 +1075,25 @@ fn canonicalizeImportStatement( // 2. Determine the alias (either explicit or default to last part) const alias = self.resolveModuleAlias(import_stmt.alias_tok, module_name) orelse return null; - // 3. Add to scope: alias -> module_name mapping + // 3. Get or create Import.Idx for this module + const module_name_text = self.can_ir.env.idents.getText(module_name); + const module_import_idx = self.can_ir.imports.getOrPut(self.can_ir.env.gpa, module_name_text) catch |err| exitOnOom(err); + + // 4. Add to scope: alias -> module_name mapping self.scopeIntroduceModuleAlias(alias, module_name); // Process type imports from this module self.processTypeImports(module_name, alias); - // 4. Convert exposed items and introduce them into scope + // 5. Convert exposed items and introduce them into scope const cir_exposes = self.convertASTExposesToCIR(import_stmt.exposes); - self.introduceExposedItemsIntoScope(cir_exposes, module_name); + const import_region = self.parse_ir.tokenizedRegionToRegion(import_stmt.region); + self.introduceExposedItemsIntoScope(cir_exposes, module_name, import_region); + + // 6. Store the mapping from module name to Import.Idx + self.import_indices.put(self.can_ir.env.gpa, module_name_text, module_import_idx) catch |err| exitOnOom(err); - // 5. Create CIR import statement + // 7. Create CIR import statement const cir_import = CIR.Statement{ .s_import = .{ .module_name_tok = module_name, @@ -920,6 +1106,11 @@ fn canonicalizeImportStatement( const import_idx = self.can_ir.store.addStatement(cir_import); self.can_ir.store.addScratchStatement(import_idx); + + // 8. Add the module to the current scope so it can be used in qualified lookups + const current_scope = self.currentScope(); + _ = current_scope.introduceImportedModule(self.can_ir.env.gpa, module_name_text, module_import_idx); + return import_idx; } @@ -1037,24 +1228,74 @@ fn introduceExposedItemsIntoScope( self: *Self, exposed_items_span: CIR.ExposedItem.Span, module_name: Ident.Idx, + import_region: Region, ) void { const exposed_items_slice = self.can_ir.store.sliceExposedItems(exposed_items_span); - for (exposed_items_slice) |exposed_item_idx| { - const exposed_item = self.can_ir.store.getExposedItem(exposed_item_idx); + // If we have module_envs, validate the imports + if (self.module_envs) |envs_map| { + const module_name_text = self.can_ir.env.idents.getText(module_name); - // Use the alias if provided, otherwise use the original name for the local lookup - const item_name = exposed_item.alias orelse exposed_item.name; + // Check if the module exists + if (!envs_map.contains(module_name_text)) { + // Module not found - create diagnostic + self.can_ir.pushDiagnostic(CIR.Diagnostic{ .module_not_found = .{ + .module_name = module_name, + .region = import_region, + } }); + return; + } - // Create the exposed item info with module name and original name - const item_info = Scope.ExposedItemInfo{ - .module_name = module_name, - .original_name = exposed_item.name, // Always use the original name for module lookup - }; + // Get the module's exposed_by_str map + const module_env = envs_map.get(module_name_text).?; + + // Validate each exposed item + for (exposed_items_slice) |exposed_item_idx| { + const exposed_item = self.can_ir.store.getExposedItem(exposed_item_idx); + const item_name_text = self.can_ir.env.idents.getText(exposed_item.name); + + // Check if the item is exposed by the module + if (!module_env.exposed_by_str.contains(item_name_text)) { + // Determine if it's a type or value based on capitalization + const first_char = item_name_text[0]; + + if (first_char >= 'A' and first_char <= 'Z') { + // Type not exposed + self.can_ir.pushDiagnostic(CIR.Diagnostic{ .type_not_exposed = .{ + .module_name = module_name, + .type_name = exposed_item.name, + .region = import_region, + } }); + } else { + // Value not exposed + self.can_ir.pushDiagnostic(CIR.Diagnostic{ .value_not_exposed = .{ + .module_name = module_name, + .value_name = exposed_item.name, + .region = import_region, + } }); + } + continue; // Skip introducing this item to scope + } - // Introduce the exposed item into scope - // This allows `decode` to resolve to `json.Json.decode` - self.scopeIntroduceExposedItem(item_name, item_info); + // Item is valid, introduce it to scope + const item_name = exposed_item.alias orelse exposed_item.name; + const item_info = Scope.ExposedItemInfo{ + .module_name = module_name, + .original_name = exposed_item.name, + }; + self.scopeIntroduceExposedItem(item_name, item_info); + } + } else { + // No module_envs provided, introduce all items without validation + for (exposed_items_slice) |exposed_item_idx| { + const exposed_item = self.can_ir.store.getExposedItem(exposed_item_idx); + const item_name = exposed_item.alias orelse exposed_item.name; + const item_info = Scope.ExposedItemInfo{ + .module_name = module_name, + .original_name = exposed_item.name, + }; + self.scopeIntroduceExposedItem(item_name, item_info); + } } } @@ -1343,30 +1584,33 @@ pub fn canonicalizeExpr( // Check if this is a module alias if (self.scopeLookupModule(module_alias)) |module_name| { // This is a module-qualified lookup - // Create qualified name for external declaration const module_text = self.can_ir.env.idents.getText(module_name); - const field_text = self.can_ir.env.idents.getText(ident); - - // Allocate space for qualified name - const qualified_text = std.fmt.allocPrint(self.can_ir.env.gpa, "{s}.{s}", .{ module_text, field_text }) catch |err| collections.utils.exitOnOom(err); - defer self.can_ir.env.gpa.free(qualified_text); - const qualified_name = self.can_ir.env.idents.insert(self.can_ir.env.gpa, base.Ident.for_text(qualified_text), Region.zero()); - - // Create external declaration - const external_decl = CIR.ExternalDecl{ - .qualified_name = qualified_name, - .module_name = module_name, - .local_name = ident, - .type_var = self.can_ir.pushFreshTypeVar(@enumFromInt(0), region) catch |err| exitOnOom(err), - .kind = .value, - .region = region, + // Check if this module is imported in the current scope + const import_idx = self.scopeLookupImportedModule(module_text) orelse { + // Module not imported in current scope + return self.can_ir.pushMalformed(CIR.Expr.Idx, CIR.Diagnostic{ .module_not_imported = .{ + .module_name = module_name, + .region = region, + } }); }; - const external_idx = self.can_ir.pushExternalDecl(external_decl); + // Look up the target node index in the module's exposed_nodes + const field_text = self.can_ir.env.idents.getText(ident); + const target_node_idx = if (self.module_envs) |envs_map| blk: { + if (envs_map.get(module_text)) |module_env| { + break :blk module_env.exposed_nodes.get(field_text) orelse 0; + } else { + break :blk 0; + } + } else 0; - // Create lookup expression for external declaration - const expr_idx = self.can_ir.store.addExpr(CIR.Expr{ .e_lookup_external = external_idx }); + // Create the e_lookup_external expression with Import.Idx + const expr_idx = self.can_ir.store.addExpr(CIR.Expr{ .e_lookup_external = .{ + .module_idx = import_idx, + .target_node_idx = target_node_idx, + .region = region, + } }); _ = self.can_ir.setTypeVarAtExpr(expr_idx, Content{ .flex_var = null }); return expr_idx; } @@ -1394,23 +1638,32 @@ pub fn canonicalizeExpr( .not_found => { // Check if this identifier is an exposed item from an import if (self.scopeLookupExposedItem(ident)) |exposed_info| { - // Create qualified name using the original name, not the alias - const qualified_name = self.createQualifiedName(exposed_info.module_name, exposed_info.original_name); - - // Create external declaration for the exposed item - const external_decl = CIR.ExternalDecl{ - .qualified_name = qualified_name, - .module_name = exposed_info.module_name, - .local_name = ident, - .type_var = self.can_ir.pushFreshTypeVar(@enumFromInt(0), region) catch |err| exitOnOom(err), - .kind = .value, - .region = region, + // Get the Import.Idx for the module this item comes from + const module_text = self.can_ir.env.idents.getText(exposed_info.module_name); + const import_idx = self.scopeLookupImportedModule(module_text) orelse { + // This shouldn't happen if imports are properly tracked, but handle it gracefully + return self.can_ir.pushMalformed(CIR.Expr.Idx, CIR.Diagnostic{ .module_not_imported = .{ + .module_name = exposed_info.module_name, + .region = region, + } }); }; - const external_idx = self.can_ir.pushExternalDecl(external_decl); + // Look up the target node index in the module's exposed_nodes + const field_text = self.can_ir.env.idents.getText(exposed_info.original_name); + const target_node_idx = if (self.module_envs) |envs_map| blk: { + if (envs_map.get(module_text)) |module_env| { + break :blk module_env.exposed_nodes.get(field_text) orelse 0; + } else { + break :blk 0; + } + } else 0; - // Create lookup expression for external declaration - const expr_idx = self.can_ir.store.addExpr(CIR.Expr{ .e_lookup_external = external_idx }); + // Create the e_lookup_external expression with Import.Idx + const expr_idx = self.can_ir.store.addExpr(CIR.Expr{ .e_lookup_external = .{ + .module_idx = import_idx, + .target_node_idx = target_node_idx, + .region = region, + } }); _ = self.can_ir.setTypeVarAtExpr(expr_idx, Content{ .flex_var = null }); return expr_idx; } @@ -2488,14 +2741,15 @@ fn canonicalizePattern( .ident => |e| { const region = self.parse_ir.tokenizedRegionToRegion(e.region); if (self.parse_ir.tokens.resolveIdentifier(e.ident_tok)) |ident_idx| { - // Push a Pattern node for our identifier - const assign_idx = try self.can_ir.store.addPattern(CIR.Pattern{ .assign = .{ + // Create a Pattern node for our identifier + const pattern_idx = try self.can_ir.store.addPattern(CIR.Pattern{ .assign = .{ .ident = ident_idx, } }, region); - _ = self.can_ir.setTypeVarAtPat(assign_idx, .{ .flex_var = null }); + + _ = self.can_ir.setTypeVarAtPat(pattern_idx, .{ .flex_var = null }); // Introduce the identifier into scope mapping to this pattern node - switch (self.scopeIntroduceInternal(self.can_ir.env.gpa, &self.can_ir.env.idents, .ident, ident_idx, assign_idx, false, true)) { + switch (self.scopeIntroduceInternal(self.can_ir.env.gpa, &self.can_ir.env.idents, .ident, ident_idx, pattern_idx, false, true)) { .success => {}, .shadowing_warning => |shadowed_pattern_idx| { const original_region = self.can_ir.store.getPatternRegion(shadowed_pattern_idx); @@ -2521,7 +2775,7 @@ fn canonicalizePattern( }, } - return assign_idx; + return pattern_idx; } else { const feature = self.can_ir.env.strings.insert(self.can_ir.env.gpa, "report an error when unable to resolve identifier"); const malformed_idx = self.can_ir.pushMalformed(CIR.Pattern.Idx, CIR.Diagnostic{ .not_implemented = .{ @@ -3350,6 +3604,7 @@ test { _ = @import("canonicalize/test/int_test.zig"); _ = @import("canonicalize/test/frac_test.zig"); _ = @import("canonicalize/test/node_store_test.zig"); + _ = @import("canonicalize/test/exposed_shadowing_test.zig"); _ = @import("let_polymorphism_integration_test.zig"); } @@ -5023,6 +5278,23 @@ fn scopeLookupExposedItemInParentScopes(self: *const Self, item_name: Ident.Idx) return null; } +/// Look up an imported module in the scope hierarchy +fn scopeLookupImportedModule(self: *const Self, module_name: []const u8) ?CIR.Import.Idx { + // Search from innermost to outermost scope + var i = self.scopes.items.len; + while (i > 0) { + i -= 1; + const scope = &self.scopes.items[i]; + + switch (scope.lookupImportedModule(module_name)) { + .found => |import_idx| return import_idx, + .not_found => continue, + } + } + + return null; +} + /// Extract the module name from a full qualified name (e.g., "Json" from "json.Json") fn extractModuleName(self: *Self, module_name_ident: Ident.Idx) Ident.Idx { const module_text = self.can_ir.env.idents.getText(module_name_ident); @@ -5585,6 +5857,18 @@ fn tryModuleQualifiedLookup(self: *Self, field_access: AST.BinOp) ?CIR.Expr.Idx // Check if this is a module alias const module_name = self.scopeLookupModule(module_alias) orelse return null; + const module_text = self.can_ir.env.idents.getText(module_name); + + // Check if this module is imported in the current scope + const import_idx = self.scopeLookupImportedModule(module_text) orelse { + // Module not imported in current scope + const region = self.parse_ir.tokenizedRegionToRegion(field_access.region); + _ = self.can_ir.pushMalformed(CIR.Expr.Idx, CIR.Diagnostic{ .module_not_imported = .{ + .module_name = module_name, + .region = region, + } }); + return null; + }; // This is a module-qualified lookup const right_expr = self.parse_ir.store.getExpr(field_access.right); @@ -5593,45 +5877,24 @@ fn tryModuleQualifiedLookup(self: *Self, field_access: AST.BinOp) ?CIR.Expr.Idx const right_ident = right_expr.ident; const field_name = self.parse_ir.tokens.resolveIdentifier(right_ident.token) orelse return null; - // Create qualified name by slicing from original source text - // The field_access region covers the entire "Module.field" span const region = self.parse_ir.tokenizedRegionToRegion(field_access.region); - const source_text = self.parse_ir.source[region.start.offset..region.end.offset]; - - const qualified_name = if (base.Ident.from_bytes(source_text)) |valid_ident| - self.can_ir.env.idents.insert(self.can_ir.env.gpa, valid_ident, region) - else |err| blk: { - // Invalid qualified name - create diagnostic and use placeholder - const error_msg = switch (err) { - base.Ident.Error.EmptyText => "malformed qualified name is empty", - base.Ident.Error.ContainsNullByte => "malformed qualified name contains null bytes", - base.Ident.Error.ContainsControlCharacters => "malformed qualified name contains invalid control characters", - }; - const feature = self.can_ir.env.strings.insert(self.can_ir.env.gpa, error_msg); - self.can_ir.pushDiagnostic(CIR.Diagnostic{ .not_implemented = .{ - .feature = feature, - .region = region, - } }); - // Use a placeholder identifier instead - const placeholder_text = "MALFORMED_QUALIFIED_NAME"; - break :blk self.can_ir.env.idents.insert(self.can_ir.env.gpa, base.Ident.for_text(placeholder_text), region); - }; + // Look up the target node index in the module's exposed_nodes + const field_text = self.can_ir.env.idents.getText(field_name); + const target_node_idx = if (self.module_envs) |envs_map| blk: { + if (envs_map.get(module_text)) |module_env| { + break :blk module_env.exposed_nodes.get(field_text) orelse 0; + } else { + break :blk 0; + } + } else 0; - // Create external declaration - const external_decl = CIR.ExternalDecl{ - .qualified_name = qualified_name, - .module_name = module_name, - .local_name = field_name, - .type_var = self.can_ir.pushFreshTypeVar(@enumFromInt(0), region) catch |err| exitOnOom(err), - .kind = .value, + // Create the e_lookup_external expression with Import.Idx + const expr_idx = self.can_ir.store.addExpr(CIR.Expr{ .e_lookup_external = .{ + .module_idx = import_idx, + .target_node_idx = target_node_idx, .region = region, - }; - - const external_idx = self.can_ir.pushExternalDecl(external_decl); - - // Create lookup expression for external declaration - const expr_idx = self.can_ir.store.addExpr(CIR.Expr{ .e_lookup_external = external_idx }); + } }); _ = self.can_ir.setTypeVarAtExpr(expr_idx, Content{ .flex_var = null }); return expr_idx; } @@ -5772,7 +6035,7 @@ const ScopeTestContext = struct { cir.* = CIR.init(env); return ScopeTestContext{ - .self = try Self.init(cir, undefined), + .self = try Self.init(cir, undefined, null), .cir = cir, .env = env, .gpa = gpa, @@ -6087,7 +6350,7 @@ test "hexadecimal integer literals" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try init(&cir, &ast); + var can = try init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6177,7 +6440,7 @@ test "binary integer literals" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try init(&cir, &ast); + var can = try init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6267,7 +6530,7 @@ test "octal integer literals" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try init(&cir, &ast); + var can = try init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6357,7 +6620,7 @@ test "integer literals with uppercase base prefixes" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try init(&cir, &ast); + var can = try init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6749,7 +7012,7 @@ test "record literal uses record_unbound" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try Self.init(&cir, &ast); + var can = try Self.init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6785,7 +7048,7 @@ test "record literal uses record_unbound" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try Self.init(&cir, &ast); + var can = try Self.init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6820,7 +7083,7 @@ test "record literal uses record_unbound" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try Self.init(&cir, &ast); + var can = try Self.init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6864,7 +7127,7 @@ test "record_unbound basic functionality" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try Self.init(&cir, &ast); + var can = try Self.init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); @@ -6907,7 +7170,7 @@ test "record_unbound with multiple fields" { var cir = CIR.init(&env); defer cir.deinit(); - var can = try Self.init(&cir, &ast); + var can = try Self.init(&cir, &ast, null); defer can.deinit(); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(ast.root_node_idx); diff --git a/src/check/canonicalize/CIR.zig b/src/check/canonicalize/CIR.zig index c4c3ea4377..b5340f51a3 100644 --- a/src/check/canonicalize/CIR.zig +++ b/src/check/canonicalize/CIR.zig @@ -59,6 +59,8 @@ all_defs: Def.Span, all_statements: Statement.Span, /// All external declarations referenced in this module external_decls: std.ArrayList(ExternalDecl), +/// Store for interned module imports +imports: Import.Store, /// Initialize the IR for a module's canonicalization info. /// @@ -77,6 +79,7 @@ pub fn init(env: *ModuleEnv) CIR { .all_defs = .{ .span = .{ .start = 0, .len = 0 } }, .all_statements = .{ .span = .{ .start = 0, .len = 0 } }, .external_decls = std.ArrayList(ExternalDecl).init(env.gpa), + .imports = Import.Store.init(), }; } @@ -84,6 +87,7 @@ pub fn init(env: *ModuleEnv) CIR { pub fn deinit(self: *CIR) void { self.store.deinit(); self.external_decls.deinit(); + self.imports.deinit(self.env.gpa); } /// Records a diagnostic error during canonicalization without blocking compilation. @@ -159,6 +163,28 @@ pub fn diagnosticToReport(self: *CIR, diagnostic: Diagnostic, allocator: std.mem const feature_text = self.env.strings.get(data.feature); break :blk Diagnostic.buildNotImplementedReport(allocator, feature_text); }, + .exposed_but_not_implemented => |data| blk: { + const ident_name = self.env.idents.getText(data.ident); + const region_info = self.calcRegionInfo(data.region); + break :blk Diagnostic.buildExposedButNotImplementedReport( + allocator, + ident_name, + region_info, + filename, + ); + }, + .redundant_exposed => |data| blk: { + const ident_name = self.env.idents.getText(data.ident); + const region_info = self.calcRegionInfo(data.region); + const original_region_info = self.calcRegionInfo(data.original_region); + break :blk Diagnostic.buildRedundantExposedReport( + allocator, + ident_name, + region_info, + original_region_info, + filename, + ); + }, .invalid_num_literal => |data| blk: { break :blk Diagnostic.buildInvalidNumLiteralReport( allocator, @@ -248,6 +274,59 @@ pub fn diagnosticToReport(self: *CIR, diagnostic: Diagnostic, allocator: std.mem ); }, .tuple_elem_not_canonicalized => Diagnostic.buildTupleElemNotCanonicalizedReport(allocator), + .module_not_found => |data| blk: { + const module_name = self.env.idents.getText(data.module_name); + const region_info = self.calcRegionInfo(data.region); + break :blk Diagnostic.buildModuleNotFoundReport( + allocator, + module_name, + region_info, + filename, + ); + }, + .value_not_exposed => |data| blk: { + const module_name = self.env.idents.getText(data.module_name); + const value_name = self.env.idents.getText(data.value_name); + const region_info = self.calcRegionInfo(data.region); + break :blk Diagnostic.buildValueNotExposedReport( + allocator, + module_name, + value_name, + region_info, + filename, + ); + }, + .type_not_exposed => |data| blk: { + const module_name = self.env.idents.getText(data.module_name); + const type_name = self.env.idents.getText(data.type_name); + const region_info = self.calcRegionInfo(data.region); + break :blk Diagnostic.buildTypeNotExposedReport( + allocator, + module_name, + type_name, + region_info, + filename, + ); + }, + .module_not_imported => |data| blk: { + const module_name = self.env.idents.getText(data.module_name); + const region_info = self.calcRegionInfo(data.region); + break :blk Diagnostic.buildModuleNotImportedReport( + allocator, + module_name, + region_info, + filename, + ); + }, + .too_many_exports => |data| blk: { + const region_info = self.calcRegionInfo(data.region); + break :blk Diagnostic.buildTooManyExportsReport( + allocator, + data.count, + region_info, + filename, + ); + }, .undeclared_type => |data| blk: { const type_name = self.env.idents.getText(data.name); const region_info = self.calcRegionInfo(data.region); @@ -693,6 +772,52 @@ pub const ExposedItem = struct { } }; +/// An imported module +pub const Import = struct { + pub const Idx = enum(u16) { _ }; + + /// A store for interning imported module names + pub const Store = struct { + /// Map from module name string to Import.Idx + map: std.StringHashMapUnmanaged(Import.Idx) = .{}, + /// List of imports indexed by Import.Idx + imports: std.ArrayListUnmanaged([]u8) = .{}, + /// Storage for module name strings + strings: std.ArrayListUnmanaged(u8) = .{}, + + pub fn init() Store { + return .{}; + } + + pub fn deinit(self: *Store, gpa: std.mem.Allocator) void { + self.map.deinit(gpa); + self.imports.deinit(gpa); + self.strings.deinit(gpa); + } + + /// Get or create an Import.Idx for a module name + pub fn getOrPut(self: *Store, gpa: std.mem.Allocator, module_name: []const u8) !Import.Idx { + const gop = try self.map.getOrPut(gpa, module_name); + if (!gop.found_existing) { + // Store the string + const start = self.strings.items.len; + try self.strings.appendSlice(gpa, module_name); + const stored_name = self.strings.items[start..]; + + const import_idx: Import.Idx = @enumFromInt(self.imports.items.len); + try self.imports.append(gpa, stored_name); + gop.value_ptr.* = import_idx; + } + return gop.value_ptr.*; + } + + /// Get the module name for an Import.Idx + pub fn getModuleName(self: *const Store, idx: Import.Idx) []const u8 { + return self.imports.items[@intFromEnum(idx)]; + } + }; +}; + /// A file of any type that has been ingested into a Roc module /// as raw data, e.g. `import "lookups.txt" as lookups : Str`. /// diff --git a/src/check/canonicalize/Diagnostic.zig b/src/check/canonicalize/Diagnostic.zig index 92b5c5f4e6..d875ee426e 100644 --- a/src/check/canonicalize/Diagnostic.zig +++ b/src/check/canonicalize/Diagnostic.zig @@ -17,6 +17,15 @@ pub const Diagnostic = union(enum) { feature: StringLiteral.Idx, region: Region, }, + exposed_but_not_implemented: struct { + ident: Ident.Idx, + region: Region, + }, + redundant_exposed: struct { + ident: Ident.Idx, + region: Region, + original_region: Region, + }, invalid_num_literal: struct { region: Region, }, @@ -93,6 +102,28 @@ pub const Diagnostic = union(enum) { tuple_elem_not_canonicalized: struct { region: Region, }, + module_not_found: struct { + module_name: Ident.Idx, + region: Region, + }, + value_not_exposed: struct { + module_name: Ident.Idx, + value_name: Ident.Idx, + region: Region, + }, + type_not_exposed: struct { + module_name: Ident.Idx, + type_name: Ident.Idx, + region: Region, + }, + module_not_imported: struct { + module_name: Ident.Idx, + region: Region, + }, + too_many_exports: struct { + count: u32, + region: Region, + }, undeclared_type: struct { name: Ident.Idx, region: Region, @@ -150,6 +181,8 @@ pub const Diagnostic = union(enum) { pub fn toRegion(self: Diagnostic) Region { return switch (self) { .not_implemented => |d| d.region, + .exposed_but_not_implemented => |d| d.region, + .redundant_exposed => |d| d.region, .invalid_num_literal => |d| d.region, .ident_already_in_scope => |d| d.region, .ident_not_in_scope => |d| d.region, @@ -169,6 +202,11 @@ pub const Diagnostic = union(enum) { .shadowing_warning => |d| d.region, .type_redeclared => |d| d.redeclared_region, .tuple_elem_not_canonicalized => |d| d.region, + .module_not_found => |d| d.region, + .value_not_exposed => |d| d.region, + .type_not_exposed => |d| d.region, + .module_not_imported => |d| d.region, + .too_many_exports => |d| d.region, .undeclared_type => |d| d.region, .undeclared_type_var => |d| d.region, .crash_expects_string => |d| d.region, @@ -231,6 +269,69 @@ pub const Diagnostic = union(enum) { return report; } + /// Build a report for "exposed but not implemented" diagnostic + pub fn buildExposedButNotImplementedReport( + allocator: Allocator, + ident_name: []const u8, + region_info: base.RegionInfo, + filename: []const u8, + ) !Report { + var report = Report.init(allocator, "EXPOSED BUT NOT DEFINED", .runtime_error); + const owned_ident = try report.addOwnedString(ident_name); + + try report.document.addReflowingText("The module header says that `"); + try report.document.addInlineCode(owned_ident); + try report.document.addReflowingText("` is exposed, but it is not defined anywhere in this module."); + try report.document.addLineBreak(); + try report.document.addLineBreak(); + try report.document.addSourceRegion( + region_info, + .error_highlight, + filename, + ); + + try report.document.addReflowingText("You can fix this by either defining `"); + try report.document.addInlineCode(owned_ident); + try report.document.addReflowingText("` in this module, or by removing it from the list of exposed values."); + + return report; + } + + /// Build a report for "redundant exposed" diagnostic + pub fn buildRedundantExposedReport( + allocator: Allocator, + ident_name: []const u8, + region_info: base.RegionInfo, + original_region_info: base.RegionInfo, + filename: []const u8, + ) !Report { + var report = Report.init(allocator, "REDUNDANT EXPOSED", .warning); + const owned_ident = try report.addOwnedString(ident_name); + + try report.document.addReflowingText("The identifier `"); + try report.document.addInlineCode(owned_ident); + try report.document.addReflowingText("` is exposed multiple times in the module header."); + try report.document.addLineBreak(); + try report.document.addLineBreak(); + try report.document.addSourceRegion( + region_info, + .error_highlight, + filename, + ); + + try report.document.addReflowingText("It was already exposed here:"); + + try report.document.addSourceRegion( + original_region_info, + .dimmed, + filename, + ); + + try report.document.addReflowingText("You can remove the duplicate entry to fix this warning."); + + return report; + } + /// Build a report for "identifier not in scope" diagnostic pub fn buildIdentNotInScopeReport( allocator: Allocator, @@ -956,6 +1057,148 @@ pub const Diagnostic = union(enum) { try report.document.addLineBreak(); try report.document.addInlineCode("n if n > 1e99 => ..."); + return report; + } + /// Build a report for "module not found" diagnostic + pub fn buildModuleNotFoundReport( + allocator: Allocator, + module_name: []const u8, + region_info: base.RegionInfo, + filename: []const u8, + ) !Report { + var report = Report.init(allocator, "MODULE NOT FOUND", .runtime_error); + + const owned_module = try report.addOwnedString(module_name); + try report.document.addText("The module "); + try report.document.addAnnotated(owned_module, .module_name); + try report.document.addText(" was not found."); + try report.document.addLineBreak(); + try report.document.addReflowingText("Make sure this module is imported and available in your project."); + + try report.document.addSourceRegion( + region_info, + .error_highlight, + filename, + ); + + return report; + } + + /// Build a report for "value not exposed" diagnostic + pub fn buildValueNotExposedReport( + allocator: Allocator, + module_name: []const u8, + value_name: []const u8, + region_info: base.RegionInfo, + filename: []const u8, + ) !Report { + var report = Report.init(allocator, "VALUE NOT EXPOSED", .runtime_error); + + const owned_module = try report.addOwnedString(module_name); + const owned_value = try report.addOwnedString(value_name); + try report.document.addText("The "); + try report.document.addAnnotated(owned_module, .module_name); + try report.document.addText(" module does not expose anything named "); + try report.document.addUnqualifiedSymbol(owned_value); + try report.document.addText("."); + try report.document.addLineBreak(); + try report.document.addReflowingText("Make sure the module exports this value, or use a value that is exposed."); + + try report.document.addSourceRegion( + region_info, + .error_highlight, + filename, + ); + + return report; + } + + /// Build a report for "type not exposed" diagnostic + pub fn buildTypeNotExposedReport( + allocator: Allocator, + module_name: []const u8, + type_name: []const u8, + region_info: base.RegionInfo, + filename: []const u8, + ) !Report { + var report = Report.init(allocator, "TYPE NOT EXPOSED", .runtime_error); + + const owned_module = try report.addOwnedString(module_name); + const owned_type = try report.addOwnedString(type_name); + try report.document.addText("The "); + try report.document.addAnnotated(owned_module, .module_name); + try report.document.addText(" module does not expose anything named "); + try report.document.addAnnotated(owned_type, .emphasized); + try report.document.addText("."); + try report.document.addLineBreak(); + try report.document.addReflowingText("Make sure the module exports this type, or use a type that is exposed."); + + try report.document.addSourceRegion( + region_info, + .error_highlight, + filename, + ); + + return report; + } + + /// Build a report for "module not imported" diagnostic + pub fn buildModuleNotImportedReport( + allocator: Allocator, + module_name: []const u8, + region_info: base.RegionInfo, + filename: []const u8, + ) !Report { + var report = Report.init(allocator, "MODULE NOT IMPORTED", .runtime_error); + + const owned_module = try report.addOwnedString(module_name); + try report.document.addText("The module "); + try report.document.addAnnotated(owned_module, .module_name); + try report.document.addText(" is not imported in the current scope."); + try report.document.addLineBreak(); + try report.document.addReflowingText("Try adding an import statement like: "); + try report.document.addKeyword("import"); + try report.document.addText(" "); + try report.document.addAnnotated(owned_module, .module_name); + + try report.document.addSourceRegion( + region_info, + .error_highlight, + filename, + ); + + return report; + } + + /// Build a report for "too many exports" diagnostic + pub fn buildTooManyExportsReport( + allocator: Allocator, + count: u32, + region_info: base.RegionInfo, + filename: []const u8, + ) !Report { + var report = Report.init(allocator, "TOO MANY EXPORTS", .runtime_error); + + const max_exports = std.math.maxInt(u16); + + try report.document.addText("This module has "); + const count_str = try std.fmt.allocPrint(allocator, "{}", .{count}); + defer allocator.free(count_str); + try report.document.addAnnotated(count_str, .emphasized); + try report.document.addText(" exports, but the maximum allowed is "); + const max_str = try std.fmt.allocPrint(allocator, "{}", .{max_exports}); + defer allocator.free(max_str); + try report.document.addAnnotated(max_str, .emphasized); + try report.document.addText("."); + try report.document.addLineBreak(); + try report.document.addReflowingText("Consider splitting this module into smaller modules with fewer exports."); + + try report.document.addSourceRegion( + region_info, + .error_highlight, + filename, + ); + return report; } }; diff --git a/src/check/canonicalize/Expression.zig b/src/check/canonicalize/Expression.zig index d859715595..ddbcccb9cd 100644 --- a/src/check/canonicalize/Expression.zig +++ b/src/check/canonicalize/Expression.zig @@ -123,7 +123,11 @@ pub const Expr = union(enum) { /// import json.Utf8 /// foo = Utf8.encode("hello") # "Utf8.encode" is defined in another module /// ``` - e_lookup_external: ExternalDecl.Idx, + e_lookup_external: struct { + module_idx: CIR.Import.Idx, + target_node_idx: u16, + region: Region, + }, /// A sequence of zero or more elements of the same type /// ```roc /// ["one", "two", "three"] @@ -444,11 +448,7 @@ pub const Expr = union(enum) { .e_str_segment => |e| return e.region, .e_str => |e| return e.region, .e_lookup_local => |e| return e.region, - .e_lookup_external => { - // External lookups don't have a direct region access from Expr context - // The region should be handled where the CIR context is available - return null; - }, + .e_lookup_external => |e| return e.region, .e_list => |e| return e.region, .e_tuple => |e| return e.region, .e_match => |e| return e.region, @@ -627,12 +627,21 @@ pub const Expr = union(enum) { tree.endNode(begin, attrs); }, - .e_lookup_external => |external_idx| { + .e_lookup_external => |e| { const begin = tree.beginNode(); tree.pushStaticAtom("e-lookup-external"); + ir.appendRegionInfoToSExprTreeFromRegion(tree, e.region); const attrs = tree.beginNode(); - ir.getExternalDecl(external_idx).pushToSExprTree(ir, tree); + // Add module index + var buf: [32]u8 = undefined; + const module_idx_str = std.fmt.bufPrint(&buf, "{}", .{@intFromEnum(e.module_idx)}) catch unreachable; + tree.pushStringPair("module-idx", module_idx_str); + + // Add target node index + var buf2: [32]u8 = undefined; + const target_idx_str = std.fmt.bufPrint(&buf2, "{}", .{e.target_node_idx}) catch unreachable; + tree.pushStringPair("target-node-idx", target_idx_str); tree.endNode(begin, attrs); }, diff --git a/src/check/canonicalize/Node.zig b/src/check/canonicalize/Node.zig index d06e09b391..03eaa11e26 100644 --- a/src/check/canonicalize/Node.zig +++ b/src/check/canonicalize/Node.zig @@ -158,6 +158,11 @@ pub const Tag = enum { diag_undeclared_type_var, diag_type_alias_redeclared, diag_tuple_elem_not_canonicalized, + diag_module_not_found, + diag_value_not_exposed, + diag_type_not_exposed, + diag_module_not_imported, + diag_too_many_exports, diag_nominal_type_redeclared, diag_type_shadowed_warning, diag_type_parameter_conflict, @@ -166,4 +171,6 @@ pub const Tag = enum { diag_duplicate_record_field, diag_crash_expects_string, diag_f64_pattern_literal, + diagnostic_exposed_but_not_implemented, + diag_redundant_exposed, }; diff --git a/src/check/canonicalize/NodeStore.zig b/src/check/canonicalize/NodeStore.zig index e467fb15e3..a50808490d 100644 --- a/src/check/canonicalize/NodeStore.zig +++ b/src/check/canonicalize/NodeStore.zig @@ -97,7 +97,7 @@ pub fn deinit(store: *NodeStore) void { /// when adding/removing variants from CIR unions. Update these when modifying the unions. /// /// Count of the diagnostic nodes in the CIR -pub const CIR_DIAGNOSTIC_NODE_COUNT = 35; +pub const CIR_DIAGNOSTIC_NODE_COUNT = 42; /// Count of the expression nodes in the CIR pub const CIR_EXPR_NODE_COUNT = 28; /// Count of the statement nodes in the CIR @@ -319,7 +319,11 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr { }, .expr_external_lookup => { // Handle external lookups - return CIR.Expr{ .e_lookup_external = @enumFromInt(node.data_1) }; + return CIR.Expr{ .e_lookup_external = .{ + .module_idx = @enumFromInt(node.data_1), + .target_node_idx = @intCast(node.data_2), + .region = store.getRegionAt(node_idx), + } }; }, .expr_int => { // Read i128 from extra_data (stored as 4 u32s in data_1) @@ -1244,12 +1248,12 @@ pub fn addExpr(store: *NodeStore, expr: CIR.Expr) CIR.Expr.Idx { node.tag = .expr_var; node.data_1 = @intFromEnum(local.pattern_idx); }, - .e_lookup_external => |external_idx| { - // For external lookups, store the external decl index - // Use external lookup tag to distinguish from local lookups - region = base.Region.zero(); + .e_lookup_external => |e| { + // For external lookups, store the module index and target node index + region = e.region; node.tag = .expr_external_lookup; - node.data_1 = @intFromEnum(external_idx); + node.data_1 = @intFromEnum(e.module_idx); + node.data_2 = @as(u32, e.target_node_idx); }, .e_int => |e| { region = e.region; @@ -2396,6 +2400,22 @@ pub fn addDiagnostic(store: *NodeStore, reason: CIR.Diagnostic) CIR.Diagnostic.I region = r.region; node.data_1 = @bitCast(r.ident); }, + .exposed_but_not_implemented => |r| { + node.tag = .diagnostic_exposed_but_not_implemented; + region = r.region; + node.data_1 = @bitCast(r.ident); + }, + .redundant_exposed => |r| { + node.tag = .diag_redundant_exposed; + region = r.region; + node.data_1 = @bitCast(r.ident); + + // Store original region in extra_data + const extra_start = @as(u32, @intCast(store.extra_data.items.len)); + store.extra_data.append(store.gpa, r.original_region.start.offset) catch |err| exitOnOom(err); + store.extra_data.append(store.gpa, r.original_region.end.offset) catch |err| exitOnOom(err); + node.data_2 = extra_start; + }, .ident_not_in_scope => |r| { node.tag = .diag_ident_not_in_scope; region = r.region; @@ -2489,6 +2509,33 @@ pub fn addDiagnostic(store: *NodeStore, reason: CIR.Diagnostic) CIR.Diagnostic.I node.tag = .diag_tuple_elem_not_canonicalized; region = r.region; }, + .module_not_found => |r| { + node.tag = .diag_module_not_found; + region = r.region; + node.data_1 = @as(u32, @bitCast(r.module_name)); + }, + .value_not_exposed => |r| { + node.tag = .diag_value_not_exposed; + region = r.region; + node.data_1 = @as(u32, @bitCast(r.module_name)); + node.data_2 = @as(u32, @bitCast(r.value_name)); + }, + .type_not_exposed => |r| { + node.tag = .diag_type_not_exposed; + region = r.region; + node.data_1 = @as(u32, @bitCast(r.module_name)); + node.data_2 = @as(u32, @bitCast(r.type_name)); + }, + .module_not_imported => |r| { + node.tag = .diag_module_not_imported; + region = r.region; + node.data_1 = @as(u32, @bitCast(r.module_name)); + }, + .too_many_exports => |r| { + node.tag = .diag_too_many_exports; + region = r.region; + node.data_1 = r.count; + }, .nominal_type_redeclared => |r| { node.tag = .diag_nominal_type_redeclared; region = r.redeclared_region; @@ -2608,6 +2655,23 @@ pub fn getDiagnostic(store: *const NodeStore, diagnostic: CIR.Diagnostic.Idx) CI .ident = @bitCast(node.data_1), .region = store.getRegionAt(node_idx), } }, + .diagnostic_exposed_but_not_implemented => return CIR.Diagnostic{ .exposed_but_not_implemented = .{ + .ident = @bitCast(node.data_1), + .region = store.getRegionAt(node_idx), + } }, + .diag_redundant_exposed => { + const extra_data = store.extra_data.items[node.data_2..]; + const original_start = extra_data[0]; + const original_end = extra_data[1]; + return CIR.Diagnostic{ .redundant_exposed = .{ + .ident = @bitCast(node.data_1), + .region = store.getRegionAt(node_idx), + .original_region = Region{ + .start = .{ .offset = original_start }, + .end = .{ .offset = original_end }, + }, + } }; + }, .diag_ident_not_in_scope => return CIR.Diagnostic{ .ident_not_in_scope = .{ .ident = @bitCast(node.data_1), .region = store.getRegionAt(node_idx), @@ -2669,6 +2733,28 @@ pub fn getDiagnostic(store: *const NodeStore, diagnostic: CIR.Diagnostic.Idx) CI .diag_tuple_elem_not_canonicalized => return CIR.Diagnostic{ .tuple_elem_not_canonicalized = .{ .region = store.getRegionAt(node_idx), } }, + .diag_module_not_found => return CIR.Diagnostic{ .module_not_found = .{ + .module_name = @as(base.Ident.Idx, @bitCast(node.data_1)), + .region = store.getRegionAt(node_idx), + } }, + .diag_value_not_exposed => return CIR.Diagnostic{ .value_not_exposed = .{ + .module_name = @as(base.Ident.Idx, @bitCast(node.data_1)), + .value_name = @as(base.Ident.Idx, @bitCast(node.data_2)), + .region = store.getRegionAt(node_idx), + } }, + .diag_type_not_exposed => return CIR.Diagnostic{ .type_not_exposed = .{ + .module_name = @as(base.Ident.Idx, @bitCast(node.data_1)), + .type_name = @as(base.Ident.Idx, @bitCast(node.data_2)), + .region = store.getRegionAt(node_idx), + } }, + .diag_module_not_imported => return CIR.Diagnostic{ .module_not_imported = .{ + .module_name = @as(base.Ident.Idx, @bitCast(node.data_1)), + .region = store.getRegionAt(node_idx), + } }, + .diag_too_many_exports => return CIR.Diagnostic{ .too_many_exports = .{ + .count = node.data_1, + .region = store.getRegionAt(node_idx), + } }, .diag_undeclared_type_var => return CIR.Diagnostic{ .undeclared_type_var = .{ .name = @bitCast(node.data_1), .region = store.getRegionAt(node_idx), diff --git a/src/check/canonicalize/Scope.zig b/src/check/canonicalize/Scope.zig index 1c68ae4533..c143274740 100644 --- a/src/check/canonicalize/Scope.zig +++ b/src/check/canonicalize/Scope.zig @@ -20,6 +20,8 @@ type_vars: std.AutoHashMapUnmanaged(Ident.Idx, CIR.TypeAnno.Idx), module_aliases: std.AutoHashMapUnmanaged(Ident.Idx, Ident.Idx), /// Maps exposed item names to their source modules and original names (for import resolution) exposed_items: std.AutoHashMapUnmanaged(Ident.Idx, ExposedItemInfo), +/// Maps module names to their Import.Idx for modules imported in this scope +imported_modules: std.StringHashMapUnmanaged(CIR.Import.Idx), is_function_boundary: bool, /// Initialize the scope @@ -31,6 +33,7 @@ pub fn init(is_function_boundary: bool) Scope { .type_vars = std.AutoHashMapUnmanaged(Ident.Idx, CIR.TypeAnno.Idx){}, .module_aliases = std.AutoHashMapUnmanaged(Ident.Idx, Ident.Idx){}, .exposed_items = std.AutoHashMapUnmanaged(Ident.Idx, ExposedItemInfo){}, + .imported_modules = std.StringHashMapUnmanaged(CIR.Import.Idx){}, .is_function_boundary = is_function_boundary, }; } @@ -43,6 +46,7 @@ pub fn deinit(self: *Scope, gpa: std.mem.Allocator) void { self.type_vars.deinit(gpa); self.module_aliases.deinit(gpa); self.exposed_items.deinit(gpa); + self.imported_modules.deinit(gpa); } /// Scope management types and structures @@ -133,6 +137,18 @@ pub const ExposedItemIntroduceResult = union(enum) { already_in_scope: ExposedItemInfo, // The exposed item already exists in this scope }; +/// Result of looking up an imported module +pub const ImportedModuleLookupResult = union(enum) { + found: CIR.Import.Idx, + not_found: void, +}; + +/// Result of introducing an imported module +pub const ImportedModuleIntroduceResult = union(enum) { + success: void, + already_imported: CIR.Import.Idx, // The module was already imported in this scope +}; + /// Item kinds in a scope pub const ItemKind = enum { ident, alias, type_decl, type_var, module_alias, exposed_item }; @@ -388,3 +404,26 @@ pub fn introduceExposedItem( return ExposedItemIntroduceResult{ .success = {} }; } + +/// Look up an imported module in this scope +pub fn lookupImportedModule(scope: *const Scope, module_name: []const u8) ImportedModuleLookupResult { + if (scope.imported_modules.get(module_name)) |import_idx| { + return ImportedModuleLookupResult{ .found = import_idx }; + } + return ImportedModuleLookupResult{ .not_found = {} }; +} + +/// Introduce an imported module into this scope +pub fn introduceImportedModule( + scope: *Scope, + gpa: std.mem.Allocator, + module_name: []const u8, + import_idx: CIR.Import.Idx, +) ImportedModuleIntroduceResult { + if (scope.imported_modules.contains(module_name)) { + return ImportedModuleIntroduceResult{ .already_imported = scope.imported_modules.get(module_name).? }; + } + + scope.imported_modules.put(gpa, module_name, import_idx) catch |err| collections.utils.exitOnOom(err); + return ImportedModuleIntroduceResult{ .success = {} }; +} diff --git a/src/check/canonicalize/test/exposed_shadowing_test.zig b/src/check/canonicalize/test/exposed_shadowing_test.zig new file mode 100644 index 0000000000..ca7884f9d6 --- /dev/null +++ b/src/check/canonicalize/test/exposed_shadowing_test.zig @@ -0,0 +1,452 @@ +const std = @import("std"); +const testing = std.testing; +const base = @import("../../../base.zig"); +const AST = @import("../../parse/AST.zig"); +const CIR = @import("../CIR.zig"); +const canonicalize = @import("../../canonicalize.zig"); +const parse = @import("../../parse.zig"); +const tokenize = @import("../../parse/tokenize.zig"); + +test "exposed but not implemented - values" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [foo, bar] + \\ + \\foo = 42 + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check that we have an "exposed but not implemented" diagnostic for 'bar' + var found_bar_error = false; + for (0..cir.store.scratch_diagnostics.top()) |i| { + const diag_idx = cir.store.scratch_diagnostics.items.items[i]; + const diag = cir.store.getDiagnostic(diag_idx); + switch (diag) { + .exposed_but_not_implemented => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "bar")) { + found_bar_error = true; + } + }, + else => {}, + } + } + try testing.expect(found_bar_error); +} + +test "exposed but not implemented - types" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [MyType, OtherType] + \\ + \\MyType : [A, B] + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check that we have an "exposed but not implemented" diagnostic for 'OtherType' + var found_other_type_error = false; + for (0..cir.store.scratch_diagnostics.top()) |i| { + const diag_idx = cir.store.scratch_diagnostics.items.items[i]; + const diag = cir.store.getDiagnostic(diag_idx); + switch (diag) { + .exposed_but_not_implemented => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "OtherType")) { + found_other_type_error = true; + } + }, + else => {}, + } + } + try testing.expect(found_other_type_error); +} + +test "redundant exposed entries" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [foo, bar, foo, MyType, bar] + \\ + \\foo = 42 + \\bar = "hello" + \\MyType : [A] + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check that we have redundant exposed warnings + var found_foo_redundant = false; + var found_bar_redundant = false; + for (0..cir.store.scratch_diagnostics.top()) |i| { + const diag_idx = cir.store.scratch_diagnostics.items.items[i]; + const diag = cir.store.getDiagnostic(diag_idx); + switch (diag) { + .redundant_exposed => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "foo")) { + found_foo_redundant = true; + } else if (std.mem.eql(u8, ident_text, "bar")) { + found_bar_redundant = true; + } + }, + else => {}, + } + } + try testing.expect(found_foo_redundant); + try testing.expect(found_bar_redundant); +} + +test "shadowing with exposed items" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [x, y] + \\ + \\x = 1 + \\x = 2 + \\ + \\y = "first" + \\y = "second" + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check that we have shadowing warnings + var shadowing_count: usize = 0; + for (0..cir.store.scratch_diagnostics.top()) |i| { + const diag_idx = cir.store.scratch_diagnostics.items.items[i]; + const diag = cir.store.getDiagnostic(diag_idx); + switch (diag) { + .shadowing_warning => shadowing_count += 1, + else => {}, + } + } + // Should have warnings for both x and y being shadowed + try testing.expectEqual(@as(usize, 2), shadowing_count); +} + +test "shadowing non-exposed items" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [] + \\ + \\notExposed = 1 + \\notExposed = 2 + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check that we still get shadowing warnings for non-exposed items + var found_shadowing = false; + for (0..cir.store.scratch_diagnostics.top()) |i| { + const diag_idx = cir.store.scratch_diagnostics.items.items[i]; + const diag = cir.store.getDiagnostic(diag_idx); + switch (diag) { + .shadowing_warning => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "notExposed")) { + found_shadowing = true; + } + }, + else => {}, + } + } + try testing.expect(found_shadowing); +} + +test "exposed items correctly tracked across shadowing" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [x, y, z] + \\ + \\x = 1 + \\x = 2 + \\ + \\y = "defined" + \\ + \\# z is exposed but never defined + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Should have: + // - Shadowing warning for x + // - No "exposed but not implemented" for x (it is implemented) + // - No "exposed but not implemented" for y (it is implemented) + // - "exposed but not implemented" for z (never defined) + + var found_x_shadowing = false; + var found_z_not_implemented = false; + var found_unexpected_not_implemented = false; + + for (0..cir.store.scratch_diagnostics.top()) |i| { + const diag_idx = cir.store.scratch_diagnostics.items.items[i]; + const diag = cir.store.getDiagnostic(diag_idx); + switch (diag) { + .shadowing_warning => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "x")) { + found_x_shadowing = true; + } + }, + .exposed_but_not_implemented => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "z")) { + found_z_not_implemented = true; + } else if (std.mem.eql(u8, ident_text, "x") or std.mem.eql(u8, ident_text, "y")) { + found_unexpected_not_implemented = true; + } + }, + else => {}, + } + } + + try testing.expect(found_x_shadowing); + try testing.expect(found_z_not_implemented); + try testing.expect(!found_unexpected_not_implemented); +} + +test "complex case with redundant, shadowing, and not implemented" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [a, b, a, c, NotImplemented] + \\ + \\a = 1 + \\a = 2 + \\ + \\b = "hello" + \\ + \\c = 100 + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + var found_a_redundant = false; + var found_a_shadowing = false; + var found_not_implemented = false; + + for (0..cir.store.scratch_diagnostics.top()) |i| { + const diag_idx = cir.store.scratch_diagnostics.items.items[i]; + const diag = cir.store.getDiagnostic(diag_idx); + switch (diag) { + .redundant_exposed => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "a")) { + found_a_redundant = true; + } + }, + .shadowing_warning => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "a")) { + found_a_shadowing = true; + } + }, + .exposed_but_not_implemented => |d| { + const ident_text = cir.env.idents.getText(d.ident); + if (std.mem.eql(u8, ident_text, "NotImplemented")) { + found_not_implemented = true; + } + }, + else => {}, + } + } + + try testing.expect(found_a_redundant); + try testing.expect(found_a_shadowing); + try testing.expect(found_not_implemented); +} + +test "exposed_by_str is populated correctly" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [foo, bar, MyType, foo] + \\ + \\foo = 42 + \\bar = "hello" + \\MyType : [A, B] + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check that exposed_by_str contains all exposed items + try testing.expect(env.exposed_by_str.contains("foo")); + try testing.expect(env.exposed_by_str.contains("bar")); + try testing.expect(env.exposed_by_str.contains("MyType")); + + // Should have exactly 3 entries (duplicates not stored) + try testing.expectEqual(@as(usize, 3), env.exposed_by_str.count()); +} + +test "exposed_by_str persists after canonicalization" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [x, y, z] + \\ + \\x = 1 + \\y = 2 + \\# z is not defined + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // All exposed items should be in exposed_by_str, even those not implemented + try testing.expect(env.exposed_by_str.contains("x")); + try testing.expect(env.exposed_by_str.contains("y")); + try testing.expect(env.exposed_by_str.contains("z")); + + // Verify the map persists in env after canonicalization is complete + try testing.expectEqual(@as(usize, 3), env.exposed_by_str.count()); +} + +test "exposed_by_str never has entries removed" { + const allocator = testing.allocator; + + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + + const source = + \\module [foo, bar, foo, baz] + \\ + \\foo = 42 + \\bar = "hello" + \\# baz is not implemented + ; + + var ast = parse.parse(&env, source); + defer ast.deinit(allocator); + + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // All exposed items should remain in exposed_by_str + // Even though foo appears twice and baz is not implemented, + // exposed_by_str should have all unique exposed identifiers + try testing.expect(env.exposed_by_str.contains("foo")); + try testing.expect(env.exposed_by_str.contains("bar")); + try testing.expect(env.exposed_by_str.contains("baz")); + + // Should have exactly 3 unique entries + try testing.expectEqual(@as(usize, 3), env.exposed_by_str.count()); +} diff --git a/src/check/canonicalize/test/frac_test.zig b/src/check/canonicalize/test/frac_test.zig index d14b91b5d2..ccda46d8ed 100644 --- a/src/check/canonicalize/test/frac_test.zig +++ b/src/check/canonicalize/test/frac_test.zig @@ -30,7 +30,7 @@ fn parseAndCanonicalizeFrac(allocator: std.mem.Allocator, source: []const u8) !s cir.* = CIR.init(module_env); const can = try allocator.create(canonicalize); - can.* = try canonicalize.init(cir, parse_ast); + can.* = try canonicalize.init(cir, parse_ast, null); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(parse_ast.root_node_idx); const canonical_expr_idx = try can.canonicalizeExpr(expr_idx) orelse { diff --git a/src/check/canonicalize/test/import_validation_test.zig b/src/check/canonicalize/test/import_validation_test.zig new file mode 100644 index 0000000000..3819efccef --- /dev/null +++ b/src/check/canonicalize/test/import_validation_test.zig @@ -0,0 +1,728 @@ +const std = @import("std"); +const testing = std.testing; +const base = @import("../../../base.zig"); +const parse = @import("../../parse.zig"); +const canonicalize = @import("../../canonicalize.zig"); +const CIR = canonicalize.CIR; +const expectEqual = testing.expectEqual; + +test "import validation - mix of MODULE NOT FOUND, TYPE NOT EXPOSED, VALUE NOT EXPOSED, and working imports" { + const allocator = testing.allocator; + + // First, create some module environments with exposed items + var module_envs = std.StringHashMap(*base.ModuleEnv).init(allocator); + defer module_envs.deinit(); + + // Create module environment for "Json" module + var json_env = base.ModuleEnv.init(allocator); + defer json_env.deinit(); + + // Add exposed items to Json module + try json_env.exposed_by_str.put(allocator, "decode", {}); + try json_env.exposed_by_str.put(allocator, "encode", {}); + try json_env.exposed_by_str.put(allocator, "JsonError", {}); + try json_env.exposed_by_str.put(allocator, "DecodeProblem", {}); + + try module_envs.put("Json", &json_env); + + // Create module environment for "Utils" module + var utils_env = base.ModuleEnv.init(allocator); + defer utils_env.deinit(); + + // Add exposed items to Utils module + try utils_env.exposed_by_str.put(allocator, "map", {}); + try utils_env.exposed_by_str.put(allocator, "filter", {}); + try utils_env.exposed_by_str.put(allocator, "Result", {}); + + try module_envs.put("Utils", &utils_env); + + // Parse source code with various import statements + const source = + \\module [main] + \\ + \\# Import from existing module with valid items + \\import Json exposing [decode, JsonError] + \\ + \\# Import from existing module with some invalid items + \\import Utils exposing [map, doesNotExist, Result, InvalidType] + \\ + \\# Import from non-existent module + \\import NonExistent exposing [something, SomeType] + \\ + \\# Valid import with all exposed items + \\import Json exposing [encode, DecodeProblem] + \\ + \\main = "test" + ; + + // Parse the source + var tokens = try parse.tokenize(allocator, source, .file); + defer tokens.deinit(allocator); + var parse_env = base.ModuleEnv.init(allocator); + defer parse_env.deinit(); + try parse_env.calcLineStarts(source); + var ast = try parse.parse(&parse_env, &tokens, allocator, .file); + defer ast.deinit(); + + // Canonicalize with module validation + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + try env.calcLineStarts(source); + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, &module_envs); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Collect all diagnostics + var module_not_found_count: u32 = 0; + var value_not_exposed_count: u32 = 0; + var type_not_exposed_count: u32 = 0; + var found_does_not_exist = false; + var found_invalid_type = false; + var found_non_existent = false; + + const diagnostics = cir.diag_regions.entries.items; + for (diagnostics) |entry| { + const diag_idx: CIR.Diagnostic.Idx = @enumFromInt(entry.value); + const diagnostic = cir.store.getDiagnostic(diag_idx); + + switch (diagnostic) { + .module_not_found => |d| { + module_not_found_count += 1; + const module_name = env.idents.getText(d.module_name); + if (std.mem.eql(u8, module_name, "NonExistent")) { + found_non_existent = true; + } + }, + .value_not_exposed => |d| { + value_not_exposed_count += 1; + const value_name = env.idents.getText(d.value_name); + if (std.mem.eql(u8, value_name, "doesNotExist")) { + found_does_not_exist = true; + } + }, + .type_not_exposed => |d| { + type_not_exposed_count += 1; + const type_name = env.idents.getText(d.type_name); + if (std.mem.eql(u8, type_name, "InvalidType")) { + found_invalid_type = true; + } + }, + else => {}, + } + } + + // Verify we got the expected errors + try expectEqual(@as(u32, 1), module_not_found_count); // NonExistent module + try expectEqual(@as(u32, 1), value_not_exposed_count); // doesNotExist + try expectEqual(@as(u32, 1), type_not_exposed_count); // InvalidType + + try expectEqual(true, found_non_existent); + try expectEqual(true, found_does_not_exist); + try expectEqual(true, found_invalid_type); + + // Verify that valid imports didn't generate errors + // The imports for decode, JsonError, map, Result, encode, and DecodeProblem should all work +} + +test "import validation - no module_envs provided" { + const allocator = testing.allocator; + + // Parse source code with import statements + const source = + \\module [main] + \\ + \\import Json exposing [decode, JsonError] + \\ + \\main = "test" + ; + + // Parse the source + var tokens = try parse.tokenize(allocator, source, .file); + defer tokens.deinit(allocator); + var parse_env = base.ModuleEnv.init(allocator); + defer parse_env.deinit(); + try parse_env.calcLineStarts(source); + var ast = try parse.parse(&parse_env, &tokens, allocator, .file); + defer ast.deinit(); + + // Canonicalize without module validation (pass null) + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + try env.calcLineStarts(source); + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // When module_envs is null, no import validation errors should be generated + const diagnostics = cir.diag_regions.entries.items; + for (diagnostics) |entry| { + const diag_idx: CIR.Diagnostic.Idx = @enumFromInt(entry.value); + const diagnostic = cir.store.getDiagnostic(diag_idx); + + switch (diagnostic) { + .module_not_found, .value_not_exposed, .type_not_exposed => { + // These errors should not occur when module_envs is null + try testing.expect(false); + }, + else => {}, + } + } +} + +test "import interner - Import.Idx functionality" { + const allocator = testing.allocator; + + // Parse source code with multiple imports, including duplicates + const source = + \\module [main] + \\ + \\import List + \\import Dict + \\import List # Duplicate - should get same Import.Idx + \\import Json.Decode + \\import Set + \\import Json.Decode # Another duplicate + \\ + \\main = "test" + ; + + // Parse the source + var tokens = try parse.tokenize(allocator, source, .file); + defer tokens.deinit(allocator); + var parse_env = base.ModuleEnv.init(allocator); + defer parse_env.deinit(); + try parse_env.calcLineStarts(source); + var ast = try parse.parse(&parse_env, &tokens, allocator, .file); + defer ast.deinit(); + + // Canonicalize without module validation to focus on Import.Idx + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + try env.calcLineStarts(source); + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check that we have the correct number of unique imports + // Expected: List, Dict, Json.Decode, Set (4 unique) + try expectEqual(@as(usize, 4), cir.imports.imports.items.len); + + // Verify each unique module has an Import.Idx + var found_list = false; + var found_dict = false; + var found_json_decode = false; + var found_set = false; + + for (cir.imports.imports.items, 0..) |import, idx| { + const import_idx: CIR.Import.Idx = @enumFromInt(idx); + const module_name = import.module_name; + + // Verify we can look up the module name from Import.Idx + const retrieved_name = cir.imports.getModuleName(import_idx); + try testing.expectEqualStrings(module_name, retrieved_name); + + if (std.mem.eql(u8, module_name, "List")) { + found_list = true; + } else if (std.mem.eql(u8, module_name, "Dict")) { + found_dict = true; + } else if (std.mem.eql(u8, module_name, "Json.Decode")) { + found_json_decode = true; + } else if (std.mem.eql(u8, module_name, "Set")) { + found_set = true; + } + } + + // Verify all expected modules were found + try expectEqual(true, found_list); + try expectEqual(true, found_dict); + try expectEqual(true, found_json_decode); + try expectEqual(true, found_set); + + // Test the lookup functionality + // Get the Import.Idx for "List" (should be used twice) + var list_import_idx: ?CIR.Import.Idx = null; + for (canonicalizer.import_indices.iterator()) |entry| { + const module_name = entry.key_ptr.*; + if (std.mem.eql(u8, module_name, "List")) { + list_import_idx = entry.value_ptr.*; + break; + } + } + + try testing.expect(list_import_idx != null); + + // Verify we can retrieve the correct module name from the Import.Idx + const retrieved_list_name = cir.imports.getModuleName(list_import_idx.?); + try testing.expectEqualStrings("List", retrieved_list_name); +} + +test "import interner - comprehensive usage example" { + const allocator = testing.allocator; + + // Parse source with imports used in different contexts + const source = + \\module [process] + \\ + \\import List exposing [map, filter] + \\import Dict + \\import Result exposing [Result, withDefault] + \\ + \\process : List Str -> Dict Str Nat + \\process = \items -> + \\ items + \\ |> List.map Str.toLower + \\ |> List.filter \item -> Str.length item > 3 + \\ |> List.foldl Dict.empty \dict, item -> + \\ Dict.update dict item \maybeCount -> + \\ when maybeCount is + \\ Present count -> Present (count + 1) + \\ Missing -> Present 1 + ; + + // Parse the source + var tokens = try parse.tokenize(allocator, source, .file); + defer tokens.deinit(allocator); + var parse_env = base.ModuleEnv.init(allocator); + defer parse_env.deinit(); + try parse_env.calcLineStarts(source); + var ast = try parse.parse(&parse_env, &tokens, allocator, .file); + defer ast.deinit(); + + // Canonicalize + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + try env.calcLineStarts(source); + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Verify Import.Idx assignments + // Get Import.Idx values from the imports store + const list_import = cir.imports.map.get("List"); + const dict_import = cir.imports.map.get("Dict"); + const result_import = cir.imports.map.get("Result"); + + // All should have Import.Idx values + try testing.expect(list_import != null); + try testing.expect(dict_import != null); + try testing.expect(result_import != null); + + // They should all be different + try testing.expect(list_import.? != dict_import.?); + try testing.expect(list_import.? != result_import.?); + try testing.expect(dict_import.? != result_import.?); + + // Verify we can look up module names from Import.Idx + const list_name = cir.imports.getModuleName(list_import.?); + const dict_name = cir.imports.getModuleName(dict_import.?); + const result_name = cir.imports.getModuleName(result_import.?); + + try testing.expectEqualStrings("List", list_name); + try testing.expectEqualStrings("Dict", dict_name); + try testing.expectEqualStrings("Result", result_name); + + // Verify total unique imports + try expectEqual(@as(usize, 3), cir.imports.imports.items.len); + + // Demo: Print all imports with their indices + std.debug.print("\n=== Import Index Demo ===\n", .{}); + for (cir.imports.imports.items, 0..) |import, idx| { + const import_idx: CIR.Import.Idx = @enumFromInt(idx); + const module_name_text = import.module_name; + std.debug.print("Import.Idx {} -> module '{}'\n", .{ @intFromEnum(import_idx), module_name_text }); + } +} + +test "Import.Idx is u16" { + // Verify that Import.Idx is indeed a u16 enum + const import_idx_type = @TypeOf(CIR.Import.Idx); + const type_info = @typeInfo(import_idx_type).Enum; + + // The underlying type should be u16 + try testing.expectEqual(u16, type_info.tag_type); + + // Test that we can create valid Import.Idx values + const idx1: CIR.Import.Idx = @enumFromInt(0); + const idx2: CIR.Import.Idx = @enumFromInt(65535); // max u16 value + + // Verify they are distinct + try testing.expect(idx1 != idx2); + + // Verify the size in memory + try testing.expectEqual(@sizeOf(u16), @sizeOf(CIR.Import.Idx)); +} + +test "module scopes - imports are only available in their scope" { + const allocator = testing.allocator; + + // Parse source with imports in different scopes + const source = + \\module [process] + \\ + \\import List + \\import Dict + \\ + \\process = \items -> + \\ # List and Dict are available here + \\ list = List.map items \x -> x + 1 + \\ dict = Dict.empty + \\ + \\ inner = \y -> + \\ # List and Dict are still available in inner scope + \\ import Set + \\ # Now Set is also available + \\ set = Set.empty + \\ list2 = List.len items + \\ set + \\ + \\ # Set is NOT available here (out of scope) + \\ # This should generate MODULE_NOT_IMPORTED error + \\ badSet = Set.empty + \\ + \\ dict + ; + + // Parse the source + var tokens = try parse.tokenize(allocator, source, .file); + defer tokens.deinit(allocator); + var parse_env = base.ModuleEnv.init(allocator); + defer parse_env.deinit(); + try parse_env.calcLineStarts(source); + var ast = try parse.parse(&parse_env, &tokens, allocator, .file); + defer ast.deinit(); + + // Canonicalize without external module validation to focus on scope testing + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + try env.calcLineStarts(source); + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Check for MODULE_NOT_IMPORTED error + var found_module_not_imported = false; + var error_module_name: ?[]const u8 = null; + + const diagnostics = cir.diag_regions.entries.items; + for (diagnostics) |entry| { + const diag_idx: CIR.Diagnostic.Idx = @enumFromInt(entry.value); + const diagnostic = cir.store.getDiagnostic(diag_idx); + + switch (diagnostic) { + .module_not_imported => |d| { + found_module_not_imported = true; + error_module_name = env.idents.getText(d.module_name); + }, + else => {}, + } + } + + // Verify we got the MODULE_NOT_IMPORTED error for Set + try expectEqual(true, found_module_not_imported); + try testing.expectEqualStrings("Set", error_module_name.?); + + // Verify that List and Dict imports were processed correctly + try testing.expect(cir.imports.imports.items.len >= 3); // List, Dict, and Set +} + +test "module-qualified lookups with e_lookup_external" { + const allocator = testing.allocator; + + // Parse source with module-qualified lookups + const source = + \\module [main] + \\ + \\import List + \\import Dict + \\ + \\main = + \\ list = List.map [1, 2, 3] \x -> x * 2 + \\ dict = Dict.insert Dict.empty "key" "value" + \\ List.len list + ; + + // Parse the source + var tokens = try parse.tokenize(allocator, source, .file); + defer tokens.deinit(allocator); + var parse_env = base.ModuleEnv.init(allocator); + defer parse_env.deinit(); + try parse_env.calcLineStarts(source); + var ast = try parse.parse(&parse_env, &tokens, allocator, .file); + defer ast.deinit(); + + // Canonicalize + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + try env.calcLineStarts(source); + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, null); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Count e_lookup_external expressions + var external_lookup_count: u32 = 0; + var found_list_map = false; + var found_list_len = false; + var found_dict_insert = false; + var found_dict_empty = false; + + // Traverse the CIR to find e_lookup_external expressions + const all_exprs = cir.store.expr_buffer.items; + for (all_exprs) |node| { + if (node.tag == .expr_lookup_external) { + external_lookup_count += 1; + + // Get the external lookup data + const module_idx: CIR.Import.Idx = @enumFromInt(node.data_1); + const field_name_idx: base.Ident.Idx = @bitCast(node.data_2); + + const module_name = cir.imports.getModuleName(module_idx); + const field_name = env.idents.getText(field_name_idx); + + if (std.mem.eql(u8, module_name, "List")) { + if (std.mem.eql(u8, field_name, "map")) found_list_map = true; + if (std.mem.eql(u8, field_name, "len")) found_list_len = true; + } else if (std.mem.eql(u8, module_name, "Dict")) { + if (std.mem.eql(u8, field_name, "insert")) found_dict_insert = true; + if (std.mem.eql(u8, field_name, "empty")) found_dict_empty = true; + } + } + } + + // Verify we found all expected external lookups + try expectEqual(@as(u32, 4), external_lookup_count); + try expectEqual(true, found_list_map); + try expectEqual(true, found_list_len); + try expectEqual(true, found_dict_insert); + try expectEqual(true, found_dict_empty); +} + +test "exposed_nodes - tracking CIR node indices for exposed items" { + const allocator = testing.allocator; + + // Create module environments with exposed items + var module_envs = std.StringHashMap(*base.ModuleEnv).init(allocator); + defer module_envs.deinit(); + + // Create a "MathUtils" module with some exposed definitions + var math_env = base.ModuleEnv.init(allocator); + defer math_env.deinit(); + + // Add exposed items + try math_env.exposed_by_str.put(allocator, "add", {}); + try math_env.exposed_by_str.put(allocator, "multiply", {}); + try math_env.exposed_by_str.put(allocator, "PI", {}); + + // Simulate having CIR node indices for these exposed items + // In real usage, these would be set during canonicalization of MathUtils + try math_env.exposed_nodes.put(allocator, "add", 100); + try math_env.exposed_nodes.put(allocator, "multiply", 200); + try math_env.exposed_nodes.put(allocator, "PI", 300); + + try module_envs.put("MathUtils", &math_env); + + // Parse source that uses these exposed items + const source = + \\module [calculate] + \\ + \\import MathUtils exposing [add, multiply, PI] + \\ + \\calculate = \x, y -> + \\ sum = add x y + \\ product = multiply x y + \\ circumference = multiply (multiply 2 PI) x + \\ { sum, product, circumference } + ; + + // Parse the source + var tokens = try parse.tokenize(allocator, source, .file); + defer tokens.deinit(allocator); + var parse_env = base.ModuleEnv.init(allocator); + defer parse_env.deinit(); + try parse_env.calcLineStarts(source); + var ast = try parse.parse(&parse_env, &tokens, allocator, .file); + defer ast.deinit(); + + // Canonicalize with module environments + var env = base.ModuleEnv.init(allocator); + defer env.deinit(); + try env.calcLineStarts(source); + var cir = CIR.init(&env); + defer cir.deinit(); + + var canonicalizer = try canonicalize.init(&cir, &ast, &module_envs); + defer canonicalizer.deinit(); + + try canonicalizer.canonicalizeFile(); + + // Verify that e_lookup_external expressions have the correct target_node_idx values + var found_add_with_idx_100 = false; + var found_multiply_with_idx_200 = false; + var found_pi_with_idx_300 = false; + + const all_exprs = cir.store.expr_buffer.items; + for (all_exprs) |node| { + if (node.tag == .expr_external_lookup) { + const module_idx: CIR.Import.Idx = @enumFromInt(node.data_1); + const field_name_idx: base.Ident.Idx = @bitCast(node.data_2); + const target_node_idx: u16 = @intCast(node.data_3); + + const module_name = cir.imports.getModuleName(module_idx); + const field_name = env.idents.getText(field_name_idx); + + if (std.mem.eql(u8, module_name, "MathUtils")) { + if (std.mem.eql(u8, field_name, "add") and target_node_idx == 100) { + found_add_with_idx_100 = true; + } else if (std.mem.eql(u8, field_name, "multiply") and target_node_idx == 200) { + found_multiply_with_idx_200 = true; + } else if (std.mem.eql(u8, field_name, "PI") and target_node_idx == 300) { + found_pi_with_idx_300 = true; + } + } + } + } + + // Verify all lookups have the correct target node indices + try expectEqual(true, found_add_with_idx_100); + try expectEqual(true, found_multiply_with_idx_200); + try expectEqual(true, found_pi_with_idx_300); + + // Test case where exposed_nodes is not populated (should get 0) + var empty_env = base.ModuleEnv.init(allocator); + defer empty_env.deinit(); + try empty_env.exposed_by_str.put(allocator, "undefined", {}); + // Don't add to exposed_nodes - should default to 0 + try module_envs.put("EmptyModule", &empty_env); + + const source2 = + \\module [test] + \\ + \\import EmptyModule exposing [undefined] + \\ + \\test = undefined + ; + + var tokens2 = try parse.tokenize(allocator, source2, .file); + defer tokens2.deinit(allocator); + var parse_env2 = base.ModuleEnv.init(allocator); + defer parse_env2.deinit(); + try parse_env2.calcLineStarts(source2); + var ast2 = try parse.parse(&parse_env2, &tokens2, allocator, .file); + defer ast2.deinit(); + + var env2 = base.ModuleEnv.init(allocator); + defer env2.deinit(); + try env2.calcLineStarts(source2); + var cir2 = CIR.init(&env2); + defer cir2.deinit(); + + var canonicalizer2 = try canonicalize.init(&cir2, &ast2, &module_envs); + defer canonicalizer2.deinit(); + + try canonicalizer2.canonicalizeFile(); + + // Verify that undefined gets target_node_idx = 0 (not found) + var found_undefined_with_idx_0 = false; + const all_exprs2 = cir2.store.expr_buffer.items; + for (all_exprs2) |node| { + if (node.tag == .expr_external_lookup) { + const field_name_idx: base.Ident.Idx = @bitCast(node.data_2); + const target_node_idx: u16 = @intCast(node.data_3); + const field_name = env2.idents.getText(field_name_idx); + + if (std.mem.eql(u8, field_name, "undefined") and target_node_idx == 0) { + found_undefined_with_idx_0 = true; + } + } + } + + try expectEqual(true, found_undefined_with_idx_0); +} + +test "export count safety - ensures safe u16 casting" { + const allocator = testing.allocator; + + // This test verifies that we check export counts to ensure safe casting to u16 + // The check triggers when exposed_items.len >= maxInt(u16) (65535) + // This leaves 0 available as a potential sentinel value if needed + + // Verify the threshold is what we expect + try expectEqual(@as(u32, 65535), std.math.maxInt(u16)); + + // Test the diagnostic for exactly maxInt(u16) exports + var env1 = base.ModuleEnv.init(allocator); + defer env1.deinit(); + var cir1 = CIR.init(&env1); + defer cir1.deinit(); + + const diag_at_limit = CIR.Diagnostic{ + .too_many_exports = .{ + .count = 65535, // Exactly at the limit + .region = base.Region{ .start = .{ .offset = 0 }, .end = .{ .offset = 10 } }, + }, + }; + + const diag_idx1 = cir1.store.addDiagnostic(diag_at_limit); + const retrieved1 = cir1.store.getDiagnostic(diag_idx1); + + switch (retrieved1) { + .too_many_exports => |d| { + try expectEqual(@as(u32, 65535), d.count); + }, + else => return error.UnexpectedDiagnostic, + } + + // Test the diagnostic for exceeding the limit + var env2 = base.ModuleEnv.init(allocator); + defer env2.deinit(); + var cir2 = CIR.init(&env2); + defer cir2.deinit(); + + const diag_over_limit = CIR.Diagnostic{ + .too_many_exports = .{ + .count = 70000, // Well over the limit + .region = base.Region{ .start = .{ .offset = 0 }, .end = .{ .offset = 10 } }, + }, + }; + + const diag_idx2 = cir2.store.addDiagnostic(diag_over_limit); + const retrieved2 = cir2.store.getDiagnostic(diag_idx2); + + switch (retrieved2) { + .too_many_exports => |d| { + try expectEqual(@as(u32, 70000), d.count); + }, + else => return error.UnexpectedDiagnostic, + } + + // Demonstrate that values under the limit can be safely cast to u16 + const safe_count: u32 = 65534; // Just under the limit + const casted: u16 = @intCast(safe_count); // This is safe + try expectEqual(@as(u16, 65534), casted); + + // The actual runtime check in createExposedScope ensures that we never + // attempt to cast values >= 65535 to u16, preventing overflow +} diff --git a/src/check/canonicalize/test/int_test.zig b/src/check/canonicalize/test/int_test.zig index 09e6211920..922b61bb17 100644 --- a/src/check/canonicalize/test/int_test.zig +++ b/src/check/canonicalize/test/int_test.zig @@ -29,7 +29,7 @@ fn parseAndCanonicalizeInt(allocator: std.mem.Allocator, source: []const u8) !st cir.* = CIR.init(module_env); const can = try allocator.create(canonicalize); - can.* = try canonicalize.init(cir, parse_ast); + can.* = try canonicalize.init(cir, parse_ast, null); const expr_idx: parse.AST.Expr.Idx = @enumFromInt(parse_ast.root_node_idx); const canonical_expr_idx = try can.canonicalizeExpr(expr_idx) orelse { diff --git a/src/check/canonicalize/test/node_store_test.zig b/src/check/canonicalize/test/node_store_test.zig index cc85a9540f..97eae1612c 100644 --- a/src/check/canonicalize/test/node_store_test.zig +++ b/src/check/canonicalize/test/node_store_test.zig @@ -203,7 +203,11 @@ test "NodeStore round trip - Expressions" { }, }); try expressions.append(CIR.Expr{ - .e_lookup_external = @enumFromInt(345), + .e_lookup_external = .{ + .module_idx = @enumFromInt(0), + .target_node_idx = 42, + .region = from_raw_offsets(200, 210), + }, }); try expressions.append(CIR.Expr{ .e_list = .{ @@ -605,6 +609,58 @@ test "NodeStore round trip - Diagnostics" { }, }); + try diagnostics.append(CIR.Diagnostic{ + .exposed_but_not_implemented = .{ + .ident = @bitCast(@as(u32, 321)), + .region = from_raw_offsets(760, 770), + }, + }); + + try diagnostics.append(CIR.Diagnostic{ + .redundant_exposed = .{ + .ident = @bitCast(@as(u32, 432)), + .region = from_raw_offsets(770, 780), + .original_region = from_raw_offsets(780, 790), + }, + }); + + try diagnostics.append(CIR.Diagnostic{ + .module_not_found = .{ + .module_name = @bitCast(@as(u32, 543)), + .region = from_raw_offsets(790, 800), + }, + }); + + try diagnostics.append(CIR.Diagnostic{ + .value_not_exposed = .{ + .module_name = @bitCast(@as(u32, 654)), + .value_name = @bitCast(@as(u32, 655)), + .region = from_raw_offsets(800, 810), + }, + }); + + try diagnostics.append(CIR.Diagnostic{ + .type_not_exposed = .{ + .module_name = @bitCast(@as(u32, 765)), + .type_name = @bitCast(@as(u32, 766)), + .region = from_raw_offsets(810, 820), + }, + }); + + try diagnostics.append(CIR.Diagnostic{ + .module_not_imported = .{ + .module_name = @bitCast(@as(u32, 876)), + .region = from_raw_offsets(820, 830), + }, + }); + + try diagnostics.append(CIR.Diagnostic{ + .too_many_exports = .{ + .count = 65536, + .region = from_raw_offsets(830, 840), + }, + }); + // Test the round-trip for all diagnostics for (diagnostics.items) |diagnostic| { const idx = store.addDiagnostic(diagnostic); diff --git a/src/check/check_types.zig b/src/check/check_types.zig index 82e047c156..71d7449ab9 100644 --- a/src/check/check_types.zig +++ b/src/check/check_types.zig @@ -201,12 +201,10 @@ pub fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx) std.mem.Allocator.Error!bo const pattern_var = @as(Var, @enumFromInt(@intFromEnum(local.pattern_idx))); _ = self.unify(lookup_var, pattern_var); }, - .e_lookup_external => |external_idx| { - // For lookups, we need to connect the lookup expression to the actual variable - // For external lookups, connect to the external declaration's type - const lookup_var = @as(Var, @enumFromInt(@intFromEnum(expr_idx))); - const external_decl = self.can_ir.external_decls.items[@intFromEnum(external_idx)]; - _ = self.unify(lookup_var, external_decl.type_var); + .e_lookup_external => |e| { + // TODO: Handle type checking for external lookups + // For now, just skip type checking + _ = e; }, .e_list => |list| { const elem_var = @as(Var, @enumFromInt(@intFromEnum(list.elem_var))); diff --git a/src/check/let_polymorphism_integration_test.zig b/src/check/let_polymorphism_integration_test.zig index a08bd07d72..39c08db78d 100644 --- a/src/check/let_polymorphism_integration_test.zig +++ b/src/check/let_polymorphism_integration_test.zig @@ -46,7 +46,7 @@ fn typeCheckExpr(allocator: std.mem.Allocator, source: []const u8) !struct { cir.* = CIR.init(module_env); const can = try allocator.create(canonicalize); - can.* = try canonicalize.init(cir, parse_ast); + can.* = try canonicalize.init(cir, parse_ast, null); // Run canonicalization - for expressions var canon_expr_idx: ?CIR.Expr.Idx = null; @@ -111,7 +111,7 @@ fn typeCheckFile(allocator: std.mem.Allocator, source: []const u8) !struct { cir.* = CIR.init(module_env); const can = try allocator.create(canonicalize); - can.* = try canonicalize.init(cir, parse_ast); + can.* = try canonicalize.init(cir, parse_ast, null); // Run canonicalization - for files // Check if we have a valid file structure first @@ -181,7 +181,7 @@ fn typeCheckStatement(allocator: std.mem.Allocator, source: []const u8) !struct cir.* = CIR.init(module_env); const can = try allocator.create(canonicalize); - can.* = try canonicalize.init(cir, parse_ast); + can.* = try canonicalize.init(cir, parse_ast, null); // Run canonicalization - for statements var canon_result: ?CIR.Expr.Idx = null; diff --git a/src/coordinate_simple.zig b/src/coordinate_simple.zig index 510794f128..97ae64e21b 100644 --- a/src/coordinate_simple.zig +++ b/src/coordinate_simple.zig @@ -128,7 +128,7 @@ fn processSourceInternal( // Create scope for semantic analysis // Canonicalize the AST - var canonicalizer = try canonicalize.init(cir, &parse_ast); + var canonicalizer = try canonicalize.init(cir, &parse_ast, null); defer canonicalizer.deinit(); try canonicalizer.canonicalizeFile(); diff --git a/src/snapshot.zig b/src/snapshot.zig index 5d8284cefa..a6e842192f 100644 --- a/src/snapshot.zig +++ b/src/snapshot.zig @@ -365,7 +365,7 @@ fn processRocFileAsSnapshotWithExpected(allocator: Allocator, output_path: []con var can_ir = CIR.init(&module_env); defer can_ir.deinit(); - var can = canonicalize.init(&can_ir, &ast) catch |err| { + var can = canonicalize.init(&can_ir, &ast, null) catch |err| { warn("Canonicalization init failed: {}", .{err}); return; }; @@ -1499,7 +1499,7 @@ fn processSnapshotFileUnified(gpa: Allocator, snapshot_path: []const u8, maybe_f var can_ir = CIR.init(&module_env); defer can_ir.deinit(); - var can = try canonicalize.init(&can_ir, &parse_ast); + var can = try canonicalize.init(&can_ir, &parse_ast, null); defer can.deinit(); var maybe_expr_idx: ?CIR.Expr.Idx = null; diff --git a/src/snapshots/can_import_comprehensive.md b/src/snapshots/can_import_comprehensive.md index eedba59486..14761252cc 100644 --- a/src/snapshots/can_import_comprehensive.md +++ b/src/snapshots/can_import_comprehensive.md @@ -179,36 +179,44 @@ main = { (e-block @7.8-35.2 (s-let @8.5-8.22 (p-assign @8.5-8.11 (ident "client")) - (e-lookup-external - (ext-decl @8.14-8.22 (ident "http.Client.get") (kind "value")))) + (e-lookup-external @8.14-8.22 + (module-idx "1") + (target-node-idx "0"))) (s-let @9.5-9.23 (p-assign @9.5-9.11 (ident "parser")) - (e-lookup-external - (ext-decl @9.14-9.23 (ident "json.Json.utf8") (kind "value")))) + (e-lookup-external @9.14-9.23 + (module-idx "0") + (target-node-idx "0"))) (s-let @10.5-10.22 (p-assign @10.5-10.11 (ident "helper")) - (e-lookup-external - (ext-decl @10.14-10.22 (ident "utils.String.trim") (kind "value")))) + (e-lookup-external @10.14-10.22 + (module-idx "2") + (target-node-idx "0"))) (s-let @13.5-13.25 (p-assign @13.5-13.12 (ident "result1")) - (e-lookup-external - (ext-decl @13.15-13.25 (ident "json.Json.parse") (kind "value")))) + (e-lookup-external @13.15-13.25 + (module-idx "0") + (target-node-idx "0"))) (s-let @16.5-16.24 (p-assign @16.5-16.12 (ident "result2")) - (e-lookup-external - (ext-decl @16.15-16.24 (ident "http.Client.post") (kind "value")))) + (e-lookup-external @16.15-16.24 + (module-idx "1") + (target-node-idx "0"))) (s-let @19.5-19.18 (p-assign @19.5-19.12 (ident "result3")) - (e-lookup-external - (ext-decl @19.15-19.18 (ident "http.Client.get") (kind "value")))) + (e-lookup-external @19.15-19.18 + (module-idx "1") + (target-node-idx "0"))) (s-let @20.5-20.19 (p-assign @20.5-20.12 (ident "result4")) - (e-lookup-external - (ext-decl @20.15-20.19 (ident "http.Client.post") (kind "value")))) + (e-lookup-external @20.15-20.19 + (module-idx "1") + (target-node-idx "0"))) (s-let @23.5-23.26 (p-assign @23.5-23.13 (ident "combined")) - (e-lookup-external - (ext-decl @23.16-23.26 (ident "utils.String.concat") (kind "value")))) + (e-lookup-external @23.16-23.26 + (module-idx "2") + (target-node-idx "0"))) (e-tuple @25.5-34.6 (elems (e-lookup-local @26.9-26.15 @@ -234,15 +242,7 @@ main = { (exposed (name "get") (wildcard false)) (exposed (name "post") (wildcard false)))) (s-import @5.1-5.27 (module "utils.String") (qualifier "utils") (alias "Str") - (exposes)) - (ext-decl @8.14-8.22 (ident "http.Client.get") (kind "value")) - (ext-decl @9.14-9.23 (ident "json.Json.utf8") (kind "value")) - (ext-decl @10.14-10.22 (ident "utils.String.trim") (kind "value")) - (ext-decl @13.15-13.25 (ident "json.Json.parse") (kind "value")) - (ext-decl @16.15-16.24 (ident "http.Client.post") (kind "value")) - (ext-decl @19.15-19.18 (ident "http.Client.get") (kind "value")) - (ext-decl @20.15-20.19 (ident "http.Client.post") (kind "value")) - (ext-decl @23.16-23.26 (ident "utils.String.concat") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/can_import_exposing_types.md b/src/snapshots/can_import_exposing_types.md index 161d676c29..6524a9ebec 100644 --- a/src/snapshots/can_import_exposing_types.md +++ b/src/snapshots/can_import_exposing_types.md @@ -821,8 +821,9 @@ combineResults = |jsonResult, httpStatus| (args (p-assign @9.14-9.19 (ident "input"))) (e-call @9.21-9.38 - (e-lookup-external - (ext-decl @9.21-9.31 (ident "json.Json.parse") (kind "value"))) + (e-lookup-external @9.21-9.31 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @9.32-9.37 (p-assign @9.14-9.19 (ident "input"))))) (annotation @9.1-9.10 @@ -841,8 +842,9 @@ combineResults = |jsonResult, httpStatus| (s-let @14.5-14.35 (p-assign @14.5-14.11 (ident "result")) (e-call @14.14-14.35 - (e-lookup-external - (ext-decl @14.14-14.25 (ident "json.Json.decode") (kind "value"))) + (e-lookup-external @14.14-14.25 + (module-idx "0") + (target-node-idx "0")) (e-dot-access @14.26-14.35 (field "body") (receiver (e-lookup-local @14.26-14.29 @@ -859,8 +861,9 @@ combineResults = |jsonResult, httpStatus| (p-applied-tag @16.9-16.18))) (value (e-call @16.22-16.36 - (e-lookup-external - (ext-decl @16.22-16.29 (ident "http.Client.ok") (kind "value"))) + (e-lookup-external @16.22-16.29 + (module-idx "1") + (target-node-idx "0")) (e-lookup-local @16.30-16.35 (p-assign @16.12-16.17 (ident "value")))))) (branch @@ -869,8 +872,9 @@ combineResults = |jsonResult, httpStatus| (p-applied-tag @17.9-17.19))) (value (e-call @17.23-17.45 - (e-lookup-external - (ext-decl @17.23-17.38 (ident "http.Client.badRequest") (kind "value"))) + (e-lookup-external @17.23-17.38 + (module-idx "1") + (target-node-idx "0")) (e-lookup-local @17.39-17.44 (p-assign @17.13-17.18 (ident "error"))))))))))) (annotation @13.1-13.14 @@ -892,8 +896,9 @@ combineResults = |jsonResult, httpStatus| (args (p-assign @26.10-26.11 (ident "v"))) (e-call @26.13-26.41 - (e-lookup-external - (ext-decl @26.13-26.30 (ident "json.Json.validateWith") (kind "value"))) + (e-lookup-external @26.13-26.30 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @26.31-26.37 (p-assign @23.16-23.22 (ident "config"))) (e-lookup-local @26.39-26.40 @@ -914,8 +919,9 @@ combineResults = |jsonResult, httpStatus| (args (p-assign @38.17-38.23 (ident "config"))) (e-call @38.25-38.48 - (e-lookup-external - (ext-decl @38.25-38.40 (ident "http.Client.clientWith") (kind "value"))) + (e-lookup-external @38.25-38.40 + (module-idx "1") + (target-node-idx "0")) (e-lookup-local @38.41-38.47 (p-assign @38.17-38.23 (ident "config"))))) (annotation @38.1-38.13 @@ -943,8 +949,9 @@ combineResults = |jsonResult, httpStatus| (p-applied-tag @44.9-44.19))) (value (e-call @44.23-44.50 - (e-lookup-external - (ext-decl @44.23-44.42 (ident "http.Client.statusToString") (kind "value"))) + (e-lookup-external @44.23-44.42 + (module-idx "1") + (target-node-idx "0")) (e-lookup-local @44.43-44.49 (p-assign @44.12-44.18 (ident "status")))))) (branch @@ -1032,14 +1039,7 @@ combineResults = |jsonResult, httpStatus| (s-import @5.1-5.38 (module "utils.Result") (qualifier "utils") (exposes (exposed (name "Result") (wildcard false)))) - (ext-decl @9.21-9.31 (ident "json.Json.parse") (kind "value")) - (ext-decl @14.14-14.25 (ident "json.Json.decode") (kind "value")) - (ext-decl @16.22-16.29 (ident "http.Client.ok") (kind "value")) - (ext-decl @17.23-17.38 (ident "http.Client.badRequest") (kind "value")) - (ext-decl @26.13-26.30 (ident "json.Json.validateWith") (kind "value")) - (ext-decl @37.26-37.37 (ident "Http.Client") (kind "type")) - (ext-decl @38.25-38.40 (ident "http.Client.clientWith") (kind "value")) - (ext-decl @44.23-44.42 (ident "http.Client.statusToString") (kind "value"))) + (ext-decl @37.26-37.37 (ident "Http.Client") (kind "type"))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/can_import_json.md b/src/snapshots/can_import_json.md index 77e91e9dd4..82b03d3c40 100644 --- a/src/snapshots/can_import_json.md +++ b/src/snapshots/can_import_json.md @@ -43,11 +43,11 @@ NO CHANGE (can-ir (d-let (p-assign @5.1-5.5 (ident "main")) - (e-lookup-external - (ext-decl @5.8-5.17 (ident "json.Json.utf8") (kind "value")))) + (e-lookup-external @5.8-5.17 + (module-idx "0") + (target-node-idx "0"))) (s-import @3.1-3.17 (module "json.Json") (qualifier "json") - (exposes)) - (ext-decl @5.8-5.17 (ident "json.Json.utf8") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/can_import_type_annotations.md b/src/snapshots/can_import_type_annotations.md index 2041c2c944..cc2ce90815 100644 --- a/src/snapshots/can_import_type_annotations.md +++ b/src/snapshots/can_import_type_annotations.md @@ -421,8 +421,9 @@ combineResults = |result1, result2| (e-lambda @8.18-8.44 (args (p-assign @8.19-8.22 (ident "req"))) - (e-lookup-external - (ext-decl @8.24-8.44 (ident "http.Client.defaultResponse") (kind "value")))) + (e-lookup-external @8.24-8.44 + (module-idx "0") + (target-node-idx "0"))) (annotation @8.1-8.15 (declared-type (ty-fn @7.18-7.37 (effectful false) @@ -434,8 +435,9 @@ combineResults = |result1, result2| (args (p-assign @11.14-11.19 (ident "input"))) (e-call @11.21-11.38 - (e-lookup-external - (ext-decl @11.21-11.31 (ident "json.Json.parse") (kind "value"))) + (e-lookup-external @11.21-11.31 + (module-idx "1") + (target-node-idx "0")) (e-lookup-local @11.32-11.37 (p-assign @11.14-11.19 (ident "input"))))) (annotation @11.1-11.10 @@ -453,8 +455,9 @@ combineResults = |result1, result2| (s-let @15.5-15.39 (p-assign @15.5-15.11 (ident "result")) (e-call @15.14-15.39 - (e-lookup-external - (ext-decl @15.14-15.25 (ident "json.Json.decode") (kind "value"))) + (e-lookup-external @15.14-15.25 + (module-idx "1") + (target-node-idx "0")) (e-dot-access @15.26-15.39 (field "body") (receiver (e-lookup-local @15.26-15.33 @@ -498,8 +501,9 @@ combineResults = |result1, result2| (ext-decl @13.51-13.61 (ident "Json.Error") (kind "type")))))))) (d-let (p-assign @23.1-23.7 (ident "config")) - (e-lookup-external - (ext-decl @23.10-23.28 (ident "json.Json.defaultConfig") (kind "value"))) + (e-lookup-external @23.10-23.28 + (module-idx "1") + (target-node-idx "0")) (annotation @23.1-23.7 (declared-type (ty-lookup-external @22.10-22.21 @@ -511,8 +515,9 @@ combineResults = |result1, result2| (p-assign @27.19-27.31 (ident "parserConfig")) (p-assign @27.33-27.38 (ident "input"))) (e-call @27.40-27.82 - (e-lookup-external - (ext-decl @27.40-27.61 (ident "json.Json.parseWith") (kind "value"))) + (e-lookup-external @27.40-27.61 + (module-idx "1") + (target-node-idx "0")) (e-lookup-local @27.62-27.74 (p-assign @27.19-27.31 (ident "parserConfig"))) (e-lookup-local @27.76-27.81 @@ -605,19 +610,14 @@ combineResults = |result1, result2| (s-import @5.1-5.38 (module "utils.Result") (qualifier "utils") (exposes (exposed (name "Result") (wildcard false)))) - (ext-decl @8.24-8.44 (ident "http.Client.defaultResponse") (kind "value")) (ext-decl @10.20-10.30 (ident "Json.Value") (kind "type")) - (ext-decl @11.21-11.31 (ident "json.Json.parse") (kind "value")) (ext-decl @13.13-13.25 (ident "Http.Request") (kind "type")) (ext-decl @13.36-13.49 (ident "Http.Response") (kind "type")) (ext-decl @13.51-13.61 (ident "Json.Error") (kind "type")) - (ext-decl @15.14-15.25 (ident "json.Json.decode") (kind "value")) (ext-decl @22.10-22.21 (ident "Json.Config") (kind "type")) - (ext-decl @23.10-23.28 (ident "json.Json.defaultConfig") (kind "value")) (ext-decl @26.18-26.36 (ident "Json.Parser.Config") (kind "type")) (ext-decl @26.52-26.62 (ident "Json.Value") (kind "type")) - (ext-decl @26.64-26.81 (ident "Json.Parser.Error") (kind "type")) - (ext-decl @27.40-27.61 (ident "json.Json.parseWith") (kind "value"))) + (ext-decl @26.64-26.81 (ident "Json.Parser.Error") (kind "type"))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/can_import_unresolved_qualified.md b/src/snapshots/can_import_unresolved_qualified.md index 8e25f81a9d..32786d201f 100644 --- a/src/snapshots/can_import_unresolved_qualified.md +++ b/src/snapshots/can_import_unresolved_qualified.md @@ -162,16 +162,18 @@ parser = Json.create (can-ir (d-let (p-assign @7.1-7.5 (ident "main")) - (e-lookup-external - (ext-decl @7.8-7.31 (ident "json.Json.method") (kind "value")))) + (e-lookup-external @7.8-7.31 + (module-idx "0") + (target-node-idx "0"))) (d-let (p-assign @11.1-11.10 (ident "parseData")) (e-lambda @11.13-11.40 (args (p-assign @11.14-11.18 (ident "data"))) (e-call @11.20-11.40 - (e-lookup-external - (ext-decl @11.20-11.34 (ident "json.Json.stringify") (kind "value"))) + (e-lookup-external @11.20-11.34 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @11.35-11.39 (p-assign @11.14-11.18 (ident "data"))))) (annotation @11.1-11.10 @@ -185,8 +187,9 @@ parser = Json.create (e-lambda @15.18-15.51 (args (p-assign @15.19-15.22 (ident "req"))) - (e-lookup-external - (ext-decl @15.24-15.51 (ident "http.Client.defaultResponse") (kind "value")))) + (e-lookup-external @15.24-15.51 + (module-idx "1") + (target-node-idx "0"))) (annotation @15.1-15.15 (declared-type (ty-fn @14.18-14.61 (effectful false) @@ -197,8 +200,9 @@ parser = Json.create (d-let (p-assign @18.1-18.7 (ident "result")) (e-call @18.10-18.28 - (e-lookup-external - (ext-decl @18.10-18.20 (ident "json.Json.prase") (kind "value"))) + (e-lookup-external @18.10-18.20 + (module-idx "0") + (target-node-idx "0")) (e-string @18.21-18.27 (e-literal @18.22-18.26 (string "test"))))) (d-let @@ -207,25 +211,21 @@ parser = Json.create (p-assign @21.1-21.7 (ident "config")))) (d-let (p-assign @24.1-24.7 (ident "client")) - (e-lookup-external - (ext-decl @24.10-24.28 (ident "http.Client.invalidMethod") (kind "value")))) + (e-lookup-external @24.10-24.28 + (module-idx "1") + (target-node-idx "0"))) (d-let (p-assign @27.1-27.7 (ident "parser")) - (e-lookup-external - (ext-decl @27.10-27.49 (ident "json.Json.create") (kind "value")))) + (e-lookup-external @27.10-27.49 + (module-idx "0") + (target-node-idx "0"))) (s-import @3.1-3.17 (module "json.Json") (qualifier "json") (exposes)) (s-import @4.1-4.27 (module "http.Client") (qualifier "http") (alias "Http") (exposes)) - (ext-decl @7.8-7.31 (ident "json.Json.method") (kind "value")) (ext-decl @10.13-10.29 (ident "Json.InvalidType") (kind "type")) - (ext-decl @11.20-11.34 (ident "json.Json.stringify") (kind "value")) (ext-decl @14.18-14.37 (ident "Http.Server.Request") (kind "type")) - (ext-decl @14.41-14.61 (ident "Http.Server.Response") (kind "type")) - (ext-decl @15.24-15.51 (ident "http.Client.defaultResponse") (kind "value")) - (ext-decl @18.10-18.20 (ident "json.Json.prase") (kind "value")) - (ext-decl @24.10-24.28 (ident "http.Client.invalidMethod") (kind "value")) - (ext-decl @27.10-27.49 (ident "json.Json.create") (kind "value"))) + (ext-decl @14.41-14.61 (ident "Http.Server.Response") (kind "type"))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/can_import_with_alias.md b/src/snapshots/can_import_with_alias.md index 4ae83d53e5..a2d15e653f 100644 --- a/src/snapshots/can_import_with_alias.md +++ b/src/snapshots/can_import_with_alias.md @@ -43,11 +43,11 @@ NO CHANGE (can-ir (d-let (p-assign @5.1-5.5 (ident "main")) - (e-lookup-external - (ext-decl @5.8-5.21 (ident "json.Json.decode") (kind "value")))) + (e-lookup-external @5.8-5.21 + (module-idx "0") + (target-node-idx "0"))) (s-import @3.1-3.27 (module "json.Json") (qualifier "json") (alias "MyJson") - (exposes)) - (ext-decl @5.8-5.21 (ident "json.Json.decode") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/effectful_with_effectful_annotation.md b/src/snapshots/effectful_with_effectful_annotation.md index bfd2565ead..a65288aaec 100644 --- a/src/snapshots/effectful_with_effectful_annotation.md +++ b/src/snapshots/effectful_with_effectful_annotation.md @@ -78,8 +78,9 @@ NO CHANGE (args (p-assign @7.15-7.18 (ident "msg"))) (e-call @7.20-7.37 - (e-lookup-external - (ext-decl @7.20-7.32 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @7.20-7.32 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @7.33-7.36 (p-assign @7.15-7.18 (ident "msg"))))) (annotation @7.1-7.11 @@ -95,8 +96,7 @@ NO CHANGE (e-string @9.20-9.35 (e-literal @9.21-9.34 (string "Hello, world!"))))) (s-import @3.1-3.17 (module "pf.Stdout") (qualifier "pf") - (exposes)) - (ext-decl @7.20-7.32 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/external_decl_lookup.md b/src/snapshots/external_decl_lookup.md index 49605bae27..bec6e702a5 100644 --- a/src/snapshots/external_decl_lookup.md +++ b/src/snapshots/external_decl_lookup.md @@ -91,21 +91,21 @@ main! = |_| { (s-let @8.5-8.54 (p-assign @8.5-8.11 (ident "result")) (e-call @8.14-8.54 - (e-lookup-external - (ext-decl @8.14-8.23 (ident "json.Json.utf8") (kind "value"))) + (e-lookup-external @8.14-8.23 + (module-idx "1") + (target-node-idx "0")) (e-string @8.24-8.53 (e-literal @8.25-8.52 (string "Hello from external module!"))))) (e-call @9.5-9.25 - (e-lookup-external - (ext-decl @9.5-9.17 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @9.5-9.17 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @9.18-9.24 (p-assign @8.5-8.11 (ident "result"))))))) (s-import @3.1-3.17 (module "pf.Stdout") (qualifier "pf") (exposes)) (s-import @4.1-4.17 (module "json.Json") (qualifier "json") - (exposes)) - (ext-decl @8.14-8.23 (ident "json.Json.utf8") (kind "value")) - (ext-decl @9.5-9.17 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/file/inline_ingested_file.md b/src/snapshots/file/inline_ingested_file.md index a483c8d9f3..5a870bc5e3 100644 --- a/src/snapshots/file/inline_ingested_file.md +++ b/src/snapshots/file/inline_ingested_file.md @@ -13,9 +13,18 @@ import Json foo = Json.parse(data) ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - inline_ingested_file.md:1:9:1:12 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``foo`` is exposed, but it is not defined anywhere in this module. + +**inline_ingested_file.md:1:9:1:12:** +```roc +module [foo] +``` + ^^^ +You can fix this by either defining ``foo`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:12),CloseSquare(1:12-1:13),Newline(1:1-1:1), diff --git a/src/snapshots/function_no_annotation.md b/src/snapshots/function_no_annotation.md index 24032d1182..5196c79ebf 100644 --- a/src/snapshots/function_no_annotation.md +++ b/src/snapshots/function_no_annotation.md @@ -114,8 +114,9 @@ NO CHANGE (args (p-assign @9.18-9.19 (ident "n"))) (e-call @9.21-9.36 - (e-lookup-external - (ext-decl @9.21-9.33 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @9.21-9.33 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @9.34-9.35 (p-assign @9.18-9.19 (ident "n")))))) (d-let @@ -139,8 +140,7 @@ NO CHANGE (p-assign @12.1-12.9 (ident "process!"))) (e-int @14.18-14.20 (value "42")))) (s-import @3.1-3.17 (module "pf.Stdout") (qualifier "pf") - (exposes)) - (ext-decl @9.21-9.33 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/fuzz_crash/fuzz_crash_023.md b/src/snapshots/fuzz_crash/fuzz_crash_023.md index 9d029343a0..5f4a17410f 100644 --- a/src/snapshots/fuzz_crash/fuzz_crash_023.md +++ b/src/snapshots/fuzz_crash/fuzz_crash_023.md @@ -2841,8 +2841,9 @@ expect { (s-expr @190.2-191.8 (e-runtime-error (tag "not_implemented"))) (e-call @191.2-195.3 - (e-lookup-external - (ext-decl @191.2-191.14 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @191.2-191.14 + (module-idx "0") + (target-node-idx "0")) (e-string @192.3-194.18 (e-literal @192.4-192.14 (string "How about ")) (e-call @193.4-193.21 @@ -2991,8 +2992,7 @@ expect { (e-lookup-local @206.2-206.6 (p-assign @205.2-205.6 (ident "blah"))) (e-lookup-local @206.10-206.13 - (p-assign @204.2-204.5 (ident "foo")))))) - (ext-decl @191.2-191.14 (ident "pf.Stdout.line!") (kind "value"))) + (p-assign @204.2-204.5 (ident "foo"))))))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/fuzz_crash/fuzz_crash_028.md b/src/snapshots/fuzz_crash/fuzz_crash_028.md index 947f36e52c..d8d88ba00d 100644 Binary files a/src/snapshots/fuzz_crash/fuzz_crash_028.md and b/src/snapshots/fuzz_crash/fuzz_crash_028.md differ diff --git a/src/snapshots/fuzz_crash/fuzz_crash_032.md b/src/snapshots/fuzz_crash/fuzz_crash_032.md index 121a855739..7dc1104d01 100644 --- a/src/snapshots/fuzz_crash/fuzz_crash_032.md +++ b/src/snapshots/fuzz_crash/fuzz_crash_032.md @@ -112,6 +112,26 @@ olor = |color| { import Color.RGB ^^^^^ +**EXPOSED BUT NOT DEFINED** +The module header says that ``r`` is exposed, but it is not defined anywhere in this module. + +**fuzz_crash_032.md:1:13:1:14:** +```roc +module [tus,r] +``` + ^ +You can fix this by either defining ``r`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``tus`` is exposed, but it is not defined anywhere in this module. + +**fuzz_crash_032.md:1:9:1:12:** +```roc +module [tus,r] +``` + ^^^ +You can fix this by either defining ``tus`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:12),Comma(1:12-1:13),LowerIdent(1:13-1:14),CloseSquare(1:14-1:15),Newline(1:1-1:1), diff --git a/src/snapshots/hello_world.md b/src/snapshots/hello_world.md index 86d0de2d43..6ea6fe4517 100644 --- a/src/snapshots/hello_world.md +++ b/src/snapshots/hello_world.md @@ -61,13 +61,13 @@ NO CHANGE (args (p-underscore @5.10-5.11)) (e-call @5.13-5.42 - (e-lookup-external - (ext-decl @5.13-5.25 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @5.13-5.25 + (module-idx "0") + (target-node-idx "0")) (e-string @5.26-5.41 (e-literal @5.27-5.40 (string "Hello, world!")))))) (s-import @3.1-3.17 (module "pf.Stdout") (qualifier "pf") - (exposes)) - (ext-decl @5.13-5.25 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/hello_world_with_block.md b/src/snapshots/hello_world_with_block.md index 58bcd27295..52361c09d0 100644 --- a/src/snapshots/hello_world_with_block.md +++ b/src/snapshots/hello_world_with_block.md @@ -13,15 +13,26 @@ app [main!] { pf: platform "../basic-cli/platform.roc" } import pf.Stdout main! = |_| { - name = "World" + world = "World" # Hello - Stdout.line!("Hello, ${name}!") + Stdout.line!("Hello, world!") } ~~~ # EXPECTED -NIL +UNUSED VARIABLE - hello_world_with_block.md:9:2:9:7 # PROBLEMS -NIL +**UNUSED VARIABLE** +Variable ``world`` is not used anywhere in your code. + +If you don't need this variable, prefix it with an underscore like `_world` to suppress this warning. +The unused variable is declared here: +**hello_world_with_block.md:9:2:9:7:** +```roc + world = "World" +``` + ^^^^^ + + # TOKENS ~~~zig Newline(1:2-1:15), @@ -32,9 +43,9 @@ Newline(1:1-1:1), KwImport(6:1-6:7),LowerIdent(6:8-6:10),NoSpaceDotUpperIdent(6:10-6:17),Newline(1:1-1:1), Newline(1:1-1:1), LowerIdent(8:1-8:6),OpAssign(8:7-8:8),OpBar(8:9-8:10),Underscore(8:10-8:11),OpBar(8:11-8:12),OpenCurly(8:13-8:14),Newline(1:1-1:1), -LowerIdent(9:2-9:6),OpAssign(9:7-9:8),StringStart(9:9-9:10),StringPart(9:10-9:15),StringEnd(9:15-9:16),Newline(1:1-1:1), +LowerIdent(9:2-9:7),OpAssign(9:8-9:9),StringStart(9:10-9:11),StringPart(9:11-9:16),StringEnd(9:16-9:17),Newline(1:1-1:1), Newline(10:3-10:9), -UpperIdent(11:2-11:8),NoSpaceDotLowerIdent(11:8-11:14),NoSpaceOpenRound(11:14-11:15),StringStart(11:15-11:16),StringPart(11:16-11:23),OpenStringInterpolation(11:23-11:25),LowerIdent(11:25-11:29),CloseStringInterpolation(11:29-11:30),StringPart(11:30-11:31),StringEnd(11:31-11:32),CloseRound(11:32-11:33),Newline(1:1-1:1), +UpperIdent(11:2-11:8),NoSpaceDotLowerIdent(11:8-11:14),NoSpaceOpenRound(11:14-11:15),StringStart(11:15-11:16),StringPart(11:16-11:29),StringEnd(11:29-11:30),CloseRound(11:30-11:31),Newline(1:1-1:1), CloseCurly(12:1-12:2),EndOfFile(12:2-12:2), ~~~ # PARSE @@ -59,16 +70,14 @@ CloseCurly(12:1-12:2),EndOfFile(12:2-12:2), (p-underscore)) (e-block @8.13-12.2 (statements - (s-decl @9.2-9.16 - (p-ident @9.2-9.6 (raw "name")) - (e-string @9.9-9.16 - (e-string-part @9.10-9.15 (raw "World")))) - (e-apply @11.2-11.33 + (s-decl @9.2-9.17 + (p-ident @9.2-9.7 (raw "world")) + (e-string @9.10-9.17 + (e-string-part @9.11-9.16 (raw "World")))) + (e-apply @11.2-11.31 (e-ident @11.2-11.14 (raw "Stdout.line!")) - (e-string @11.15-11.32 - (e-string-part @11.16-11.23 (raw "Hello, ")) - (e-ident @11.25-11.29 (raw "name")) - (e-string-part @11.30-11.31 (raw "!")))))))))) + (e-string @11.15-11.30 + (e-string-part @11.16-11.29 (raw "Hello, world!")))))))))) ~~~ # FORMATTED ~~~roc @@ -83,21 +92,18 @@ NO CHANGE (args (p-underscore @8.10-8.11)) (e-block @8.13-12.2 - (s-let @9.2-9.16 - (p-assign @9.2-9.6 (ident "name")) - (e-string @9.9-9.16 - (e-literal @9.10-9.15 (string "World")))) - (e-call @11.2-11.33 - (e-lookup-external - (ext-decl @11.2-11.14 (ident "pf.Stdout.line!") (kind "value"))) - (e-string @11.15-11.32 - (e-literal @11.16-11.23 (string "Hello, ")) - (e-lookup-local @11.25-11.29 - (p-assign @9.2-9.6 (ident "name"))) - (e-literal @11.30-11.31 (string "!"))))))) + (s-let @9.2-9.17 + (p-assign @9.2-9.7 (ident "world")) + (e-string @9.10-9.17 + (e-literal @9.11-9.16 (string "World")))) + (e-call @11.2-11.31 + (e-lookup-external @11.2-11.14 + (module-idx "0") + (target-node-idx "0")) + (e-string @11.15-11.30 + (e-literal @11.16-11.29 (string "Hello, world!"))))))) (s-import @6.1-6.17 (module "pf.Stdout") (qualifier "pf") - (exposes)) - (ext-decl @11.2-11.14 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/import_exposing_alias.md b/src/snapshots/import_exposing_alias.md index 12403a971d..79971dacf9 100644 --- a/src/snapshots/import_exposing_alias.md +++ b/src/snapshots/import_exposing_alias.md @@ -99,15 +99,17 @@ main = { (s-let @7.2-7.24 (p-assign @7.2-7.9 (ident "encoded")) (e-call @7.12-7.24 - (e-lookup-external - (ext-decl @7.12-7.18 (ident "json.Json.encode") (kind "value"))) + (e-lookup-external @7.12-7.18 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @7.19-7.23 (p-assign @6.2-6.6 (ident "data"))))) (s-let @8.2-8.29 (p-assign @8.2-8.9 (ident "decoded")) (e-call @8.12-8.29 - (e-lookup-external - (ext-decl @8.12-8.20 (ident "json.Json.decode") (kind "value"))) + (e-lookup-external @8.12-8.20 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @8.21-8.28 (p-assign @7.2-7.9 (ident "encoded"))))) (e-lookup-local @9.2-9.9 @@ -115,9 +117,7 @@ main = { (s-import @3.1-3.65 (module "json.Json") (qualifier "json") (exposes (exposed (name "decode") (alias "fromJson") (wildcard false)) - (exposed (name "encode") (alias "toJson") (wildcard false)))) - (ext-decl @7.12-7.18 (ident "json.Json.encode") (kind "value")) - (ext-decl @8.12-8.20 (ident "json.Json.decode") (kind "value"))) + (exposed (name "encode") (alias "toJson") (wildcard false))))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/import_exposing_basic.md b/src/snapshots/import_exposing_basic.md index eb7ba0c88e..b08146a0f3 100644 --- a/src/snapshots/import_exposing_basic.md +++ b/src/snapshots/import_exposing_basic.md @@ -99,15 +99,17 @@ main = { (s-let @7.5-7.27 (p-assign @7.5-7.12 (ident "encoded")) (e-call @7.15-7.27 - (e-lookup-external - (ext-decl @7.15-7.21 (ident "json.Json.encode") (kind "value"))) + (e-lookup-external @7.15-7.21 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @7.22-7.26 (p-assign @6.5-6.9 (ident "data"))))) (s-let @8.5-8.30 (p-assign @8.5-8.12 (ident "decoded")) (e-call @8.15-8.30 - (e-lookup-external - (ext-decl @8.15-8.21 (ident "json.Json.decode") (kind "value"))) + (e-lookup-external @8.15-8.21 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @8.22-8.29 (p-assign @7.5-7.12 (ident "encoded"))))) (e-lookup-local @9.5-9.12 @@ -115,9 +117,7 @@ main = { (s-import @3.1-3.43 (module "json.Json") (qualifier "json") (exposes (exposed (name "decode") (wildcard false)) - (exposed (name "encode") (wildcard false)))) - (ext-decl @7.15-7.21 (ident "json.Json.encode") (kind "value")) - (ext-decl @8.15-8.21 (ident "json.Json.decode") (kind "value"))) + (exposed (name "encode") (wildcard false))))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/module_multiline_with_comments.md b/src/snapshots/module_multiline_with_comments.md index ffe17187f4..afa8def8ea 100644 --- a/src/snapshots/module_multiline_with_comments.md +++ b/src/snapshots/module_multiline_with_comments.md @@ -12,9 +12,28 @@ module # Comment after module keyword ] ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - module_multiline_with_comments.md:3:3:3:12 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**module_multiline_with_comments.md:3:3:3:12:** +```roc + something, # Comment after exposed item +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**module_multiline_with_comments.md:4:3:4:11:** +```roc + SomeType, # Comment after final exposed item +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),Newline(1:9-1:38), diff --git a/src/snapshots/module_nonempty_single.md b/src/snapshots/module_nonempty_single.md index 2842f0314d..1c4538ac06 100644 --- a/src/snapshots/module_nonempty_single.md +++ b/src/snapshots/module_nonempty_single.md @@ -8,9 +8,28 @@ type=file module [something, SomeType] ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - module_nonempty_single.md:1:9:1:18 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**module_nonempty_single.md:1:9:1:18:** +```roc +module [something, SomeType] +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**module_nonempty_single.md:1:20:1:28:** +```roc +module [something, SomeType] +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:18),Comma(1:18-1:19),UpperIdent(1:20-1:28),CloseSquare(1:28-1:29),EndOfFile(1:29-1:29), diff --git a/src/snapshots/module_singleline_fmts_to_multiline.md b/src/snapshots/module_singleline_fmts_to_multiline.md index 67877d6779..d679ae0591 100644 --- a/src/snapshots/module_singleline_fmts_to_multiline.md +++ b/src/snapshots/module_singleline_fmts_to_multiline.md @@ -8,9 +8,28 @@ type=file module [something, SomeType,] ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - module_singleline_fmts_to_multiline.md:1:9:1:18 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**module_singleline_fmts_to_multiline.md:1:9:1:18:** +```roc +module [something, SomeType,] +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**module_singleline_fmts_to_multiline.md:1:20:1:28:** +```roc +module [something, SomeType,] +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:18),Comma(1:18-1:19),UpperIdent(1:20-1:28),Comma(1:28-1:29),CloseSquare(1:29-1:30),EndOfFile(1:30-1:30), diff --git a/src/snapshots/package_header_nonempty_multiline_1.md b/src/snapshots/package_header_nonempty_multiline_1.md index 05ea4c0ca7..52f5ce1eff 100644 --- a/src/snapshots/package_header_nonempty_multiline_1.md +++ b/src/snapshots/package_header_nonempty_multiline_1.md @@ -10,9 +10,28 @@ package # This comment is here { somePkg: "../main.roc" } ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - package_header_nonempty_multiline_1.md:2:3:2:12 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_1.md:2:3:2:12:** +```roc + [something, SomeType] +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_1.md:2:14:2:22:** +```roc + [something, SomeType] +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwPackage(1:1-1:8),Newline(1:10-1:31), diff --git a/src/snapshots/package_header_nonempty_multiline_3.md b/src/snapshots/package_header_nonempty_multiline_3.md index 3f2cd15e7f..6ebb77d777 100644 --- a/src/snapshots/package_header_nonempty_multiline_3.md +++ b/src/snapshots/package_header_nonempty_multiline_3.md @@ -10,9 +10,28 @@ package { somePkg: "../main.roc", } ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - package_header_nonempty_multiline_3.md:2:3:2:12 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_3.md:2:3:2:12:** +```roc + [something, SomeType,] +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_3.md:2:14:2:22:** +```roc + [something, SomeType,] +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwPackage(1:1-1:8),Newline(1:1-1:1), diff --git a/src/snapshots/package_header_nonempty_multiline_4.md b/src/snapshots/package_header_nonempty_multiline_4.md index 32a64251e7..31ab69b1b4 100644 --- a/src/snapshots/package_header_nonempty_multiline_4.md +++ b/src/snapshots/package_header_nonempty_multiline_4.md @@ -15,9 +15,28 @@ package } ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - package_header_nonempty_multiline_4.md:3:3:3:12 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_4.md:3:3:3:12:** +```roc + something, +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_4.md:4:3:4:11:** +```roc + SomeType, +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwPackage(1:1-1:8),Newline(1:1-1:1), diff --git a/src/snapshots/package_header_nonempty_multiline_6.md b/src/snapshots/package_header_nonempty_multiline_6.md index 8af4764c20..c3ce3ccaf5 100644 --- a/src/snapshots/package_header_nonempty_multiline_6.md +++ b/src/snapshots/package_header_nonempty_multiline_6.md @@ -16,9 +16,28 @@ package # Comment after keyword } ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - package_header_nonempty_multiline_6.md:3:3:3:12 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_6.md:3:3:3:12:** +```roc + something, # Comment after exposed item +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_multiline_6.md:4:3:4:11:** +```roc + SomeType, # Comment after last exposed item +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwPackage(1:1-1:8),Newline(1:10-1:32), diff --git a/src/snapshots/package_header_nonempty_singleline_1.md b/src/snapshots/package_header_nonempty_singleline_1.md index dd93b42dfa..8e19603ef8 100644 --- a/src/snapshots/package_header_nonempty_singleline_1.md +++ b/src/snapshots/package_header_nonempty_singleline_1.md @@ -8,9 +8,28 @@ type=file package [something, SomeType] { somePkg: "../main.roc", other: "../../other/main.roc" } ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - package_header_nonempty_singleline_1.md:1:10:1:19 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``something`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_singleline_1.md:1:10:1:19:** +```roc +package [something, SomeType] { somePkg: "../main.roc", other: "../../other/main.roc" } +``` + ^^^^^^^^^ +You can fix this by either defining ``something`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``SomeType`` is exposed, but it is not defined anywhere in this module. + +**package_header_nonempty_singleline_1.md:1:21:1:29:** +```roc +package [something, SomeType] { somePkg: "../main.roc", other: "../../other/main.roc" } +``` + ^^^^^^^^ +You can fix this by either defining ``SomeType`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwPackage(1:1-1:8),OpenSquare(1:9-1:10),LowerIdent(1:10-1:19),Comma(1:19-1:20),UpperIdent(1:21-1:29),CloseSquare(1:29-1:30),OpenCurly(1:31-1:32),LowerIdent(1:33-1:40),OpColon(1:40-1:41),StringStart(1:42-1:43),StringPart(1:43-1:54),StringEnd(1:54-1:55),Comma(1:55-1:56),LowerIdent(1:57-1:62),OpColon(1:62-1:63),StringStart(1:64-1:65),StringPart(1:65-1:85),StringEnd(1:85-1:86),CloseCurly(1:87-1:88),EndOfFile(1:88-1:88), diff --git a/src/snapshots/pass/exposed_not_impl.md b/src/snapshots/pass/exposed_not_impl.md new file mode 100644 index 0000000000..4dd8ef684f --- /dev/null +++ b/src/snapshots/pass/exposed_not_impl.md @@ -0,0 +1,136 @@ +# META +~~~ini +description=Module exposes values that are not implemented +type=file +~~~ +# SOURCE +~~~roc +module [foo, bar, MyType, OtherType, foo, MyType] + +# This module exposes foo, bar, MyType, and OtherType +# but only implements foo and MyType +# This should generate "exposed but not implemented" errors for bar and OtherType +# Also tests redundant exposed entries for foo and MyType + +foo = 42 + +MyType : [A, B, C] +~~~ +# EXPECTED +REDUNDANT EXPOSED - exposed_not_impl.md:1:38:1:41 +# PROBLEMS +**REDUNDANT EXPOSED** +The identifier ``foo`` is exposed multiple times in the module header. + +**exposed_not_impl.md:1:38:1:41:** +```roc +module [foo, bar, MyType, OtherType, foo, MyType] +``` + ^^^ +It was already exposed here:**exposed_not_impl.md:1:9:1:12:** +```roc +module [foo, bar, MyType, OtherType, foo, MyType] +``` + ^^^ +You can remove the duplicate entry to fix this warning. + +**REDUNDANT EXPOSED** +The identifier ``MyType`` is exposed multiple times in the module header. + +**exposed_not_impl.md:1:43:1:49:** +```roc +module [foo, bar, MyType, OtherType, foo, MyType] +``` + ^^^^^^ +It was already exposed here:**exposed_not_impl.md:1:19:1:25:** +```roc +module [foo, bar, MyType, OtherType, foo, MyType] +``` + ^^^^^^ +You can remove the duplicate entry to fix this warning. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``bar`` is exposed, but it is not defined anywhere in this module. + +**exposed_not_impl.md:1:14:1:17:** +```roc +module [foo, bar, MyType, OtherType, foo, MyType] +``` + ^^^ +You can fix this by either defining ``bar`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``OtherType`` is exposed, but it is not defined anywhere in this module. + +**exposed_not_impl.md:1:27:1:36:** +```roc +module [foo, bar, MyType, OtherType, foo, MyType] +``` + ^^^^^^^^^ +You can fix this by either defining ``OtherType`` in this module, or by removing it from the list of exposed values. + +# TOKENS +~~~zig +KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:12),Comma(1:12-1:13),LowerIdent(1:14-1:17),Comma(1:17-1:18),UpperIdent(1:19-1:25),Comma(1:25-1:26),UpperIdent(1:27-1:36),Comma(1:36-1:37),LowerIdent(1:38-1:41),Comma(1:41-1:42),UpperIdent(1:43-1:49),CloseSquare(1:49-1:50),Newline(1:1-1:1), +Newline(1:1-1:1), +Newline(3:2-3:54), +Newline(4:2-4:37), +Newline(5:2-5:82), +Newline(6:2-6:58), +Newline(1:1-1:1), +LowerIdent(8:1-8:4),OpAssign(8:5-8:6),Int(8:7-8:9),Newline(1:1-1:1), +Newline(1:1-1:1), +UpperIdent(10:1-10:7),OpColon(10:8-10:9),OpenSquare(10:10-10:11),UpperIdent(10:11-10:12),Comma(10:12-10:13),UpperIdent(10:14-10:15),Comma(10:15-10:16),UpperIdent(10:17-10:18),CloseSquare(10:18-10:19),EndOfFile(10:19-10:19), +~~~ +# PARSE +~~~clojure +(file @1.1-10.19 + (module @1.1-1.50 + (exposes @1.8-1.50 + (exposed-lower-ident (text "foo")) + (exposed-lower-ident (text "bar")) + (exposed-upper-ident (text "MyType")) + (exposed-upper-ident (text "OtherType")) + (exposed-lower-ident (text "foo")) + (exposed-upper-ident (text "MyType")))) + (statements + (s-decl @8.1-8.9 + (p-ident @8.1-8.4 (raw "foo")) + (e-int @8.7-8.9 (raw "42"))) + (s-type-decl @10.1-10.19 + (header @10.1-10.7 (name "MyType") + (args)) + (ty-tag-union @10.10-10.19 + (tags + (ty @10.11-10.12 (name "A")) + (ty @10.14-10.15 (name "B")) + (ty @10.17-10.18 (name "C"))))))) +~~~ +# FORMATTED +~~~roc +NO CHANGE +~~~ +# CANONICALIZE +~~~clojure +(can-ir + (d-let + (p-assign @8.1-8.4 (ident "foo")) + (e-int @8.7-8.9 (value "42"))) + (s-alias-decl @10.1-10.19 + (ty-header @10.1-10.7 (name "MyType")) + (ty-tag-union @10.10-10.19 + (ty @10.11-10.12 (name "A")) + (ty @10.14-10.15 (name "B")) + (ty @10.17-10.18 (name "C"))))) +~~~ +# TYPES +~~~clojure +(inferred-types + (defs + (patt @8.1-8.4 (type "Num(*)"))) + (type_decls + (alias @10.1-10.19 (type "MyType") + (ty-header @10.1-10.7 (name "MyType")))) + (expressions + (expr @8.7-8.9 (type "Num(*)")))) +~~~ diff --git a/src/snapshots/platform_header_nonempty_1.md b/src/snapshots/platform_header_nonempty_1.md index 064bf64c3c..af6f7c85e5 100644 --- a/src/snapshots/platform_header_nonempty_1.md +++ b/src/snapshots/platform_header_nonempty_1.md @@ -28,9 +28,18 @@ platform # Comment after platform keyword ] ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - platform_header_nonempty_1.md:12:4:12:7 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``foo`` is exposed, but it is not defined anywhere in this module. + +**platform_header_nonempty_1.md:12:4:12:7:** +```roc + foo, # Comment after exposed item +``` + ^^^ +You can fix this by either defining ``foo`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwPlatform(1:1-1:9),Newline(1:11-1:42), diff --git a/src/snapshots/plume_package/main.md b/src/snapshots/plume_package/main.md index 1216778009..64f78703e2 100644 --- a/src/snapshots/plume_package/main.md +++ b/src/snapshots/plume_package/main.md @@ -10,9 +10,18 @@ package [ ] {} ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - main.md:2:5:2:10 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``Color`` is exposed, but it is not defined anywhere in this module. + +**main.md:2:5:2:10:** +```roc + Color, +``` + ^^^^^ +You can fix this by either defining ``Color`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwPackage(1:1-1:8),OpenSquare(1:9-1:10),Newline(1:1-1:1), diff --git a/src/snapshots/pure_annotation_effectful_body_error.md b/src/snapshots/pure_annotation_effectful_body_error.md index 2a437f638c..994dae7d7d 100644 --- a/src/snapshots/pure_annotation_effectful_body_error.md +++ b/src/snapshots/pure_annotation_effectful_body_error.md @@ -78,8 +78,9 @@ NO CHANGE (args (p-assign @7.17-7.20 (ident "msg"))) (e-call @7.22-7.39 - (e-lookup-external - (ext-decl @7.22-7.34 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @7.22-7.34 + (module-idx "0") + (target-node-idx "0")) (e-lookup-local @7.35-7.38 (p-assign @7.17-7.20 (ident "msg"))))) (annotation @7.1-7.13 @@ -95,8 +96,7 @@ NO CHANGE (e-string @9.22-9.40 (e-literal @9.23-9.39 (string "This should fail"))))) (s-import @3.1-3.17 (module "pf.Stdout") (qualifier "pf") - (exposes)) - (ext-decl @7.22-7.34 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/simple_module_no_blanks.md b/src/snapshots/simple_module_no_blanks.md index 3ce27f6ee1..66d37ec65f 100644 --- a/src/snapshots/simple_module_no_blanks.md +++ b/src/snapshots/simple_module_no_blanks.md @@ -51,8 +51,9 @@ NO CHANGE (d-let (p-assign @3.1-3.7 (ident "hello!")) (e-call @3.10-3.31 - (e-lookup-external - (ext-decl @3.10-3.22 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @3.10-3.22 + (module-idx "0") + (target-node-idx "0")) (e-string @3.23-3.30 (e-literal @3.24-3.29 (string "Hello"))))) (d-let @@ -60,8 +61,7 @@ NO CHANGE (e-string @4.9-4.16 (e-literal @4.10-4.15 (string "World")))) (s-import @2.1-2.17 (module "pf.Stdout") (qualifier "pf") - (exposes)) - (ext-decl @3.10-3.22 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/syntax_grab_bag.md b/src/snapshots/syntax_grab_bag.md index 651c6a36ab..87216b38e5 100644 --- a/src/snapshots/syntax_grab_bag.md +++ b/src/snapshots/syntax_grab_bag.md @@ -2437,8 +2437,9 @@ expect { (s-expr @190.2-191.8 (e-runtime-error (tag "not_implemented"))) (e-call @191.2-195.3 - (e-lookup-external - (ext-decl @191.2-191.14 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @191.2-191.14 + (module-idx "0") + (target-node-idx "0")) (e-string @192.3-194.18 (e-literal @192.4-192.14 (string "How about ")) (e-call @193.4-193.21 @@ -2587,8 +2588,7 @@ expect { (e-lookup-local @206.2-206.6 (p-assign @205.2-205.6 (ident "blah"))) (e-lookup-local @206.10-206.13 - (p-assign @204.2-204.5 (ident "foo")))))) - (ext-decl @191.2-191.14 (ident "pf.Stdout.line!") (kind "value"))) + (p-assign @204.2-204.5 (ident "foo"))))))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/type_annotation_missing_parens.md b/src/snapshots/type_annotation_missing_parens.md index c56b1bb5c8..b6b8975413 100644 --- a/src/snapshots/type_annotation_missing_parens.md +++ b/src/snapshots/type_annotation_missing_parens.md @@ -36,6 +36,16 @@ nums : List U8 +**EXPOSED BUT NOT DEFINED** +The module header says that ``nums`` is exposed, but it is not defined anywhere in this module. + +**type_annotation_missing_parens.md:1:9:1:13:** +```roc +module [nums] +``` + ^^^^ +You can fix this by either defining ``nums`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:13),CloseSquare(1:13-1:14),Newline(1:1-1:1), diff --git a/src/snapshots/type_declarations.md b/src/snapshots/type_declarations.md index ae35be6969..b206c53af0 100644 --- a/src/snapshots/type_declarations.md +++ b/src/snapshots/type_declarations.md @@ -83,6 +83,26 @@ Some(a) : { foo : Ok(a), bar : Something } ^^^^^ +**EXPOSED BUT NOT DEFINED** +The module header says that ``main!`` is exposed, but it is not defined anywhere in this module. + +**type_declarations.md:1:51:1:56:** +```roc +module [Map, Foo, Some, Maybe, SomeFunc, add_one, main!] +``` + ^^^^^ +You can fix this by either defining ``main!`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``add_one`` is exposed, but it is not defined anywhere in this module. + +**type_declarations.md:1:42:1:49:** +```roc +module [Map, Foo, Some, Maybe, SomeFunc, add_one, main!] +``` + ^^^^^^^ +You can fix this by either defining ``add_one`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),UpperIdent(1:9-1:12),Comma(1:12-1:13),UpperIdent(1:14-1:17),Comma(1:17-1:18),UpperIdent(1:19-1:23),Comma(1:23-1:24),UpperIdent(1:25-1:30),Comma(1:30-1:31),UpperIdent(1:32-1:40),Comma(1:40-1:41),LowerIdent(1:42-1:49),Comma(1:49-1:50),LowerIdent(1:51-1:56),CloseSquare(1:56-1:57),Newline(1:1-1:1), diff --git a/src/snapshots/type_record_effectful.md b/src/snapshots/type_record_effectful.md index c50e3b42b8..abbc100804 100644 --- a/src/snapshots/type_record_effectful.md +++ b/src/snapshots/type_record_effectful.md @@ -102,8 +102,9 @@ main! = |_| {} (e-block @6.22-9.2 (s-expr @7.5-8.11 (e-call @7.5-7.30 - (e-lookup-external - (ext-decl @7.5-7.17 (ident "pf.Stdout.line!") (kind "value"))) + (e-lookup-external @7.5-7.17 + (module-idx "0") + (target-node-idx "0")) (e-dot-access @7.18-7.30 (field "name") (receiver (e-lookup-local @7.18-7.24 @@ -128,8 +129,7 @@ main! = |_| {} (p-underscore @10.10-10.11)) (e-empty_record @10.13-10.15))) (s-import @3.1-3.17 (module "pf.Stdout") (qualifier "pf") - (exposes)) - (ext-decl @7.5-7.17 (ident "pf.Stdout.line!") (kind "value"))) + (exposes))) ~~~ # TYPES ~~~clojure diff --git a/src/snapshots/where_clause/where_clauses_10.md b/src/snapshots/where_clause/where_clauses_10.md index e7d1f242aa..bb4025f68b 100644 --- a/src/snapshots/where_clause/where_clauses_10.md +++ b/src/snapshots/where_clause/where_clauses_10.md @@ -16,9 +16,18 @@ decodeThings # After member name module(a).Decode, ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - where_clauses_10.md:1:9:1:15 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``decode`` is exposed, but it is not defined anywhere in this module. + +**where_clauses_10.md:1:9:1:15:** +```roc +module [decode] +``` + ^^^^^^ +You can fix this by either defining ``decode`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:15),CloseSquare(1:15-1:16),Newline(1:1-1:1), diff --git a/src/snapshots/where_clause/where_clauses_4.md b/src/snapshots/where_clause/where_clauses_4.md index bd322849e7..a163aa9745 100644 --- a/src/snapshots/where_clause/where_clauses_4.md +++ b/src/snapshots/where_clause/where_clauses_4.md @@ -13,9 +13,18 @@ decodeThings : List(List(U8)) -> List(a) where module(a).Decode ~~~ # EXPECTED -NIL +EXPOSED BUT NOT DEFINED - where_clauses_4.md:1:9:1:15 # PROBLEMS -NIL +**EXPOSED BUT NOT DEFINED** +The module header says that ``decode`` is exposed, but it is not defined anywhere in this module. + +**where_clauses_4.md:1:9:1:15:** +```roc +module [decode] +``` + ^^^^^^ +You can fix this by either defining ``decode`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:15),CloseSquare(1:15-1:16),Newline(1:1-1:1), diff --git a/src/snapshots/where_clause/where_clauses_error_cases.md b/src/snapshots/where_clause/where_clauses_error_cases.md index b8a666d90d..6284bf5bfa 100644 --- a/src/snapshots/where_clause/where_clauses_error_cases.md +++ b/src/snapshots/where_clause/where_clauses_error_cases.md @@ -100,6 +100,36 @@ This type variable is referenced here: ^ +**EXPOSED BUT NOT DEFINED** +The module header says that ``broken_fn1`` is exposed, but it is not defined anywhere in this module. + +**where_clauses_error_cases.md:1:9:1:19:** +```roc +module [broken_fn1, broken_fn2, broken_fn3] +``` + ^^^^^^^^^^ +You can fix this by either defining ``broken_fn1`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``broken_fn2`` is exposed, but it is not defined anywhere in this module. + +**where_clauses_error_cases.md:1:21:1:31:** +```roc +module [broken_fn1, broken_fn2, broken_fn3] +``` + ^^^^^^^^^^ +You can fix this by either defining ``broken_fn2`` in this module, or by removing it from the list of exposed values. + +**EXPOSED BUT NOT DEFINED** +The module header says that ``broken_fn3`` is exposed, but it is not defined anywhere in this module. + +**where_clauses_error_cases.md:1:33:1:43:** +```roc +module [broken_fn1, broken_fn2, broken_fn3] +``` + ^^^^^^^^^^ +You can fix this by either defining ``broken_fn3`` in this module, or by removing it from the list of exposed values. + # TOKENS ~~~zig KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:19),Comma(1:19-1:20),LowerIdent(1:21-1:31),Comma(1:31-1:32),LowerIdent(1:33-1:43),CloseSquare(1:43-1:44),Newline(1:1-1:1),