From ac21ae147f23a845b56a2af33fbeb98a2fdcaea0 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 15 Jul 2024 22:27:58 +0200 Subject: [PATCH 01/21] refactor scope lookup functions --- src/analysis.zig | 59 +++++++++++++++++++------------------ src/features/references.zig | 2 +- 2 files changed, 32 insertions(+), 29 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 6d38c10e6..2e93ad929 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2886,10 +2886,11 @@ pub const Type = struct { return; } if (token >= 1 and token_tags[token - 1] == .keyword_return) blk: { - const document_scope = handle.getDocumentScope() catch break :blk; // there is no good way to handle this error - const func_node = innermostBlockScopeInternal(document_scope, token_starts[token - 1], true); + const document_scope = try handle.getDocumentScope(); + const function_scope = innermostFunctionScopeAtIndex(document_scope, token_starts[token - 1]).unwrap() orelse break :blk; + const function_node = document_scope.getScopeAstNode(function_scope).?; var buf: [1]Ast.Node.Index = undefined; - const func = tree.fullFnProto(&buf, func_node) orelse break :blk; + const func = tree.fullFnProto(&buf, function_node).?; const func_name_token = func.name_token orelse break :blk; const func_name = offsets.tokenToSlice(tree, func_name_token); try writer.print("{s}(...)", .{func_name}); @@ -4357,33 +4358,34 @@ pub fn iterateLabels(handle: *DocumentStore.Handle, source_index: usize, comptim } } -pub fn innermostBlockScopeIndex(document_scope: DocumentScope, source_index: usize) Scope.OptionalIndex { +pub fn innermostScopeAtIndex(document_scope: DocumentScope, source_index: usize) Scope.Index { + var scope_iterator = iterateEnclosingScopes(&document_scope, source_index); + var scope_index: Scope.Index = scope_iterator.next().unwrap().?; // the DocumentScope's root scope must exist + while (scope_iterator.next().unwrap()) |inner_scope| { + scope_index = inner_scope; + } + return scope_index; +} + +pub fn innermostFunctionScopeAtIndex(document_scope: DocumentScope, source_index: usize) Scope.OptionalIndex { var scope_iterator = iterateEnclosingScopes(&document_scope, source_index); var scope_index: Scope.OptionalIndex = .none; while (scope_iterator.next().unwrap()) |inner_scope| { + if (document_scope.getScopeTag(inner_scope) != .function) continue; scope_index = inner_scope.toOptional(); } return scope_index; } pub fn innermostBlockScope(document_scope: DocumentScope, source_index: usize) Ast.Node.Index { - return innermostBlockScopeInternal(document_scope, source_index, false); -} - -fn innermostBlockScopeInternal(document_scope: DocumentScope, source_index: usize, skip_block: bool) Ast.Node.Index { - var scope_index = innermostBlockScopeIndex(document_scope, source_index); - while (true) { - const scope = scope_index.unwrap().?; - defer scope_index = document_scope.getScopeParent(scope); - const tag = document_scope.getScopeTag(scope); - - if (tag == .block and skip_block) - continue; - - if (document_scope.getScopeAstNode(scope)) |ast_node| { - return ast_node; + var scope_iterator = iterateEnclosingScopes(&document_scope, source_index); + var ast_node: Ast.Node.Index = undefined; // the DocumentScope's root scope is guaranteed to have an Ast Node + while (scope_iterator.next().unwrap()) |inner_scope| { + if (document_scope.getScopeAstNode(inner_scope)) |node| { + ast_node = node; } } + return ast_node; } pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) error{OutOfMemory}!Type { @@ -4454,13 +4456,11 @@ pub fn lookupSymbolGlobal( ) error{OutOfMemory}!?DeclWithHandle { const tree = handle.tree; const document_scope = try handle.getDocumentScope(); - var current_scope = innermostBlockScopeIndex(document_scope, source_index); - - while (current_scope.unwrap()) |scope_index| { - defer current_scope = document_scope.getScopeParent(scope_index); + var current_scope = innermostScopeAtIndex(document_scope, source_index); + while (true) { if (document_scope.getScopeDeclaration(.{ - .scope = current_scope.unwrap().?, + .scope = current_scope, .name = symbol, .kind = .field, }).unwrap()) |decl_index| { @@ -4476,14 +4476,16 @@ pub fn lookupSymbolGlobal( } if (document_scope.getScopeDeclaration(.{ - .scope = scope_index, + .scope = current_scope, .name = symbol, .kind = .other, }).unwrap()) |decl_index| { const decl = document_scope.declarations.get(@intFromEnum(decl_index)); return DeclWithHandle{ .decl = decl, .handle = handle }; } - if (try analyser.resolveUse(document_scope.getScopeUsingnamespaceNodesConst(scope_index), symbol, handle)) |result| return result; + if (try analyser.resolveUse(document_scope.getScopeUsingnamespaceNodesConst(current_scope), symbol, handle)) |result| return result; + + current_scope = document_scope.getScopeParent(current_scope).unwrap() orelse break; } return null; @@ -5102,9 +5104,10 @@ fn addReferencedTypes( } if (token >= 1 and token_tags[token - 1] == .keyword_return) blk: { const document_scope = try handle.getDocumentScope(); - const func_node = innermostBlockScopeInternal(document_scope, token_starts[token - 1], true); + const function_scope = innermostFunctionScopeAtIndex(document_scope, token_starts[token - 1]).unwrap() orelse break :blk; + const function_node = document_scope.getScopeAstNode(function_scope).?; var buf: [1]Ast.Node.Index = undefined; - const func = tree.fullFnProto(&buf, func_node) orelse break :blk; + const func = tree.fullFnProto(&buf, function_node).?; const func_name_token = func.name_token orelse break :blk; const func_name = offsets.tokenToSlice(tree, func_name_token); try referenced_types.put(.{ diff --git a/src/features/references.zig b/src/features/references.zig index ea7f63429..c982a27da 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -244,7 +244,7 @@ fn symbolReferences( // highlight requests only pertain to the current document, otherwise we can try to narrow things down const workspace = if (request == .highlight) false else blk: { const doc_scope = try curr_handle.getDocumentScope(); - const scope_index = Analyser.innermostBlockScopeIndex(doc_scope, source_index).unwrap() orelse break :blk true; + const scope_index = Analyser.innermostScopeAtIndex(doc_scope, source_index); break :blk switch (doc_scope.getScopeTag(scope_index)) { .function, .block => false, .container, .container_usingnamespace => decl_handle.isPublic(), From 5fc59f5eb675fc5632e2cd80974e5b3b32a74cc4 Mon Sep 17 00:00:00 2001 From: SuperAuguste <19855629+SuperAuguste@users.noreply.github.com> Date: Mon, 15 Jul 2024 22:22:05 +0200 Subject: [PATCH 02/21] Refactor container types Co-authored-by: Techatrix --- src/DocumentScope.zig | 1 + src/DocumentStore.zig | 8 + src/analysis.zig | 549 ++++++++++++++++++------------- src/features/completions.zig | 42 +-- src/features/goto.zig | 2 +- src/features/semantic_tokens.zig | 4 +- 6 files changed, 350 insertions(+), 256 deletions(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index 8bb28395b..238cdb06f 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -144,6 +144,7 @@ pub const Scope = struct { child_declarations: ChildDeclarations, pub const Index = enum(u32) { + root, _, pub fn toOptional(index: Index) OptionalIndex { diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index ca4b9a296..ab36765b0 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -265,6 +265,14 @@ pub const Handle = struct { return try self.getDocumentScopeCold(); } + /// Asserts that `getDocumentScope` has been previously called on `handle`. + pub fn getDocumentScopeCached(self: *Handle) DocumentScope { + if (builtin.mode == .Debug) { + std.debug.assert(self.getStatus().has_document_scope); + } + return self.impl.document_scope; + } + pub fn getZir(self: *Handle) error{OutOfMemory}!Zir { if (self.getStatus().has_zir) return self.impl.zir; return try self.getZirCold(); diff --git a/src/analysis.zig b/src/analysis.zig index 2e93ad929..8a66861eb 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -737,15 +737,15 @@ fn resolveVarDeclAliasInternal(analyser: *Analyser, node_handle: NodeWithHandle, if (!resolved.is_type_val) return null; - const resolved_node_handle = switch (resolved.data) { - .other => |n| n, + const resolved_scope_handle = switch (resolved.data) { + .container => |s| s, else => return null, }; const symbol_name = offsets.identifierTokenToNameSlice(tree, datas[node_handle.node].rhs); break :blk try analyser.lookupSymbolContainer( - resolved_node_handle, + resolved_scope_handle, symbol_name, .other, ); @@ -775,10 +775,10 @@ fn resolveVarDeclAliasInternal(analyser: *Analyser, node_handle: NodeWithHandle, const inner_node = (try analyser.resolveTypeOfNode(.{ .node = lhs, .handle = handle })) orelse return null; // assert root node - std.debug.assert(inner_node.data.other.node == 0); - const document_scope = try inner_node.data.other.handle.getDocumentScope(); + std.debug.assert(@intFromEnum(inner_node.data.container.scope) == 0); + const document_scope = try inner_node.data.container.handle.getDocumentScope(); const root_decl = document_scope.declarations.get(0); - break :blk DeclWithHandle{ .decl = root_decl, .handle = inner_node.data.other.handle }; + break :blk DeclWithHandle{ .decl = root_decl, .handle = inner_node.data.container.handle }; }, else => return null, } orelse return null; @@ -955,12 +955,12 @@ fn resolveTaggedUnionFieldType(analyser: *Analyser, ty: Type, symbol: []const u8 if (!ty.is_type_val) return null; - const node_handle = switch (ty.data) { - .other => |n| n, + const scope_handle = switch (ty.data) { + .container => |s| s, else => return null, }; - const node = node_handle.node; - const handle = node_handle.handle; + const node = scope_handle.toNode(); + const handle = scope_handle.handle; if (node == 0) return null; @@ -1152,12 +1152,12 @@ fn resolveBracketAccessType(analyser: *Analyser, lhs: Type, rhs: BracketAccessKi } fn resolveTupleFieldType(analyser: *Analyser, tuple: Type, index: usize) error{OutOfMemory}!?Type { - const node_handle = switch (tuple.data) { - .other => |n| n, + const scope_handle = switch (tuple.data) { + .container => |s| s, else => return null, }; - const node = node_handle.node; - const handle = node_handle.handle; + const node = scope_handle.toNode(); + const handle = scope_handle.handle; const tree = handle.tree; const main_tokens = tree.nodes.items(.main_token); const token_tags = tree.tokens.items(.tag); @@ -1233,13 +1233,7 @@ fn resolvePropertyType(analyser: *Analyser, ty: Type, name: []const u8) error{Ou } }, - .other => |node_handle| switch (node_handle.handle.tree.nodes.items(.tag)[node_handle.node]) { - .multiline_string_literal, - .string_literal, - => if (std.mem.eql(u8, "len", name)) { - return try Type.typeValFromIP(analyser, .usize_type); - }, - + .container => |scope_handle| switch (scope_handle.handle.tree.nodes.items(.tag)[scope_handle.toNode()]) { .container_decl, .container_decl_trailing, .container_decl_arg, @@ -1256,6 +1250,15 @@ fn resolvePropertyType(analyser: *Analyser, ty: Type, name: []const u8) error{Ou return analyser.resolveTupleFieldType(ty, index); }, + else => {}, + }, + + .other => |node_handle| switch (node_handle.handle.tree.nodes.items(.tag)[node_handle.node]) { + .multiline_string_literal, + .string_literal, + => if (std.mem.eql(u8, "len", name)) { + return try Type.typeValFromIP(analyser, .usize_type); + }, else => {}, }, @@ -1748,9 +1751,9 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e }, // TODO represent through InternPool - .merge_error_sets, - .error_set_decl, + .merge_error_sets => return Type.typeVal(node_handle), + .error_set_decl, // TODO represent through InternPool .container_decl, .container_decl_arg, .container_decl_arg_trailing, @@ -1763,7 +1766,27 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e .tagged_union_two_trailing, .tagged_union_enum_tag, .tagged_union_enum_tag_trailing, - => return Type.typeVal(node_handle), + => { + // TODO: use map? idk + const document_scope = try handle.getDocumentScope(); + + return .{ + .data = .{ + .container = .{ + .handle = handle, + .scope = for (0..document_scope.scopes.len) |scope_index| { + switch (document_scope.getScopeTag(@enumFromInt(scope_index))) { + .container, .container_usingnamespace => if (document_scope.getScopeAstNode(@enumFromInt(scope_index)).? == node) { + break @enumFromInt(scope_index); + }, + else => {}, + } + } else unreachable, // is this safe? idk + }, + }, + .is_type_val = true, + }; + }, .builtin_call, .builtin_call_comma, .builtin_call_two, @@ -1811,21 +1834,15 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e const builtin_path = try std.fs.path.join(analyser.arena.allocator(), &.{ zig_lib_path, "std", "builtin.zig" }); const builtin_uri = try URI.fromPath(analyser.arena.allocator(), builtin_path); - const new_handle = analyser.store.getOrLoadHandle(builtin_uri) orelse return null; - const new_handle_document_scope = try new_handle.getDocumentScope(); - - const decl_index = new_handle_document_scope.getScopeDeclaration(.{ - .scope = @enumFromInt(0), - .name = "Type", - .kind = .other, - }).unwrap() orelse return null; - - const decl = new_handle_document_scope.declarations.get(@intFromEnum(decl_index)); - if (decl != .ast_node) return null; - - const var_decl = new_handle.tree.fullVarDecl(decl.ast_node) orelse return null; + const builtin_handle = analyser.store.getOrLoadHandle(builtin_uri) orelse return null; + const builtin_root_struct_type: Type = .{ + .data = .{ .container = .{ .handle = builtin_handle, .scope = Scope.Index.root } }, + .is_type_val = true, + }; - return Type{ .data = .{ .other = .{ .node = var_decl.ast.init_node, .handle = new_handle } }, .is_type_val = false }; + const builtin_type_decl = try builtin_root_struct_type.lookupSymbol(analyser, "Type") orelse return null; + const builtin_type = try builtin_type_decl.resolveType(analyser) orelse return null; + return try builtin_type.instanceTypeVal(analyser); } if (std.mem.eql(u8, call_name, "@import")) { @@ -1846,15 +1863,29 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e const new_handle = analyser.store.getOrLoadHandle(import_uri) orelse return null; - // reference to node '0' which is root - return Type.typeVal(.{ .node = 0, .handle = new_handle }); + return .{ + .data = .{ + .container = .{ + .handle = new_handle, + .scope = Scope.Index.root, + }, + }, + .is_type_val = true, + }; } else if (std.mem.eql(u8, call_name, "@cImport")) { const cimport_uri = (try analyser.store.resolveCImport(handle, node)) orelse return null; const new_handle = analyser.store.getOrLoadHandle(cimport_uri) orelse return null; - // reference to node '0' which is root - return Type.typeVal(.{ .node = 0, .handle = new_handle }); + return .{ + .data = .{ + .container = .{ + .handle = new_handle, + .scope = Scope.Index.root, + }, + }, + .is_type_val = true, + }; } if (std.mem.eql(u8, call_name, "@field")) { if (params.len < 2) return null; @@ -2349,8 +2380,10 @@ pub const Type = struct { /// `Foo` in `Foo.bar` where `Foo = union(enum) { bar }` union_tag: *Type, - /// - Container type: `struct {}`, `enum {}`, `union {}`, `opaque {}` - /// - Error type: `error{Foo}`, `Foo || Bar` + /// - Container type: `struct {}`, `enum {}`, `union {}`, `opaque {}`, `error {}` + container: ScopeWithHandle, + + /// - Error type: `Foo || Bar`, `Foo!Bar` /// - Function: `fn () Foo`, `fn foo() Foo` other: NodeWithHandle, @@ -2410,6 +2443,10 @@ pub const Type = struct { } info.payload.hashWithHasher(hasher); }, + .container => |scope_handle| { + hasher.update(scope_handle.handle.uri); + std.hash.autoHash(hasher, scope_handle.scope); + }, .other, .compile_error => |node_handle| { std.hash.autoHash(hasher, node_handle.node); hasher.update(node_handle.handle.uri); @@ -2459,6 +2496,11 @@ pub const Type = struct { if (!a_error_set.eql(b_info.error_set.?.*)) return false; } }, + .container => |a_scope_handle| { + const b_scope_handle = b.data.container; + if (a_scope_handle.scope != b_scope_handle.scope) return false; + if (!std.mem.eql(u8, a_scope_handle.handle.uri, b_scope_handle.handle.uri)) return false; + }, .other => |a_node_handle| return a_node_handle.eql(b.data.other), .compile_error => |a_node_handle| return a_node_handle.eql(b.data.compile_error), .either => |a_entries| { @@ -2617,19 +2659,20 @@ pub const Type = struct { fn isRoot(self: Type) bool { switch (self.data) { - // root is always index 0 - .other => |node_handle| return node_handle.node == 0, + .container => |container_scope_handle| return container_scope_handle.scope == Scope.Index.root, else => return false, } } fn isContainerKind(self: Type, container_kind_tok: std.zig.Token.Tag) bool { - const node_handle = switch (self.data) { - .other => |n| n, + const scope_handle = switch (self.data) { + .container => |s| s, else => return false, }; - const node = node_handle.node; - const tree = node_handle.handle.tree; + + const node = scope_handle.toNode(); + + const tree = scope_handle.handle.tree; const main_tokens = tree.nodes.items(.main_token); const tags = tree.tokens.items(.tag); return tags[main_tokens[node]] == container_kind_tok; @@ -2641,9 +2684,9 @@ pub const Type = struct { pub fn isNamespace(self: Type) bool { if (!self.isStructType()) return false; - const node_handle = self.data.other; - const node = node_handle.node; - const tree = node_handle.handle.tree; + const scope_handle = self.data.container; + const node = scope_handle.toNode(); + const tree = scope_handle.handle.tree; const tags = tree.nodes.items(.tag); var buf: [2]Ast.Node.Index = undefined; const full = tree.fullContainerDecl(&buf, node) orelse return true; @@ -2667,7 +2710,7 @@ pub const Type = struct { pub fn isTaggedUnion(self: Type) bool { return switch (self.data) { - .other => |node_handle| ast.isTaggedUnion(node_handle.handle.tree, node_handle.node), + .container => |scope_handle| ast.isTaggedUnion(scope_handle.handle.tree, scope_handle.toNode()), else => false, }; } @@ -2724,8 +2767,12 @@ pub const Type = struct { }; } - pub fn typeDefinitionToken(self: Type) ?TokenWithHandle { + pub fn typeDefinitionToken(self: Type) !?TokenWithHandle { return switch (self.data) { + .container => |scope_handle| .{ + .token = scope_handle.handle.tree.firstToken(scope_handle.toNode()), + .handle = scope_handle.handle, + }, .other => |node_handle| .{ .token = node_handle.handle.tree.firstToken(node_handle.node), .handle = node_handle.handle, @@ -2737,6 +2784,7 @@ pub const Type = struct { pub fn docComments(self: Type, allocator: std.mem.Allocator) error{OutOfMemory}!?[]const u8 { if (self.is_type_val) { switch (self.data) { + .container => |scope_handle| return try getDocComments(allocator, scope_handle.handle.tree, scope_handle.toNode()), .other => |node_handle| return try getDocComments(allocator, node_handle.handle.tree, node_handle.node), else => {}, } @@ -2749,8 +2797,8 @@ pub const Type = struct { analyser: *Analyser, symbol: []const u8, ) error{OutOfMemory}!?DeclWithHandle { - const node_handle = switch (self.data) { - .other => |n| n, + const scope_handle = switch (self.data) { + .container => |s| s, .either => |entries| { // TODO: Return all options instead of first valid one for (entries) |entry| { @@ -2764,17 +2812,17 @@ pub const Type = struct { else => return null, }; if (self.is_type_val) { - if (try analyser.lookupSymbolContainer(node_handle, symbol, .other)) |decl| + if (try analyser.lookupSymbolContainer(scope_handle, symbol, .other)) |decl| return decl; if (self.isEnumType() or self.isTaggedUnion()) - return analyser.lookupSymbolContainer(node_handle, symbol, .field); + return analyser.lookupSymbolContainer(scope_handle, symbol, .field); return null; } if (self.isEnumType()) - return analyser.lookupSymbolContainer(node_handle, symbol, .other); - if (try analyser.lookupSymbolContainer(node_handle, symbol, .field)) |decl| + return analyser.lookupSymbolContainer(scope_handle, symbol, .other); + if (try analyser.lookupSymbolContainer(scope_handle, symbol, .field)) |decl| return decl; - return analyser.lookupSymbolContainer(node_handle, symbol, .other); + return analyser.lookupSymbolContainer(scope_handle, symbol, .other); } pub fn fmt(ty: Type, analyser: *Analyser, options: FormatOptions) std.fmt.Formatter(format) { @@ -2852,97 +2900,104 @@ pub const Type = struct { try writer.print("!{}", .{info.payload.fmtTypeVal(analyser, ctx.options)}); }, .union_tag => |t| try writer.print("@typeInfo({}).Union.tag_type.?", .{t.fmtTypeVal(analyser, ctx.options)}), - .other => |node_handle| switch (node_handle.handle.tree.nodes.items(.tag)[node_handle.node]) { - .root => { - const path = URI.parse(analyser.arena.allocator(), node_handle.handle.uri) catch node_handle.handle.uri; - try writer.writeAll(std.fs.path.stem(path)); - }, + .container => |scope_handle| { + const handle = scope_handle.handle; + const tree = handle.tree; - .container_decl, - .container_decl_arg, - .container_decl_arg_trailing, - .container_decl_trailing, - .container_decl_two, - .container_decl_two_trailing, - .error_set_decl, - .tagged_union, - .tagged_union_trailing, - .tagged_union_two, - .tagged_union_two_trailing, - .tagged_union_enum_tag, - .tagged_union_enum_tag_trailing, - => { - const node = node_handle.node; - const handle = node_handle.handle; - const tree = handle.tree; + const doc_scope = try handle.getDocumentScope(); + const node = scope_handle.toNode(); - const token_tags = tree.tokens.items(.tag); - const token_starts = tree.tokens.items(.start); + switch (handle.tree.nodes.items(.tag)[node]) { + .root => { + const path = URI.parse(analyser.arena.allocator(), handle.uri) catch handle.uri; + try writer.writeAll(std.fs.path.stem(path)); + }, - // This is a hacky nightmare but it works :P - const token = tree.firstToken(node); - if (token >= 2 and token_tags[token - 2] == .identifier and token_tags[token - 1] == .equal) { - try writer.writeAll(tree.tokenSlice(token - 2)); - return; - } - if (token >= 1 and token_tags[token - 1] == .keyword_return) blk: { - const document_scope = try handle.getDocumentScope(); - const function_scope = innermostFunctionScopeAtIndex(document_scope, token_starts[token - 1]).unwrap() orelse break :blk; - const function_node = document_scope.getScopeAstNode(function_scope).?; - var buf: [1]Ast.Node.Index = undefined; - const func = tree.fullFnProto(&buf, function_node).?; - const func_name_token = func.name_token orelse break :blk; - const func_name = offsets.tokenToSlice(tree, func_name_token); - try writer.print("{s}(...)", .{func_name}); - return; - } + .container_decl, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_trailing, + .container_decl_two, + .container_decl_two_trailing, + .error_set_decl, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + => { + const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); - if (!ctx.options.truncate_container_decls) { - try writer.writeAll(offsets.nodeToSlice(tree, node)); - return; - } + // This is a hacky nightmare but it works :P + const token = tree.firstToken(node); + if (token >= 2 and token_tags[token - 2] == .identifier and token_tags[token - 1] == .equal) { + try writer.writeAll(tree.tokenSlice(token - 2)); + return; + } + if (token >= 1 and token_tags[token - 1] == .keyword_return) blk: { + const function_scope = innermostFunctionScopeAtIndex(doc_scope, token_starts[token - 1]).unwrap() orelse break :blk; + const function_node = doc_scope.getScopeAstNode(function_scope).?; + var buf: [1]Ast.Node.Index = undefined; + const func = tree.fullFnProto(&buf, function_node).?; + const func_name_token = func.name_token orelse break :blk; + const func_name = offsets.tokenToSlice(tree, func_name_token); + try writer.print("{s}(...)", .{func_name}); + return; + } - if (tree.nodes.items(.tag)[node] == .error_set_decl) { - const field_count = ast.errorSetFieldCount(tree, node); - if (field_count > 2) { - try writer.writeAll("error{...}"); + if (!ctx.options.truncate_container_decls) { + try writer.writeAll(offsets.nodeToSlice(tree, node)); return; } - var it = ast.ErrorSetIterator.init(tree, node); - var i: usize = 0; + if (tree.nodes.items(.tag)[node] == .error_set_decl) { + const field_count = ast.errorSetFieldCount(tree, node); + if (field_count > 2) { + try writer.writeAll("error{...}"); + return; + } + + var it = ast.ErrorSetIterator.init(tree, node); + var i: usize = 0; - try writer.writeAll("error{"); - while (it.next()) |identifier_token| : (i += 1) { - if (i != 0) { - try writer.writeByte(','); + try writer.writeAll("error{"); + while (it.next()) |identifier_token| : (i += 1) { + if (i != 0) { + try writer.writeByte(','); + } + const name = offsets.tokenToSlice(tree, identifier_token); + try writer.writeAll(name); } - const name = offsets.tokenToSlice(tree, identifier_token); - try writer.writeAll(name); + try writer.writeByte('}'); + + return; } - try writer.writeByte('}'); - return; - } + var buffer: [2]Ast.Node.Index = undefined; + const container_decl = tree.fullContainerDecl(&buffer, node).?; - var buffer: [2]Ast.Node.Index = undefined; - const container_decl = tree.fullContainerDecl(&buffer, node).?; + const start_token = container_decl.layout_token orelse container_decl.ast.main_token; + const end_token = if (container_decl.ast.arg != 0) + @min(ast.lastToken(tree, container_decl.ast.arg) + 1, tree.tokens.len) + else if (container_decl.ast.enum_token) |enum_token| + @min(enum_token + 1, tree.tokens.len) + else + container_decl.ast.main_token; - const start_token = container_decl.layout_token orelse container_decl.ast.main_token; - const end_token = if (container_decl.ast.arg != 0) - @min(ast.lastToken(tree, container_decl.ast.arg) + 1, tree.tokens.len) - else if (container_decl.ast.enum_token) |enum_token| - @min(enum_token + 1, tree.tokens.len) - else - container_decl.ast.main_token; + try writer.writeAll(offsets.tokensToSlice(tree, start_token, end_token)); + if (container_decl.ast.members.len == 0) { + try writer.writeAll(" {}"); + } else { + try writer.writeAll(" {...}"); + } + }, - try writer.writeAll(offsets.tokensToSlice(tree, start_token, end_token)); - if (container_decl.ast.members.len == 0) { - try writer.writeAll(" {}"); - } else { - try writer.writeAll(" {...}"); - } - }, + else => unreachable, + } + }, + .other => |node_handle| switch (node_handle.handle.tree.nodes.items(.tag)[node_handle.node]) { .fn_proto, .fn_proto_multi, .fn_proto_one, @@ -2977,6 +3032,17 @@ pub const Type = struct { } }; +pub const ScopeWithHandle = struct { + handle: *DocumentStore.Handle, + scope: Scope.Index, + + pub fn toNode(scope_handle: ScopeWithHandle) Ast.Node.Index { + if (scope_handle.scope == Scope.Index.root) return 0; + var doc_scope = scope_handle.handle.getDocumentScopeCached(); + return doc_scope.getScopeAstNode(scope_handle.scope).?; + } +}; + /// Collects all `@import`'s we can find into a slice of import paths (without quotes). pub fn collectImports(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}!std.ArrayListUnmanaged([]const u8) { var imports = std.ArrayListUnmanaged([]const u8){}; @@ -3233,7 +3299,15 @@ pub fn getFieldAccessType( }); const uri = try analyser.store.uriFromImportStr(analyser.arena.allocator(), handle, import_str) orelse return null; const node_handle = analyser.store.getOrLoadHandle(uri) orelse return null; - current_type = Type.typeVal(NodeWithHandle{ .handle = node_handle, .node = 0 }); + current_type = .{ + .data = .{ + .container = .{ + .handle = node_handle, + .scope = @enumFromInt(0), + }, + }, + .is_type_val = true, + }; _ = tokenizer.next(); // eat the .r_paren } else { log.debug("Unhandled builtin: {s}", .{offsets.locToSlice(tokenizer.buffer, tok.loc)}); @@ -3794,7 +3868,7 @@ pub const DeclWithHandle = struct { } if (try self.resolveType(analyser)) |resolved_type| { if (resolved_type.is_type_val) { - if (resolved_type.typeDefinitionToken()) |token| { + if (try resolved_type.typeDefinitionToken()) |token| { return token; } } @@ -4083,7 +4157,7 @@ pub const DeclWithHandle = struct { }) orelse return null; break :blk switch (node.data) { .array => |array_info| try array_info.elem_ty.instanceTypeVal(analyser), - .other => try analyser.resolveTupleFieldType(node, pay.index), + .container => try analyser.resolveTupleFieldType(node, pay.index), else => null, }; }, @@ -4133,6 +4207,7 @@ pub const DeclWithHandle = struct { } }; +/// Collects all symbols/declarations that can be a acccessed on the given container type. fn findContainerScopeIndex(container_handle: NodeWithHandle) !?Scope.Index { const container = container_handle.node; const handle = container_handle.handle; @@ -4155,8 +4230,8 @@ fn findContainerScopeIndex(container_handle: NodeWithHandle) !?Scope.Index { /// Collects all symbols/declarations that can be a accessed on the given container type. pub fn collectDeclarationsOfContainer( analyser: *Analyser, - /// a ast-node to a container type (i.e. `struct`, `union`, `enum`, `opaque`) - container_handle: NodeWithHandle, + /// A container type (i.e. `struct`, `union`, `enum`, `opaque`) + container_scope: ScopeWithHandle, original_handle: *DocumentStore.Handle, /// Whether or not the container type is a instance of its type. /// ```zig @@ -4167,19 +4242,19 @@ pub fn collectDeclarationsOfContainer( /// allocated with `analyser.arena.allocator()` decl_collection: *std.ArrayListUnmanaged(DeclWithHandle), ) error{OutOfMemory}!void { - const container = container_handle.node; - const handle = container_handle.handle; + const scope = container_scope.scope; + const handle = container_scope.handle; const tree = handle.tree; const document_scope = try handle.getDocumentScope(); + const container_node = container_scope.toNode(); const node_tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); - const main_token = tree.nodes.items(.main_token)[container]; + const main_token = tree.nodes.items(.main_token)[container_node]; const is_enum = token_tags[main_token] == .keyword_enum; - const container_scope_index = try findContainerScopeIndex(container_handle) orelse return; - const scope_decls = document_scope.getScopeDeclarationsConst(container_scope_index); + const scope_decls = document_scope.getScopeDeclarationsConst(scope); for (scope_decls) |decl_index| { const decl = document_scope.declarations.get(@intFromEnum(decl_index)); @@ -4209,7 +4284,15 @@ pub fn collectDeclarationsOfContainer( const alias_type = try analyser.resolveTypeOfNode(.{ .node = node, .handle = handle }) orelse continue; const func_ty = try analyser.resolveFuncProtoOfCallable(alias_type) orelse continue; - if (!try analyser.firstParamIs(func_ty, Type.typeVal(container_handle))) continue; + if (!try analyser.firstParamIs(func_ty, .{ + .data = .{ + .container = .{ + .handle = handle, + .scope = scope, + }, + }, + .is_type_val = true, + })) continue; } }, else => {}, @@ -4223,7 +4306,7 @@ pub fn collectDeclarationsOfContainer( try decl_collection.append(analyser.arena.allocator(), decl_with_handle); } - for (document_scope.getScopeUsingnamespaceNodesConst(container_scope_index)) |use| { + for (document_scope.getScopeUsingnamespaceNodesConst(scope)) |use| { try analyser.collectUsingnamespaceDeclarationsOfContainer( .{ .node = use, .handle = handle }, original_handle, @@ -4257,9 +4340,9 @@ fn collectUsingnamespaceDeclarationsOfContainer( })) orelse return; switch (use_expr.data) { - .other => |expr| { + .container => |container_scope| { try analyser.collectDeclarationsOfContainer( - expr, + container_scope, original_handle, instance_access, decl_collection, @@ -4268,9 +4351,9 @@ fn collectUsingnamespaceDeclarationsOfContainer( .either => |entries| { for (entries) |entry| { switch (entry.type_data) { - .other => |expr| { + .container => |container_scope| { try analyser.collectDeclarationsOfContainer( - expr, + container_scope, original_handle, instance_access, decl_collection, @@ -4390,17 +4473,33 @@ pub fn innermostBlockScope(document_scope: DocumentScope, source_index: usize) A pub fn innermostContainer(handle: *DocumentStore.Handle, source_index: usize) error{OutOfMemory}!Type { const document_scope = try handle.getDocumentScope(); - var current = document_scope.getScopeAstNode(@enumFromInt(0)).?; - if (document_scope.scopes.len == 1) return Type.typeVal(.{ .node = current, .handle = handle }); + var current: DocumentScope.Scope.Index = @enumFromInt(0); + if (document_scope.scopes.len == 1) return .{ + .data = .{ + .container = .{ + .handle = handle, + .scope = @enumFromInt(0), + }, + }, + .is_type_val = true, + }; var scope_iterator = iterateEnclosingScopes(&document_scope, source_index); while (scope_iterator.next().unwrap()) |scope_index| { switch (document_scope.getScopeTag(scope_index)) { - .container, .container_usingnamespace => current = document_scope.getScopeAstNode(scope_index).?, + .container, .container_usingnamespace => current = scope_index, else => {}, } } - return Type.typeVal(.{ .node = current, .handle = handle }); + return .{ + .data = .{ + .container = .{ + .handle = handle, + .scope = current, + }, + }, + .is_type_val = true, + }; } fn resolveUse(analyser: *Analyser, uses: []const Ast.Node.Index, symbol: []const u8, handle: *DocumentStore.Handle) error{OutOfMemory}!?DeclWithHandle { @@ -4493,17 +4592,15 @@ pub fn lookupSymbolGlobal( pub fn lookupSymbolContainer( analyser: *Analyser, - container_handle: NodeWithHandle, + container_scope: ScopeWithHandle, symbol: []const u8, kind: DocumentScope.DeclarationLookup.Kind, ) error{OutOfMemory}!?DeclWithHandle { - const handle = container_handle.handle; + const handle = container_scope.handle; const document_scope = try handle.getDocumentScope(); - const container_scope_index = try findContainerScopeIndex(container_handle) orelse return null; - if (document_scope.getScopeDeclaration(.{ - .scope = container_scope_index, + .scope = container_scope.scope, .name = symbol, .kind = kind, }).unwrap()) |decl_index| { @@ -4511,7 +4608,7 @@ pub fn lookupSymbolContainer( return DeclWithHandle{ .decl = decl, .handle = handle }; } - if (try analyser.resolveUse(document_scope.getScopeUsingnamespaceNodesConst(container_scope_index), symbol, handle)) |result| return result; + if (try analyser.resolveUse(document_scope.getScopeUsingnamespaceNodesConst(container_scope.scope), symbol, handle)) |result| return result; return null; } @@ -4536,13 +4633,13 @@ pub fn lookupSymbolFieldInit( if (try analyser.resolveOptionalUnwrap(container_type)) |unwrapped| container_type = unwrapped; - const container_node_handle = switch (container_type.data) { - .other => |n| n, + const container_scope_handle = switch (container_type.data) { + .container => |s| s, else => return null, }; return analyser.lookupSymbolContainer( - container_node_handle, + container_scope_handle, field_name, .field, ); @@ -5052,72 +5149,72 @@ fn addReferencedTypes( }, .union_tag => |t| try analyser.addReferencedTypes(t.*, ReferencedType.Collector.init(referenced_types)), - .other => |node_handle| switch (node_handle.handle.tree.nodes.items(.tag)[node_handle.node]) { - .root => { - const node = node_handle.node; - const handle = node_handle.handle; - const tree = handle.tree; + .container => |scope_handle| { + const handle = scope_handle.handle; + const tree = handle.tree; - const path = URI.parse(allocator, handle.uri) catch |err| switch (err) { - error.OutOfMemory => |e| return e, - else => return, - }; - const str = std.fs.path.stem(path); - try referenced_types.put(.{ - .str = type_str orelse str, - .handle = handle, - .token = tree.firstToken(node), - }, {}); - }, + const doc_scope = try handle.getDocumentScope(); + const node = scope_handle.toNode(); - .container_decl, - .container_decl_arg, - .container_decl_arg_trailing, - .container_decl_trailing, - .container_decl_two, - .container_decl_two_trailing, - .error_set_decl, - .merge_error_sets, - .tagged_union, - .tagged_union_trailing, - .tagged_union_two, - .tagged_union_two_trailing, - .tagged_union_enum_tag, - .tagged_union_enum_tag_trailing, - => { - const node = node_handle.node; - const handle = node_handle.handle; - const tree = handle.tree; - - const token_tags = tree.tokens.items(.tag); - const token_starts = tree.tokens.items(.start); - - // This is a hacky nightmare but it works :P - const token = tree.firstToken(node); - if (token >= 2 and token_tags[token - 2] == .identifier and token_tags[token - 1] == .equal) { - const str = tree.tokenSlice(token - 2); + switch (tree.nodes.items(.tag)[node]) { + .root => { + const path = URI.parse(allocator, handle.uri) catch |err| switch (err) { + error.OutOfMemory => |e| return e, + else => return, + }; + const str = std.fs.path.stem(path); try referenced_types.put(.{ .str = type_str orelse str, .handle = handle, - .token = token - 2, - }, {}); - } - if (token >= 1 and token_tags[token - 1] == .keyword_return) blk: { - const document_scope = try handle.getDocumentScope(); - const function_scope = innermostFunctionScopeAtIndex(document_scope, token_starts[token - 1]).unwrap() orelse break :blk; - const function_node = document_scope.getScopeAstNode(function_scope).?; - var buf: [1]Ast.Node.Index = undefined; - const func = tree.fullFnProto(&buf, function_node).?; - const func_name_token = func.name_token orelse break :blk; - const func_name = offsets.tokenToSlice(tree, func_name_token); - try referenced_types.put(.{ - .str = type_str orelse func_name, - .handle = handle, - .token = func_name_token, + .token = tree.firstToken(node), }, {}); - } - }, + }, + .container_decl, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_trailing, + .container_decl_two, + .container_decl_two_trailing, + .error_set_decl, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + => { + const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); + + // This is a hacky nightmare but it works :P + const token = tree.firstToken(node); + if (token >= 2 and token_tags[token - 2] == .identifier and token_tags[token - 1] == .equal) { + const str = tree.tokenSlice(token - 2); + try referenced_types.put(.{ + .str = type_str orelse str, + .handle = handle, + .token = token - 2, + }, {}); + } + if (token >= 1 and token_tags[token - 1] == .keyword_return) blk: { + const function_scope = innermostFunctionScopeAtIndex(doc_scope, token_starts[token - 1]).unwrap() orelse break :blk; + const function_node = doc_scope.getScopeAstNode(function_scope).?; + var buf: [1]Ast.Node.Index = undefined; + const func = tree.fullFnProto(&buf, function_node).?; + const func_name_token = func.name_token orelse break :blk; + const func_name = offsets.tokenToSlice(tree, func_name_token); + try referenced_types.put(.{ + .str = type_str orelse func_name, + .handle = handle, + .token = func_name_token, + }, {}); + } + }, + else => unreachable, + } + }, + .other => |node_handle| switch (node_handle.handle.tree.nodes.items(.tag)[node_handle.node]) { .fn_proto, .fn_proto_multi, .fn_proto_one, diff --git a/src/features/completions.zig b/src/features/completions.zig index 562449941..1bd1ae71e 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -102,6 +102,16 @@ fn typeToCompletion(builder: *Builder, ty: Analyser.Type) error{OutOfMemory}!voi ), }); }, + .container => |scope_handle| { + var decls: std.ArrayListUnmanaged(Analyser.DeclWithHandle) = .{}; + try builder.analyser.collectDeclarationsOfContainer(scope_handle, builder.orig_handle, !ty.is_type_val, &decls); + + for (decls.items) |decl_with_handle| { + try declToCompletion(builder, decl_with_handle, .{ + .parent_container_ty = ty, + }); + } + }, .other => |node_handle| switch (node_handle.handle.tree.nodes.items(.tag)[node_handle.node]) { .merge_error_sets => { const node_data = node_handle.handle.tree.nodes.items(.data)[node_handle.node]; @@ -112,29 +122,7 @@ fn typeToCompletion(builder: *Builder, ty: Analyser.Type) error{OutOfMemory}!voi try typeToCompletion(builder, rhs_ty); } }, - .error_set_decl, - .root, - .container_decl, - .container_decl_arg, - .container_decl_arg_trailing, - .container_decl_trailing, - .container_decl_two, - .container_decl_two_trailing, - .tagged_union, - .tagged_union_trailing, - .tagged_union_two, - .tagged_union_two_trailing, - .tagged_union_enum_tag, - .tagged_union_enum_tag_trailing, - => { - var decls: std.ArrayListUnmanaged(Analyser.DeclWithHandle) = .{}; - try builder.analyser.collectDeclarationsOfContainer(node_handle, builder.orig_handle, !ty.is_type_val, &decls); - for (decls.items) |decl_with_handle| { - try declToCompletion(builder, decl_with_handle, .{ - .parent_container_ty = ty, - }); - } - }, + .fn_proto, .fn_proto_multi, .fn_proto_one, @@ -1240,12 +1228,12 @@ fn collectContainerFields( omit_members: std.BufSet, ) error{OutOfMemory}!void { const use_snippets = builder.server.config.enable_snippets and builder.server.client_capabilities.supports_snippets; - const node_handle = switch (container.data) { - .other => |n| n, + const scope_handle = switch (container.data) { + .container => |s| s, else => return, }; - const node = node_handle.node; - const handle = node_handle.handle; + const node = scope_handle.toNode(); + const handle = scope_handle.handle; var buffer: [2]Ast.Node.Index = undefined; const container_decl = Ast.fullContainerDecl(handle.tree, &buffer, node) orelse return; for (container_decl.ast.members) |member| { diff --git a/src/features/goto.zig b/src/features/goto.zig index 1e0613cd4..bac3e6c02 100644 --- a/src/features/goto.zig +++ b/src/features/goto.zig @@ -43,7 +43,7 @@ fn gotoDefinitionSymbol( const type_declaration = try decl_handle.typeDeclarationNode() orelse { // just resolve the type and guess if (try decl_handle.resolveType(analyser)) |resolved_type| { - if (resolved_type.typeDefinitionToken()) |token_handle| { + if (try resolved_type.typeDefinitionToken()) |token_handle| { break :blk token_handle; } } diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index dfd7b1a04..f2a57779d 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -605,7 +605,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v field_token_type = if (try builder.analyser.resolveTypeOfNode( .{ .node = struct_init.ast.type_expr, .handle = handle }, )) |struct_type| switch (struct_type.data) { - .other => |node_handle| fieldTokenType(node_handle.node, node_handle.handle, false), + .container => |scope_handle| fieldTokenType(scope_handle.toNode(), scope_handle.handle, false), else => null, } else null; } @@ -871,7 +871,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v .ast_node => |decl_node| { if (decl_type.handle.tree.nodes.items(.tag)[decl_node].isContainerField()) { const tok_type = switch (lhs_type.data) { - .other => |node_handle| fieldTokenType(node_handle.node, node_handle.handle, lhs_type.is_type_val), + .container => |scope_handle| fieldTokenType(scope_handle.toNode(), scope_handle.handle, lhs_type.is_type_val), else => null, }; From 465bf4181ac8bb31c38a2f448d587e8e6241f9ab Mon Sep 17 00:00:00 2001 From: SuperAuguste <19855629+SuperAuguste@users.noreply.github.com> Date: Sun, 3 Mar 2024 14:10:26 -0500 Subject: [PATCH 03/21] Remove incorrect import resolution case in alias resolution --- src/analysis.zig | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 8a66861eb..6ddec725c 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -763,23 +763,6 @@ fn resolveVarDeclAliasInternal(analyser: *Analyser, node_handle: NodeWithHandle, return try analyser.resolveVarDeclAliasInternal(.{ .node = base_exp, .handle = handle }, node_trail); }, - .builtin_call, - .builtin_call_comma, - .builtin_call_two, - .builtin_call_two_comma, - => blk: { - const lhs = datas[node_handle.node].lhs; - const name = tree.tokenSlice(main_tokens[lhs]); - if (!std.mem.eql(u8, name, "@import") and !std.mem.eql(u8, name, "@cImport")) - return null; - - const inner_node = (try analyser.resolveTypeOfNode(.{ .node = lhs, .handle = handle })) orelse return null; - // assert root node - std.debug.assert(@intFromEnum(inner_node.data.container.scope) == 0); - const document_scope = try inner_node.data.container.handle.getDocumentScope(); - const root_decl = document_scope.declarations.get(0); - break :blk DeclWithHandle{ .decl = root_decl, .handle = inner_node.data.container.handle }; - }, else => return null, } orelse return null; From ca49d03fcb4a4d3e44df35c446128cebeb09dacd Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Wed, 6 Mar 2024 20:54:47 +0100 Subject: [PATCH 04/21] delete redundant `Analyser.` --- src/analysis.zig | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 6ddec725c..ec9a5827b 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -3972,14 +3972,13 @@ pub const DeclWithHandle = struct { pub fn docComments(self: DeclWithHandle, allocator: std.mem.Allocator) error{OutOfMemory}!?[]const u8 { const tree = self.handle.tree; return switch (self.decl) { - // TODO: delete redundant `Analyser.` - .ast_node => |node| try Analyser.getDocComments(allocator, tree, node), + .ast_node => |node| try getDocComments(allocator, tree, node), .function_parameter => |pay| { const param = pay.get(tree).?; const doc_comments = param.first_doc_comment orelse return null; - return try Analyser.collectDocComments(allocator, tree, doc_comments, false); + return try collectDocComments(allocator, tree, doc_comments, false); }, - .error_token => |token| try Analyser.getDocCommentsBeforeToken(allocator, tree, token), + .error_token => |token| try getDocCommentsBeforeToken(allocator, tree, token), else => null, }; } From 601ba7a55d91b3bd5d72bb1512372f12e8f23ed6 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 7 Jun 2024 00:20:16 +0200 Subject: [PATCH 05/21] remove `getLabelGlobal` and `getSymbolGlobal` --- src/analysis.zig | 23 ----------------------- src/features/goto.zig | 4 ++-- src/features/hover.zig | 4 ++-- src/features/references.zig | 4 ++-- 4 files changed, 6 insertions(+), 29 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index ec9a5827b..08cd8efe6 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -4943,29 +4943,6 @@ pub fn identifierLocFromPosition(pos_index: usize, handle: *DocumentStore.Handle return .{ .start = start_idx, .end = end_idx }; } -pub fn getLabelGlobal( - pos_index: usize, - handle: *DocumentStore.Handle, - name: []const u8, -) error{OutOfMemory}!?DeclWithHandle { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - return try lookupLabel(handle, name, pos_index); -} - -pub fn getSymbolGlobal( - analyser: *Analyser, - pos_index: usize, - handle: *DocumentStore.Handle, - name: []const u8, -) error{OutOfMemory}!?DeclWithHandle { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - return try analyser.lookupSymbolGlobal(handle, name, pos_index); -} - pub fn getSymbolEnumLiteral( analyser: *Analyser, arena: std.mem.Allocator, diff --git a/src/features/goto.zig b/src/features/goto.zig index bac3e6c02..de5d98595 100644 --- a/src/features/goto.zig +++ b/src/features/goto.zig @@ -83,7 +83,7 @@ fn gotoDefinitionLabel( const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); - const decl = (try Analyser.getLabelGlobal(pos_index, handle, name)) orelse return null; + const decl = (try Analyser.lookupLabel(handle, name, pos_index)) orelse return null; return try gotoDefinitionSymbol(analyser, offsets.locToRange(handle.tree.source, name_loc, offset_encoding), decl, kind, offset_encoding); } @@ -101,7 +101,7 @@ fn gotoDefinitionGlobal( const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); - const decl = (try analyser.getSymbolGlobal(pos_index, handle, name)) orelse return null; + const decl = (try analyser.lookupSymbolGlobal(handle, name, pos_index)) orelse return null; return try gotoDefinitionSymbol(analyser, offsets.locToRange(handle.tree.source, name_loc, offset_encoding), decl, kind, offset_encoding); } diff --git a/src/features/hover.zig b/src/features/hover.zig index 555f061e3..5c98e47f3 100644 --- a/src/features/hover.zig +++ b/src/features/hover.zig @@ -167,7 +167,7 @@ fn hoverDefinitionLabel( const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); - const decl = (try Analyser.getLabelGlobal(pos_index, handle, name)) orelse return null; + const decl = (try Analyser.lookupLabel(handle, name, pos_index)) orelse return null; return .{ .contents = .{ @@ -272,7 +272,7 @@ fn hoverDefinitionGlobal( const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); - const decl = (try analyser.getSymbolGlobal(pos_index, handle, name)) orelse return null; + const decl = (try analyser.lookupSymbolGlobal(handle, name, pos_index)) orelse return null; return .{ .contents = .{ diff --git a/src/features/references.zig b/src/features/references.zig index c982a27da..6a5415baa 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -452,7 +452,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen // TODO: Make this work with branching types const decl = switch (pos_context) { - .var_access => try analyser.getSymbolGlobal(source_index, handle, name), + .var_access => try analyser.lookupSymbolGlobal(handle, name, source_index), .field_access => |loc| z: { const held_loc = offsets.locMerge(loc, name_loc); const a = try analyser.getSymbolFieldAccesses(arena, handle, source_index, held_loc, name); @@ -462,7 +462,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen break :z null; }, - .label => try Analyser.getLabelGlobal(source_index, handle, name), + .label => try Analyser.lookupLabel(handle, name, source_index), .enum_literal => try analyser.getSymbolEnumLiteral(arena, handle, source_index, name), else => null, } orelse return null; From c67b6fb1d78b8167968e71e685c657e9707d9dc9 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 7 Jun 2024 00:36:50 +0200 Subject: [PATCH 06/21] move Declaration to DocumentScope.zig --- src/DocumentScope.zig | 202 ++++++++++++++- src/analysis.zig | 328 +++---------------------- src/ast.zig | 20 ++ src/features/completions.zig | 2 +- src/features/document_symbol.zig | 51 ++-- src/features/hover.zig | 94 ++++--- src/features/references.zig | 28 ++- src/features/semantic_tokens.zig | 4 +- tests/lsp_features/document_symbol.zig | 17 +- 9 files changed, 372 insertions(+), 374 deletions(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index 238cdb06f..d1f3f50d5 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -5,8 +5,6 @@ const ast = @import("ast.zig"); const Ast = std.zig.Ast; const tracy = @import("tracy"); const offsets = @import("offsets.zig"); -const Analyser = @import("analysis.zig"); -const Declaration = Analyser.Declaration; const DocumentScope = @This(); @@ -79,6 +77,206 @@ pub const DeclarationLookupContext = struct { } }; +pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { + const tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + + var buffer: [1]Ast.Node.Index = undefined; + const token_index = switch (tags[node]) { + .local_var_decl, + .global_var_decl, + .simple_var_decl, + .aligned_var_decl, + => tree.fullVarDecl(node).?.ast.mut_token + 1, + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => tree.fullFnProto(&buffer, node).?.name_token orelse return null, + + .identifier => main_tokens[node], + .error_value => main_tokens[node] + 2, // 'error'. + .test_decl => ast.testDeclNameToken(tree, node) orelse return null, + + .container_field, + .container_field_init, + .container_field_align, + => main_tokens[node], + + .root, + .container_decl, + .container_decl_trailing, + .container_decl_arg, + .container_decl_arg_trailing, + .container_decl_two, + .container_decl_two_trailing, + .tagged_union, + .tagged_union_trailing, + .tagged_union_two, + .tagged_union_two_trailing, + .tagged_union_enum_tag, + .tagged_union_enum_tag_trailing, + .error_set_decl, + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + => return null, + + else => return null, + }; + + if (token_index >= tree.tokens.len) return null; + if (token_tags[token_index] != .identifier) return null; + return token_index; +} + +pub const Declaration = union(enum) { + /// Index of the ast node. + /// Can have one of the following tags: + /// - `.root` + /// - `.container_decl` + /// - `.tagged_union` + /// - `.error_set_decl` + /// - `.container_field` + /// - `.fn_proto` + /// - `.fn_decl` + /// - `.var_decl` + /// - `.block` + ast_node: Ast.Node.Index, + /// Function parameter + function_parameter: Param, + /// - `if (condition) |identifier| {}` + /// - `while (condition) |identifier| {}` + optional_payload: struct { + identifier: Ast.TokenIndex, + condition: Ast.Node.Index, + }, + /// - `for (condition) |identifier| {}` + /// - `for (..., condition, ...) |..., identifier, ...| {}` + for_loop_payload: struct { + identifier: Ast.TokenIndex, + condition: Ast.Node.Index, + }, + /// - `if (condition) |identifier| {} else |_| {}` + /// - `while (condition) |identifier| {} else |_| {}` + error_union_payload: struct { + identifier: Ast.TokenIndex, + condition: Ast.Node.Index, + }, + /// - `if (condition) |_| {} else |identifier| {}` + /// - `while (condition) |_| {} else |identifier| {}` + /// - `condition catch |identifier| {}` + /// - `errdefer |identifier| {}` (condition is 0) + error_union_error: struct { + identifier: Ast.TokenIndex, + /// may be 0 + condition: Ast.Node.Index, + }, + assign_destructure: AssignDestructure, + // a switch case capture + switch_payload: Switch, + label: struct { + identifier: Ast.TokenIndex, + block: Ast.Node.Index, + }, + /// always an identifier + /// used as child declarations of an error set declaration + error_token: Ast.TokenIndex, + + pub const Param = struct { + param_index: u16, + func: Ast.Node.Index, + + pub fn get(self: Param, tree: Ast) ?Ast.full.FnProto.Param { + var buffer: [1]Ast.Node.Index = undefined; + const func = tree.fullFnProto(&buffer, self.func).?; + var param_index: u16 = 0; + var it = func.iterate(&tree); + while (ast.nextFnParam(&it)) |param| : (param_index += 1) { + if (self.param_index == param_index) return param; + } + return null; + } + }; + + pub const AssignDestructure = struct { + /// tag is .assign_destructure + node: Ast.Node.Index, + index: u32, + + pub fn getVarDeclNode(self: AssignDestructure, tree: Ast) Ast.Node.Index { + const data = tree.nodes.items(.data); + return tree.extra_data[data[self.node].lhs + 1 ..][self.index]; + } + + pub fn getFullVarDecl(self: AssignDestructure, tree: Ast) Ast.full.VarDecl { + return tree.fullVarDecl(self.getVarDeclNode(tree)).?; + } + }; + + pub const Switch = struct { + /// tag is `.@"switch"` or `.switch_comma` + node: Ast.Node.Index, + /// is guaranteed to have a payload_token + case_index: u32, + + pub fn getCase(self: Switch, tree: Ast) Ast.full.SwitchCase { + const node_datas = tree.nodes.items(.data); + const extra = tree.extraData(node_datas[self.node].rhs, Ast.Node.SubRange); + const cases = tree.extra_data[extra.start..extra.end]; + return tree.fullSwitchCase(cases[self.case_index]).?; + } + }; + + pub const Index = enum(u32) { + _, + + pub fn toOptional(index: Index) OptionalIndex { + return @enumFromInt(@intFromEnum(index)); + } + }; + + pub const OptionalIndex = enum(u32) { + none = std.math.maxInt(u32), + _, + + pub fn unwrap(index: OptionalIndex) ?Index { + if (index == .none) return null; + return @enumFromInt(@intFromEnum(index)); + } + }; + + pub fn eql(a: Declaration, b: Declaration) bool { + return std.meta.eql(a, b); + } + + pub fn nameToken(decl: Declaration, tree: Ast) Ast.TokenIndex { + return switch (decl) { + .ast_node => |n| getDeclNameToken(tree, n).?, + .function_parameter => |payload| payload.get(tree).?.name_token.?, + .optional_payload => |payload| payload.identifier, + .error_union_payload => |payload| payload.identifier, + .error_union_error => |payload| payload.identifier, + .for_loop_payload => |payload| payload.identifier, + .label => |payload| payload.identifier, + .error_token => |error_token| error_token, + .assign_destructure => |payload| { + const var_decl_node = payload.getVarDeclNode(tree); + const varDecl = tree.fullVarDecl(var_decl_node).?; + return varDecl.ast.mut_token + 1; + }, + .switch_payload => |payload| { + const case = payload.getCase(tree); + const payload_token = case.payload_token.?; + return payload_token + @intFromBool(tree.tokens.items(.tag)[payload_token] == .asterisk); + }, + }; + } +}; + pub const Scope = struct { pub const Tag = enum(u3) { /// `node_tags[ast_node]` is ContainerDecl or Root or ErrorSetDecl diff --git a/src/analysis.zig b/src/analysis.zig index 08cd8efe6..982942b5d 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -18,8 +18,9 @@ const tracy = @import("tracy"); const InternPool = @import("analyser/InternPool.zig"); const references = @import("features/references.zig"); -const DocumentScope = @import("DocumentScope.zig"); -const Scope = DocumentScope.Scope; +pub const DocumentScope = @import("DocumentScope.zig"); +pub const Declaration = DocumentScope.Declaration; +pub const Scope = DocumentScope.Scope; const Analyser = @This(); @@ -468,8 +469,6 @@ pub fn getVariableSignature( for (container_decl.ast.members) |member| { const member_line_start = offsets.lineLocUntilIndex(tree.source, offsets.tokenToIndex(tree, tree.firstToken(member))).start; - if (!isNodePublic(tree, member)) continue; - const member_source_indented = switch (tree.nodes.items(.tag)[member]) { .container_field_init, .container_field_align, @@ -608,90 +607,6 @@ pub fn isSnakeCase(name: []const u8) bool { // ANALYSIS ENGINE -pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { - return getContainerDeclNameToken(tree, null, node); -} - -pub fn getContainerDeclNameToken(tree: Ast, container: ?Ast.Node.Index, node: Ast.Node.Index) ?Ast.TokenIndex { - const tags = tree.nodes.items(.tag); - const datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const main_token = main_tokens[node]; - const token_tags = tree.tokens.items(.tag); - - return switch (tags[node]) { - // regular declaration names. + 1 to mut token because name comes after 'const'/'var' - .local_var_decl, - .global_var_decl, - .simple_var_decl, - .aligned_var_decl, - => { - const tok = tree.fullVarDecl(node).?.ast.mut_token + 1; - return if (tok >= tree.tokens.len) - null - else - tok; - }, - // function declaration names - .fn_proto, - .fn_proto_multi, - .fn_proto_one, - .fn_proto_simple, - .fn_decl, - => blk: { - var params: [1]Ast.Node.Index = undefined; - break :blk tree.fullFnProto(¶ms, node).?.name_token; - }, - - // containers - .container_field, - .container_field_init, - .container_field_align, - => { - if (container) |container_node| { - if (token_tags[main_tokens[container_node]] == .keyword_struct and - tree.fullContainerField(node).?.ast.tuple_like) - { - return null; - } - } - return main_token; - }, - .identifier => main_token, - .error_value => { - const tok = main_token + 2; - return if (tok >= tree.tokens.len) - null - else - tok; - }, // 'error'. - - .test_decl => if (datas[node].lhs != 0) datas[node].lhs else null, - - else => null, - }; -} - -pub fn getDeclName(tree: Ast, node: Ast.Node.Index) ?[]const u8 { - return getContainerDeclName(tree, null, node); -} - -pub fn getContainerDeclName(tree: Ast, container: ?Ast.Node.Index, node: Ast.Node.Index) ?[]const u8 { - const name_token = getContainerDeclNameToken(tree, container, node) orelse return null; - return declNameTokenToSlice(tree, name_token); -} - -pub fn declNameTokenToSlice(tree: Ast, name_token: Ast.TokenIndex) ?[]const u8 { - switch (tree.tokens.items(.tag)[name_token]) { - .string_literal => { - const name = offsets.tokenToSlice(tree, name_token); - return name[1 .. name.len - 1]; - }, - .identifier => return offsets.identifierTokenToNameSlice(tree, name_token), - else => return null, - } -} - /// Resolves variable declarations consisting of chains of imports and field accesses of containers /// Examples: ///```zig @@ -811,9 +726,9 @@ fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.N const lhs = datas[child_idx].lhs; var buf: [1]Ast.Node.Index = undefined; if (tree.fullCall(&buf, lhs)) |call| { - const call_name = getDeclName(tree, call.ast.fn_expr); + const call_name = DocumentScope.getDeclNameToken(tree, call.ast.fn_expr); if (call_name) |name| { - if (std.mem.eql(u8, name, tree.tokenSlice(fn_decl.name_token.?))) { + if (std.mem.eql(u8, offsets.tokenToSlice(tree, name), offsets.tokenToSlice(tree, fn_decl.name_token.?))) { continue; } } @@ -2725,12 +2640,20 @@ pub const Type = struct { }; } + /// Returns whether the given function has a `anytype` parameter. pub fn isGenericFunc(self: Type) bool { - var buf: [1]Ast.Node.Index = undefined; return switch (self.data) { - .other => |node_handle| if (node_handle.handle.tree.fullFnProto(&buf, node_handle.node)) |fn_proto| blk: { - break :blk isGenericFunction(node_handle.handle.tree, fn_proto); - } else false, + .other => |node_handle| { + var buf: [1]Ast.Node.Index = undefined; + const fn_proto = node_handle.handle.tree.fullFnProto(&buf, node_handle.node) orelse return false; + var it = fn_proto.iterate(&node_handle.handle.tree); + while (ast.nextFnParam(&it)) |param| { + if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) { + return true; + } + } + return false; + }, else => false, }; } @@ -3309,66 +3232,6 @@ pub fn getFieldAccessType( return current_type; } -pub fn isNodePublic(tree: Ast, node: Ast.Node.Index) bool { - var buf: [1]Ast.Node.Index = undefined; - return switch (tree.nodes.items(.tag)[node]) { - .global_var_decl, - .local_var_decl, - .simple_var_decl, - .aligned_var_decl, - => tree.fullVarDecl(node).?.visib_token != null, - .fn_proto, - .fn_proto_multi, - .fn_proto_one, - .fn_proto_simple, - .fn_decl, - => tree.fullFnProto(&buf, node).?.visib_token != null, - else => true, - }; -} - -pub fn nodeToString(tree: Ast, node: Ast.Node.Index) ?[]const u8 { - const data = tree.nodes.items(.data); - const main_token = tree.nodes.items(.main_token)[node]; - var buf: [1]Ast.Node.Index = undefined; - return switch (tree.nodes.items(.tag)[node]) { - .container_field, - .container_field_init, - .container_field_align, - => { - const field = tree.fullContainerField(node).?.ast; - return if (field.tuple_like) null else tree.tokenSlice(field.main_token); - }, - .error_value => tree.tokenSlice(data[node].rhs), - .identifier => { - if (tree.tokens.items(.tag)[main_token] != .identifier) return null; - return offsets.identifierTokenToNameSlice(tree, main_token); - }, - .fn_proto, - .fn_proto_multi, - .fn_proto_one, - .fn_proto_simple, - .fn_decl, - => if (tree.fullFnProto(&buf, node).?.name_token) |name| tree.tokenSlice(name) else null, - .field_access => tree.tokenSlice(data[node].rhs), - .call, - .call_comma, - .async_call, - .async_call_comma, - => tree.tokenSlice(tree.callFull(node).ast.lparen - 1), - .call_one, - .call_one_comma, - .async_call_one, - .async_call_one_comma, - => tree.tokenSlice(tree.callOne(&buf, node).ast.lparen - 1), - .test_decl => if (data[node].lhs != 0) tree.tokenSlice(data[node].lhs) else null, - else => |tag| { - log.debug("INVALID: {}", .{tag}); - return null; - }, - }; -} - pub const PositionContext = union(enum) { builtin: offsets.Loc, comment, @@ -3687,146 +3550,6 @@ pub const TokenWithHandle = struct { handle: *DocumentStore.Handle, }; -pub const Declaration = union(enum) { - /// Index of the ast node. - /// Can have one of the following tags: - /// - `.root` - /// - `.container_decl` - /// - `.tagged_union` - /// - `.error_set_decl` - /// - `.container_field` - /// - `.fn_proto` - /// - `.fn_decl` - /// - `.var_decl` - /// - `.block` - ast_node: Ast.Node.Index, - /// Function parameter - function_parameter: Param, - /// - `if (condition) |identifier| {}` - /// - `while (condition) |identifier| {}` - optional_payload: struct { - identifier: Ast.TokenIndex, - condition: Ast.Node.Index, - }, - /// - `for (condition) |identifier| {}` - /// - `for (..., condition, ...) |..., identifier, ...| {}` - for_loop_payload: struct { - identifier: Ast.TokenIndex, - condition: Ast.Node.Index, - }, - /// - `if (condition) |identifier| {} else |_| {}` - /// - `while (condition) |identifier| {} else |_| {}` - error_union_payload: struct { - identifier: Ast.TokenIndex, - condition: Ast.Node.Index, - }, - /// - `if (condition) |_| {} else |identifier| {}` - /// - `while (condition) |_| {} else |identifier| {}` - /// - `condition catch |identifier| {}` - /// - `errdefer |identifier| {}` (condition is 0) - error_union_error: struct { - identifier: Ast.TokenIndex, - /// may be 0 - condition: Ast.Node.Index, - }, - assign_destructure: AssignDestructure, - // a switch case capture - switch_payload: Switch, - label: struct { - identifier: Ast.TokenIndex, - block: Ast.Node.Index, - }, - /// always an identifier - /// used as child declarations of an error set declaration - error_token: Ast.TokenIndex, - - pub const Param = struct { - param_index: u16, - func: Ast.Node.Index, - - pub fn get(self: Param, tree: Ast) ?Ast.full.FnProto.Param { - var buffer: [1]Ast.Node.Index = undefined; - const func = tree.fullFnProto(&buffer, self.func).?; - var param_index: u16 = 0; - var it = func.iterate(&tree); - while (ast.nextFnParam(&it)) |param| : (param_index += 1) { - if (self.param_index == param_index) return param; - } - return null; - } - }; - - pub const AssignDestructure = struct { - /// tag is .assign_destructure - node: Ast.Node.Index, - index: u32, - - pub fn getVarDeclNode(self: AssignDestructure, tree: Ast) Ast.Node.Index { - const data = tree.nodes.items(.data); - return tree.extra_data[data[self.node].lhs + 1 ..][self.index]; - } - - pub fn getFullVarDecl(self: AssignDestructure, tree: Ast) Ast.full.VarDecl { - return tree.fullVarDecl(self.getVarDeclNode(tree)).?; - } - }; - - pub const Switch = struct { - /// tag is `.@"switch"` or `.switch_comma` - node: Ast.Node.Index, - /// is guaranteed to have a payload_token - case_index: u32, - - pub fn getCase(self: Switch, tree: Ast) Ast.full.SwitchCase { - const node_datas = tree.nodes.items(.data); - const extra = tree.extraData(node_datas[self.node].rhs, Ast.Node.SubRange); - const cases = tree.extra_data[extra.start..extra.end]; - return tree.fullSwitchCase(cases[self.case_index]).?; - } - }; - - pub const Index = enum(u32) { - _, - - pub fn toOptional(index: Index) OptionalIndex { - return @enumFromInt(@intFromEnum(index)); - } - }; - - pub const OptionalIndex = enum(u32) { - none = std.math.maxInt(u32), - _, - - pub fn unwrap(index: OptionalIndex) ?Index { - if (index == .none) return null; - return @enumFromInt(@intFromEnum(index)); - } - }; - - pub fn eql(a: Declaration, b: Declaration) bool { - return std.meta.eql(a, b); - } - - pub fn nameToken(decl: Declaration, tree: Ast) Ast.TokenIndex { - return switch (decl) { - .ast_node => |n| getDeclNameToken(tree, n).?, - .function_parameter => |payload| payload.get(tree).?.name_token.?, - .optional_payload => |payload| payload.identifier, - .error_union_payload => |payload| payload.identifier, - .error_union_error => |payload| payload.identifier, - .for_loop_payload => |payload| payload.identifier, - .label => |payload| payload.identifier, - .error_token => |error_token| error_token, - .assign_destructure => |payload| getDeclNameToken(tree, payload.getVarDeclNode(tree)).?, - .switch_payload => |payload| { - const case = payload.getCase(tree); - const payload_token = case.payload_token.?; - return payload_token + @intFromBool(tree.tokens.items(.tag)[payload_token] == .asterisk); - }, - }; - } -}; - pub const DeclWithHandle = struct { decl: Declaration, handle: *DocumentStore.Handle, @@ -3984,8 +3707,23 @@ pub const DeclWithHandle = struct { } pub fn isPublic(self: DeclWithHandle) bool { + const tree = self.handle.tree; + var buf: [1]Ast.Node.Index = undefined; return switch (self.decl) { - .ast_node => |node| isNodePublic(self.handle.tree, node), + .ast_node => |node| switch (tree.nodes.items(.tag)[node]) { + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, + => tree.fullVarDecl(node).?.visib_token != null, + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => tree.fullFnProto(&buf, node).?.visib_token != null, + else => true, + }, else => true, }; } diff --git a/src/ast.zig b/src/ast.zig index 801a09212..d8923169e 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -1122,6 +1122,26 @@ pub fn lastToken(tree: Ast, node: Ast.Node.Index) Ast.TokenIndex { }; } +pub fn testDeclNameToken(tree: Ast, test_decl_node: Ast.Node.Index) ?Ast.TokenIndex { + std.debug.assert(tree.nodes.items(.tag)[test_decl_node] == .test_decl); + const node_datas = tree.nodes.items(.data); + if (node_datas[test_decl_node].lhs == 0) return null; + return node_datas[test_decl_node].lhs; +} + +pub fn testDeclNameAndToken(tree: Ast, test_decl_node: Ast.Node.Index) ?struct { Ast.TokenIndex, []const u8 } { + const test_name_token = testDeclNameToken(tree, test_decl_node) orelse return null; + + switch (tree.tokens.items(.tag)[test_name_token]) { + .string_literal => { + const name = offsets.tokenToSlice(tree, test_name_token); + return .{ test_name_token, name[1 .. name.len - 1] }; + }, + .identifier => return .{ test_name_token, offsets.identifierTokenToNameSlice(tree, test_name_token) }, + else => return null, + } +} + pub fn hasInferredError(tree: Ast, fn_proto: Ast.full.FnProto) bool { const token_tags = tree.tokens.items(.tag); if (fn_proto.ast.return_type == 0) return false; diff --git a/src/features/completions.zig b/src/features/completions.zig index 1bd1ae71e..0a4b2c0d5 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -361,7 +361,7 @@ fn functionTypeCompletion( }, }; - const kind: types.CompletionItemKind = if (Analyser.isTypeFunction(tree, func)) + const kind: types.CompletionItemKind = if (func_ty.isTypeFunc()) .Struct else if (has_self_param) .Method diff --git a/src/features/document_symbol.zig b/src/features/document_symbol.zig index d8acbd914..def976f2e 100644 --- a/src/features/document_symbol.zig +++ b/src/features/document_symbol.zig @@ -35,9 +35,6 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v const main_tokens = tree.nodes.items(.main_token); const token_tags = tree.tokens.items(.tag); - const decl_name_token = analysis.getContainerDeclNameToken(tree, ctx.parent_container, node); - const decl_name = if (decl_name_token) |name_token| analysis.declNameTokenToSlice(tree, name_token) else null; - var new_ctx = ctx.*; const maybe_symbol: ?Symbol = switch (node_tags[node]) { .global_var_decl, @@ -45,9 +42,15 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v .simple_var_decl, .aligned_var_decl, => blk: { - new_ctx.last_var_decl_name = decl_name; if (!ast.isContainer(tree, ctx.parent_node)) break :blk null; + const var_decl = tree.fullVarDecl(node).?; + const var_decl_name_token = var_decl.ast.mut_token + 1; + if (tree.tokens.items(.tag)[var_decl_name_token] != .identifier) break :blk null; + const var_decl_name = offsets.identifierTokenToNameSlice(tree, var_decl_name_token); + + new_ctx.last_var_decl_name = var_decl_name; + const kind: types.SymbolKind = switch (token_tags[main_tokens[node]]) { .keyword_var => .Variable, .keyword_const => .Constant, @@ -55,33 +58,38 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v }; break :blk .{ - .name = decl_name orelse break :blk null, + .name = var_decl_name, .detail = null, .kind = kind, .loc = offsets.nodeToLoc(tree, node), - .selection_loc = offsets.tokenToLoc(tree, decl_name_token.?), + .selection_loc = offsets.tokenToLoc(tree, var_decl_name_token), .children = .{}, }; }, - .test_decl, - .fn_decl, - => |tag| blk: { - const kind: types.SymbolKind = switch (tag) { - .test_decl => .Method, // there is no SymbolKind that represents a tests - .fn_decl => .Function, - else => unreachable, + .test_decl => blk: { + const test_name_token, const test_name = ast.testDeclNameAndToken(tree, node) orelse break :blk null; + + break :blk .{ + .name = test_name, + .kind = .Method, // there is no SymbolKind that represents a tests + .loc = offsets.nodeToLoc(tree, node), + .selection_loc = offsets.tokenToLoc(tree, test_name_token), + .children = .{}, }; + }, + .fn_decl => blk: { var buffer: [1]Ast.Node.Index = undefined; - const detail = if (tree.fullFnProto(&buffer, node)) |fn_info| analysis.getFunctionSignature(tree, fn_info) else null; + const fn_info = tree.fullFnProto(&buffer, node).?; + const name_token = fn_info.name_token orelse break :blk null; break :blk .{ - .name = decl_name orelse break :blk null, - .detail = detail, - .kind = kind, + .name = offsets.identifierTokenToNameSlice(tree, name_token), + .detail = analysis.getFunctionSignature(tree, fn_info), + .kind = .Function, .loc = offsets.nodeToLoc(tree, node), - .selection_loc = offsets.tokenToLoc(tree, decl_name_token.?), + .selection_loc = offsets.tokenToLoc(tree, name_token), .children = .{}, }; }, @@ -115,12 +123,15 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v else => unreachable, }; + const decl_name_token = analysis.DocumentScope.getDeclNameToken(tree, node) orelse break :blk null; + const decl_name = offsets.tokenToSlice(tree, decl_name_token); + break :blk .{ - .name = decl_name orelse break :blk null, + .name = decl_name, .detail = ctx.last_var_decl_name, .kind = kind, .loc = offsets.nodeToLoc(tree, node), - .selection_loc = offsets.tokenToLoc(tree, decl_name_token.?), + .selection_loc = offsets.tokenToLoc(tree, decl_name_token), .children = .{}, }; }, diff --git a/src/features/hover.zig b/src/features/hover.zig index 5c98e47f3..53ea357af 100644 --- a/src/features/hover.zig +++ b/src/features/hover.zig @@ -51,41 +51,65 @@ fn hoverSymbolRecursive( return try hoverSymbolRecursive(analyser, arena, result, markup_kind, doc_strings); } - var buf: [1]Ast.Node.Index = undefined; - - if (tree.fullVarDecl(node)) |var_decl| { - var struct_init_buf: [2]Ast.Node.Index = undefined; - var type_node: Ast.Node.Index = 0; - - if (var_decl.ast.type_node != 0) { - type_node = var_decl.ast.type_node; - } else if (tree.fullStructInit(&struct_init_buf, var_decl.ast.init_node)) |struct_init| { - if (struct_init.ast.type_expr != 0) - type_node = struct_init.ast.type_expr; - } - - if (type_node != 0) - try analyser.referencedTypesFromNode( - .{ .node = type_node, .handle = handle }, - &reference_collector, - ); - - break :def try Analyser.getVariableSignature(arena, tree, var_decl, true); - } else if (tree.fullFnProto(&buf, node)) |fn_proto| { - is_fn = true; - break :def Analyser.getFunctionSignature(tree, fn_proto); - } else if (tree.fullContainerField(node)) |field| { - var converted = field; - converted.convertToNonTupleLike(tree.nodes); - if (converted.ast.type_expr != 0) - try analyser.referencedTypesFromNode( - .{ .node = converted.ast.type_expr, .handle = handle }, - &reference_collector, - ); - - break :def Analyser.getContainerFieldSignature(tree, field) orelse return null; - } else { - break :def Analyser.nodeToString(tree, node) orelse return null; + switch (tree.nodes.items(.tag)[node]) { + .global_var_decl, + .local_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { + const var_decl = tree.fullVarDecl(node).?; + var struct_init_buf: [2]Ast.Node.Index = undefined; + var type_node: Ast.Node.Index = 0; + + if (var_decl.ast.type_node != 0) { + type_node = var_decl.ast.type_node; + } else if (tree.fullStructInit(&struct_init_buf, var_decl.ast.init_node)) |struct_init| { + if (struct_init.ast.type_expr != 0) + type_node = struct_init.ast.type_expr; + } + + if (type_node != 0) + try analyser.referencedTypesFromNode( + .{ .node = type_node, .handle = handle }, + &reference_collector, + ); + + break :def try Analyser.getVariableSignature(arena, tree, var_decl, true); + }, + .container_field, + .container_field_init, + .container_field_align, + => { + const field = tree.fullContainerField(node).?; + var converted = field; + converted.convertToNonTupleLike(tree.nodes); + if (converted.ast.type_expr != 0) + try analyser.referencedTypesFromNode( + .{ .node = converted.ast.type_expr, .handle = handle }, + &reference_collector, + ); + + break :def Analyser.getContainerFieldSignature(tree, field) orelse return null; + }, + .fn_proto, + .fn_proto_multi, + .fn_proto_one, + .fn_proto_simple, + .fn_decl, + => { + is_fn = true; + var buf: [1]Ast.Node.Index = undefined; + const fn_proto = tree.fullFnProto(&buf, node).?; + break :def Analyser.getFunctionSignature(tree, fn_proto); + }, + .test_decl => { + const test_name_token, const test_name = ast.testDeclNameAndToken(tree, node) orelse return null; + _ = test_name_token; + break :def test_name; + }, + else => { + return null; + }, } }, .function_parameter => |pay| def: { diff --git a/src/features/references.zig b/src/features/references.zig index 6a5415baa..f5eb2750e 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -102,32 +102,38 @@ const Builder = struct { const node_tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); - const token_tags = tree.tokens.items(.tag); + const main_tokens = tree.nodes.items(.main_token); const starts = tree.tokens.items(.start); switch (node_tags[node]) { .identifier, .test_decl, - => { - const identifier_token = Analyser.getDeclNameToken(tree, node) orelse return; - if (token_tags[identifier_token] != .identifier) return; + => |tag| { + const name_token, const name = switch (tag) { + .identifier => .{ + main_tokens[node], + offsets.identifierTokenToNameSlice(tree, main_tokens[node]), + }, + .test_decl => ast.testDeclNameAndToken(tree, node) orelse return, + else => unreachable, + }; - const child = (try builder.analyser.lookupSymbolGlobal( + const child = try builder.analyser.lookupSymbolGlobal( handle, - offsets.tokenToSlice(tree, identifier_token), - starts[identifier_token], - )) orelse return; + name, + starts[name_token], + ) orelse return; if (builder.decl_handle.eql(child)) { - try builder.add(handle, identifier_token); + try builder.add(handle, name_token); } }, .field_access => { const lhs = try builder.analyser.resolveTypeOfNode(.{ .node = datas[node].lhs, .handle = handle }) orelse return; const deref_lhs = try builder.analyser.resolveDerefType(lhs) orelse lhs; - const symbol = offsets.tokenToSlice(tree, datas[node].rhs); - const child = (try deref_lhs.lookupSymbol(builder.analyser, symbol)) orelse return; + const symbol = offsets.identifierTokenToNameSlice(tree, datas[node].rhs); + const child = try deref_lhs.lookupSymbol(builder.analyser, symbol) orelse return; if (builder.decl_handle.eql(child)) { try builder.add(handle, datas[node].rhs); diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index f2a57779d..1cba79a7c 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -416,7 +416,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v .is_type_val = true, }; - const func_name_tok_type: TokenType = if (Analyser.isTypeFunction(tree, fn_proto)) + const func_name_tok_type: TokenType = if (func_ty.isTypeFunc()) .type else if (try builder.analyser.hasSelfParam(func_ty)) .method @@ -425,7 +425,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v const tok_mod = TokenModifiers{ .declaration = true, - .generic = Analyser.isGenericFunction(tree, fn_proto), + .generic = func_ty.isGenericFunc(), }; try writeTokenMod(builder, fn_proto.name_token, func_name_tok_type, tok_mod); diff --git a/tests/lsp_features/document_symbol.zig b/tests/lsp_features/document_symbol.zig index 4aaaf7685..f1975b7da 100644 --- a/tests/lsp_features/document_symbol.zig +++ b/tests/lsp_features/document_symbol.zig @@ -27,14 +27,15 @@ test "container decl" { \\ Field alpha \\ Function f ); - try testDocumentSymbol( - \\const S = struct { - \\ []const u8, - \\ u32, - \\}; - , - \\Constant S - ); + // TODO + // try testDocumentSymbol( + // \\const S = struct { + // \\ []const u8, + // \\ u32, + // \\}; + // , + // \\Constant S + // ); } test "enum" { From bbad28c55eda5f1f1cbfbf0edc171de08808a231 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 7 Jun 2024 00:31:58 +0200 Subject: [PATCH 07/21] remove duplicate implementation of `isSymbolChar` --- src/features/code_actions.zig | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/features/code_actions.zig b/src/features/code_actions.zig index 3b706da78..429669ede 100644 --- a/src/features/code_actions.zig +++ b/src/features/code_actions.zig @@ -580,7 +580,7 @@ const DiagnosticKind = union(enum) { fn getDiscardLoc(text: []const u8, loc: offsets.Loc) ?offsets.Loc { // check of the loc points to a valid identifier for (offsets.locToSlice(text, loc)) |c| { - if (!isSymbolChar(c)) return null; + if (!Analyser.isSymbolChar(c)) return null; } // check if the identifier is followed by a colon @@ -684,7 +684,3 @@ test getCaptureLoc { try std.testing.expect(getCaptureLoc("| |", .{ .start = 1, .end = 3 }) == null); try std.testing.expect(getCaptureLoc("| |", .{ .start = 1, .end = 6 }) == null); } - -fn isSymbolChar(char: u8) bool { - return std.ascii.isAlphanumeric(char) or char == '_'; -} From 681e5a01b39b2108167970f49410ba841ffb6ff7 Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Fri, 7 Jun 2024 00:36:01 +0200 Subject: [PATCH 08/21] improve documentation of `Analyser.Type` --- src/analysis.zig | 51 +++++++++++++++++++++++++++--------------------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 982942b5d..82b05c8c7 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -2241,14 +2241,20 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e /// Represents a resolved Zig type. /// This is the return type of `resolveTypeOfNode`. pub const Type = struct { - pub const EitherEntry = struct { - /// the `is_type_val` property is inherited from the containing `Type` - type_data: Data, - descriptor: []const u8, - }; + data: Data, + /// If true, the type `type`, the attached data is the value of the type value. + /// ```zig + /// const foo = u32; // is_type_val == true + /// const bar = @as(u32, ...); // is_type_val == false + /// ``` + /// if `data == .ip_index` then this field is equivalent to `typeOf(index) == .type_type` + is_type_val: bool, pub const Data = union(enum) { - /// *T, [*]T, [*:x]T, [T], [*c]T + /// - `*const T` + /// - `[*]T` + /// - `[]const T` + /// - `[*c]T` pointer: struct { size: std.builtin.Type.Pointer.Size, /// `.none` means no sentinel @@ -2257,7 +2263,7 @@ pub const Type = struct { elem_ty: *Type, }, - /// [elem_count :sentinel]elem_ty + /// `[elem_count :sentinel]elem_ty` array: struct { elem_count: ?u64, /// `.none` means no sentinel @@ -2265,7 +2271,7 @@ pub const Type = struct { elem_ty: *Type, }, - /// ?T + /// `?T` optional: *Type, /// `error_set!payload` @@ -2278,7 +2284,11 @@ pub const Type = struct { /// `Foo` in `Foo.bar` where `Foo = union(enum) { bar }` union_tag: *Type, - /// - Container type: `struct {}`, `enum {}`, `union {}`, `opaque {}`, `error {}` + /// - `struct {}` + /// - `enum {}` + /// - `union {}` + /// - `opaque {}` + /// - `error {}` container: ScopeWithHandle, /// - Error type: `Foo || Bar`, `Foo!Bar` @@ -2298,16 +2308,13 @@ pub const Type = struct { /// this stores both the type and the value index: InternPool.Index, }, - }; - data: Data, - /// If true, the type `type`, the attached data is the value of the type value. - /// ```zig - /// const foo = u32; // is_type_val == true - /// const bar = @as(u32, ...); // is_type_val == false - /// ``` - /// if `data == .ip_index` then this field is equivalent to `typeOf(index) == .type_type` - is_type_val: bool, + pub const EitherEntry = struct { + /// the `is_type_val` property is inherited from the containing `Type` + type_data: Data, + descriptor: []const u8, + }; + }; pub fn hash32(self: Type) u32 { return @truncate(self.hash64()); @@ -2454,13 +2461,13 @@ pub const Type = struct { // duplicates const DeduplicatorContext = struct { - pub fn hash(self: @This(), item: Type.EitherEntry) u32 { + pub fn hash(self: @This(), item: Type.Data.EitherEntry) u32 { _ = self; const ty = Type{ .data = item.type_data, .is_type_val = true }; return ty.hash32(); } - pub fn eql(self: @This(), a: Type.EitherEntry, b: Type.EitherEntry, b_index: usize) bool { + pub fn eql(self: @This(), a: Type.Data.EitherEntry, b: Type.Data.EitherEntry, b_index: usize) bool { _ = b_index; _ = self; const a_ty = Type{ .data = a.type_data, .is_type_val = true }; @@ -2468,7 +2475,7 @@ pub const Type = struct { return a_ty.eql(b_ty); } }; - const Deduplicator = std.ArrayHashMapUnmanaged(Type.EitherEntry, void, DeduplicatorContext, true); + const Deduplicator = std.ArrayHashMapUnmanaged(Type.Data.EitherEntry, void, DeduplicatorContext, true); var deduplicator = Deduplicator{}; defer deduplicator.deinit(arena); @@ -2490,7 +2497,7 @@ pub const Type = struct { return entries[0].type; return .{ - .data = .{ .either = try arena.dupe(Type.EitherEntry, deduplicator.keys()) }, + .data = .{ .either = try arena.dupe(Type.Data.EitherEntry, deduplicator.keys()) }, .is_type_val = has_type_val, }; } From ad6f5e79595a8c4a6495493f71cc50d5d3d0c104 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 15 Jul 2024 22:27:40 +0200 Subject: [PATCH 09/21] remove unused functions --- src/analysis.zig | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 82b05c8c7..e3a7e7285 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -580,17 +580,6 @@ pub fn isTypeFunction(tree: Ast, func: Ast.full.FnProto) bool { return isMetaType(tree, func.ast.return_type); } -/// Returns whether the given function has a `anytype` parameter. -pub fn isGenericFunction(tree: Ast, func: Ast.full.FnProto) bool { - var it = func.iterate(&tree); - while (ast.nextFnParam(&it)) |param| { - if (param.anytype_ellipsis3 != null or param.comptime_noalias != null) { - return true; - } - } - return false; -} - // STYLE pub fn isCamelCase(name: []const u8) bool { @@ -3934,26 +3923,6 @@ pub const DeclWithHandle = struct { } }; -/// Collects all symbols/declarations that can be a acccessed on the given container type. -fn findContainerScopeIndex(container_handle: NodeWithHandle) !?Scope.Index { - const container = container_handle.node; - const handle = container_handle.handle; - - const tree = handle.tree; - const document_scope = try handle.getDocumentScope(); - - if (!ast.isContainer(tree, container)) return null; - - return for (0..document_scope.scopes.len) |scope_index| { - switch (document_scope.getScopeTag(@enumFromInt(scope_index))) { - .container, .container_usingnamespace => if (document_scope.getScopeAstNode(@enumFromInt(scope_index)).? == container) { - break @enumFromInt(scope_index); - }, - else => {}, - } - } else null; -} - /// Collects all symbols/declarations that can be a accessed on the given container type. pub fn collectDeclarationsOfContainer( analyser: *Analyser, From 6b878a67633e7bb7ce468c020dc625dc1f455c0a Mon Sep 17 00:00:00 2001 From: Techatrix Date: Wed, 17 Jul 2024 00:21:54 +0200 Subject: [PATCH 10/21] correct detect struct tuple container fields --- src/DocumentScope.zig | 120 ++++++++++--------------- src/features/document_symbol.zig | 10 ++- tests/lsp_features/document_symbol.zig | 20 +++-- 3 files changed, 66 insertions(+), 84 deletions(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index d1f3f50d5..9b6721430 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -77,6 +77,7 @@ pub const DeclarationLookupContext = struct { } }; +/// Assumes that the `node` is not a container_field of a struct tuple field. pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { const tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); @@ -385,14 +386,20 @@ const ScopeContext = struct { fn pushDeclaration( pushed: PushedScope, - name: []const u8, + identifier_token: Ast.TokenIndex, declaration: Declaration, kind: DeclarationLookup.Kind, ) error{OutOfMemory}!void { std.debug.assert((declaration == .label) == (kind == .label)); + const name = offsets.identifierTokenToNameSlice(pushed.context.tree, identifier_token); if (std.mem.eql(u8, name, "_")) return; defer std.debug.assert(pushed.context.doc_scope.declarations.len == pushed.context.doc_scope.declaration_lookup_map.count()); + if (@import("builtin").mode == .Debug) { + // Check that nameToken works + std.debug.assert(identifier_token == declaration.nameToken(pushed.context.tree)); + } + const context = pushed.context; const doc_scope = context.doc_scope; const allocator = context.allocator; @@ -810,18 +817,18 @@ noinline fn walkContainerDecl( const allocator = context.allocator; const tags = tree.nodes.items(.tag); - const main_tokens = tree.nodes.items(.main_token); const token_tags = tree.tokens.items(.tag); var buf: [2]Ast.Node.Index = undefined; const container_decl = tree.fullContainerDecl(&buf, node_idx).?; - const is_enum_or_tagged_union = blk: { - if (node_idx == 0) break :blk false; + const is_enum_or_tagged_union, const is_struct = blk: { + if (node_idx == 0) break :blk .{ false, true }; break :blk switch (token_tags[container_decl.ast.main_token]) { - .keyword_enum => true, - .keyword_union => container_decl.ast.enum_token != null or container_decl.ast.arg != 0, - .keyword_struct, .keyword_opaque => false, + .keyword_enum => .{ true, false }, + .keyword_union => .{ container_decl.ast.enum_token != null or container_decl.ast.arg != 0, false }, + .keyword_struct => .{ false, true }, + .keyword_opaque => .{ false, false }, else => unreachable, }; }; @@ -850,21 +857,14 @@ noinline fn walkContainerDecl( .container_field_init, .container_field_align, => { - if (token_tags[main_tokens[node_idx]] == .keyword_struct and - tree.fullContainerField(decl).?.ast.tuple_like) - { - continue; - } + const container_field = tree.fullContainerField(decl).?; + if (is_struct and container_field.ast.tuple_like) continue; - const main_token = main_tokens[decl]; - if (token_tags[main_token] != .identifier) { - // TODO this code path should not be reachable - continue; - } - const name = offsets.identifierTokenToNameSlice(tree, main_token); - try scope.pushDeclaration(name, .{ .ast_node = decl }, .field); + const main_token = container_field.ast.main_token; + try scope.pushDeclaration(main_token, .{ .ast_node = decl }, .field); if (is_enum_or_tagged_union) { + const name = offsets.identifierTokenToNameSlice(tree, main_token); if (std.mem.eql(u8, name, "_")) continue; const gop = try context.doc_scope.global_enum_set.getOrPutContext( @@ -888,8 +888,7 @@ noinline fn walkContainerDecl( => { var buffer: [1]Ast.Node.Index = undefined; const name_token = tree.fullFnProto(&buffer, decl).?.name_token orelse continue; - const name = offsets.identifierTokenToNameSlice(tree, name_token); - try scope.pushDeclaration(name, .{ .ast_node = decl }, .other); + try scope.pushDeclaration(name_token, .{ .ast_node = decl }, .other); }, .local_var_decl, .global_var_decl, @@ -898,9 +897,7 @@ noinline fn walkContainerDecl( => { const name_token = tree.fullVarDecl(decl).?.ast.mut_token + 1; if (name_token >= tree.tokens.len) continue; - - const name = offsets.identifierTokenToNameSlice(tree, name_token); - try scope.pushDeclaration(name, .{ .ast_node = decl }, .other); + try scope.pushDeclaration(name_token, .{ .ast_node = decl }, .other); }, else => unreachable, @@ -937,8 +934,7 @@ noinline fn walkErrorSetNode( var it = ast.ErrorSetIterator.init(tree, node_idx); while (it.next()) |identifier_token| { - const name = offsets.identifierTokenToNameSlice(tree, identifier_token); - try scope.pushDeclaration(name, .{ .error_token = identifier_token }, .other); + try scope.pushDeclaration(identifier_token, .{ .error_token = identifier_token }, .other); const gop = try context.doc_scope.global_error_set.getOrPutContext( context.allocator, identifier_token, @@ -975,7 +971,7 @@ noinline fn walkFuncNode( while (ast.nextFnParam(&it)) |param| : (param_index += 1) { if (param.name_token) |name_token| { try scope.pushDeclaration( - offsets.identifierTokenToNameSlice(tree, name_token), + name_token, .{ .function_parameter = .{ .param_index = param_index, .func = node_idx } }, .other, ); @@ -1024,7 +1020,7 @@ fn walkBlockNodeKeepOpen( // if labeled block if (token_tags[first_token] == .identifier) { try scope.pushDeclaration( - offsets.identifierTokenToNameSlice(tree, first_token), + first_token, .{ .label = .{ .identifier = first_token, .block = node_idx } }, .label, ); @@ -1042,8 +1038,8 @@ fn walkBlockNodeKeepOpen( .simple_var_decl, => { const var_decl = tree.fullVarDecl(idx).?; - const name = offsets.identifierTokenToNameSlice(tree, var_decl.ast.mut_token + 1); - try scope.pushDeclaration(name, .{ .ast_node = idx }, .other); + const name_token = var_decl.ast.mut_token + 1; + try scope.pushDeclaration(name_token, .{ .ast_node = idx }, .other); }, .assign_destructure => { const lhs_count = tree.extra_data[data[idx].lhs]; @@ -1051,9 +1047,9 @@ fn walkBlockNodeKeepOpen( for (lhs_exprs, 0..) |lhs_node, i| { const var_decl = tree.fullVarDecl(lhs_node) orelse continue; - const name = offsets.identifierTokenToNameSlice(tree, var_decl.ast.mut_token + 1); + const name_token = var_decl.ast.mut_token + 1; try scope.pushDeclaration( - name, + name_token, .{ .assign_destructure = .{ .node = idx, .index = @intCast(i) } }, .other, ); @@ -1077,7 +1073,6 @@ noinline fn walkIfNode( if (if_node.payload_token) |payload_token| { const name_token = payload_token + @intFromBool(token_tags[payload_token] == .asterisk); - const name = offsets.identifierTokenToNameSlice(tree, name_token); const decl: Declaration = if (if_node.error_token != null) .{ .error_union_payload = .{ .identifier = name_token, .condition = if_node.ast.cond_expr } } @@ -1085,7 +1080,7 @@ noinline fn walkIfNode( .{ .optional_payload = .{ .identifier = name_token, .condition = if_node.ast.cond_expr } }; const then_scope = try walkNodeEnsureScope(context, tree, if_node.ast.then_expr, name_token); - try then_scope.pushDeclaration(name, decl, .other); + try then_scope.pushDeclaration(name_token, decl, .other); try then_scope.finalize(); } else { try walkNode(context, tree, if_node.ast.then_expr); @@ -1093,11 +1088,9 @@ noinline fn walkIfNode( if (if_node.ast.else_expr != 0) { if (if_node.error_token) |error_token| { - const name = offsets.identifierTokenToNameSlice(tree, error_token); - const else_scope = try walkNodeEnsureScope(context, tree, if_node.ast.else_expr, error_token); try else_scope.pushDeclaration( - name, + error_token, .{ .error_union_error = .{ .identifier = error_token, .condition = if_node.ast.cond_expr } }, .other, ); @@ -1124,11 +1117,9 @@ noinline fn walkCatchNode( token_tags[catch_token - 1] == .pipe and token_tags[catch_token] == .identifier) { - const name = offsets.identifierTokenToNameSlice(tree, catch_token); - const expr_scope = try walkNodeEnsureScope(context, tree, data[node_idx].rhs, catch_token); try expr_scope.pushDeclaration( - name, + catch_token, .{ .error_union_error = .{ .identifier = catch_token, .condition = data[node_idx].lhs } }, .other, ); @@ -1150,46 +1141,39 @@ noinline fn walkWhileNode( try walkNode(context, tree, while_node.ast.cond_expr); - const label_token, const label_name = if (while_node.label_token) |label| blk: { - std.debug.assert(token_tags[label] == .identifier); - const name = offsets.tokenToSlice(tree, label); - break :blk .{ label, name }; - } else .{ null, null }; - - const payload_declaration, const payload_name = if (while_node.payload_token) |payload_token| blk: { + const payload_declaration, const payload_name_token = if (while_node.payload_token) |payload_token| blk: { const name_token = payload_token + @intFromBool(token_tags[payload_token] == .asterisk); - const name = offsets.identifierTokenToNameSlice(tree, name_token); const decl: Declaration = if (while_node.error_token != null) .{ .error_union_payload = .{ .identifier = name_token, .condition = while_node.ast.cond_expr } } else .{ .optional_payload = .{ .identifier = name_token, .condition = while_node.ast.cond_expr } }; - break :blk .{ decl, name }; + break :blk .{ decl, name_token }; } else .{ null, null }; if (while_node.ast.cont_expr != 0) { if (payload_declaration) |decl| { const cont_scope = try walkNodeEnsureScope(context, tree, while_node.ast.cont_expr, tree.firstToken(while_node.ast.cont_expr)); - try cont_scope.pushDeclaration(payload_name.?, decl, .other); + try cont_scope.pushDeclaration(payload_name_token.?, decl, .other); try cont_scope.finalize(); } else { try walkNode(context, tree, while_node.ast.cont_expr); } } - if (payload_declaration != null or label_token != null) { + if (payload_declaration != null or while_node.label_token != null) { const then_start = while_node.payload_token orelse tree.firstToken(while_node.ast.then_expr); const then_scope = try walkNodeEnsureScope(context, tree, while_node.ast.then_expr, then_start); - if (label_token) |label| { + if (while_node.label_token) |label| { try then_scope.pushDeclaration( - label_name.?, + label, .{ .label = .{ .identifier = label, .block = while_node.ast.then_expr } }, .label, ); } if (payload_declaration) |decl| { - try then_scope.pushDeclaration(payload_name.?, decl, .other); + try then_scope.pushDeclaration(payload_name_token.?, decl, .other); } try then_scope.finalize(); @@ -1198,23 +1182,21 @@ noinline fn walkWhileNode( } if (while_node.ast.else_expr != 0) { - if (label_token != null or while_node.error_token != null) { + if (while_node.label_token != null or while_node.error_token != null) { const else_start = while_node.error_token orelse tree.firstToken(while_node.ast.else_expr); const else_scope = try walkNodeEnsureScope(context, tree, while_node.ast.else_expr, else_start); - if (label_token) |label| { + if (while_node.label_token) |label| { try else_scope.pushDeclaration( - label_name.?, + label, .{ .label = .{ .identifier = label, .block = while_node.ast.then_expr } }, .label, ); } if (while_node.error_token) |error_token| { - const name = offsets.identifierTokenToNameSlice(tree, error_token); - try else_scope.pushDeclaration( - name, + error_token, .{ .error_union_error = .{ .identifier = error_token, .condition = while_node.ast.cond_expr } }, .other, ); @@ -1252,20 +1234,15 @@ noinline fn walkForNode( if (tree.tokens.items(.tag)[name_token] != .identifier) break; try then_scope.pushDeclaration( - offsets.identifierTokenToNameSlice(tree, name_token), + name_token, .{ .for_loop_payload = .{ .identifier = name_token, .condition = input } }, .other, ); } - const label_name = if (for_node.label_token) |label_token| - offsets.identifierTokenToNameSlice(context.tree, label_token) - else - null; - if (for_node.label_token) |label_token| { try then_scope.pushDeclaration( - label_name.?, + for_node.label_token.?, .{ .label = .{ .identifier = label_token, .block = for_node.ast.then_expr } }, .label, ); @@ -1277,7 +1254,7 @@ noinline fn walkForNode( if (for_node.label_token) |label_token| { const else_scope = try walkNodeEnsureScope(context, tree, for_node.ast.else_expr, tree.firstToken(for_node.ast.else_expr)); try else_scope.pushDeclaration( - label_name.?, + for_node.label_token.?, .{ .label = .{ .identifier = label_token, .block = for_node.ast.else_expr } }, .label, ); @@ -1304,11 +1281,10 @@ noinline fn walkSwitchNode( if (switch_case.payload_token) |payload_token| { const name_token = payload_token + @intFromBool(token_tags[payload_token] == .asterisk); - const name = offsets.identifierTokenToNameSlice(tree, name_token); const expr_scope = try walkNodeEnsureScope(context, tree, switch_case.ast.target_expr, name_token); try expr_scope.pushDeclaration( - name, + name_token, .{ .switch_payload = .{ .node = node_idx, .case_index = @intCast(case_index) } }, .other, ); @@ -1328,11 +1304,9 @@ noinline fn walkErrdeferNode( const payload_token = data[node_idx].lhs; if (payload_token != 0) { - const name = offsets.identifierTokenToNameSlice(tree, payload_token); - const expr_scope = try walkNodeEnsureScope(context, tree, data[node_idx].rhs, payload_token); try expr_scope.pushDeclaration( - name, + payload_token, .{ .error_union_error = .{ .identifier = payload_token, .condition = 0 } }, .other, ); diff --git a/src/features/document_symbol.zig b/src/features/document_symbol.zig index def976f2e..fbf4fb434 100644 --- a/src/features/document_symbol.zig +++ b/src/features/document_symbol.zig @@ -98,6 +98,9 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v .container_field_align, .container_field, => blk: { + const container_kind = token_tags[main_tokens[ctx.parent_container]]; + const is_struct = container_kind == .keyword_struct; + const kind: types.SymbolKind = switch (node_tags[ctx.parent_container]) { .root => .Field, .container_decl, @@ -106,7 +109,7 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v .container_decl_arg_trailing, .container_decl_two, .container_decl_two_trailing, - => switch (token_tags[main_tokens[ctx.parent_container]]) { + => switch (container_kind) { .keyword_struct => .Field, .keyword_union => .Field, .keyword_enum => .EnumMember, @@ -123,7 +126,10 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v else => unreachable, }; - const decl_name_token = analysis.DocumentScope.getDeclNameToken(tree, node) orelse break :blk null; + const container_field = tree.fullContainerField(node).?; + if (is_struct and container_field.ast.tuple_like) break :blk null; + + const decl_name_token = container_field.ast.main_token; const decl_name = offsets.tokenToSlice(tree, decl_name_token); break :blk .{ diff --git a/tests/lsp_features/document_symbol.zig b/tests/lsp_features/document_symbol.zig index f1975b7da..e5d6392cc 100644 --- a/tests/lsp_features/document_symbol.zig +++ b/tests/lsp_features/document_symbol.zig @@ -27,15 +27,17 @@ test "container decl" { \\ Field alpha \\ Function f ); - // TODO - // try testDocumentSymbol( - // \\const S = struct { - // \\ []const u8, - // \\ u32, - // \\}; - // , - // \\Constant S - // ); +} + +test "tuple" { + try testDocumentSymbol( + \\const S = struct { + \\ []const u8, + \\ u32, + \\}; + , + \\Constant S + ); } test "enum" { From ca967b7805d67ccefe55ea27de27ce6b6c1b41d4 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Wed, 17 Jul 2024 00:18:12 +0200 Subject: [PATCH 11/21] remove unnecessary recursion check from findReturnStatementInternal I just wanted to make getDeclNameToken private. --- src/DocumentScope.zig | 2 +- src/analysis.zig | 24 +++++------------------- tests/lsp_features/completion.zig | 29 +++++++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 20 deletions(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index 9b6721430..9366d5f7f 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -78,7 +78,7 @@ pub const DeclarationLookupContext = struct { }; /// Assumes that the `node` is not a container_field of a struct tuple field. -pub fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { +fn getDeclNameToken(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { const tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); diff --git a/src/analysis.zig b/src/analysis.zig index e3a7e7285..fabe2bb3e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -700,45 +700,31 @@ pub fn resolveFieldAccess(analyser: *Analyser, lhs: Type, field_name: []const u8 return null; } -fn findReturnStatementInternal(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.Node.Index, already_found: *bool) ?Ast.Node.Index { +fn findReturnStatementInternal(tree: Ast, body: Ast.Node.Index, already_found: *bool) ?Ast.Node.Index { var result: ?Ast.Node.Index = null; const node_tags = tree.nodes.items(.tag); - const datas = tree.nodes.items(.data); var buffer: [2]Ast.Node.Index = undefined; const statements = ast.blockStatements(tree, body, &buffer) orelse return null; for (statements) |child_idx| { if (node_tags[child_idx] == .@"return") { - if (datas[child_idx].lhs != 0) { - const lhs = datas[child_idx].lhs; - var buf: [1]Ast.Node.Index = undefined; - if (tree.fullCall(&buf, lhs)) |call| { - const call_name = DocumentScope.getDeclNameToken(tree, call.ast.fn_expr); - if (call_name) |name| { - if (std.mem.eql(u8, offsets.tokenToSlice(tree, name), offsets.tokenToSlice(tree, fn_decl.name_token.?))) { - continue; - } - } - } - } - if (already_found.*) return null; already_found.* = true; result = child_idx; continue; } - result = findReturnStatementInternal(tree, fn_decl, child_idx, already_found); + result = findReturnStatementInternal(tree, child_idx, already_found); } return result; } -fn findReturnStatement(tree: Ast, fn_decl: Ast.full.FnProto, body: Ast.Node.Index) ?Ast.Node.Index { +fn findReturnStatement(tree: Ast, body: Ast.Node.Index) ?Ast.Node.Index { var already_found = false; - return findReturnStatementInternal(tree, fn_decl, body, &already_found); + return findReturnStatementInternal(tree, body, &already_found); } pub fn resolveReturnType(analyser: *Analyser, fn_decl: Ast.full.FnProto, handle: *DocumentStore.Handle, fn_body: ?Ast.Node.Index) error{OutOfMemory}!?Type { @@ -746,7 +732,7 @@ pub fn resolveReturnType(analyser: *Analyser, fn_decl: Ast.full.FnProto, handle: if (isTypeFunction(tree, fn_decl) and fn_body != null) { // If this is a type function and it only contains a single return statement that returns // a container declaration, we will return that declaration. - const ret = findReturnStatement(tree, fn_decl, fn_body.?) orelse return null; + const ret = findReturnStatement(tree, fn_body.?) orelse return null; const data = tree.nodes.items(.data)[ret]; if (data.lhs != 0) { return try analyser.resolveTypeOfNodeInternal(.{ .node = data.lhs, .handle = handle }); diff --git a/tests/lsp_features/completion.zig b/tests/lsp_features/completion.zig index 96b738b0a..d7971b512 100644 --- a/tests/lsp_features/completion.zig +++ b/tests/lsp_features/completion.zig @@ -362,6 +362,35 @@ test "generic function" { }); } +test "recusive generic function" { + try testCompletion( + \\const S = struct { alpha: u32 }; + \\fn ArrayList(comptime T: type) type { + \\ return ArrayList(T); + \\} + \\const array_list: ArrayList(S) = undefined; + \\const foo = array_list. + , &.{}); + try testCompletion( + \\const S = struct { alpha: u32 }; + \\fn ArrayList(comptime T: type) type { + \\ return ArrayList(T); + \\} + \\const foo = ArrayList(S). + , &.{}); + try testCompletion( + \\const S = struct { alpha: u32 }; + \\fn Foo(comptime T: type) type { + \\ return Bar(T); + \\} + \\fn Bar(comptime T: type) type { + \\ return Foo(T); + \\} + \\const foo: Foo(S) = undefined; + \\const value = array_list. + , &.{}); +} + test "std.ArrayList" { if (!std.process.can_spawn) return error.SkipZigTest; try testCompletion( From 2c8cfff3c41aa9b311e1fbde164c8e698bf0c6ac Mon Sep 17 00:00:00 2001 From: Techatrix Date: Wed, 17 Jul 2024 20:11:55 +0200 Subject: [PATCH 12/21] walk the switch condition to find document scopes WTF is std.posix.dl_iterate_phdr doing? --- src/DocumentScope.zig | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index 9366d5f7f..788a774da 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -1273,6 +1273,8 @@ noinline fn walkSwitchNode( const token_tags = tree.tokens.items(.tag); const data = tree.nodes.items(.data); + try walkNode(context, tree, data[node_idx].lhs); + const extra = tree.extraData(data[node_idx].rhs, Ast.Node.SubRange); const cases = tree.extra_data[extra.start..extra.end]; From 4dd9f2d49ae9d9837370f499ca7ae4fcb585a6d8 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 22 Jul 2024 19:21:16 +0200 Subject: [PATCH 13/21] walk the if condition to find document scopes --- src/DocumentScope.zig | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index 788a774da..9e526c632 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -1071,6 +1071,8 @@ noinline fn walkIfNode( const if_node = ast.fullIf(tree, node_idx).?; + try walkNode(context, tree, if_node.ast.cond_expr); + if (if_node.payload_token) |payload_token| { const name_token = payload_token + @intFromBool(token_tags[payload_token] == .asterisk); From 8453aece7ceeebcc5ee444a1c32f9f3152a2e671 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 22 Jul 2024 19:26:34 +0200 Subject: [PATCH 14/21] safely handle tuple like container fields in document scope I am curious as to why they are handled this way. The code in the test case also give a "union field missing name" error. --- src/DocumentScope.zig | 4 +++- tests/lsp_features/semantic_tokens.zig | 13 +++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index 9e526c632..c3dd07a67 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -857,9 +857,11 @@ noinline fn walkContainerDecl( .container_field_init, .container_field_align, => { - const container_field = tree.fullContainerField(decl).?; + var container_field = tree.fullContainerField(decl).?; if (is_struct and container_field.ast.tuple_like) continue; + container_field.convertToNonTupleLike(tree.nodes); + if (container_field.ast.tuple_like) continue; const main_token = container_field.ast.main_token; try scope.pushDeclaration(main_token, .{ .ast_node = decl }, .field); diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 3e97af18e..82837ce4a 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -1776,6 +1776,19 @@ test "weird code" { .{ "error", .keyword, .{} }, .{ "foo", .variable, .{} }, }); + try testSemanticTokens( + \\const foo = union { + \\ .bar = 5, + \\}; + , &.{ + .{ "const", .keyword, .{} }, + .{ "foo", .type, .{ .declaration = true } }, + .{ "=", .operator, .{} }, + .{ "union", .keyword, .{} }, + .{ "bar", .enumMember, .{} }, + .{ "=", .operator, .{} }, + .{ "5", .number, .{} }, + }); } const TokenData = struct { From 7d5492676d1316316a4cc4de839341d67e4c36c8 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 22 Jul 2024 19:44:51 +0200 Subject: [PATCH 15/21] fix position context on builtin call without lparen --- src/analysis.zig | 19 ++++++++++--------- tests/utility/position_context.zig | 18 ++++++++++++++++++ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index fabe2bb3e..1de630a22 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -3308,15 +3308,16 @@ pub fn getPositionContext( } else if (new_index + 2 < text.len) { if (text[new_index] == '@') new_index += 2; while (new_index < text.len and isSymbolChar(text[new_index])) : (new_index += 1) {} - switch (text[new_index]) { - ':' => { // look for `id:`, but avoid `a: T` by checking for a `{` following the ':' - var b_index = new_index + 1; - while (b_index < text.len and text[b_index] == ' ') : (b_index += 1) {} // eat spaces - if (text[b_index] == '{') new_index += 1; // current new_index points to ':', but slc ends are exclusive => `text[0..pos_of_r_brace]` - }, - - // ';' => new_index += 1, // XXX: currently given `some;` the last letter gets cut off, ie `som`, but fixing it breaks existing logic.. ? - else => {}, + if (new_index < text.len) { + switch (text[new_index]) { + ':' => { // look for `id:`, but avoid `a: T` by checking for a `{` following the ':' + var b_index = new_index + 1; + while (b_index < text.len and text[b_index] == ' ') : (b_index += 1) {} // eat spaces + if (text[b_index] == '{') new_index += 1; // current new_index points to ':', but slc ends are exclusive => `text[0..pos_of_r_brace]` + }, + // ';' => new_index += 1, // XXX: currently given `some;` the last letter gets cut off, ie `som`, but fixing it breaks existing logic.. ? + else => {}, + } } } } diff --git a/tests/utility/position_context.zig b/tests/utility/position_context.zig index 619343cbd..3585ad6dd 100644 --- a/tests/utility/position_context.zig +++ b/tests/utility/position_context.zig @@ -265,6 +265,24 @@ test "builtin" { .empty, null, ); + try testContext( + \\var foo = @ + , + .builtin, + "@", + ); + try testContext( + \\var foo = @tagName + , + .builtin, + "@tagName", + ); + try testContext( + \\var foo = @tagName + , + .builtin, + "@tagName", + ); try testContext( \\var foo = @intC(u32, 5); , From a8490d745a40acb7061d7e4c5e27bc647523de8e Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 22 Jul 2024 20:02:53 +0200 Subject: [PATCH 16/21] add helper function to access the main token of an identifier node --- src/DocumentScope.zig | 1 - src/analysis.zig | 7 ++----- src/ast.zig | 14 ++++++++++++++ src/features/document_symbol.zig | 1 - src/features/references.zig | 19 +++++++++---------- src/features/semantic_tokens.zig | 2 +- 6 files changed, 26 insertions(+), 18 deletions(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index c3dd07a67..ff19d3563 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -898,7 +898,6 @@ noinline fn walkContainerDecl( .aligned_var_decl, => { const name_token = tree.fullVarDecl(decl).?.ast.mut_token + 1; - if (name_token >= tree.tokens.len) continue; try scope.pushDeclaration(name_token, .{ .ast_node = decl }, .other); }, diff --git a/src/analysis.zig b/src/analysis.zig index 1de630a22..04772cfbb 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -620,14 +620,12 @@ fn resolveVarDeclAliasInternal(analyser: *Analyser, node_handle: NodeWithHandle, const handle = node_handle.handle; const tree = handle.tree; const node_tags = tree.nodes.items(.tag); - const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); const token_tags = tree.tokens.items(.tag); const resolved = switch (node_tags[node_handle.node]) { .identifier => blk: { - const name_token = main_tokens[node_handle.node]; - if (tree.tokens.items(.tag)[name_token] != .identifier) break :blk null; + const name_token = ast.identifierTokenFromIdentifierNode(tree, node_handle.node) orelse break :blk null; const name = offsets.identifierTokenToNameSlice(tree, name_token); break :blk try analyser.lookupSymbolGlobal( handle, @@ -1351,8 +1349,7 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e return fallback_type; }, .identifier => { - const name_token = main_tokens[node]; - if (tree.tokens.items(.tag)[name_token] != .identifier) return null; + const name_token = ast.identifierTokenFromIdentifierNode(tree, node) orelse return null; const name = offsets.identifierTokenToNameSlice(tree, name_token); const is_escaped_identifier = tree.source[tree.tokens.items(.start)[name_token]] == '@'; diff --git a/src/ast.zig b/src/ast.zig index d8923169e..9e940f934 100644 --- a/src/ast.zig +++ b/src/ast.zig @@ -1142,6 +1142,20 @@ pub fn testDeclNameAndToken(tree: Ast, test_decl_node: Ast.Node.Index) ?struct { } } +/// The main token of a identifier node may not be a identifier token. +/// +/// Example: +/// ```zig +/// const Foo; +/// @tagName +/// ``` +/// TODO investigate the parser to figure out why. +pub fn identifierTokenFromIdentifierNode(tree: Ast, node: Ast.Node.Index) ?Ast.TokenIndex { + const main_token = tree.nodes.items(.main_token)[node]; + if (tree.tokens.items(.tag)[main_token] != .identifier) return null; + return main_token; +} + pub fn hasInferredError(tree: Ast, fn_proto: Ast.full.FnProto) bool { const token_tags = tree.tokens.items(.tag); if (fn_proto.ast.return_type == 0) return false; diff --git a/src/features/document_symbol.zig b/src/features/document_symbol.zig index fbf4fb434..7a72404c6 100644 --- a/src/features/document_symbol.zig +++ b/src/features/document_symbol.zig @@ -46,7 +46,6 @@ fn callback(ctx: *Context, tree: Ast, node: Ast.Node.Index) error{OutOfMemory}!v const var_decl = tree.fullVarDecl(node).?; const var_decl_name_token = var_decl.ast.mut_token + 1; - if (tree.tokens.items(.tag)[var_decl_name_token] != .identifier) break :blk null; const var_decl_name = offsets.identifierTokenToNameSlice(tree, var_decl_name_token); new_ctx.last_var_decl_name = var_decl_name; diff --git a/src/features/references.zig b/src/features/references.zig index f5eb2750e..104a7e73a 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -102,17 +102,18 @@ const Builder = struct { const node_tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const starts = tree.tokens.items(.start); switch (node_tags[node]) { .identifier, .test_decl, => |tag| { const name_token, const name = switch (tag) { - .identifier => .{ - main_tokens[node], - offsets.identifierTokenToNameSlice(tree, main_tokens[node]), + .identifier => blk: { + const name_token = ast.identifierTokenFromIdentifierNode(tree, node) orelse return; + break :blk .{ + name_token, + offsets.identifierTokenToNameSlice(tree, name_token), + }; }, .test_decl => ast.testDeclNameAndToken(tree, node) orelse return, else => unreachable, @@ -121,7 +122,7 @@ const Builder = struct { const child = try builder.analyser.lookupSymbolGlobal( handle, name, - starts[name_token], + tree.tokens.items(.start)[name_token], ) orelse return; if (builder.decl_handle.eql(child)) { @@ -320,8 +321,6 @@ const CallBuilder = struct { const node_tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - // const token_tags = tree.tokens.items(.tag); const starts = tree.tokens.items(.start); switch (node_tags[node]) { @@ -341,11 +340,11 @@ const CallBuilder = struct { switch (node_tags[called_node]) { .identifier => { - const identifier_token = main_tokens[called_node]; + const identifier_token = ast.identifierTokenFromIdentifierNode(tree, called_node) orelse return; const child = (try builder.analyser.lookupSymbolGlobal( handle, - offsets.tokenToSlice(tree, identifier_token), + offsets.identifierTokenToNameSlice(tree, identifier_token), starts[identifier_token], )) orelse return; diff --git a/src/features/semantic_tokens.zig b/src/features/semantic_tokens.zig index 1cba79a7c..36d910efb 100644 --- a/src/features/semantic_tokens.zig +++ b/src/features/semantic_tokens.zig @@ -847,7 +847,7 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v try writeNodeTokens(builder, node_data[node].rhs); }, .identifier => { - if (tree.tokens.items(.tag)[main_token] != .identifier) return; // why parser? why? + std.debug.assert(main_token == ast.identifierTokenFromIdentifierNode(tree, node) orelse return); try writeIdentifier(builder, main_token); }, .field_access => { From 719b834fb48ae6f13c9ae44fb3e8048e442c4877 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Sun, 11 Aug 2024 18:43:15 +0200 Subject: [PATCH 17/21] simplify `identifierLocFromPosition` Also renamed to `identifierLocFromIndex` --- src/analysis.zig | 77 +++++++++++++++++++++++---------- src/features/completions.zig | 2 +- src/features/goto.zig | 8 ++-- src/features/hover.zig | 10 ++--- src/features/references.zig | 2 +- src/features/signature_help.zig | 2 +- 6 files changed, 67 insertions(+), 34 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 04772cfbb..287d9e691 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -596,6 +596,61 @@ pub fn isSnakeCase(name: []const u8) bool { // ANALYSIS ENGINE +/// if the `source_index` points to `@name`, the source location of `name` without the `@` is returned. +/// if the `source_index` points to `@"name"`, the source location of `name` is returned. +pub fn identifierLocFromIndex(tree: Ast, source_index: usize) ?offsets.Loc { + std.debug.assert(source_index < tree.source.len); + + var start = source_index; + while (start > 0 and isSymbolChar(tree.source[start - 1])) { + start -= 1; + } + + var end = source_index; + while (end < tree.source.len and isSymbolChar(tree.source[end])) { + end += 1; + } + + if (start == end) return null; + return .{ .start = start, .end = end }; +} + +test identifierLocFromIndex { + var tree = try Ast.parse(std.testing.allocator, + \\;name; ;@builtin; ;@"escaped"; + , .zig); + defer tree.deinit(std.testing.allocator); + + try std.testing.expectEqualSlices( + std.zig.Token.Tag, + &.{ + .semicolon, .identifier, .semicolon, + .semicolon, .builtin, .semicolon, + .semicolon, .identifier, .semicolon, + .eof, + }, + tree.tokens.items(.tag), + ); + + std.debug.assert(std.mem.eql(u8, "name", offsets.locToSlice(tree.source, .{ .start = 1, .end = 5 }))); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 1, .end = 5 }), identifierLocFromIndex(tree, 1)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 1, .end = 5 }), identifierLocFromIndex(tree, 2)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 1, .end = 5 }), identifierLocFromIndex(tree, 5)); + + std.debug.assert(std.mem.eql(u8, "builtin", offsets.locToSlice(tree.source, .{ .start = 10, .end = 17 }))); + try std.testing.expectEqual(@as(?offsets.Loc, null), identifierLocFromIndex(tree, 9)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 10, .end = 17 }), identifierLocFromIndex(tree, 10)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 10, .end = 17 }), identifierLocFromIndex(tree, 10)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 10, .end = 17 }), identifierLocFromIndex(tree, 14)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 10, .end = 17 }), identifierLocFromIndex(tree, 17)); + + std.debug.assert(std.mem.eql(u8, "escaped", offsets.locToSlice(tree.source, .{ .start = 22, .end = 29 }))); + try std.testing.expectEqual(@as(?offsets.Loc, null), identifierLocFromIndex(tree, 20)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 22, .end = 29 }), identifierLocFromIndex(tree, 22)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 22, .end = 29 }), identifierLocFromIndex(tree, 25)); + try std.testing.expectEqual(@as(?offsets.Loc, .{ .start = 22, .end = 29 }), identifierLocFromIndex(tree, 29)); +} + /// Resolves variable declarations consisting of chains of imports and field accesses of containers /// Examples: ///```zig @@ -4619,28 +4674,6 @@ pub fn resolveExpressionTypeFromAncestors( return null; } -pub fn identifierLocFromPosition(pos_index: usize, handle: *DocumentStore.Handle) ?std.zig.Token.Loc { - if (pos_index + 1 >= handle.tree.source.len) return null; - var start_idx = pos_index; - - while (start_idx > 0 and Analyser.isSymbolChar(handle.tree.source[start_idx - 1])) { - start_idx -= 1; - } - - const tree = handle.tree; - const token_index = offsets.sourceIndexToTokenIndex(tree, start_idx); - if (tree.tokens.items(.tag)[token_index] == .identifier) - return offsets.identifierTokenToNameLoc(tree, token_index); - - var end_idx = pos_index; - while (end_idx < handle.tree.source.len and Analyser.isSymbolChar(handle.tree.source[end_idx])) { - end_idx += 1; - } - - if (end_idx <= start_idx) return null; - return .{ .start = start_idx, .end = end_idx }; -} - pub fn getSymbolEnumLiteral( analyser: *Analyser, arena: std.mem.Allocator, diff --git a/src/features/completions.zig b/src/features/completions.zig index 0a4b2c0d5..804ec023d 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -1340,7 +1340,7 @@ fn collectFieldAccessContainerNodes( // inconsistent at returning name_loc for methods, ie // `abc.method() == .` => fails, `abc.method(.{}){.}` => ok // it also fails for `abc.xyz.*` ... currently we take advantage of this quirk - const name_loc = Analyser.identifierLocFromPosition(loc.end, handle) orelse { + const name_loc = Analyser.identifierLocFromIndex(handle.tree, loc.end) orelse { const result = try analyser.getFieldAccessType(handle, loc.end, loc) orelse return; const container = try analyser.resolveDerefType(result) orelse result; if (try analyser.resolveUnwrapErrorUnionType(container, .payload)) |unwrapped| { diff --git a/src/features/goto.zig b/src/features/goto.zig index de5d98595..7fcd71d74 100644 --- a/src/features/goto.zig +++ b/src/features/goto.zig @@ -81,7 +81,7 @@ fn gotoDefinitionLabel( defer tracy_zone.end(); _ = arena; - const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try Analyser.lookupLabel(handle, name, pos_index)) orelse return null; return try gotoDefinitionSymbol(analyser, offsets.locToRange(handle.tree.source, name_loc, offset_encoding), decl, kind, offset_encoding); @@ -99,7 +99,7 @@ fn gotoDefinitionGlobal( defer tracy_zone.end(); _ = arena; - const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try analyser.lookupSymbolGlobal(handle, name, pos_index)) orelse return null; return try gotoDefinitionSymbol(analyser, offsets.locToRange(handle.tree.source, name_loc, offset_encoding), decl, kind, offset_encoding); @@ -116,7 +116,7 @@ fn gotoDefinitionEnumLiteral( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromPosition(source_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try analyser.getSymbolEnumLiteral(arena, handle, source_index, name)) orelse return null; return try gotoDefinitionSymbol(analyser, offsets.locToRange(handle.tree.source, name_loc, offset_encoding), decl, kind, offset_encoding); @@ -172,7 +172,7 @@ fn gotoDefinitionFieldAccess( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromPosition(source_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const held_loc = offsets.locMerge(loc, name_loc); const accesses = (try analyser.getSymbolFieldAccesses(arena, handle, source_index, held_loc, name)) orelse return null; diff --git a/src/features/hover.zig b/src/features/hover.zig index 53ea357af..cd207437c 100644 --- a/src/features/hover.zig +++ b/src/features/hover.zig @@ -189,7 +189,7 @@ fn hoverDefinitionLabel( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try Analyser.lookupLabel(handle, name, pos_index)) orelse return null; @@ -216,7 +216,7 @@ fn hoverDefinitionBuiltin( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const builtin = for (data.builtins) |builtin| { @@ -294,7 +294,7 @@ fn hoverDefinitionGlobal( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromPosition(pos_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try analyser.lookupSymbolGlobal(handle, name, pos_index)) orelse return null; @@ -320,7 +320,7 @@ fn hoverDefinitionEnumLiteral( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromPosition(source_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try analyser.getSymbolEnumLiteral(arena, handle, source_index, name)) orelse return null; @@ -347,7 +347,7 @@ fn hoverDefinitionFieldAccess( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromPosition(source_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const held_loc = offsets.locMerge(loc, name_loc); const decls = (try analyser.getSymbolFieldAccesses(arena, handle, source_index, held_loc, name)) orelse return null; diff --git a/src/features/references.zig b/src/features/references.zig index 104a7e73a..f47508698 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -443,7 +443,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen if (request.position().character <= 0) return null; const source_index = offsets.positionToIndex(handle.tree.source, request.position(), server.offset_encoding); - const name_loc = Analyser.identifierLocFromPosition(source_index, handle) orelse return null; + const name_loc = Analyser.identifierLocFromIndex(handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const pos_context = try Analyser.getPositionContext(server.allocator, handle.tree.source, source_index, true); diff --git a/src/features/signature_help.zig b/src/features/signature_help.zig index e3df93534..2585d1eea 100644 --- a/src/features/signature_help.zig +++ b/src/features/signature_help.zig @@ -257,7 +257,7 @@ pub fn getSignatureInfo( ); } - const name_loc = Analyser.identifierLocFromPosition(loc.end - 1, handle) orelse { + const name_loc = Analyser.identifierLocFromIndex(handle.tree, loc.end - 1) orelse { try symbol_stack.append(arena, .l_paren); continue; }; From 233b84122dfbfead7a5c69bcb44ab7851ddc040c Mon Sep 17 00:00:00 2001 From: Techatrix Date: Sun, 11 Aug 2024 18:58:19 +0200 Subject: [PATCH 18/21] check decl visibility as early as possible --- src/analysis.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 287d9e691..904b6f0b6 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -3993,6 +3993,8 @@ pub fn collectDeclarationsOfContainer( for (scope_decls) |decl_index| { const decl = document_scope.declarations.get(@intFromEnum(decl_index)); + const decl_with_handle = DeclWithHandle{ .decl = decl, .handle = handle }; + if (handle != original_handle and !decl_with_handle.isPublic()) continue; switch (decl) { .ast_node => |node| switch (node_tags[node]) { @@ -4036,8 +4038,6 @@ pub fn collectDeclarationsOfContainer( else => {}, } - const decl_with_handle = DeclWithHandle{ .decl = decl, .handle = handle }; - if (handle != original_handle and !decl_with_handle.isPublic()) continue; try decl_collection.append(analyser.arena.allocator(), decl_with_handle); } From 879ba5776d7424ae6ed335ac647db671622b2d33 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Sun, 11 Aug 2024 19:13:54 +0200 Subject: [PATCH 19/21] store: simplify status check when computing DocumentScope or ZIR --- src/DocumentStore.zig | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index ab36765b0..e94773f23 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -385,11 +385,9 @@ pub const Handle = struct { while (true) { const status = self.getStatus(); if (status.has_document_scope) break; - if (status.has_document_scope_lock) { - // another thread is currently computing the document scope - self.impl.condition.wait(&self.impl.lock); - continue; - } else if (self.impl.status.bitSet(@bitOffsetOf(Status, "has_document_scope_lock"), .release) != 0) { + if (status.has_document_scope_lock or + self.impl.status.bitSet(@bitOffsetOf(Status, "has_document_scope_lock"), .release) != 0) + { // another thread is currently computing the document scope self.impl.condition.wait(&self.impl.lock); continue; @@ -424,11 +422,9 @@ pub const Handle = struct { while (true) { const status = self.getStatus(); if (status.has_zir) break; - if (status.has_zir_lock) { - // another thread is currently computing the ZIR - self.impl.condition.wait(&self.impl.lock); - continue; - } else if (self.impl.status.bitSet(@bitOffsetOf(Status, "has_zir_lock"), .release) != 0) { + if (status.has_zir_lock or + self.impl.status.bitSet(@bitOffsetOf(Status, "has_zir_lock"), .release) != 0) + { // another thread is currently computing the ZIR self.impl.condition.wait(&self.impl.lock); continue; From 25739c3b8cec863d0c768c9727eee68950cda171 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Fri, 16 Aug 2024 15:36:20 +0200 Subject: [PATCH 20/21] use std.crypto.random instead of std.posix.getrandom --- src/debug.zig | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/debug.zig b/src/debug.zig index 6faf80a45..82d0fc1c1 100644 --- a/src/debug.zig +++ b/src/debug.zig @@ -85,12 +85,9 @@ pub const FailingAllocator = struct { /// `likelihood == 0` means that every allocation will fail /// `likelihood == std.math.intMax(u32)` means that no allocation will be forced to fail pub fn init(internal_allocator: std.mem.Allocator, likelihood: u32) FailingAllocator { - var seed = std.mem.zeroes([8]u8); - std.posix.getrandom(&seed) catch {}; - return FailingAllocator{ .internal_allocator = internal_allocator, - .random = std.Random.DefaultPrng.init(@bitCast(seed)), + .random = std.Random.DefaultPrng.init(std.crypto.random.int(u64)), .likelihood = likelihood, }; } From 3b82e8d99d3f89c7c5a384bc7a8f7c341391a53a Mon Sep 17 00:00:00 2001 From: Techatrix Date: Fri, 16 Aug 2024 15:39:58 +0200 Subject: [PATCH 21/21] fix deadlock when DocumentScope or ZIR failed because of OutOfMemory --- src/DocumentStore.zig | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index e94773f23..00e26e878 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -392,6 +392,7 @@ pub const Handle = struct { self.impl.condition.wait(&self.impl.lock); continue; } + defer self.impl.condition.broadcast(); self.impl.document_scope = blk: { var document_scope = try DocumentScope.init(self.impl.allocator, self.tree); @@ -406,8 +407,6 @@ pub const Handle = struct { }; const old_has_document_scope = self.impl.status.bitSet(@bitOffsetOf(Status, "has_document_scope"), .release); // atomically set has_document_scope std.debug.assert(old_has_document_scope == 0); // race condition: another thread set `has_document_scope` even though we hold the lock - - self.impl.condition.broadcast(); } return self.impl.document_scope; } @@ -429,6 +428,7 @@ pub const Handle = struct { self.impl.condition.wait(&self.impl.lock); continue; } + defer self.impl.condition.broadcast(); self.impl.zir = blk: { const tracy_zone_inner = tracy.traceNamed(@src(), "AstGen.generate"); @@ -447,8 +447,6 @@ pub const Handle = struct { _ = self.impl.status.bitReset(@bitOffsetOf(Status, "zir_outdated"), .release); // atomically set zir_outdated const old_has_zir = self.impl.status.bitSet(@bitOffsetOf(Status, "has_zir"), .release); // atomically set has_zir std.debug.assert(old_has_zir == 0); // race condition: another thread set `has_zir` even though we hold the lock - - self.impl.condition.broadcast(); } return self.impl.zir; }