Skip to content

Commit

Permalink
Merge pull request #22303 from mlugg/131-new
Browse files Browse the repository at this point in the history
compiler: analyze type and value of global declarations separately
  • Loading branch information
mlugg authored Dec 25, 2024
2 parents af5e731 + 3afda43 commit 497592c
Show file tree
Hide file tree
Showing 29 changed files with 3,095 additions and 2,546 deletions.
1,118 changes: 582 additions & 536 deletions lib/std/zig/AstGen.zig

Large diffs are not rendered by default.

487 changes: 392 additions & 95 deletions lib/std/zig/Zir.zig

Large diffs are not rendered by default.

81 changes: 52 additions & 29 deletions src/Compilation.zig
Original file line number Diff line number Diff line change
Expand Up @@ -348,12 +348,15 @@ const Job = union(enum) {
/// Corresponds to the task in `link.Task`.
/// Only needed for backends that haven't yet been updated to not race against Sema.
codegen_type: InternPool.Index,
/// The `Cau` must be semantically analyzed (and possibly export itself).
/// The `AnalUnit`, which is *not* a `func`, must be semantically analyzed.
/// This may be its first time being analyzed, or it may be outdated.
/// If the unit is a function, a `codegen_func` job will then be queued.
analyze_comptime_unit: InternPool.AnalUnit,
/// This function must be semantically analyzed.
/// This may be its first time being analyzed, or it may be outdated.
analyze_cau: InternPool.Cau.Index,
/// Analyze the body of a runtime function.
/// After analysis, a `codegen_func` job will be queued.
/// These must be separate jobs to ensure any needed type resolution occurs *before* codegen.
/// This job is separate from `analyze_comptime_unit` because it has a different priority.
analyze_func: InternPool.Index,
/// The main source file for the module needs to be analyzed.
analyze_mod: *Package.Module,
Expand Down Expand Up @@ -2903,6 +2906,7 @@ const Header = extern struct {
file_deps_len: u32,
src_hash_deps_len: u32,
nav_val_deps_len: u32,
nav_ty_deps_len: u32,
namespace_deps_len: u32,
namespace_name_deps_len: u32,
first_dependency_len: u32,
Expand Down Expand Up @@ -2946,6 +2950,7 @@ pub fn saveState(comp: *Compilation) !void {
.file_deps_len = @intCast(ip.file_deps.count()),
.src_hash_deps_len = @intCast(ip.src_hash_deps.count()),
.nav_val_deps_len = @intCast(ip.nav_val_deps.count()),
.nav_ty_deps_len = @intCast(ip.nav_ty_deps.count()),
.namespace_deps_len = @intCast(ip.namespace_deps.count()),
.namespace_name_deps_len = @intCast(ip.namespace_name_deps.count()),
.first_dependency_len = @intCast(ip.first_dependency.count()),
Expand Down Expand Up @@ -2976,6 +2981,8 @@ pub fn saveState(comp: *Compilation) !void {
addBuf(&bufs, mem.sliceAsBytes(ip.src_hash_deps.values()));
addBuf(&bufs, mem.sliceAsBytes(ip.nav_val_deps.keys()));
addBuf(&bufs, mem.sliceAsBytes(ip.nav_val_deps.values()));
addBuf(&bufs, mem.sliceAsBytes(ip.nav_ty_deps.keys()));
addBuf(&bufs, mem.sliceAsBytes(ip.nav_ty_deps.values()));
addBuf(&bufs, mem.sliceAsBytes(ip.namespace_deps.keys()));
addBuf(&bufs, mem.sliceAsBytes(ip.namespace_deps.values()));
addBuf(&bufs, mem.sliceAsBytes(ip.namespace_name_deps.keys()));
Expand Down Expand Up @@ -3141,8 +3148,10 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
}

const file_index = switch (anal_unit.unwrap()) {
.cau => |cau| zcu.namespacePtr(ip.getCau(cau).namespace).file_scope,
.func => |ip_index| (zcu.funcInfo(ip_index).zir_body_inst.resolveFull(ip) orelse continue).file,
.@"comptime" => |cu| ip.getComptimeUnit(cu).zir_index.resolveFile(ip),
.nav_val, .nav_ty => |nav| ip.getNav(nav).analysis.?.zir_index.resolveFile(ip),
.type => |ty| Type.fromInterned(ty).typeDeclInst(zcu).?.resolveFile(ip),
.func => |ip_index| zcu.funcInfo(ip_index).zir_body_inst.resolveFile(ip),
};

// Skip errors for AnalUnits within files that had a parse failure.
Expand Down Expand Up @@ -3374,11 +3383,9 @@ pub fn addModuleErrorMsg(
const rt_file_path = try src.file_scope.fullPath(gpa);
defer gpa.free(rt_file_path);
const name = switch (ref.referencer.unwrap()) {
.cau => |cau| switch (ip.getCau(cau).owner.unwrap()) {
.nav => |nav| ip.getNav(nav).name.toSlice(ip),
.type => |ty| Type.fromInterned(ty).containerTypeName(ip).toSlice(ip),
.none => "comptime",
},
.@"comptime" => "comptime",
.nav_val, .nav_ty => |nav| ip.getNav(nav).name.toSlice(ip),
.type => |ty| Type.fromInterned(ty).containerTypeName(ip).toSlice(ip),
.func => |f| ip.getNav(zcu.funcInfo(f).owner_nav).name.toSlice(ip),
};
try ref_traces.append(gpa, .{
Expand Down Expand Up @@ -3641,10 +3648,14 @@ fn performAllTheWorkInner(
// If there's no work queued, check if there's anything outdated
// which we need to work on, and queue it if so.
if (try zcu.findOutdatedToAnalyze()) |outdated| {
switch (outdated.unwrap()) {
.cau => |cau| try comp.queueJob(.{ .analyze_cau = cau }),
.func => |func| try comp.queueJob(.{ .analyze_func = func }),
}
try comp.queueJob(switch (outdated.unwrap()) {
.func => |f| .{ .analyze_func = f },
.@"comptime",
.nav_ty,
.nav_val,
.type,
=> .{ .analyze_comptime_unit = outdated },
});
continue;
}
}
Expand All @@ -3667,13 +3678,13 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre
.codegen_nav => |nav_index| {
const zcu = comp.zcu.?;
const nav = zcu.intern_pool.getNav(nav_index);
if (nav.analysis_owner.unwrap()) |cau| {
const unit = InternPool.AnalUnit.wrap(.{ .cau = cau });
if (nav.analysis != null) {
const unit: InternPool.AnalUnit = .wrap(.{ .nav_val = nav_index });
if (zcu.failed_analysis.contains(unit) or zcu.transitive_failed_analysis.contains(unit)) {
return;
}
}
assert(nav.status == .resolved);
assert(nav.status == .fully_resolved);
comp.dispatchCodegenTask(tid, .{ .codegen_nav = nav_index });
},
.codegen_func => |func| {
Expand All @@ -3688,36 +3699,48 @@ fn processOneJob(tid: usize, comp: *Compilation, job: Job, prog_node: std.Progre

const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.ensureFuncBodyAnalyzed(func) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,

pt.ensureFuncBodyUpToDate(func) catch |err| switch (err) {
error.OutOfMemory => |e| return e,
error.AnalysisFail => return,
};
},
.analyze_cau => |cau_index| {
.analyze_comptime_unit => |unit| {
const named_frame = tracy.namedFrame("analyze_comptime_unit");
defer named_frame.end();

const pt: Zcu.PerThread = .activate(comp.zcu.?, @enumFromInt(tid));
defer pt.deactivate();
pt.ensureCauAnalyzed(cau_index) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,

const maybe_err: Zcu.SemaError!void = switch (unit.unwrap()) {
.@"comptime" => |cu| pt.ensureComptimeUnitUpToDate(cu),
.nav_ty => |nav| pt.ensureNavTypeUpToDate(nav),
.nav_val => |nav| pt.ensureNavValUpToDate(nav),
.type => |ty| if (pt.ensureTypeUpToDate(ty)) |_| {} else |err| err,
.func => unreachable,
};
maybe_err catch |err| switch (err) {
error.OutOfMemory => |e| return e,
error.AnalysisFail => return,
};

queue_test_analysis: {
if (!comp.config.is_test) break :queue_test_analysis;
const nav = switch (unit.unwrap()) {
.nav_val => |nav| nav,
else => break :queue_test_analysis,
};

// Check if this is a test function.
const ip = &pt.zcu.intern_pool;
const cau = ip.getCau(cau_index);
const nav_index = switch (cau.owner.unwrap()) {
.none, .type => break :queue_test_analysis,
.nav => |nav| nav,
};
if (!pt.zcu.test_functions.contains(nav_index)) {
if (!pt.zcu.test_functions.contains(nav)) {
break :queue_test_analysis;
}

// Tests are always emitted in test binaries. The decl_refs are created by
// Zcu.populateTestFunctions, but this will not queue body analysis, so do
// that now.
try pt.zcu.ensureFuncBodyAnalysisQueued(ip.getNav(nav_index).status.resolved.val);
try pt.zcu.ensureFuncBodyAnalysisQueued(ip.getNav(nav).status.fully_resolved.val);
}
},
.resolve_type_fully => |ty| {
Expand Down
Loading

0 comments on commit 497592c

Please sign in to comment.