@@ -33,6 +33,8 @@ const FrameIndex = bits.FrameIndex;
33
33
34
34
const InnerError = codegen.CodeGenError || error{OutOfRegisters};
35
35
36
+ const err_ret_trace_index: Air.Inst.Index = @enumFromInt(std.math.maxInt(u32));
37
+
36
38
gpa: Allocator,
37
39
pt: Zcu.PerThread,
38
40
air: Air,
@@ -55,6 +57,7 @@ va_info: union {
55
57
win64: struct {},
56
58
},
57
59
ret_mcv: InstTracking,
60
+ err_ret_trace_reg: Register,
58
61
fn_type: Type,
59
62
src_loc: Zcu.LazySrcLoc,
60
63
@@ -626,6 +629,7 @@ const InstTracking = struct {
626
629
switch (self.long) {
627
630
.none => self.long = try cg.allocRegOrMem(inst, false),
628
631
.load_frame => {},
632
+ .lea_frame => return,
629
633
.reserved_frame => |index| self.long = .{ .load_frame = .{ .index = index } },
630
634
else => unreachable,
631
635
}
@@ -887,6 +891,7 @@ pub fn generate(
887
891
.args = undefined, // populated after `resolveCallingConventionValues`
888
892
.va_info = undefined, // populated after `resolveCallingConventionValues`
889
893
.ret_mcv = undefined, // populated after `resolveCallingConventionValues`
894
+ .err_ret_trace_reg = undefined, // populated after `resolveCallingConventionValues`
890
895
.fn_type = fn_type,
891
896
.src_loc = src_loc,
892
897
.end_di_line = func.rbrace_line,
@@ -935,6 +940,7 @@ pub fn generate(
935
940
936
941
function.args = call_info.args;
937
942
function.ret_mcv = call_info.return_value;
943
+ function.err_ret_trace_reg = call_info.err_ret_trace_reg;
938
944
function.frame_allocs.set(@intFromEnum(FrameIndex.ret_addr), .init(.{
939
945
.size = Type.usize.abiSize(zcu),
940
946
.alignment = Type.usize.abiAlignment(zcu).min(call_info.stack_align),
@@ -962,6 +968,14 @@ pub fn generate(
962
968
} },
963
969
.x86_64_win => .{ .win64 = .{} },
964
970
};
971
+ if (call_info.err_ret_trace_reg != .none) {
972
+ function.register_manager.getRegAssumeFree(call_info.err_ret_trace_reg, err_ret_trace_index);
973
+ try function.inst_tracking.putNoClobber(
974
+ gpa,
975
+ err_ret_trace_index,
976
+ .init(.{ .register = call_info.err_ret_trace_reg }),
977
+ );
978
+ }
965
979
966
980
function.gen() catch |err| switch (err) {
967
981
error.CodegenFail => return error.CodegenFail,
@@ -1042,6 +1056,7 @@ pub fn generateLazy(
1042
1056
.args = undefined,
1043
1057
.va_info = undefined,
1044
1058
.ret_mcv = undefined,
1059
+ .err_ret_trace_reg = undefined,
1045
1060
.fn_type = undefined,
1046
1061
.src_loc = src_loc,
1047
1062
.end_di_line = undefined, // no debug info yet
@@ -2503,9 +2518,6 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void {
2503
2518
.optional_payload => try cg.airOptionalPayload(inst),
2504
2519
.unwrap_errunion_err => try cg.airUnwrapErrUnionErr(inst),
2505
2520
.unwrap_errunion_payload => try cg.airUnwrapErrUnionPayload(inst),
2506
- .err_return_trace => try cg.airErrReturnTrace(inst),
2507
- .set_err_return_trace => try cg.airSetErrReturnTrace(inst),
2508
- .save_err_return_trace_index=> try cg.airSaveErrReturnTraceIndex(inst),
2509
2521
2510
2522
.wrap_optional => try cg.airWrapOptional(inst),
2511
2523
.wrap_errunion_payload => try cg.airWrapErrUnionPayload(inst),
@@ -11236,12 +11248,46 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void {
11236
11248
.wasm_memory_size => unreachable,
11237
11249
.wasm_memory_grow => unreachable,
11238
11250
11251
+ .err_return_trace => {
11252
+ const ert: Temp = .{ .index = err_ret_trace_index };
11253
+ try ert.moveTo(inst, cg);
11254
+ },
11255
+ .set_err_return_trace => {
11256
+ const un_op = air_datas[@intFromEnum(inst)].un_op;
11257
+ var ops = try cg.tempsFromOperands(inst, .{un_op});
11258
+ switch (ops[0].unwrap(cg)) {
11259
+ .ref => {
11260
+ const result = try cg.allocRegOrMem(err_ret_trace_index, true);
11261
+ try cg.genCopy(.usize, result, ops[0].tracking(cg).short, .{});
11262
+ tracking_log.debug("{} => {} (birth)", .{ err_ret_trace_index, result });
11263
+ cg.inst_tracking.putAssumeCapacityNoClobber(err_ret_trace_index, .init(result));
11264
+ },
11265
+ .temp => |temp_index| {
11266
+ const temp_tracking = temp_index.tracking(cg);
11267
+ tracking_log.debug("{} => {} (birth)", .{ err_ret_trace_index, temp_tracking.short });
11268
+ cg.inst_tracking.putAssumeCapacityNoClobber(err_ret_trace_index, temp_tracking.*);
11269
+ assert(cg.reuseTemp(err_ret_trace_index, temp_index.toIndex(), temp_tracking));
11270
+ },
11271
+ .err_ret_trace => unreachable,
11272
+ }
11273
+ },
11274
+
11239
11275
.addrspace_cast => {
11240
11276
const ty_op = air_datas[@intFromEnum(inst)].ty_op;
11241
11277
var ops = try cg.tempsFromOperands(inst, .{ty_op.operand});
11242
11278
try ops[0].moveTo(inst, cg);
11243
11279
},
11244
11280
11281
+ .save_err_return_trace_index => {
11282
+ const ty_pl = air_datas[@intFromEnum(inst)].ty_pl;
11283
+ const agg_ty = ty_pl.ty.toType();
11284
+ assert(agg_ty.containerLayout(zcu) != .@"packed");
11285
+ var ert: Temp = .{ .index = err_ret_trace_index };
11286
+ var res = try ert.load(.usize, .{ .disp = @intCast(agg_ty.structFieldOffset(ty_pl.payload, zcu)) }, cg);
11287
+ try ert.die(cg);
11288
+ try res.moveTo(inst, cg);
11289
+ },
11290
+
11245
11291
.vector_store_elem => return cg.fail("TODO implement vector_store_elem", .{}),
11246
11292
11247
11293
.c_va_arg => try cg.airVaArg(inst),
@@ -11697,7 +11743,7 @@ fn restoreState(self: *CodeGen, state: State, deaths: []const Air.Inst.Index, co
11697
11743
const target_maybe_inst = if (state.free_registers.isSet(reg_index)) null else target_slot;
11698
11744
if (std.debug.runtime_safety) if (target_maybe_inst) |target_inst|
11699
11745
assert(self.inst_tracking.getIndex(target_inst).? < state.inst_tracking_len);
11700
- if (opts.emit_instructions) {
11746
+ if (opts.emit_instructions and current_maybe_inst != target_maybe_inst ) {
11701
11747
if (current_maybe_inst) |current_inst|
11702
11748
try self.inst_tracking.getPtr(current_inst).?.spill(self, current_inst);
11703
11749
if (target_maybe_inst) |target_inst|
@@ -11709,7 +11755,7 @@ fn restoreState(self: *CodeGen, state: State, deaths: []const Air.Inst.Index, co
11709
11755
self.register_manager.freeRegIndex(reg_index);
11710
11756
}
11711
11757
if (target_maybe_inst) |target_inst| {
11712
- self.register_manager.getRegIndexAssumeFree(reg_index, target_maybe_inst );
11758
+ self.register_manager.getRegIndexAssumeFree(reg_index, target_inst );
11713
11759
self.inst_tracking.getPtr(target_inst).?.trackMaterialize(target_inst, reg_tracking);
11714
11760
}
11715
11761
} else if (target_maybe_inst) |_|
@@ -11750,9 +11796,10 @@ pub fn spillEflagsIfOccupied(self: *CodeGen) !void {
11750
11796
}
11751
11797
}
11752
11798
11753
- pub fn spillCallerPreservedRegs(self: *CodeGen, cc: std.builtin.CallingConvention.Tag) !void {
11799
+ pub fn spillCallerPreservedRegs(self: *CodeGen, cc: std.builtin.CallingConvention.Tag, ignore_reg: Register ) !void {
11754
11800
switch (cc) {
11755
- inline .auto, .x86_64_sysv, .x86_64_win => |tag| try self.spillRegisters(abi.getCallerPreservedRegs(tag)),
11801
+ inline .auto, .x86_64_sysv, .x86_64_win => |tag| inline for (comptime abi.getCallerPreservedRegs(tag)) |reg|
11802
+ if (reg != ignore_reg) try self.register_manager.getKnownReg(reg, null),
11756
11803
else => unreachable,
11757
11804
}
11758
11805
}
@@ -14406,22 +14453,6 @@ fn genUnwrapErrUnionPayloadPtrMir(
14406
14453
return result;
14407
14454
}
14408
14455
14409
- fn airErrReturnTrace(self: *CodeGen, inst: Air.Inst.Index) !void {
14410
- _ = inst;
14411
- return self.fail("TODO implement airErrReturnTrace for {}", .{self.target.cpu.arch});
14412
- //return self.finishAir(inst, result, .{ .none, .none, .none });
14413
- }
14414
-
14415
- fn airSetErrReturnTrace(self: *CodeGen, inst: Air.Inst.Index) !void {
14416
- _ = inst;
14417
- return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch});
14418
- }
14419
-
14420
- fn airSaveErrReturnTraceIndex(self: *CodeGen, inst: Air.Inst.Index) !void {
14421
- _ = inst;
14422
- return self.fail("TODO implement airSaveErrReturnTraceIndex for {}", .{self.target.cpu.arch});
14423
- }
14424
-
14425
14456
fn airWrapOptional(self: *CodeGen, inst: Air.Inst.Index) !void {
14426
14457
const pt = self.pt;
14427
14458
const zcu = pt.zcu;
@@ -21188,7 +21219,7 @@ fn genCall(self: *CodeGen, info: union(enum) {
21188
21219
}
21189
21220
21190
21221
try self.spillEflagsIfOccupied();
21191
- try self.spillCallerPreservedRegs(fn_info.cc);
21222
+ try self.spillCallerPreservedRegs(fn_info.cc, call_info.err_ret_trace_reg );
21192
21223
21193
21224
// set stack arguments first because this can clobber registers
21194
21225
// also clobber spill arguments as we go
@@ -21273,6 +21304,24 @@ fn genCall(self: *CodeGen, info: union(enum) {
21273
21304
else => unreachable,
21274
21305
};
21275
21306
21307
+ if (call_info.err_ret_trace_reg != .none) {
21308
+ if (self.inst_tracking.getPtr(err_ret_trace_index)) |err_ret_trace| {
21309
+ if (switch (err_ret_trace.short) {
21310
+ .register => |reg| call_info.err_ret_trace_reg != reg,
21311
+ else => true,
21312
+ }) {
21313
+ try self.register_manager.getReg(call_info.err_ret_trace_reg, err_ret_trace_index);
21314
+ try reg_locks.append(self.register_manager.lockReg(call_info.err_ret_trace_reg));
21315
+
21316
+ try self.genSetReg(call_info.err_ret_trace_reg, .usize, err_ret_trace.short, .{});
21317
+ err_ret_trace.trackMaterialize(err_ret_trace_index, .{
21318
+ .long = err_ret_trace.long,
21319
+ .short = .{ .register = call_info.err_ret_trace_reg },
21320
+ });
21321
+ }
21322
+ }
21323
+ }
21324
+
21276
21325
// now we are free to set register arguments
21277
21326
switch (call_info.return_value.long) {
21278
21327
.none, .unreach => {},
@@ -21447,6 +21496,17 @@ fn airRet(self: *CodeGen, inst: Air.Inst.Index, safety: bool) !void {
21447
21496
else => unreachable,
21448
21497
}
21449
21498
self.ret_mcv.liveOut(self, inst);
21499
+
21500
+ if (self.err_ret_trace_reg != .none) {
21501
+ if (self.inst_tracking.getPtr(err_ret_trace_index)) |err_ret_trace| {
21502
+ if (switch (err_ret_trace.short) {
21503
+ .register => |reg| self.err_ret_trace_reg != reg,
21504
+ else => true,
21505
+ }) try self.genSetReg(self.err_ret_trace_reg, .usize, err_ret_trace.short, .{});
21506
+ err_ret_trace.liveOut(self, err_ret_trace_index);
21507
+ }
21508
+ }
21509
+
21450
21510
try self.finishAir(inst, .unreach, .{ un_op, .none, .none });
21451
21511
21452
21512
// TODO optimization opportunity: figure out when we can emit this as a 2 byte instruction
@@ -21467,6 +21527,17 @@ fn airRetLoad(self: *CodeGen, inst: Air.Inst.Index) !void {
21467
21527
else => unreachable,
21468
21528
}
21469
21529
self.ret_mcv.liveOut(self, inst);
21530
+
21531
+ if (self.err_ret_trace_reg != .none) {
21532
+ if (self.inst_tracking.getPtr(err_ret_trace_index)) |err_ret_trace| {
21533
+ if (switch (err_ret_trace.short) {
21534
+ .register => |reg| self.err_ret_trace_reg != reg,
21535
+ else => true,
21536
+ }) try self.genSetReg(self.err_ret_trace_reg, .usize, err_ret_trace.short, .{});
21537
+ err_ret_trace.liveOut(self, err_ret_trace_index);
21538
+ }
21539
+ }
21540
+
21470
21541
try self.finishAir(inst, .unreach, .{ un_op, .none, .none });
21471
21542
21472
21543
// TODO optimization opportunity: figure out when we can emit this as a 2 byte instruction
@@ -26098,8 +26169,13 @@ fn airTagName(self: *CodeGen, inst: Air.Inst.Index) !void {
26098
26169
stack_frame_align.* = stack_frame_align.max(needed_call_frame.abi_align);
26099
26170
}
26100
26171
26172
+ const err_ret_trace_reg = if (zcu.comp.config.any_error_tracing) err_ret_trace_reg: {
26173
+ const param_gpr = abi.getCAbiIntParamRegs(.auto);
26174
+ break :err_ret_trace_reg param_gpr[param_gpr.len - 1];
26175
+ } else .none;
26176
+
26101
26177
try self.spillEflagsIfOccupied();
26102
- try self.spillCallerPreservedRegs(.auto);
26178
+ try self.spillCallerPreservedRegs(.auto, err_ret_trace_reg );
26103
26179
26104
26180
const param_regs = abi.getCAbiIntParamRegs(.auto);
26105
26181
@@ -28564,6 +28640,7 @@ const CallMCValues = struct {
28564
28640
stack_align: InternPool.Alignment,
28565
28641
gp_count: u32,
28566
28642
fp_count: u32,
28643
+ err_ret_trace_reg: Register,
28567
28644
28568
28645
fn deinit(self: *CallMCValues, func: *CodeGen) void {
28569
28646
func.gpa.free(self.args);
@@ -28598,6 +28675,7 @@ fn resolveCallingConventionValues(
28598
28675
.stack_align = undefined,
28599
28676
.gp_count = 0,
28600
28677
.fp_count = 0,
28678
+ .err_ret_trace_reg = .none,
28601
28679
};
28602
28680
errdefer self.gpa.free(result.args);
28603
28681
@@ -28842,6 +28920,11 @@ fn resolveCallingConventionValues(
28842
28920
var param_x87 = abi.getCAbiX87ParamRegs(cc);
28843
28921
var param_sse = abi.getCAbiSseParamRegs(cc, self.target);
28844
28922
28923
+ if (zcu.comp.config.any_error_tracing) {
28924
+ result.err_ret_trace_reg = param_gpr[param_gpr.len - 1];
28925
+ param_gpr = param_gpr[0 .. param_gpr.len - 1];
28926
+ }
28927
+
28845
28928
// Return values
28846
28929
result.return_value = if (ret_ty.isNoReturn(zcu))
28847
28930
.init(.unreach)
@@ -29159,16 +29242,8 @@ fn typeOf(self: *CodeGen, inst: Air.Inst.Ref) Type {
29159
29242
}
29160
29243
29161
29244
fn typeOfIndex(self: *CodeGen, inst: Air.Inst.Index) Type {
29162
- const pt = self.pt;
29163
- const zcu = pt.zcu;
29164
29245
const temp: Temp = .{ .index = inst };
29165
- return switch (temp.unwrap(self)) {
29166
- .ref => switch (self.air.instructions.items(.tag)[@intFromEnum(inst)]) {
29167
- .loop_switch_br => self.typeOf(self.air.unwrapSwitch(inst).operand),
29168
- else => self.air.typeOfIndex(inst, &zcu.intern_pool),
29169
- },
29170
- .temp => temp.typeOf(self),
29171
- };
29246
+ return temp.typeOf(self);
29172
29247
}
29173
29248
29174
29249
fn intCompilerRtAbiName(int_bits: u32) u8 {
@@ -29336,10 +29411,12 @@ const Temp = struct {
29336
29411
fn unwrap(temp: Temp, cg: *CodeGen) union(enum) {
29337
29412
ref: Air.Inst.Ref,
29338
29413
temp: Index,
29414
+ err_ret_trace,
29339
29415
} {
29340
29416
switch (temp.index.unwrap()) {
29341
29417
.ref => |ref| return .{ .ref = ref },
29342
29418
.target => |target_index| {
29419
+ if (temp.index == err_ret_trace_index) return .err_ret_trace;
29343
29420
const temp_index: Index = @enumFromInt(target_index);
29344
29421
assert(temp_index.isValid(cg));
29345
29422
return .{ .temp = temp_index };
@@ -29349,14 +29426,18 @@ const Temp = struct {
29349
29426
29350
29427
fn typeOf(temp: Temp, cg: *CodeGen) Type {
29351
29428
return switch (temp.unwrap(cg)) {
29352
- .ref => |ref| cg.typeOf(ref),
29429
+ .ref => switch (cg.air.instructions.items(.tag)[@intFromEnum(temp.index)]) {
29430
+ .loop_switch_br => cg.typeOf(cg.air.unwrapSwitch(temp.index).operand),
29431
+ else => cg.air.typeOfIndex(temp.index, &cg.pt.zcu.intern_pool),
29432
+ },
29353
29433
.temp => |temp_index| temp_index.typeOf(cg),
29434
+ .err_ret_trace => .usize,
29354
29435
};
29355
29436
}
29356
29437
29357
29438
fn isMut(temp: Temp, cg: *CodeGen) bool {
29358
29439
return switch (temp.unwrap(cg)) {
29359
- .ref => false,
29440
+ .ref, .err_ret_trace => false,
29360
29441
.temp => |temp_index| switch (temp_index.tracking(cg).short) {
29361
29442
.none,
29362
29443
.unreach,
@@ -29456,7 +29537,7 @@ const Temp = struct {
29456
29537
fn toOffset(temp: *Temp, off: i32, cg: *CodeGen) !void {
29457
29538
if (off == 0) return;
29458
29539
switch (temp.unwrap(cg)) {
29459
- .ref => {},
29540
+ .ref, .err_ret_trace => {},
29460
29541
.temp => |temp_index| {
29461
29542
const temp_tracking = temp_index.tracking(cg);
29462
29543
switch (temp_tracking.short) {
@@ -29617,6 +29698,7 @@ const Temp = struct {
29617
29698
},
29618
29699
}
29619
29700
},
29701
+ .err_ret_trace => unreachable,
29620
29702
}
29621
29703
const new_temp = try temp.getLimb(limb_ty, limb_index, cg);
29622
29704
try temp.die(cg);
@@ -29633,14 +29715,15 @@ const Temp = struct {
29633
29715
}
29634
29716
29635
29717
fn toReg(temp: *Temp, new_reg: Register, cg: *CodeGen) !bool {
29636
- const val, const ty = val_ty: switch (temp.unwrap(cg)) {
29718
+ const val, const ty: Type = val_ty: switch (temp.unwrap(cg)) {
29637
29719
.ref => |ref| .{ temp.tracking(cg).short, cg.typeOf(ref) },
29638
29720
.temp => |temp_index| {
29639
29721
const temp_tracking = temp_index.tracking(cg);
29640
29722
if (temp_tracking.short == .register and
29641
29723
temp_tracking.short.register == new_reg) return false;
29642
29724
break :val_ty .{ temp_tracking.short, temp_index.typeOf(cg) };
29643
29725
},
29726
+ .err_ret_trace => .{ temp.tracking(cg).short, .usize },
29644
29727
};
29645
29728
const new_temp_index = cg.next_temp_index;
29646
29729
try cg.register_manager.getReg(new_reg, new_temp_index.toIndex());
@@ -30167,7 +30250,7 @@ const Temp = struct {
30167
30250
30168
30251
fn moveTo(temp: Temp, inst: Air.Inst.Index, cg: *CodeGen) !void {
30169
30252
if (cg.liveness.isUnused(inst)) try temp.die(cg) else switch (temp.unwrap(cg)) {
30170
- .ref => {
30253
+ .ref, .err_ret_trace => {
30171
30254
const result = try cg.allocRegOrMem(inst, true);
30172
30255
try cg.genCopy(cg.typeOfIndex(inst), result, temp.tracking(cg).short, .{});
30173
30256
tracking_log.debug("{} => {} (birth)", .{ inst, result });
@@ -30184,7 +30267,7 @@ const Temp = struct {
30184
30267
30185
30268
fn die(temp: Temp, cg: *CodeGen) !void {
30186
30269
switch (temp.unwrap(cg)) {
30187
- .ref => {},
30270
+ .ref, .err_ret_trace => {},
30188
30271
.temp => |temp_index| try temp_index.tracking(cg).die(cg, temp_index.toIndex()),
30189
30272
}
30190
30273
}
0 commit comments