mirror of
https://codeberg.org/ziglang/zig.git
synced 2026-04-29 03:57:08 +03:00
beginnings of zig ir parser
This commit is contained in:
+229
-2569
@@ -1,2590 +1,250 @@
|
||||
const std = @import("std");
|
||||
const Compilation = @import("compilation.zig").Compilation;
|
||||
const Scope = @import("scope.zig").Scope;
|
||||
const ast = std.zig.ast;
|
||||
const mem = std.mem;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Value = @import("value.zig").Value;
|
||||
const Type = Value.Type;
|
||||
const assert = std.debug.assert;
|
||||
const Token = std.zig.Token;
|
||||
const Span = @import("errmsg.zig").Span;
|
||||
const llvm = @import("llvm.zig");
|
||||
const codegen = @import("codegen.zig");
|
||||
const ObjectFile = codegen.ObjectFile;
|
||||
const Decl = @import("decl.zig").Decl;
|
||||
const mem = std.mem;
|
||||
|
||||
pub const LVal = enum {
|
||||
None,
|
||||
Ptr,
|
||||
};
|
||||
|
||||
pub const IrVal = union(enum) {
|
||||
Unknown,
|
||||
KnownType: *Type,
|
||||
KnownValue: *Value,
|
||||
|
||||
const Init = enum {
|
||||
Unknown,
|
||||
NoReturn,
|
||||
Void,
|
||||
};
|
||||
|
||||
pub fn dump(self: IrVal) void {
|
||||
switch (self) {
|
||||
.Unknown => std.debug.warn("Unknown", .{}),
|
||||
.KnownType => |typ| {
|
||||
std.debug.warn("KnownType(", .{});
|
||||
typ.dump();
|
||||
std.debug.warn(")", .{});
|
||||
},
|
||||
.KnownValue => |value| {
|
||||
std.debug.warn("KnownValue(", .{});
|
||||
value.dump();
|
||||
std.debug.warn(")", .{});
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Inst = struct {
|
||||
id: Id,
|
||||
scope: *Scope,
|
||||
debug_id: usize,
|
||||
val: IrVal,
|
||||
ref_count: usize,
|
||||
span: Span,
|
||||
owner_bb: *BasicBlock,
|
||||
tag: Tag,
|
||||
|
||||
/// true if this instruction was generated by zig and not from user code
|
||||
is_generated: bool,
|
||||
|
||||
/// the instruction that is derived from this one in analysis
|
||||
child: ?*Inst,
|
||||
|
||||
/// the instruction that this one derives from in analysis
|
||||
parent: ?*Inst,
|
||||
|
||||
/// populated durign codegen
|
||||
llvm_value: ?*llvm.Value,
|
||||
|
||||
pub fn cast(base: *Inst, comptime T: type) ?*T {
|
||||
if (base.id == comptime typeToId(T)) {
|
||||
return @fieldParentPtr(T, "base", base);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn typeToId(comptime T: type) Id {
|
||||
inline for (@typeInfo(Id).Enum.fields) |f| {
|
||||
if (T == @field(Inst, f.name)) {
|
||||
return @field(Id, f.name);
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn dump(base: *const Inst) void {
|
||||
inline for (@typeInfo(Id).Enum.fields) |f| {
|
||||
if (base.id == @field(Id, f.name)) {
|
||||
const T = @field(Inst, f.name);
|
||||
std.debug.warn("#{} = {}(", .{ base.debug_id, @tagName(base.id) });
|
||||
@fieldParentPtr(T, "base", base).dump();
|
||||
std.debug.warn(")", .{});
|
||||
return;
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(base: *const Inst) bool {
|
||||
inline for (@typeInfo(Id).Enum.fields) |f| {
|
||||
if (base.id == @field(Id, f.name)) {
|
||||
const T = @field(Inst, f.name);
|
||||
return @fieldParentPtr(T, "base", base).hasSideEffects();
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub fn analyze(base: *Inst, ira: *Analyze) Analyze.Error!*Inst {
|
||||
switch (base.id) {
|
||||
.Return => return @fieldParentPtr(Return, "base", base).analyze(ira),
|
||||
.Const => return @fieldParentPtr(Const, "base", base).analyze(ira),
|
||||
.Call => return @fieldParentPtr(Call, "base", base).analyze(ira),
|
||||
.DeclRef => return @fieldParentPtr(DeclRef, "base", base).analyze(ira),
|
||||
.Ref => return @fieldParentPtr(Ref, "base", base).analyze(ira),
|
||||
.DeclVar => return @fieldParentPtr(DeclVar, "base", base).analyze(ira),
|
||||
.CheckVoidStmt => return @fieldParentPtr(CheckVoidStmt, "base", base).analyze(ira),
|
||||
.Phi => return @fieldParentPtr(Phi, "base", base).analyze(ira),
|
||||
.Br => return @fieldParentPtr(Br, "base", base).analyze(ira),
|
||||
.AddImplicitReturnType => return @fieldParentPtr(AddImplicitReturnType, "base", base).analyze(ira),
|
||||
.PtrType => return @fieldParentPtr(PtrType, "base", base).analyze(ira),
|
||||
.VarPtr => return @fieldParentPtr(VarPtr, "base", base).analyze(ira),
|
||||
.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).analyze(ira),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error{OutOfMemory}!?*llvm.Value) {
|
||||
switch (base.id) {
|
||||
.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
|
||||
.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
|
||||
.Call => return @fieldParentPtr(Call, "base", base).render(ofile, fn_val),
|
||||
.VarPtr => return @fieldParentPtr(VarPtr, "base", base).render(ofile, fn_val),
|
||||
.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).render(ofile, fn_val),
|
||||
.DeclRef => unreachable,
|
||||
.PtrType => unreachable,
|
||||
.Ref => @panic("TODO"),
|
||||
.DeclVar => @panic("TODO"),
|
||||
.CheckVoidStmt => @panic("TODO"),
|
||||
.Phi => @panic("TODO"),
|
||||
.Br => @panic("TODO"),
|
||||
.AddImplicitReturnType => @panic("TODO"),
|
||||
}
|
||||
}
|
||||
|
||||
fn ref(base: *Inst, builder: *Builder) void {
|
||||
base.ref_count += 1;
|
||||
if (base.owner_bb != builder.current_basic_block and !base.isCompTime()) {
|
||||
base.owner_bb.ref(builder);
|
||||
}
|
||||
}
|
||||
|
||||
fn copyVal(base: *Inst, comp: *Compilation) !*Value {
|
||||
if (base.parent.?.ref_count == 0) {
|
||||
return base.val.KnownValue.derefAndCopy(comp);
|
||||
}
|
||||
return base.val.KnownValue.copy(comp);
|
||||
}
|
||||
|
||||
fn getAsParam(param: *Inst) !*Inst {
|
||||
param.ref_count -= 1;
|
||||
const child = param.child orelse return error.SemanticAnalysisFailed;
|
||||
switch (child.val) {
|
||||
.Unknown => return error.SemanticAnalysisFailed,
|
||||
else => return child,
|
||||
}
|
||||
}
|
||||
|
||||
fn getConstVal(self: *Inst, ira: *Analyze) !*Value {
|
||||
if (self.isCompTime()) {
|
||||
return self.val.KnownValue;
|
||||
} else {
|
||||
try ira.addCompileError(self.span, "unable to evaluate constant expression", .{});
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
}
|
||||
|
||||
fn getAsConstType(param: *Inst, ira: *Analyze) !*Type {
|
||||
const meta_type = Type.MetaType.get(ira.irb.comp);
|
||||
meta_type.base.base.deref(ira.irb.comp);
|
||||
|
||||
const inst = try param.getAsParam();
|
||||
const casted = try ira.implicitCast(inst, &meta_type.base);
|
||||
const val = try casted.getConstVal(ira);
|
||||
return val.cast(Value.Type).?;
|
||||
}
|
||||
|
||||
fn getAsConstAlign(param: *Inst, ira: *Analyze) !u32 {
|
||||
return error.Unimplemented;
|
||||
//const align_type = Type.Int.get_align(ira.irb.comp);
|
||||
//align_type.base.base.deref(ira.irb.comp);
|
||||
|
||||
//const inst = try param.getAsParam();
|
||||
//const casted = try ira.implicitCast(inst, align_type);
|
||||
//const val = try casted.getConstVal(ira);
|
||||
|
||||
//uint32_t align_bytes = bigint_as_unsigned(&const_val->data.x_bigint);
|
||||
//if (align_bytes == 0) {
|
||||
// ir_add_error(ira, value, buf_sprintf("alignment must be >= 1"));
|
||||
// return false;
|
||||
//}
|
||||
|
||||
//if (!is_power_of_2(align_bytes)) {
|
||||
// ir_add_error(ira, value, buf_sprintf("alignment value %" PRIu32 " is not a power of 2", align_bytes));
|
||||
// return false;
|
||||
//}
|
||||
}
|
||||
|
||||
/// asserts that the type is known
|
||||
fn getKnownType(self: *Inst) *Type {
|
||||
switch (self.val) {
|
||||
.KnownType => |typ| return typ,
|
||||
.KnownValue => |value| return value.typ,
|
||||
.Unknown => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setGenerated(base: *Inst) void {
|
||||
base.is_generated = true;
|
||||
}
|
||||
|
||||
pub fn isNoReturn(base: *const Inst) bool {
|
||||
switch (base.val) {
|
||||
.Unknown => return false,
|
||||
.KnownValue => |x| return x.typ.id == .NoReturn,
|
||||
.KnownType => |typ| return typ.id == .NoReturn,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isCompTime(base: *const Inst) bool {
|
||||
return base.val == .KnownValue;
|
||||
}
|
||||
|
||||
pub fn linkToParent(self: *Inst, parent: *Inst) void {
|
||||
assert(self.parent == null);
|
||||
assert(parent.child == null);
|
||||
self.parent = parent;
|
||||
parent.child = self;
|
||||
}
|
||||
|
||||
pub const Id = enum {
|
||||
Return,
|
||||
Const,
|
||||
Ref,
|
||||
DeclVar,
|
||||
CheckVoidStmt,
|
||||
Phi,
|
||||
Br,
|
||||
AddImplicitReturnType,
|
||||
Call,
|
||||
DeclRef,
|
||||
PtrType,
|
||||
VarPtr,
|
||||
LoadPtr,
|
||||
pub const all_types = .{
|
||||
Constant,
|
||||
PtrToInt,
|
||||
FieldPtr,
|
||||
Deref,
|
||||
Assembly,
|
||||
Unreach,
|
||||
};
|
||||
|
||||
pub const Call = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
fn_ref: *Inst,
|
||||
args: []*Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(self: *const Call) void {
|
||||
std.debug.warn("#{}(", .{self.params.fn_ref.debug_id});
|
||||
for (self.params.args) |arg| {
|
||||
std.debug.warn("#{},", .{arg.debug_id});
|
||||
}
|
||||
std.debug.warn(")", .{});
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(self: *const Call) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const Call, ira: *Analyze) !*Inst {
|
||||
const fn_ref = try self.params.fn_ref.getAsParam();
|
||||
const fn_ref_type = fn_ref.getKnownType();
|
||||
const fn_type = fn_ref_type.cast(Type.Fn) orelse {
|
||||
try ira.addCompileError(fn_ref.span, "type '{}' not a function", .{fn_ref_type.name});
|
||||
return error.SemanticAnalysisFailed;
|
||||
};
|
||||
|
||||
const fn_type_param_count = fn_type.paramCount();
|
||||
|
||||
if (fn_type_param_count != self.params.args.len) {
|
||||
try ira.addCompileError(self.base.span, "expected {} arguments, found {}", .{
|
||||
fn_type_param_count,
|
||||
self.params.args.len,
|
||||
});
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
|
||||
const args = try ira.irb.arena().alloc(*Inst, self.params.args.len);
|
||||
for (self.params.args) |arg, i| {
|
||||
args[i] = try arg.getAsParam();
|
||||
}
|
||||
const new_inst = try ira.irb.build(Call, self.base.scope, self.base.span, Params{
|
||||
.fn_ref = fn_ref,
|
||||
.args = args,
|
||||
});
|
||||
new_inst.val = IrVal{ .KnownType = fn_type.key.data.Normal.return_type };
|
||||
return new_inst;
|
||||
}
|
||||
|
||||
pub fn render(self: *Call, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
||||
const fn_ref = self.params.fn_ref.llvm_value.?;
|
||||
|
||||
const args = try ofile.arena.alloc(*llvm.Value, self.params.args.len);
|
||||
for (self.params.args) |arg, i| {
|
||||
args[i] = arg.llvm_value.?;
|
||||
}
|
||||
|
||||
const llvm_cc = llvm.CCallConv;
|
||||
const call_attr = llvm.CallAttr.Auto;
|
||||
|
||||
return llvm.BuildCall(
|
||||
ofile.builder,
|
||||
fn_ref,
|
||||
args.ptr,
|
||||
@intCast(c_uint, args.len),
|
||||
llvm_cc,
|
||||
call_attr,
|
||||
"",
|
||||
) orelse error.OutOfMemory;
|
||||
}
|
||||
pub const Tag = enum {
|
||||
constant,
|
||||
ptrtoint,
|
||||
fieldptr,
|
||||
deref,
|
||||
@"asm",
|
||||
unreach,
|
||||
};
|
||||
|
||||
pub const Const = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {};
|
||||
|
||||
// Use Builder.buildConst* methods, or, after building a Const instruction,
|
||||
// manually set the ir_val field.
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(self: *const Const) void {
|
||||
self.base.val.KnownValue.dump();
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(self: *const Const) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const Const, ira: *Analyze) !*Inst {
|
||||
const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{});
|
||||
new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() };
|
||||
return new_inst;
|
||||
}
|
||||
|
||||
pub fn render(self: *Const, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
||||
return self.base.val.KnownValue.getLlvmConst(ofile);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Return = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
return_value: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.NoReturn;
|
||||
|
||||
pub fn dump(self: *const Return) void {
|
||||
std.debug.warn("#{}", .{self.params.return_value.debug_id});
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(self: *const Return) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const Return, ira: *Analyze) !*Inst {
|
||||
const value = try self.params.return_value.getAsParam();
|
||||
const casted_value = try ira.implicitCast(value, ira.explicit_return_type);
|
||||
|
||||
// TODO detect returning local variable address
|
||||
|
||||
return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
|
||||
}
|
||||
|
||||
pub fn render(self: *Return, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
||||
const value = self.params.return_value.llvm_value;
|
||||
const return_type = self.params.return_value.getKnownType();
|
||||
|
||||
if (return_type.handleIsPtr()) {
|
||||
@panic("TODO");
|
||||
} else {
|
||||
_ = llvm.BuildRet(ofile.builder, value) orelse return error.OutOfMemory;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Ref = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
target: *Inst,
|
||||
mut: Type.Pointer.Mut,
|
||||
volatility: Type.Pointer.Vol,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const Ref) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const Ref) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const Ref, ira: *Analyze) !*Inst {
|
||||
const target = try self.params.target.getAsParam();
|
||||
|
||||
if (ira.getCompTimeValOrNullUndefOk(target)) |val| {
|
||||
return ira.getCompTimeRef(
|
||||
val,
|
||||
Value.Ptr.Mut.CompTimeConst,
|
||||
self.params.mut,
|
||||
self.params.volatility,
|
||||
);
|
||||
}
|
||||
|
||||
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{
|
||||
.target = target,
|
||||
.mut = self.params.mut,
|
||||
.volatility = self.params.volatility,
|
||||
});
|
||||
const elem_type = target.getKnownType();
|
||||
const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
|
||||
.child_type = elem_type,
|
||||
.mut = self.params.mut,
|
||||
.vol = self.params.volatility,
|
||||
.size = .One,
|
||||
.alignment = .Abi,
|
||||
});
|
||||
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
|
||||
// could be a ref of a global, for example
|
||||
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
|
||||
// TODO potentially add an alloca entry here
|
||||
return new_inst;
|
||||
}
|
||||
};
|
||||
|
||||
pub const DeclRef = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
decl: *Decl,
|
||||
lval: LVal,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const DeclRef) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const DeclRef) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const DeclRef, ira: *Analyze) !*Inst {
|
||||
(ira.irb.comp.resolveDecl(self.params.decl)) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => return error.SemanticAnalysisFailed,
|
||||
};
|
||||
switch (self.params.decl.id) {
|
||||
.CompTime => unreachable,
|
||||
.Var => return error.Unimplemented,
|
||||
.Fn => {
|
||||
const fn_decl = @fieldParentPtr(Decl.Fn, "base", self.params.decl);
|
||||
const decl_val = switch (fn_decl.value) {
|
||||
.Unresolved => unreachable,
|
||||
.Fn => |fn_val| &fn_val.base,
|
||||
.FnProto => |fn_proto| &fn_proto.base,
|
||||
};
|
||||
switch (self.params.lval) {
|
||||
.None => {
|
||||
return ira.irb.buildConstValue(self.base.scope, self.base.span, decl_val);
|
||||
},
|
||||
.Ptr => return error.Unimplemented,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const VarPtr = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
var_scope: *Scope.Var,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const VarPtr) void {
|
||||
std.debug.warn("{}", .{inst.params.var_scope.name});
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(inst: *const VarPtr) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const VarPtr, ira: *Analyze) !*Inst {
|
||||
switch (self.params.var_scope.data) {
|
||||
.Const => @panic("TODO"),
|
||||
.Param => |param| {
|
||||
const new_inst = try ira.irb.build(
|
||||
Inst.VarPtr,
|
||||
self.base.scope,
|
||||
self.base.span,
|
||||
Inst.VarPtr.Params{ .var_scope = self.params.var_scope },
|
||||
);
|
||||
const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
|
||||
.child_type = param.typ,
|
||||
.mut = .Const,
|
||||
.vol = .Non,
|
||||
.size = .One,
|
||||
.alignment = .Abi,
|
||||
});
|
||||
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
|
||||
return new_inst;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(self: *VarPtr, ofile: *ObjectFile, fn_val: *Value.Fn) *llvm.Value {
|
||||
switch (self.params.var_scope.data) {
|
||||
.Const => unreachable, // turned into Inst.Const in analyze pass
|
||||
.Param => |param| return param.llvm_value,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const LoadPtr = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
target: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const LoadPtr) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const LoadPtr) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const LoadPtr, ira: *Analyze) !*Inst {
|
||||
const target = try self.params.target.getAsParam();
|
||||
const target_type = target.getKnownType();
|
||||
if (target_type.id != .Pointer) {
|
||||
try ira.addCompileError(self.base.span, "dereference of non pointer type '{}'", .{target_type.name});
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
const ptr_type = @fieldParentPtr(Type.Pointer, "base", target_type);
|
||||
// if (instr_is_comptime(ptr)) {
|
||||
// if (ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst ||
|
||||
// ptr->value.data.x_ptr.mut == ConstPtrMutComptimeVar)
|
||||
// {
|
||||
// ConstExprValue *pointee = const_ptr_pointee(ira->codegen, &ptr->value);
|
||||
// if (pointee->special != ConstValSpecialRuntime) {
|
||||
// IrInstruction *result = ir_create_const(&ira->new_irb, source_instruction->scope,
|
||||
// source_instruction->source_node, child_type);
|
||||
// copy_const_val(&result->value, pointee, ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst);
|
||||
// result->value.type = child_type;
|
||||
// return result;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
const new_inst = try ira.irb.build(
|
||||
Inst.LoadPtr,
|
||||
self.base.scope,
|
||||
self.base.span,
|
||||
Inst.LoadPtr.Params{ .target = target },
|
||||
);
|
||||
new_inst.val = IrVal{ .KnownType = ptr_type.key.child_type };
|
||||
return new_inst;
|
||||
}
|
||||
|
||||
pub fn render(self: *LoadPtr, ofile: *ObjectFile, fn_val: *Value.Fn) !?*llvm.Value {
|
||||
const child_type = self.base.getKnownType();
|
||||
if (!child_type.hasBits()) {
|
||||
return null;
|
||||
}
|
||||
const ptr = self.params.target.llvm_value.?;
|
||||
const ptr_type = self.params.target.getKnownType().cast(Type.Pointer).?;
|
||||
|
||||
return try codegen.getHandleValue(ofile, ptr, ptr_type);
|
||||
|
||||
//uint32_t unaligned_bit_count = ptr_type->data.pointer.unaligned_bit_count;
|
||||
//if (unaligned_bit_count == 0)
|
||||
// return get_handle_value(g, ptr, child_type, ptr_type);
|
||||
|
||||
//bool big_endian = g->is_big_endian;
|
||||
|
||||
//assert(!handle_is_ptr(child_type));
|
||||
//LLVMValueRef containing_int = gen_load(g, ptr, ptr_type, "");
|
||||
|
||||
//uint32_t bit_offset = ptr_type->data.pointer.bit_offset;
|
||||
//uint32_t host_bit_count = LLVMGetIntTypeWidth(LLVMTypeOf(containing_int));
|
||||
//uint32_t shift_amt = big_endian ? host_bit_count - bit_offset - unaligned_bit_count : bit_offset;
|
||||
|
||||
//LLVMValueRef shift_amt_val = LLVMConstInt(LLVMTypeOf(containing_int), shift_amt, false);
|
||||
//LLVMValueRef shifted_value = LLVMBuildLShr(g->builder, containing_int, shift_amt_val, "");
|
||||
|
||||
//return LLVMBuildTrunc(g->builder, shifted_value, child_type->type_ref, "");
|
||||
}
|
||||
};
|
||||
|
||||
pub const PtrType = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
child_type: *Inst,
|
||||
mut: Type.Pointer.Mut,
|
||||
vol: Type.Pointer.Vol,
|
||||
size: Type.Pointer.Size,
|
||||
alignment: ?*Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const PtrType) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const PtrType) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const PtrType, ira: *Analyze) !*Inst {
|
||||
const child_type = try self.params.child_type.getAsConstType(ira);
|
||||
// if (child_type->id == TypeTableEntryIdUnreachable) {
|
||||
// ir_add_error(ira, &instruction->base, buf_sprintf("pointer to noreturn not allowed"));
|
||||
// return ira->codegen->builtin_types.entry_invalid;
|
||||
// } else if (child_type->id == TypeTableEntryIdOpaque && instruction->ptr_len == PtrLenUnknown) {
|
||||
// ir_add_error(ira, &instruction->base, buf_sprintf("unknown-length pointer to opaque"));
|
||||
// return ira->codegen->builtin_types.entry_invalid;
|
||||
// }
|
||||
const alignment = if (self.params.alignment) |align_inst| blk: {
|
||||
const amt = try align_inst.getAsConstAlign(ira);
|
||||
break :blk Type.Pointer.Align{ .Override = amt };
|
||||
} else blk: {
|
||||
break :blk .Abi;
|
||||
};
|
||||
const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
|
||||
.child_type = child_type,
|
||||
.mut = self.params.mut,
|
||||
.vol = self.params.vol,
|
||||
.size = self.params.size,
|
||||
.alignment = alignment,
|
||||
});
|
||||
ptr_type.base.base.deref(ira.irb.comp);
|
||||
|
||||
return ira.irb.buildConstValue(self.base.scope, self.base.span, &ptr_type.base.base);
|
||||
}
|
||||
};
|
||||
|
||||
pub const DeclVar = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
variable: *Variable,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const DeclVar) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const DeclVar) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const DeclVar, ira: *Analyze) !*Inst {
|
||||
return error.Unimplemented; // TODO
|
||||
}
|
||||
};
|
||||
|
||||
pub const CheckVoidStmt = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
target: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(self: *const CheckVoidStmt) void {
|
||||
std.debug.warn("#{}", .{self.params.target.debug_id});
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(inst: *const CheckVoidStmt) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Inst {
|
||||
const target = try self.params.target.getAsParam();
|
||||
if (target.getKnownType().id != .Void) {
|
||||
try ira.addCompileError(self.base.span, "expression value is ignored", .{});
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
return ira.irb.buildConstVoid(self.base.scope, self.base.span, true);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Phi = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
incoming_blocks: []*BasicBlock,
|
||||
incoming_values: []*Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const Phi) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const Phi) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const Phi, ira: *Analyze) !*Inst {
|
||||
return error.Unimplemented; // TODO
|
||||
}
|
||||
};
|
||||
|
||||
pub const Br = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
dest_block: *BasicBlock,
|
||||
is_comptime: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.NoReturn;
|
||||
|
||||
pub fn dump(inst: *const Br) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const Br) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const Br, ira: *Analyze) !*Inst {
|
||||
return error.Unimplemented; // TODO
|
||||
}
|
||||
};
|
||||
|
||||
pub const CondBr = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {
|
||||
condition: *Inst,
|
||||
then_block: *BasicBlock,
|
||||
else_block: *BasicBlock,
|
||||
is_comptime: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.NoReturn;
|
||||
|
||||
pub fn dump(inst: *const CondBr) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const CondBr) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const CondBr, ira: *Analyze) !*Inst {
|
||||
return error.Unimplemented; // TODO
|
||||
}
|
||||
};
|
||||
|
||||
pub const AddImplicitReturnType = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
pub const Params = struct {
|
||||
target: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const AddImplicitReturnType) void {
|
||||
std.debug.warn("#{}", .{inst.params.target.debug_id});
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(inst: *const AddImplicitReturnType) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const AddImplicitReturnType, ira: *Analyze) !*Inst {
|
||||
const target = try self.params.target.getAsParam();
|
||||
try ira.src_implicit_return_type_list.append(target);
|
||||
return ira.irb.buildConstVoid(self.base.scope, self.base.span, true);
|
||||
}
|
||||
};
|
||||
|
||||
pub const TestErr = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
pub const Params = struct {
|
||||
target: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const TestErr) void {
|
||||
std.debug.warn("#{}", .{inst.params.target.debug_id});
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(inst: *const TestErr) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const TestErr, ira: *Analyze) !*Inst {
|
||||
const target = try self.params.target.getAsParam();
|
||||
const target_type = target.getKnownType();
|
||||
switch (target_type.id) {
|
||||
.ErrorUnion => {
|
||||
return error.Unimplemented;
|
||||
// if (instr_is_comptime(value)) {
|
||||
// ConstExprValue *err_union_val = ir_resolve_const(ira, value, UndefBad);
|
||||
// if (!err_union_val)
|
||||
// return ira->codegen->builtin_types.entry_invalid;
|
||||
|
||||
// if (err_union_val->special != ConstValSpecialRuntime) {
|
||||
// ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
|
||||
// out_val->data.x_bool = (err_union_val->data.x_err_union.err != nullptr);
|
||||
// return ira->codegen->builtin_types.entry_bool;
|
||||
// }
|
||||
// }
|
||||
|
||||
// TypeTableEntry *err_set_type = type_entry->data.error_union.err_set_type;
|
||||
// if (!resolve_inferred_error_set(ira->codegen, err_set_type, instruction->base.source_node)) {
|
||||
// return ira->codegen->builtin_types.entry_invalid;
|
||||
// }
|
||||
// if (!type_is_global_error_set(err_set_type) &&
|
||||
// err_set_type->data.error_set.err_count == 0)
|
||||
// {
|
||||
// assert(err_set_type->data.error_set.infer_fn == nullptr);
|
||||
// ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
|
||||
// out_val->data.x_bool = false;
|
||||
// return ira->codegen->builtin_types.entry_bool;
|
||||
// }
|
||||
|
||||
// ir_build_test_err_from(&ira->new_irb, &instruction->base, value);
|
||||
// return ira->codegen->builtin_types.entry_bool;
|
||||
},
|
||||
.ErrorSet => {
|
||||
return ira.irb.buildConstBool(self.base.scope, self.base.span, true);
|
||||
},
|
||||
else => {
|
||||
return ira.irb.buildConstBool(self.base.scope, self.base.span, false);
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const TestCompTime = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
pub const Params = struct {
|
||||
target: *Inst,
|
||||
};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const TestCompTime) void {
|
||||
std.debug.warn("#{}", .{inst.params.target.debug_id});
|
||||
}
|
||||
|
||||
pub fn hasSideEffects(inst: *const TestCompTime) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const TestCompTime, ira: *Analyze) !*Inst {
|
||||
const target = try self.params.target.getAsParam();
|
||||
return ira.irb.buildConstBool(self.base.scope, self.base.span, target.isCompTime());
|
||||
}
|
||||
};
|
||||
|
||||
pub const SaveErrRetAddr = struct {
|
||||
base: Inst,
|
||||
params: Params,
|
||||
|
||||
const Params = struct {};
|
||||
|
||||
const ir_val_init = IrVal.Init.Unknown;
|
||||
|
||||
pub fn dump(inst: *const SaveErrRetAddr) void {}
|
||||
|
||||
pub fn hasSideEffects(inst: *const SaveErrRetAddr) bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn analyze(self: *const SaveErrRetAddr, ira: *Analyze) !*Inst {
|
||||
return ira.irb.build(Inst.SaveErrRetAddr, self.base.scope, self.base.span, Params{});
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub const Variable = struct {
|
||||
child_scope: *Scope,
|
||||
};
|
||||
|
||||
pub const BasicBlock = struct {
|
||||
ref_count: usize,
|
||||
name_hint: [*:0]const u8,
|
||||
debug_id: usize,
|
||||
scope: *Scope,
|
||||
instruction_list: std.ArrayList(*Inst),
|
||||
ref_instruction: ?*Inst,
|
||||
|
||||
/// for codegen
|
||||
llvm_block: *llvm.BasicBlock,
|
||||
llvm_exit_block: *llvm.BasicBlock,
|
||||
|
||||
/// the basic block that is derived from this one in analysis
|
||||
child: ?*BasicBlock,
|
||||
|
||||
/// the basic block that this one derives from in analysis
|
||||
parent: ?*BasicBlock,
|
||||
|
||||
pub fn ref(self: *BasicBlock, builder: *Builder) void {
|
||||
self.ref_count += 1;
|
||||
}
|
||||
|
||||
pub fn linkToParent(self: *BasicBlock, parent: *BasicBlock) void {
|
||||
assert(self.parent == null);
|
||||
assert(parent.child == null);
|
||||
self.parent = parent;
|
||||
parent.child = self;
|
||||
}
|
||||
};
|
||||
|
||||
/// Stuff that survives longer than Builder
|
||||
pub const Code = struct {
|
||||
basic_block_list: std.ArrayList(*BasicBlock),
|
||||
arena: std.heap.ArenaAllocator,
|
||||
return_type: ?*Type,
|
||||
tree_scope: *Scope.AstTree,
|
||||
|
||||
/// allocator is comp.gpa()
|
||||
pub fn destroy(self: *Code, allocator: *Allocator) void {
|
||||
self.arena.deinit();
|
||||
allocator.destroy(self);
|
||||
}
|
||||
|
||||
pub fn dump(self: *Code) void {
|
||||
var bb_i: usize = 0;
|
||||
for (self.basic_block_list.span()) |bb| {
|
||||
std.debug.warn("{s}_{}:\n", .{ bb.name_hint, bb.debug_id });
|
||||
for (bb.instruction_list.span()) |instr| {
|
||||
std.debug.warn(" ", .{});
|
||||
instr.dump();
|
||||
std.debug.warn("\n", .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// returns a ref-incremented value, or adds a compile error
|
||||
pub fn getCompTimeResult(self: *Code, comp: *Compilation) !*Value {
|
||||
const bb = self.basic_block_list.at(0);
|
||||
for (bb.instruction_list.span()) |inst| {
|
||||
if (inst.cast(Inst.Return)) |ret_inst| {
|
||||
const ret_value = ret_inst.params.return_value;
|
||||
if (ret_value.isCompTime()) {
|
||||
return ret_value.val.KnownValue.getRef();
|
||||
}
|
||||
try comp.addCompileError(
|
||||
self.tree_scope,
|
||||
ret_value.span,
|
||||
"unable to evaluate constant expression",
|
||||
.{},
|
||||
);
|
||||
return error.SemanticAnalysisFailed;
|
||||
} else if (inst.hasSideEffects()) {
|
||||
try comp.addCompileError(
|
||||
self.tree_scope,
|
||||
inst.span,
|
||||
"unable to evaluate constant expression",
|
||||
.{},
|
||||
);
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Builder = struct {
|
||||
comp: *Compilation,
|
||||
code: *Code,
|
||||
current_basic_block: *BasicBlock,
|
||||
next_debug_id: usize,
|
||||
is_comptime: bool,
|
||||
is_async: bool,
|
||||
begin_scope: ?*Scope,
|
||||
|
||||
pub const Error = Analyze.Error;
|
||||
|
||||
pub fn init(comp: *Compilation, tree_scope: *Scope.AstTree, begin_scope: ?*Scope) !Builder {
|
||||
const code = try comp.gpa().create(Code);
|
||||
code.* = Code{
|
||||
.basic_block_list = undefined,
|
||||
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
|
||||
.return_type = null,
|
||||
.tree_scope = tree_scope,
|
||||
};
|
||||
code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator);
|
||||
errdefer code.destroy(comp.gpa());
|
||||
|
||||
return Builder{
|
||||
.comp = comp,
|
||||
.current_basic_block = undefined,
|
||||
.code = code,
|
||||
.next_debug_id = 0,
|
||||
.is_comptime = false,
|
||||
.is_async = false,
|
||||
.begin_scope = begin_scope,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn abort(self: *Builder) void {
|
||||
self.code.destroy(self.comp.gpa());
|
||||
}
|
||||
|
||||
/// Call code.destroy() when done
|
||||
pub fn finish(self: *Builder) *Code {
|
||||
return self.code;
|
||||
}
|
||||
|
||||
/// No need to clean up resources thanks to the arena allocator.
|
||||
pub fn createBasicBlock(self: *Builder, scope: *Scope, name_hint: [*:0]const u8) !*BasicBlock {
|
||||
const basic_block = try self.arena().create(BasicBlock);
|
||||
basic_block.* = BasicBlock{
|
||||
.ref_count = 0,
|
||||
.name_hint = name_hint,
|
||||
.debug_id = self.next_debug_id,
|
||||
.scope = scope,
|
||||
.instruction_list = std.ArrayList(*Inst).init(self.arena()),
|
||||
.child = null,
|
||||
.parent = null,
|
||||
.ref_instruction = null,
|
||||
.llvm_block = undefined,
|
||||
.llvm_exit_block = undefined,
|
||||
};
|
||||
self.next_debug_id += 1;
|
||||
return basic_block;
|
||||
}
|
||||
|
||||
pub fn setCursorAtEndAndAppendBlock(self: *Builder, basic_block: *BasicBlock) !void {
|
||||
try self.code.basic_block_list.append(basic_block);
|
||||
self.setCursorAtEnd(basic_block);
|
||||
}
|
||||
|
||||
pub fn setCursorAtEnd(self: *Builder, basic_block: *BasicBlock) void {
|
||||
self.current_basic_block = basic_block;
|
||||
}
|
||||
|
||||
pub fn genNodeRecursive(irb: *Builder, node: *ast.Node, scope: *Scope, lval: LVal) Error!*Inst {
|
||||
const alloc = irb.comp.gpa();
|
||||
var frame = try alloc.create(@Frame(genNode));
|
||||
defer alloc.destroy(frame);
|
||||
frame.* = async irb.genNode(node, scope, lval);
|
||||
return await frame;
|
||||
}
|
||||
|
||||
pub async fn genNode(irb: *Builder, node: *ast.Node, scope: *Scope, lval: LVal) Error!*Inst {
|
||||
switch (node.id) {
|
||||
.Root => unreachable,
|
||||
.Use => unreachable,
|
||||
.TestDecl => unreachable,
|
||||
.VarDecl => return error.Unimplemented,
|
||||
.Defer => return error.Unimplemented,
|
||||
.InfixOp => return error.Unimplemented,
|
||||
.PrefixOp => {
|
||||
const prefix_op = @fieldParentPtr(ast.Node.PrefixOp, "base", node);
|
||||
switch (prefix_op.op) {
|
||||
.AddressOf => return error.Unimplemented,
|
||||
.ArrayType => |n| return error.Unimplemented,
|
||||
.Await => return error.Unimplemented,
|
||||
.BitNot => return error.Unimplemented,
|
||||
.BoolNot => return error.Unimplemented,
|
||||
.OptionalType => return error.Unimplemented,
|
||||
.Negation => return error.Unimplemented,
|
||||
.NegationWrap => return error.Unimplemented,
|
||||
.Resume => return error.Unimplemented,
|
||||
.PtrType => |ptr_info| {
|
||||
const inst = try irb.genPtrType(prefix_op, ptr_info, scope);
|
||||
return irb.lvalWrap(scope, inst, lval);
|
||||
},
|
||||
.SliceType => |ptr_info| return error.Unimplemented,
|
||||
.Try => return error.Unimplemented,
|
||||
}
|
||||
},
|
||||
.SuffixOp => {
|
||||
const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
|
||||
switch (suffix_op.op) {
|
||||
.Call => |*call| {
|
||||
const inst = try irb.genCall(suffix_op, call, scope);
|
||||
return irb.lvalWrap(scope, inst, lval);
|
||||
},
|
||||
.ArrayAccess => |n| return error.Unimplemented,
|
||||
.Slice => |slice| return error.Unimplemented,
|
||||
.ArrayInitializer => |init_list| return error.Unimplemented,
|
||||
.StructInitializer => |init_list| return error.Unimplemented,
|
||||
.Deref => return error.Unimplemented,
|
||||
.UnwrapOptional => return error.Unimplemented,
|
||||
}
|
||||
},
|
||||
.Switch => return error.Unimplemented,
|
||||
.While => return error.Unimplemented,
|
||||
.For => return error.Unimplemented,
|
||||
.If => return error.Unimplemented,
|
||||
.ControlFlowExpression => {
|
||||
const control_flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", node);
|
||||
return irb.genControlFlowExpr(control_flow_expr, scope, lval);
|
||||
},
|
||||
.Suspend => return error.Unimplemented,
|
||||
.VarType => return error.Unimplemented,
|
||||
.ErrorType => return error.Unimplemented,
|
||||
.FnProto => return error.Unimplemented,
|
||||
.AnyFrameType => return error.Unimplemented,
|
||||
.IntegerLiteral => {
|
||||
const int_lit = @fieldParentPtr(ast.Node.IntegerLiteral, "base", node);
|
||||
return irb.lvalWrap(scope, try irb.genIntLit(int_lit, scope), lval);
|
||||
},
|
||||
.FloatLiteral => return error.Unimplemented,
|
||||
.StringLiteral => {
|
||||
const str_lit = @fieldParentPtr(ast.Node.StringLiteral, "base", node);
|
||||
const inst = try irb.genStrLit(str_lit, scope);
|
||||
return irb.lvalWrap(scope, inst, lval);
|
||||
},
|
||||
.MultilineStringLiteral => return error.Unimplemented,
|
||||
.CharLiteral => return error.Unimplemented,
|
||||
.BoolLiteral => return error.Unimplemented,
|
||||
.NullLiteral => return error.Unimplemented,
|
||||
.UndefinedLiteral => return error.Unimplemented,
|
||||
.Unreachable => return error.Unimplemented,
|
||||
.Identifier => {
|
||||
const identifier = @fieldParentPtr(ast.Node.Identifier, "base", node);
|
||||
return irb.genIdentifier(identifier, scope, lval);
|
||||
},
|
||||
.GroupedExpression => {
|
||||
const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", node);
|
||||
return irb.genNodeRecursive(grouped_expr.expr, scope, lval);
|
||||
},
|
||||
.BuiltinCall => return error.Unimplemented,
|
||||
.ErrorSetDecl => return error.Unimplemented,
|
||||
.ContainerDecl => return error.Unimplemented,
|
||||
.Asm => return error.Unimplemented,
|
||||
.Comptime => return error.Unimplemented,
|
||||
.Block => {
|
||||
const block = @fieldParentPtr(ast.Node.Block, "base", node);
|
||||
const inst = try irb.genBlock(block, scope);
|
||||
return irb.lvalWrap(scope, inst, lval);
|
||||
},
|
||||
.DocComment => return error.Unimplemented,
|
||||
.SwitchCase => return error.Unimplemented,
|
||||
.SwitchElse => return error.Unimplemented,
|
||||
.Else => return error.Unimplemented,
|
||||
.Payload => return error.Unimplemented,
|
||||
.PointerPayload => return error.Unimplemented,
|
||||
.PointerIndexPayload => return error.Unimplemented,
|
||||
.ContainerField => return error.Unimplemented,
|
||||
.ErrorTag => return error.Unimplemented,
|
||||
.AsmInput => return error.Unimplemented,
|
||||
.AsmOutput => return error.Unimplemented,
|
||||
.ParamDecl => return error.Unimplemented,
|
||||
.FieldInitializer => return error.Unimplemented,
|
||||
.EnumLiteral => return error.Unimplemented,
|
||||
.Noasync => return error.Unimplemented,
|
||||
}
|
||||
}
|
||||
|
||||
fn genCall(irb: *Builder, suffix_op: *ast.Node.SuffixOp, call: *ast.Node.SuffixOp.Op.Call, scope: *Scope) !*Inst {
|
||||
const fn_ref = try irb.genNodeRecursive(suffix_op.lhs.node, scope, .None);
|
||||
|
||||
const args = try irb.arena().alloc(*Inst, call.params.len);
|
||||
var it = call.params.iterator(0);
|
||||
var i: usize = 0;
|
||||
while (it.next()) |arg_node_ptr| : (i += 1) {
|
||||
args[i] = try irb.genNodeRecursive(arg_node_ptr.*, scope, .None);
|
||||
}
|
||||
|
||||
//bool is_async = node->data.fn_call_expr.is_async;
|
||||
//IrInstruction *async_allocator = nullptr;
|
||||
//if (is_async) {
|
||||
// if (node->data.fn_call_expr.async_allocator) {
|
||||
// async_allocator = ir_gen_node(irb, node->data.fn_call_expr.async_allocator, scope);
|
||||
// if (async_allocator == irb->codegen->invalid_instruction)
|
||||
// return async_allocator;
|
||||
// }
|
||||
//}
|
||||
|
||||
return irb.build(Inst.Call, scope, Span.token(suffix_op.rtoken), Inst.Call.Params{
|
||||
.fn_ref = fn_ref,
|
||||
.args = args,
|
||||
});
|
||||
//IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator, nullptr);
|
||||
//return ir_lval_wrap(irb, scope, fn_call, lval);
|
||||
}
|
||||
|
||||
fn genPtrType(
|
||||
irb: *Builder,
|
||||
prefix_op: *ast.Node.PrefixOp,
|
||||
ptr_info: ast.Node.PrefixOp.PtrInfo,
|
||||
scope: *Scope,
|
||||
) !*Inst {
|
||||
// TODO port more logic
|
||||
|
||||
//assert(node->type == NodeTypePointerType);
|
||||
//PtrLen ptr_len = (node->data.pointer_type.star_token->id == TokenIdStar ||
|
||||
// node->data.pointer_type.star_token->id == TokenIdStarStar) ? PtrLenSingle : PtrLenUnknown;
|
||||
//bool is_const = node->data.pointer_type.is_const;
|
||||
//bool is_volatile = node->data.pointer_type.is_volatile;
|
||||
//AstNode *expr_node = node->data.pointer_type.op_expr;
|
||||
//AstNode *align_expr = node->data.pointer_type.align_expr;
|
||||
|
||||
//IrInstruction *align_value;
|
||||
//if (align_expr != nullptr) {
|
||||
// align_value = ir_gen_node(irb, align_expr, scope);
|
||||
// if (align_value == irb->codegen->invalid_instruction)
|
||||
// return align_value;
|
||||
//} else {
|
||||
// align_value = nullptr;
|
||||
//}
|
||||
const child_type = try irb.genNodeRecursive(prefix_op.rhs, scope, .None);
|
||||
|
||||
//uint32_t bit_offset_start = 0;
|
||||
//if (node->data.pointer_type.bit_offset_start != nullptr) {
|
||||
// if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_start, 32, false)) {
|
||||
// Buf *val_buf = buf_alloc();
|
||||
// bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_start, 10);
|
||||
// exec_add_error_node(irb->codegen, irb->exec, node,
|
||||
// buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
|
||||
// return irb->codegen->invalid_instruction;
|
||||
// }
|
||||
// bit_offset_start = bigint_as_unsigned(node->data.pointer_type.bit_offset_start);
|
||||
//}
|
||||
|
||||
//uint32_t bit_offset_end = 0;
|
||||
//if (node->data.pointer_type.bit_offset_end != nullptr) {
|
||||
// if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_end, 32, false)) {
|
||||
// Buf *val_buf = buf_alloc();
|
||||
// bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_end, 10);
|
||||
// exec_add_error_node(irb->codegen, irb->exec, node,
|
||||
// buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
|
||||
// return irb->codegen->invalid_instruction;
|
||||
// }
|
||||
// bit_offset_end = bigint_as_unsigned(node->data.pointer_type.bit_offset_end);
|
||||
//}
|
||||
|
||||
//if ((bit_offset_start != 0 || bit_offset_end != 0) && bit_offset_start >= bit_offset_end) {
|
||||
// exec_add_error_node(irb->codegen, irb->exec, node,
|
||||
// buf_sprintf("bit offset start must be less than bit offset end"));
|
||||
// return irb->codegen->invalid_instruction;
|
||||
//}
|
||||
|
||||
return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params{
|
||||
.child_type = child_type,
|
||||
.mut = .Mut,
|
||||
.vol = .Non,
|
||||
.size = .Many,
|
||||
.alignment = null,
|
||||
});
|
||||
}
|
||||
|
||||
fn isCompTime(irb: *Builder, target_scope: *Scope) bool {
|
||||
if (irb.is_comptime)
|
||||
return true;
|
||||
|
||||
var scope = target_scope;
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
.CompTime => return true,
|
||||
.FnDef => return false,
|
||||
.Decls => unreachable,
|
||||
.Root => unreachable,
|
||||
.AstTree => unreachable,
|
||||
.Block,
|
||||
.Defer,
|
||||
.DeferExpr,
|
||||
.Var,
|
||||
=> scope = scope.parent.?,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn genIntLit(irb: *Builder, int_lit: *ast.Node.IntegerLiteral, scope: *Scope) !*Inst {
|
||||
const int_token = irb.code.tree_scope.tree.tokenSlice(int_lit.token);
|
||||
|
||||
var base: u8 = undefined;
|
||||
var rest: []const u8 = undefined;
|
||||
if (int_token.len >= 3 and int_token[0] == '0') {
|
||||
rest = int_token[2..];
|
||||
switch (int_token[1]) {
|
||||
'b' => base = 2,
|
||||
'o' => base = 8,
|
||||
'x' => base = 16,
|
||||
else => {
|
||||
base = 10;
|
||||
rest = int_token;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
base = 10;
|
||||
rest = int_token;
|
||||
}
|
||||
|
||||
const comptime_int_type = Type.ComptimeInt.get(irb.comp);
|
||||
defer comptime_int_type.base.base.deref(irb.comp);
|
||||
|
||||
const int_val = Value.Int.createFromString(
|
||||
irb.comp,
|
||||
&comptime_int_type.base,
|
||||
base,
|
||||
rest,
|
||||
) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.InvalidBase => unreachable,
|
||||
error.InvalidCharForDigit => unreachable,
|
||||
error.DigitTooLargeForBase => unreachable,
|
||||
};
|
||||
errdefer int_val.base.deref(irb.comp);
|
||||
|
||||
const inst = try irb.build(Inst.Const, scope, Span.token(int_lit.token), Inst.Const.Params{});
|
||||
inst.val = IrVal{ .KnownValue = &int_val.base };
|
||||
return inst;
|
||||
}
|
||||
|
||||
pub fn genStrLit(irb: *Builder, str_lit: *ast.Node.StringLiteral, scope: *Scope) !*Inst {
|
||||
const str_token = irb.code.tree_scope.tree.tokenSlice(str_lit.token);
|
||||
const src_span = Span.token(str_lit.token);
|
||||
|
||||
var bad_index: usize = undefined;
|
||||
var buf = std.zig.parseStringLiteral(irb.comp.gpa(), str_token, &bad_index) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.InvalidCharacter => {
|
||||
try irb.comp.addCompileError(
|
||||
irb.code.tree_scope,
|
||||
src_span,
|
||||
"invalid character in string literal: '{c}'",
|
||||
.{str_token[bad_index]},
|
||||
);
|
||||
return error.SemanticAnalysisFailed;
|
||||
},
|
||||
};
|
||||
var buf_cleaned = false;
|
||||
errdefer if (!buf_cleaned) irb.comp.gpa().free(buf);
|
||||
|
||||
if (str_token[0] == 'c') {
|
||||
// first we add a null
|
||||
buf = try irb.comp.gpa().realloc(buf, buf.len + 1);
|
||||
buf[buf.len - 1] = 0;
|
||||
|
||||
// next make an array value
|
||||
const array_val = try Value.Array.createOwnedBuffer(irb.comp, buf);
|
||||
buf_cleaned = true;
|
||||
defer array_val.base.deref(irb.comp);
|
||||
|
||||
// then make a pointer value pointing at the first element
|
||||
const ptr_val = try Value.Ptr.createArrayElemPtr(
|
||||
irb.comp,
|
||||
array_val,
|
||||
.Const,
|
||||
.Many,
|
||||
0,
|
||||
);
|
||||
defer ptr_val.base.deref(irb.comp);
|
||||
|
||||
return irb.buildConstValue(scope, src_span, &ptr_val.base);
|
||||
} else {
|
||||
const array_val = try Value.Array.createOwnedBuffer(irb.comp, buf);
|
||||
buf_cleaned = true;
|
||||
defer array_val.base.deref(irb.comp);
|
||||
|
||||
return irb.buildConstValue(scope, src_span, &array_val.base);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn genBlock(irb: *Builder, block: *ast.Node.Block, parent_scope: *Scope) !*Inst {
|
||||
const block_scope = try Scope.Block.create(irb.comp, parent_scope);
|
||||
|
||||
const outer_block_scope = &block_scope.base;
|
||||
var child_scope = outer_block_scope;
|
||||
|
||||
if (parent_scope.findFnDef()) |fndef_scope| {
|
||||
if (fndef_scope.fn_val.?.block_scope == null) {
|
||||
fndef_scope.fn_val.?.block_scope = block_scope;
|
||||
}
|
||||
}
|
||||
|
||||
if (block.statements.len == 0) {
|
||||
// {}
|
||||
return irb.buildConstVoid(child_scope, Span.token(block.lbrace), false);
|
||||
}
|
||||
|
||||
if (block.label) |label| {
|
||||
block_scope.incoming_values = std.ArrayList(*Inst).init(irb.arena());
|
||||
block_scope.incoming_blocks = std.ArrayList(*BasicBlock).init(irb.arena());
|
||||
block_scope.end_block = try irb.createBasicBlock(parent_scope, "BlockEnd");
|
||||
block_scope.is_comptime = try irb.buildConstBool(
|
||||
parent_scope,
|
||||
Span.token(block.lbrace),
|
||||
irb.isCompTime(parent_scope),
|
||||
);
|
||||
}
|
||||
|
||||
var is_continuation_unreachable = false;
|
||||
var noreturn_return_value: ?*Inst = null;
|
||||
|
||||
var stmt_it = block.statements.iterator(0);
|
||||
while (stmt_it.next()) |statement_node_ptr| {
|
||||
const statement_node = statement_node_ptr.*;
|
||||
|
||||
if (statement_node.cast(ast.Node.Defer)) |defer_node| {
|
||||
// defer starts a new scope
|
||||
const defer_token = irb.code.tree_scope.tree.tokens.at(defer_node.defer_token);
|
||||
const kind = switch (defer_token.id) {
|
||||
Token.Id.Keyword_defer => Scope.Defer.Kind.ScopeExit,
|
||||
Token.Id.Keyword_errdefer => Scope.Defer.Kind.ErrorExit,
|
||||
else => unreachable,
|
||||
};
|
||||
const defer_expr_scope = try Scope.DeferExpr.create(irb.comp, parent_scope, defer_node.expr);
|
||||
const defer_child_scope = try Scope.Defer.create(irb.comp, parent_scope, kind, defer_expr_scope);
|
||||
child_scope = &defer_child_scope.base;
|
||||
continue;
|
||||
}
|
||||
const statement_value = try irb.genNodeRecursive(statement_node, child_scope, .None);
|
||||
|
||||
is_continuation_unreachable = statement_value.isNoReturn();
|
||||
if (is_continuation_unreachable) {
|
||||
// keep the last noreturn statement value around in case we need to return it
|
||||
noreturn_return_value = statement_value;
|
||||
}
|
||||
|
||||
if (statement_value.cast(Inst.DeclVar)) |decl_var| {
|
||||
// variable declarations start a new scope
|
||||
child_scope = decl_var.params.variable.child_scope;
|
||||
} else if (!is_continuation_unreachable) {
|
||||
// this statement's value must be void
|
||||
_ = try irb.build(
|
||||
Inst.CheckVoidStmt,
|
||||
child_scope,
|
||||
Span{
|
||||
.first = statement_node.firstToken(),
|
||||
.last = statement_node.lastToken(),
|
||||
},
|
||||
Inst.CheckVoidStmt.Params{ .target = statement_value },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (is_continuation_unreachable) {
|
||||
assert(noreturn_return_value != null);
|
||||
if (block.label == null or block_scope.incoming_blocks.len == 0) {
|
||||
return noreturn_return_value.?;
|
||||
}
|
||||
|
||||
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
|
||||
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
|
||||
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
|
||||
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
|
||||
});
|
||||
}
|
||||
|
||||
if (block.label) |label| {
|
||||
try block_scope.incoming_blocks.append(irb.current_basic_block);
|
||||
try block_scope.incoming_values.append(
|
||||
try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true),
|
||||
);
|
||||
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, .ScopeExit);
|
||||
|
||||
_ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params{
|
||||
.dest_block = block_scope.end_block,
|
||||
.is_comptime = block_scope.is_comptime,
|
||||
});
|
||||
|
||||
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
|
||||
|
||||
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
|
||||
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
|
||||
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
|
||||
});
|
||||
}
|
||||
|
||||
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, .ScopeExit);
|
||||
return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true);
|
||||
}
|
||||
|
||||
pub fn genControlFlowExpr(
|
||||
irb: *Builder,
|
||||
control_flow_expr: *ast.Node.ControlFlowExpression,
|
||||
scope: *Scope,
|
||||
lval: LVal,
|
||||
) !*Inst {
|
||||
switch (control_flow_expr.kind) {
|
||||
.Break => |arg| return error.Unimplemented,
|
||||
.Continue => |arg| return error.Unimplemented,
|
||||
.Return => {
|
||||
const src_span = Span.token(control_flow_expr.ltoken);
|
||||
if (scope.findFnDef() == null) {
|
||||
try irb.comp.addCompileError(
|
||||
irb.code.tree_scope,
|
||||
src_span,
|
||||
"return expression outside function definition",
|
||||
.{},
|
||||
);
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
|
||||
if (scope.findDeferExpr()) |scope_defer_expr| {
|
||||
if (!scope_defer_expr.reported_err) {
|
||||
try irb.comp.addCompileError(
|
||||
irb.code.tree_scope,
|
||||
src_span,
|
||||
"cannot return from defer expression",
|
||||
.{},
|
||||
);
|
||||
scope_defer_expr.reported_err = true;
|
||||
}
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
|
||||
const outer_scope = irb.begin_scope.?;
|
||||
const return_value = if (control_flow_expr.rhs) |rhs| blk: {
|
||||
break :blk try irb.genNodeRecursive(rhs, scope, .None);
|
||||
} else blk: {
|
||||
break :blk try irb.buildConstVoid(scope, src_span, true);
|
||||
};
|
||||
|
||||
const defer_counts = irb.countDefers(scope, outer_scope);
|
||||
const have_err_defers = defer_counts.error_exit != 0;
|
||||
if (have_err_defers or irb.comp.have_err_ret_tracing) {
|
||||
const err_block = try irb.createBasicBlock(scope, "ErrRetErr");
|
||||
const ok_block = try irb.createBasicBlock(scope, "ErrRetOk");
|
||||
if (!have_err_defers) {
|
||||
_ = try irb.genDefersForBlock(scope, outer_scope, .ScopeExit);
|
||||
}
|
||||
|
||||
const is_err = try irb.build(
|
||||
Inst.TestErr,
|
||||
scope,
|
||||
src_span,
|
||||
Inst.TestErr.Params{ .target = return_value },
|
||||
);
|
||||
|
||||
const err_is_comptime = try irb.buildTestCompTime(scope, src_span, is_err);
|
||||
|
||||
_ = try irb.buildGen(Inst.CondBr, scope, src_span, Inst.CondBr.Params{
|
||||
.condition = is_err,
|
||||
.then_block = err_block,
|
||||
.else_block = ok_block,
|
||||
.is_comptime = err_is_comptime,
|
||||
});
|
||||
|
||||
const ret_stmt_block = try irb.createBasicBlock(scope, "RetStmt");
|
||||
|
||||
try irb.setCursorAtEndAndAppendBlock(err_block);
|
||||
if (have_err_defers) {
|
||||
_ = try irb.genDefersForBlock(scope, outer_scope, .ErrorExit);
|
||||
}
|
||||
if (irb.comp.have_err_ret_tracing and !irb.isCompTime(scope)) {
|
||||
_ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params{});
|
||||
}
|
||||
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
|
||||
.dest_block = ret_stmt_block,
|
||||
.is_comptime = err_is_comptime,
|
||||
});
|
||||
|
||||
try irb.setCursorAtEndAndAppendBlock(ok_block);
|
||||
if (have_err_defers) {
|
||||
_ = try irb.genDefersForBlock(scope, outer_scope, .ScopeExit);
|
||||
}
|
||||
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
|
||||
.dest_block = ret_stmt_block,
|
||||
.is_comptime = err_is_comptime,
|
||||
});
|
||||
|
||||
try irb.setCursorAtEndAndAppendBlock(ret_stmt_block);
|
||||
return irb.genAsyncReturn(scope, src_span, return_value, false);
|
||||
} else {
|
||||
_ = try irb.genDefersForBlock(scope, outer_scope, .ScopeExit);
|
||||
return irb.genAsyncReturn(scope, src_span, return_value, false);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn genIdentifier(irb: *Builder, identifier: *ast.Node.Identifier, scope: *Scope, lval: LVal) !*Inst {
|
||||
const src_span = Span.token(identifier.token);
|
||||
const name = irb.code.tree_scope.tree.tokenSlice(identifier.token);
|
||||
|
||||
//if (buf_eql_str(variable_name, "_") && lval == LValPtr) {
|
||||
// IrInstructionConst *const_instruction = ir_build_instruction<IrInstructionConst>(irb, scope, node);
|
||||
// const_instruction->base.value.type = get_pointer_to_type(irb->codegen,
|
||||
// irb->codegen->builtin_types.entry_void, false);
|
||||
// const_instruction->base.value.special = ConstValSpecialStatic;
|
||||
// const_instruction->base.value.data.x_ptr.special = ConstPtrSpecialDiscard;
|
||||
// return &const_instruction->base;
|
||||
//}
|
||||
|
||||
if (irb.comp.getPrimitiveType(name)) |result| {
|
||||
if (result) |primitive_type| {
|
||||
defer primitive_type.base.deref(irb.comp);
|
||||
switch (lval) {
|
||||
// if (lval == LValPtr) {
|
||||
// return ir_build_ref(irb, scope, node, value, false, false);
|
||||
.Ptr => return error.Unimplemented,
|
||||
.None => return irb.buildConstValue(scope, src_span, &primitive_type.base),
|
||||
}
|
||||
}
|
||||
} else |err| switch (err) {
|
||||
error.Overflow => {
|
||||
try irb.comp.addCompileError(irb.code.tree_scope, src_span, "integer too large", .{});
|
||||
return error.SemanticAnalysisFailed;
|
||||
},
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
}
|
||||
|
||||
switch (irb.findIdent(scope, name)) {
|
||||
.Decl => |decl| {
|
||||
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
|
||||
.decl = decl,
|
||||
.lval = lval,
|
||||
});
|
||||
},
|
||||
.VarScope => |var_scope| {
|
||||
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params{ .var_scope = var_scope });
|
||||
switch (lval) {
|
||||
.Ptr => return var_ptr,
|
||||
.None => {
|
||||
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params{ .target = var_ptr });
|
||||
},
|
||||
}
|
||||
},
|
||||
.NotFound => {},
|
||||
}
|
||||
|
||||
//if (node->owner->any_imports_failed) {
|
||||
// // skip the error message since we had a failing import in this file
|
||||
// // if an import breaks we don't need redundant undeclared identifier errors
|
||||
// return irb->codegen->invalid_instruction;
|
||||
//}
|
||||
|
||||
// TODO put a variable of same name with invalid type in global scope
|
||||
// so that future references to this same name will find a variable with an invalid type
|
||||
|
||||
try irb.comp.addCompileError(irb.code.tree_scope, src_span, "unknown identifier '{}'", .{name});
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
|
||||
const DeferCounts = struct {
|
||||
scope_exit: usize,
|
||||
error_exit: usize,
|
||||
};
|
||||
|
||||
fn countDefers(irb: *Builder, inner_scope: *Scope, outer_scope: *Scope) DeferCounts {
|
||||
var result = DeferCounts{ .scope_exit = 0, .error_exit = 0 };
|
||||
|
||||
var scope = inner_scope;
|
||||
while (scope != outer_scope) {
|
||||
switch (scope.id) {
|
||||
.Defer => {
|
||||
const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope);
|
||||
switch (defer_scope.kind) {
|
||||
.ScopeExit => result.scope_exit += 1,
|
||||
.ErrorExit => result.error_exit += 1,
|
||||
}
|
||||
scope = scope.parent orelse break;
|
||||
},
|
||||
.FnDef => break,
|
||||
|
||||
.CompTime,
|
||||
.Block,
|
||||
.Decls,
|
||||
.Root,
|
||||
.Var,
|
||||
=> scope = scope.parent orelse break,
|
||||
|
||||
.DeferExpr => unreachable,
|
||||
.AstTree => unreachable,
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
fn genDefersForBlock(
|
||||
irb: *Builder,
|
||||
inner_scope: *Scope,
|
||||
outer_scope: *Scope,
|
||||
gen_kind: Scope.Defer.Kind,
|
||||
) !bool {
|
||||
var scope = inner_scope;
|
||||
var is_noreturn = false;
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
.Defer => {
|
||||
const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope);
|
||||
const generate = switch (defer_scope.kind) {
|
||||
.ScopeExit => true,
|
||||
.ErrorExit => gen_kind == .ErrorExit,
|
||||
};
|
||||
if (generate) {
|
||||
const defer_expr_scope = defer_scope.defer_expr_scope;
|
||||
const instruction = try irb.genNodeRecursive(
|
||||
defer_expr_scope.expr_node,
|
||||
&defer_expr_scope.base,
|
||||
.None,
|
||||
);
|
||||
if (instruction.isNoReturn()) {
|
||||
is_noreturn = true;
|
||||
} else {
|
||||
_ = try irb.build(
|
||||
Inst.CheckVoidStmt,
|
||||
&defer_expr_scope.base,
|
||||
Span.token(defer_expr_scope.expr_node.lastToken()),
|
||||
Inst.CheckVoidStmt.Params{ .target = instruction },
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
.FnDef,
|
||||
.Decls,
|
||||
.Root,
|
||||
=> return is_noreturn,
|
||||
|
||||
.CompTime,
|
||||
.Block,
|
||||
.Var,
|
||||
=> scope = scope.parent orelse return is_noreturn,
|
||||
|
||||
.DeferExpr => unreachable,
|
||||
.AstTree => unreachable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lvalWrap(irb: *Builder, scope: *Scope, instruction: *Inst, lval: LVal) !*Inst {
|
||||
switch (lval) {
|
||||
.None => return instruction,
|
||||
.Ptr => {
|
||||
// We needed a pointer to a value, but we got a value. So we create
|
||||
// an instruction which just makes a const pointer of it.
|
||||
return irb.build(Inst.Ref, scope, instruction.span, Inst.Ref.Params{
|
||||
.target = instruction,
|
||||
.mut = .Const,
|
||||
.volatility = .Non,
|
||||
});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn arena(self: *Builder) *Allocator {
|
||||
return &self.code.arena.allocator;
|
||||
}
|
||||
|
||||
fn buildExtra(
|
||||
self: *Builder,
|
||||
comptime I: type,
|
||||
scope: *Scope,
|
||||
span: Span,
|
||||
params: I.Params,
|
||||
is_generated: bool,
|
||||
) !*Inst {
|
||||
const inst = try self.arena().create(I);
|
||||
inst.* = I{
|
||||
.base = Inst{
|
||||
.id = Inst.typeToId(I),
|
||||
.is_generated = is_generated,
|
||||
.scope = scope,
|
||||
.debug_id = self.next_debug_id,
|
||||
.val = switch (I.ir_val_init) {
|
||||
.Unknown => IrVal.Unknown,
|
||||
.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.comp).base },
|
||||
.Void => IrVal{ .KnownValue = &Value.Void.get(self.comp).base },
|
||||
},
|
||||
.ref_count = 0,
|
||||
.span = span,
|
||||
.child = null,
|
||||
.parent = null,
|
||||
.llvm_value = undefined,
|
||||
.owner_bb = self.current_basic_block,
|
||||
},
|
||||
.params = params,
|
||||
};
|
||||
|
||||
// Look at the params and ref() other instructions
|
||||
inline for (@typeInfo(I.Params).Struct.fields) |f| {
|
||||
switch (f.field_type) {
|
||||
*Inst => @field(inst.params, f.name).ref(self),
|
||||
*BasicBlock => @field(inst.params, f.name).ref(self),
|
||||
?*Inst => if (@field(inst.params, f.name)) |other| other.ref(self),
|
||||
[]*Inst => {
|
||||
// TODO https://github.com/ziglang/zig/issues/1269
|
||||
for (@field(inst.params, f.name)) |other|
|
||||
other.ref(self);
|
||||
},
|
||||
[]*BasicBlock => {
|
||||
// TODO https://github.com/ziglang/zig/issues/1269
|
||||
for (@field(inst.params, f.name)) |other|
|
||||
other.ref(self);
|
||||
},
|
||||
Type.Pointer.Mut,
|
||||
Type.Pointer.Vol,
|
||||
Type.Pointer.Size,
|
||||
LVal,
|
||||
*Decl,
|
||||
*Scope.Var,
|
||||
=> {},
|
||||
// it's ok to add more types here, just make sure that
|
||||
// any instructions and basic blocks are ref'd appropriately
|
||||
else => @compileError("unrecognized type in Params: " ++ @typeName(f.field_type)),
|
||||
}
|
||||
}
|
||||
|
||||
self.next_debug_id += 1;
|
||||
try self.current_basic_block.instruction_list.append(&inst.base);
|
||||
return &inst.base;
|
||||
}
|
||||
|
||||
fn build(
|
||||
self: *Builder,
|
||||
comptime I: type,
|
||||
scope: *Scope,
|
||||
span: Span,
|
||||
params: I.Params,
|
||||
) !*Inst {
|
||||
return self.buildExtra(I, scope, span, params, false);
|
||||
}
|
||||
|
||||
fn buildGen(
|
||||
self: *Builder,
|
||||
comptime I: type,
|
||||
scope: *Scope,
|
||||
span: Span,
|
||||
params: I.Params,
|
||||
) !*Inst {
|
||||
return self.buildExtra(I, scope, span, params, true);
|
||||
}
|
||||
|
||||
fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Inst {
|
||||
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
|
||||
inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.comp, x).base };
|
||||
return inst;
|
||||
}
|
||||
|
||||
fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Inst {
|
||||
const inst = try self.buildExtra(Inst.Const, scope, span, Inst.Const.Params{}, is_generated);
|
||||
inst.val = IrVal{ .KnownValue = &Value.Void.get(self.comp).base };
|
||||
return inst;
|
||||
}
|
||||
|
||||
fn buildConstValue(self: *Builder, scope: *Scope, span: Span, v: *Value) !*Inst {
|
||||
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
|
||||
inst.val = IrVal{ .KnownValue = v.getRef() };
|
||||
return inst;
|
||||
}
|
||||
|
||||
/// If the code is explicitly set to be comptime, then builds a const bool,
|
||||
/// otherwise builds a TestCompTime instruction.
|
||||
fn buildTestCompTime(self: *Builder, scope: *Scope, span: Span, target: *Inst) !*Inst {
|
||||
if (self.isCompTime(scope)) {
|
||||
return self.buildConstBool(scope, span, true);
|
||||
} else {
|
||||
return self.build(
|
||||
Inst.TestCompTime,
|
||||
scope,
|
||||
span,
|
||||
Inst.TestCompTime.Params{ .target = target },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn genAsyncReturn(irb: *Builder, scope: *Scope, span: Span, result: *Inst, is_gen: bool) !*Inst {
|
||||
_ = try irb.buildGen(
|
||||
Inst.AddImplicitReturnType,
|
||||
scope,
|
||||
span,
|
||||
Inst.AddImplicitReturnType.Params{ .target = result },
|
||||
);
|
||||
|
||||
if (!irb.is_async) {
|
||||
return irb.buildExtra(
|
||||
Inst.Return,
|
||||
scope,
|
||||
span,
|
||||
Inst.Return.Params{ .return_value = result },
|
||||
is_gen,
|
||||
);
|
||||
}
|
||||
return error.Unimplemented;
|
||||
}
|
||||
|
||||
const Ident = union(enum) {
|
||||
NotFound,
|
||||
Decl: *Decl,
|
||||
VarScope: *Scope.Var,
|
||||
};
|
||||
|
||||
fn findIdent(irb: *Builder, scope: *Scope, name: []const u8) Ident {
|
||||
var s = scope;
|
||||
while (true) {
|
||||
switch (s.id) {
|
||||
.Root => return .NotFound,
|
||||
.Decls => {
|
||||
const decls = @fieldParentPtr(Scope.Decls, "base", s);
|
||||
const locked_table = decls.table.acquireRead();
|
||||
defer locked_table.release();
|
||||
if (locked_table.value.get(name)) |entry| {
|
||||
return Ident{ .Decl = entry.value };
|
||||
}
|
||||
},
|
||||
.Var => {
|
||||
const var_scope = @fieldParentPtr(Scope.Var, "base", s);
|
||||
if (mem.eql(u8, var_scope.name, name)) {
|
||||
return Ident{ .VarScope = var_scope };
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
s = s.parent.?;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const Analyze = struct {
|
||||
irb: Builder,
|
||||
old_bb_index: usize,
|
||||
const_predecessor_bb: ?*BasicBlock,
|
||||
parent_basic_block: *BasicBlock,
|
||||
instruction_index: usize,
|
||||
src_implicit_return_type_list: std.ArrayList(*Inst),
|
||||
explicit_return_type: ?*Type,
|
||||
|
||||
pub const Error = error{
|
||||
/// This is only for when we have already reported a compile error. It is the poison value.
|
||||
SemanticAnalysisFailed,
|
||||
|
||||
/// This is a placeholder - it is useful to use instead of panicking but once the compiler is
|
||||
/// done this error code will be removed.
|
||||
Unimplemented,
|
||||
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub fn init(comp: *Compilation, tree_scope: *Scope.AstTree, explicit_return_type: ?*Type) !Analyze {
|
||||
var irb = try Builder.init(comp, tree_scope, null);
|
||||
errdefer irb.abort();
|
||||
|
||||
return Analyze{
|
||||
.irb = irb,
|
||||
.old_bb_index = 0,
|
||||
.const_predecessor_bb = null,
|
||||
.parent_basic_block = undefined, // initialized with startBasicBlock
|
||||
.instruction_index = undefined, // initialized with startBasicBlock
|
||||
.src_implicit_return_type_list = std.ArrayList(*Inst).init(irb.arena()),
|
||||
.explicit_return_type = explicit_return_type,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn abort(self: *Analyze) void {
|
||||
self.irb.abort();
|
||||
}
|
||||
|
||||
pub fn getNewBasicBlock(self: *Analyze, old_bb: *BasicBlock, ref_old_instruction: ?*Inst) !*BasicBlock {
|
||||
if (old_bb.child) |child| {
|
||||
if (ref_old_instruction == null or child.ref_instruction != ref_old_instruction)
|
||||
return child;
|
||||
}
|
||||
|
||||
const new_bb = try self.irb.createBasicBlock(old_bb.scope, old_bb.name_hint);
|
||||
new_bb.linkToParent(old_bb);
|
||||
new_bb.ref_instruction = ref_old_instruction;
|
||||
return new_bb;
|
||||
}
|
||||
|
||||
pub fn startBasicBlock(self: *Analyze, old_bb: *BasicBlock, const_predecessor_bb: ?*BasicBlock) void {
|
||||
self.instruction_index = 0;
|
||||
self.parent_basic_block = old_bb;
|
||||
self.const_predecessor_bb = const_predecessor_bb;
|
||||
}
|
||||
|
||||
pub fn finishBasicBlock(ira: *Analyze, old_code: *Code) !void {
|
||||
try ira.irb.code.basic_block_list.append(ira.irb.current_basic_block);
|
||||
ira.instruction_index += 1;
|
||||
|
||||
while (ira.instruction_index < ira.parent_basic_block.instruction_list.len) {
|
||||
const next_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
|
||||
|
||||
if (!next_instruction.is_generated) {
|
||||
try ira.addCompileError(next_instruction.span, "unreachable code", .{});
|
||||
break;
|
||||
}
|
||||
ira.instruction_index += 1;
|
||||
}
|
||||
|
||||
ira.old_bb_index += 1;
|
||||
|
||||
var need_repeat = true;
|
||||
while (true) {
|
||||
while (ira.old_bb_index < old_code.basic_block_list.len) {
|
||||
const old_bb = old_code.basic_block_list.at(ira.old_bb_index);
|
||||
const new_bb = old_bb.child orelse {
|
||||
ira.old_bb_index += 1;
|
||||
continue;
|
||||
};
|
||||
if (new_bb.instruction_list.len != 0) {
|
||||
ira.old_bb_index += 1;
|
||||
continue;
|
||||
}
|
||||
ira.irb.current_basic_block = new_bb;
|
||||
|
||||
ira.startBasicBlock(old_bb, null);
|
||||
return;
|
||||
}
|
||||
if (!need_repeat)
|
||||
return;
|
||||
need_repeat = false;
|
||||
ira.old_bb_index = 0;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
fn addCompileError(self: *Analyze, span: Span, comptime fmt: []const u8, args: var) !void {
|
||||
return self.irb.comp.addCompileError(self.irb.code.tree_scope, span, fmt, args);
|
||||
}
|
||||
|
||||
fn resolvePeerTypes(self: *Analyze, expected_type: ?*Type, peers: []const *Inst) Analyze.Error!*Type {
|
||||
// TODO actual implementation
|
||||
return &Type.Void.get(self.irb.comp).base;
|
||||
}
|
||||
|
||||
fn implicitCast(self: *Analyze, target: *Inst, optional_dest_type: ?*Type) Analyze.Error!*Inst {
|
||||
const dest_type = optional_dest_type orelse return target;
|
||||
const from_type = target.getKnownType();
|
||||
if (from_type == dest_type or from_type.id == .NoReturn) return target;
|
||||
return self.analyzeCast(target, target, dest_type);
|
||||
}
|
||||
|
||||
fn analyzeCast(ira: *Analyze, source_instr: *Inst, target: *Inst, dest_type: *Type) !*Inst {
|
||||
const from_type = target.getKnownType();
|
||||
|
||||
//if (type_is_invalid(wanted_type) || type_is_invalid(actual_type)) {
|
||||
// return ira->codegen->invalid_instruction;
|
||||
//}
|
||||
|
||||
//// perfect match or non-const to const
|
||||
//ConstCastOnly const_cast_result = types_match_const_cast_only(ira, wanted_type, actual_type,
|
||||
// source_node, false);
|
||||
//if (const_cast_result.id == ConstCastResultIdOk) {
|
||||
// return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpNoop, false);
|
||||
//}
|
||||
|
||||
//// widening conversion
|
||||
//if (wanted_type->id == TypeTableEntryIdInt &&
|
||||
// actual_type->id == TypeTableEntryIdInt &&
|
||||
// wanted_type->data.integral.is_signed == actual_type->data.integral.is_signed &&
|
||||
// wanted_type->data.integral.bit_count >= actual_type->data.integral.bit_count)
|
||||
//{
|
||||
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// small enough unsigned ints can get casted to large enough signed ints
|
||||
//if (wanted_type->id == TypeTableEntryIdInt && wanted_type->data.integral.is_signed &&
|
||||
// actual_type->id == TypeTableEntryIdInt && !actual_type->data.integral.is_signed &&
|
||||
// wanted_type->data.integral.bit_count > actual_type->data.integral.bit_count)
|
||||
//{
|
||||
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// float widening conversion
|
||||
//if (wanted_type->id == TypeTableEntryIdFloat &&
|
||||
// actual_type->id == TypeTableEntryIdFloat &&
|
||||
// wanted_type->data.floating.bit_count >= actual_type->data.floating.bit_count)
|
||||
//{
|
||||
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// cast from [N]T to []const T
|
||||
//if (is_slice(wanted_type) && actual_type->id == TypeTableEntryIdArray) {
|
||||
// TypeTableEntry *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
|
||||
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
||||
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
||||
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
||||
// source_node, false).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from *const [N]T to []const T
|
||||
//if (is_slice(wanted_type) &&
|
||||
// actual_type->id == TypeTableEntryIdPointer &&
|
||||
// actual_type->data.pointer.is_const &&
|
||||
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
|
||||
//{
|
||||
// TypeTableEntry *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
|
||||
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
||||
|
||||
// TypeTableEntry *array_type = actual_type->data.pointer.child_type;
|
||||
|
||||
// if ((ptr_type->data.pointer.is_const || array_type->data.array.len == 0) &&
|
||||
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, array_type->data.array.child_type,
|
||||
// source_node, false).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from [N]T to *const []const T
|
||||
//if (wanted_type->id == TypeTableEntryIdPointer &&
|
||||
// wanted_type->data.pointer.is_const &&
|
||||
// is_slice(wanted_type->data.pointer.child_type) &&
|
||||
// actual_type->id == TypeTableEntryIdArray)
|
||||
//{
|
||||
// TypeTableEntry *ptr_type =
|
||||
// wanted_type->data.pointer.child_type->data.structure.fields[slice_ptr_index].type_entry;
|
||||
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
||||
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
||||
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
||||
// source_node, false).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.pointer.child_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from [N]T to ?[]const T
|
||||
//if (wanted_type->id == TypeTableEntryIdOptional &&
|
||||
// is_slice(wanted_type->data.maybe.child_type) &&
|
||||
// actual_type->id == TypeTableEntryIdArray)
|
||||
//{
|
||||
// TypeTableEntry *ptr_type =
|
||||
// wanted_type->data.maybe.child_type->data.structure.fields[slice_ptr_index].type_entry;
|
||||
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
||||
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
||||
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
||||
// source_node, false).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.maybe.child_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// }
|
||||
//}
|
||||
|
||||
//// *[N]T to [*]T
|
||||
//if (wanted_type->id == TypeTableEntryIdPointer &&
|
||||
// wanted_type->data.pointer.ptr_len == PtrLenUnknown &&
|
||||
// actual_type->id == TypeTableEntryIdPointer &&
|
||||
// actual_type->data.pointer.ptr_len == PtrLenSingle &&
|
||||
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray &&
|
||||
// actual_type->data.pointer.alignment >= wanted_type->data.pointer.alignment &&
|
||||
// types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
|
||||
// actual_type->data.pointer.child_type->data.array.child_type, source_node,
|
||||
// !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
||||
//{
|
||||
// return ir_resolve_ptr_of_array_to_unknown_len_ptr(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// *[N]T to []T
|
||||
//if (is_slice(wanted_type) &&
|
||||
// actual_type->id == TypeTableEntryIdPointer &&
|
||||
// actual_type->data.pointer.ptr_len == PtrLenSingle &&
|
||||
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
|
||||
//{
|
||||
// TypeTableEntry *slice_ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
|
||||
// assert(slice_ptr_type->id == TypeTableEntryIdPointer);
|
||||
// if (types_match_const_cast_only(ira, slice_ptr_type->data.pointer.child_type,
|
||||
// actual_type->data.pointer.child_type->data.array.child_type, source_node,
|
||||
// !slice_ptr_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// return ir_resolve_ptr_of_array_to_slice(ira, source_instr, value, wanted_type);
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from T to ?T
|
||||
//// note that the *T to ?*T case is handled via the "ConstCastOnly" mechanism
|
||||
//if (wanted_type->id == TypeTableEntryIdOptional) {
|
||||
// TypeTableEntry *wanted_child_type = wanted_type->data.maybe.child_type;
|
||||
// if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node,
|
||||
// false).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
|
||||
// } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
|
||||
// actual_type->id == TypeTableEntryIdComptimeFloat)
|
||||
// {
|
||||
// if (ir_num_lit_fits_in_other_type(ira, value, wanted_child_type, true)) {
|
||||
// return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
|
||||
// } else {
|
||||
// return ira->codegen->invalid_instruction;
|
||||
// }
|
||||
// } else if (wanted_child_type->id == TypeTableEntryIdPointer &&
|
||||
// wanted_child_type->data.pointer.is_const &&
|
||||
// (actual_type->id == TypeTableEntryIdPointer || is_container(actual_type)))
|
||||
// {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_child_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from null literal to maybe type
|
||||
//if (wanted_type->id == TypeTableEntryIdOptional &&
|
||||
// actual_type->id == TypeTableEntryIdNull)
|
||||
//{
|
||||
// return ir_analyze_null_to_maybe(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// cast from child type of error type to error type
|
||||
//if (wanted_type->id == TypeTableEntryIdErrorUnion) {
|
||||
// if (types_match_const_cast_only(ira, wanted_type->data.error_union.payload_type, actual_type,
|
||||
// source_node, false).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
|
||||
// } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
|
||||
// actual_type->id == TypeTableEntryIdComptimeFloat)
|
||||
// {
|
||||
// if (ir_num_lit_fits_in_other_type(ira, value, wanted_type->data.error_union.payload_type, true)) {
|
||||
// return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
|
||||
// } else {
|
||||
// return ira->codegen->invalid_instruction;
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from [N]T to E![]const T
|
||||
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
|
||||
// is_slice(wanted_type->data.error_union.payload_type) &&
|
||||
// actual_type->id == TypeTableEntryIdArray)
|
||||
//{
|
||||
// TypeTableEntry *ptr_type =
|
||||
// wanted_type->data.error_union.payload_type->data.structure.fields[slice_ptr_index].type_entry;
|
||||
// assert(ptr_type->id == TypeTableEntryIdPointer);
|
||||
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
|
||||
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
|
||||
// source_node, false).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from error set to error union type
|
||||
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
|
||||
// actual_type->id == TypeTableEntryIdErrorSet)
|
||||
//{
|
||||
// return ir_analyze_err_wrap_code(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// cast from T to E!?T
|
||||
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
|
||||
// wanted_type->data.error_union.payload_type->id == TypeTableEntryIdOptional &&
|
||||
// actual_type->id != TypeTableEntryIdOptional)
|
||||
//{
|
||||
// TypeTableEntry *wanted_child_type = wanted_type->data.error_union.payload_type->data.maybe.child_type;
|
||||
// if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node, false).id == ConstCastResultIdOk ||
|
||||
// actual_type->id == TypeTableEntryIdNull ||
|
||||
// actual_type->id == TypeTableEntryIdComptimeInt ||
|
||||
// actual_type->id == TypeTableEntryIdComptimeFloat)
|
||||
// {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// }
|
||||
//}
|
||||
|
||||
// cast from comptime-known integer to another integer where the value fits
|
||||
if (target.isCompTime() and (from_type.id == .Int or from_type.id == .ComptimeInt)) cast: {
|
||||
const target_val = target.val.KnownValue;
|
||||
const from_int = &target_val.cast(Value.Int).?.big_int;
|
||||
const fits = fits: {
|
||||
if (dest_type.cast(Type.ComptimeInt)) |ctint| {
|
||||
break :fits true;
|
||||
}
|
||||
if (dest_type.cast(Type.Int)) |int| {
|
||||
break :fits from_int.fitsInTwosComp(int.key.is_signed, int.key.bit_count);
|
||||
}
|
||||
break :cast;
|
||||
};
|
||||
if (!fits) {
|
||||
try ira.addCompileError(source_instr.span, "integer value '{}' cannot be stored in type '{}'", .{
|
||||
from_int,
|
||||
dest_type.name,
|
||||
});
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
|
||||
const new_val = try target.copyVal(ira.irb.comp);
|
||||
new_val.setType(dest_type, ira.irb.comp);
|
||||
return ira.irb.buildConstValue(source_instr.scope, source_instr.span, new_val);
|
||||
}
|
||||
|
||||
// cast from number literal to another type
|
||||
// cast from number literal to *const integer
|
||||
//if (actual_type->id == TypeTableEntryIdComptimeFloat ||
|
||||
// actual_type->id == TypeTableEntryIdComptimeInt)
|
||||
//{
|
||||
// ensure_complete_type(ira->codegen, wanted_type);
|
||||
// if (type_is_invalid(wanted_type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
// if (wanted_type->id == TypeTableEntryIdEnum) {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.enumeration.tag_int_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// } else if (wanted_type->id == TypeTableEntryIdPointer &&
|
||||
// wanted_type->data.pointer.is_const)
|
||||
// {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.pointer.child_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// } else if (ir_num_lit_fits_in_other_type(ira, value, wanted_type, true)) {
|
||||
// CastOp op;
|
||||
// if ((actual_type->id == TypeTableEntryIdComptimeFloat &&
|
||||
// wanted_type->id == TypeTableEntryIdFloat) ||
|
||||
// (actual_type->id == TypeTableEntryIdComptimeInt &&
|
||||
// wanted_type->id == TypeTableEntryIdInt))
|
||||
// {
|
||||
// op = CastOpNumLitToConcrete;
|
||||
// } else if (wanted_type->id == TypeTableEntryIdInt) {
|
||||
// op = CastOpFloatToInt;
|
||||
// } else if (wanted_type->id == TypeTableEntryIdFloat) {
|
||||
// op = CastOpIntToFloat;
|
||||
// } else {
|
||||
// zig_unreachable();
|
||||
// }
|
||||
// return ir_resolve_cast(ira, source_instr, value, wanted_type, op, false);
|
||||
// } else {
|
||||
// return ira->codegen->invalid_instruction;
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from typed number to integer or float literal.
|
||||
//// works when the number is known at compile time
|
||||
//if (instr_is_comptime(value) &&
|
||||
// ((actual_type->id == TypeTableEntryIdInt && wanted_type->id == TypeTableEntryIdComptimeInt) ||
|
||||
// (actual_type->id == TypeTableEntryIdFloat && wanted_type->id == TypeTableEntryIdComptimeFloat)))
|
||||
//{
|
||||
// return ir_analyze_number_to_literal(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// cast from union to the enum type of the union
|
||||
//if (actual_type->id == TypeTableEntryIdUnion && wanted_type->id == TypeTableEntryIdEnum) {
|
||||
// type_ensure_zero_bits_known(ira->codegen, actual_type);
|
||||
// if (type_is_invalid(actual_type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// if (actual_type->data.unionation.tag_type == wanted_type) {
|
||||
// return ir_analyze_union_to_tag(ira, source_instr, value, wanted_type);
|
||||
// }
|
||||
//}
|
||||
|
||||
//// enum to union which has the enum as the tag type
|
||||
//if (wanted_type->id == TypeTableEntryIdUnion && actual_type->id == TypeTableEntryIdEnum &&
|
||||
// (wanted_type->data.unionation.decl_node->data.container_decl.auto_enum ||
|
||||
// wanted_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr))
|
||||
//{
|
||||
// type_ensure_zero_bits_known(ira->codegen, wanted_type);
|
||||
// if (wanted_type->data.unionation.tag_type == actual_type) {
|
||||
// return ir_analyze_enum_to_union(ira, source_instr, value, wanted_type);
|
||||
// }
|
||||
//}
|
||||
|
||||
//// enum to &const union which has the enum as the tag type
|
||||
//if (actual_type->id == TypeTableEntryIdEnum && wanted_type->id == TypeTableEntryIdPointer) {
|
||||
// TypeTableEntry *union_type = wanted_type->data.pointer.child_type;
|
||||
// if (union_type->data.unionation.decl_node->data.container_decl.auto_enum ||
|
||||
// union_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr)
|
||||
// {
|
||||
// type_ensure_zero_bits_known(ira->codegen, union_type);
|
||||
// if (union_type->data.unionation.tag_type == actual_type) {
|
||||
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, union_type, value);
|
||||
// if (type_is_invalid(cast1->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
|
||||
// if (type_is_invalid(cast2->value.type))
|
||||
// return ira->codegen->invalid_instruction;
|
||||
|
||||
// return cast2;
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from *T to *[1]T
|
||||
//if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
|
||||
// actual_type->id == TypeTableEntryIdPointer && actual_type->data.pointer.ptr_len == PtrLenSingle)
|
||||
//{
|
||||
// TypeTableEntry *array_type = wanted_type->data.pointer.child_type;
|
||||
// if (array_type->id == TypeTableEntryIdArray && array_type->data.array.len == 1 &&
|
||||
// types_match_const_cast_only(ira, array_type->data.array.child_type,
|
||||
// actual_type->data.pointer.child_type, source_node,
|
||||
// !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
||||
// {
|
||||
// if (wanted_type->data.pointer.alignment > actual_type->data.pointer.alignment) {
|
||||
// ErrorMsg *msg = ir_add_error(ira, source_instr, buf_sprintf("cast increases pointer alignment"));
|
||||
// add_error_note(ira->codegen, msg, value->source_node,
|
||||
// buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&actual_type->name),
|
||||
// actual_type->data.pointer.alignment));
|
||||
// add_error_note(ira->codegen, msg, source_instr->source_node,
|
||||
// buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&wanted_type->name),
|
||||
// wanted_type->data.pointer.alignment));
|
||||
// return ira->codegen->invalid_instruction;
|
||||
// }
|
||||
// return ir_analyze_ptr_to_array(ira, source_instr, value, wanted_type);
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from T to *T where T is zero bits
|
||||
//if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
|
||||
// types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
|
||||
// actual_type, source_node, !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
|
||||
//{
|
||||
// type_ensure_zero_bits_known(ira->codegen, actual_type);
|
||||
// if (type_is_invalid(actual_type)) {
|
||||
// return ira->codegen->invalid_instruction;
|
||||
// }
|
||||
// if (!type_has_bits(actual_type)) {
|
||||
// return ir_get_ref(ira, source_instr, value, false, false);
|
||||
// }
|
||||
//}
|
||||
|
||||
//// cast from undefined to anything
|
||||
//if (actual_type->id == TypeTableEntryIdUndefined) {
|
||||
// return ir_analyze_undefined_to_anything(ira, source_instr, value, wanted_type);
|
||||
//}
|
||||
|
||||
//// cast from something to const pointer of it
|
||||
//if (!type_requires_comptime(actual_type)) {
|
||||
// TypeTableEntry *const_ptr_actual = get_pointer_to_type(ira->codegen, actual_type, true);
|
||||
// if (types_match_const_cast_only(ira, wanted_type, const_ptr_actual, source_node, false).id == ConstCastResultIdOk) {
|
||||
// return ir_analyze_cast_ref(ira, source_instr, value, wanted_type);
|
||||
// }
|
||||
//}
|
||||
|
||||
try ira.addCompileError(source_instr.span, "expected type '{}', found '{}'", .{
|
||||
dest_type.name,
|
||||
from_type.name,
|
||||
});
|
||||
//ErrorMsg *parent_msg = ir_add_error_node(ira, source_instr->source_node,
|
||||
// buf_sprintf("expected type '%s', found '%s'",
|
||||
// buf_ptr(&wanted_type->name),
|
||||
// buf_ptr(&actual_type->name)));
|
||||
//report_recursive_error(ira, source_instr->source_node, &const_cast_result, parent_msg);
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
|
||||
fn getCompTimeValOrNullUndefOk(self: *Analyze, target: *Inst) ?*Value {
|
||||
@panic("TODO");
|
||||
}
|
||||
|
||||
fn getCompTimeRef(
|
||||
self: *Analyze,
|
||||
/// This struct owns the `Value` memory. When the struct is deallocated,
|
||||
/// so is the `Value`. The value of a constant must be copied into
|
||||
/// a memory location for the value to survive after a const instruction.
|
||||
pub const Constant = struct {
|
||||
base: Inst = Inst{ .tag = .constant },
|
||||
value: *Value,
|
||||
ptr_mut: Value.Ptr.Mut,
|
||||
mut: Type.Pointer.Mut,
|
||||
volatility: Type.Pointer.Vol,
|
||||
) Analyze.Error!*Inst {
|
||||
return error.Unimplemented;
|
||||
}
|
||||
};
|
||||
|
||||
pub const PtrToInt = struct {
|
||||
base: Inst = Inst{ .tag = .ptrtoint },
|
||||
};
|
||||
|
||||
pub const FieldPtr = struct {
|
||||
base: Inst = Inst{ .tag = .fieldptr },
|
||||
};
|
||||
|
||||
pub const Deref = struct {
|
||||
base: Inst = Inst{ .tag = .deref },
|
||||
};
|
||||
|
||||
pub const Assembly = struct {
|
||||
base: Inst = Inst{ .tag = .@"asm" },
|
||||
};
|
||||
|
||||
pub const Unreach = struct {
|
||||
base: Inst = Inst{ .tag = .unreach },
|
||||
};
|
||||
};
|
||||
|
||||
pub fn gen(
|
||||
comp: *Compilation,
|
||||
body_node: *ast.Node,
|
||||
tree_scope: *Scope.AstTree,
|
||||
scope: *Scope,
|
||||
) !*Code {
|
||||
var irb = try Builder.init(comp, tree_scope, scope);
|
||||
errdefer irb.abort();
|
||||
pub const ErrorMsg = struct {
|
||||
byte_offset: usize,
|
||||
msg: []const u8,
|
||||
};
|
||||
|
||||
const entry_block = try irb.createBasicBlock(scope, "Entry");
|
||||
entry_block.ref(&irb); // Entry block gets a reference because we enter it to begin.
|
||||
try irb.setCursorAtEndAndAppendBlock(entry_block);
|
||||
pub const Tree = struct {
|
||||
decls: std.ArrayList(*Inst),
|
||||
errors: std.ArrayList(ErrorMsg),
|
||||
};
|
||||
|
||||
const result = try irb.genNode(body_node, scope, .None);
|
||||
if (!result.isNoReturn()) {
|
||||
// no need for save_err_ret_addr because this cannot return error
|
||||
_ = try irb.genAsyncReturn(scope, Span.token(body_node.lastToken()), result, true);
|
||||
}
|
||||
const ParseContext = struct {
|
||||
allocator: *Allocator,
|
||||
i: usize,
|
||||
source: []const u8,
|
||||
errors: *std.ArrayList(ErrorMsg),
|
||||
};
|
||||
|
||||
return irb.finish();
|
||||
pub fn parse(allocator: *Allocator, source: []const u8) Allocator.Error!Tree {
|
||||
var tree: Tree = .{
|
||||
.decls = std.ArrayList(*Inst).init(allocator),
|
||||
.errors = std.ArrayList(ErrorMsg).init(allocator),
|
||||
};
|
||||
var ctx: ParseContext = .{
|
||||
.allocator = allocator,
|
||||
.i = 0,
|
||||
.source = source,
|
||||
.errors = &tree.errors,
|
||||
};
|
||||
parseRoot(&ctx, &tree) catch |err| switch (err) {
|
||||
error.ParseFailure => {
|
||||
assert(tree.errors.items.len != 0);
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
return tree;
|
||||
}
|
||||
|
||||
pub fn analyze(comp: *Compilation, old_code: *Code, expected_type: ?*Type) !*Code {
|
||||
const old_entry_bb = old_code.basic_block_list.at(0);
|
||||
|
||||
var ira = try Analyze.init(comp, old_code.tree_scope, expected_type);
|
||||
errdefer ira.abort();
|
||||
|
||||
const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null);
|
||||
new_entry_bb.ref(&ira.irb);
|
||||
|
||||
ira.irb.current_basic_block = new_entry_bb;
|
||||
|
||||
ira.startBasicBlock(old_entry_bb, null);
|
||||
|
||||
while (ira.old_bb_index < old_code.basic_block_list.len) {
|
||||
const old_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
|
||||
|
||||
if (old_instruction.ref_count == 0 and !old_instruction.hasSideEffects()) {
|
||||
ira.instruction_index += 1;
|
||||
pub fn parseRoot(ctx: *ParseContext, tree: *Tree) !void {
|
||||
// The IR format is designed so that it can be tokenized and parsed at the same time.
|
||||
var global_name_map = std.StringHashMap(usize).init(ctx.allocator);
|
||||
while (ctx.i < ctx.source.len) : (ctx.i += 1) switch (ctx.source[ctx.i]) {
|
||||
';' => _ = try skipToAndOver(ctx, '\n'),
|
||||
'@' => {
|
||||
const at_start = ctx.i;
|
||||
const ident = try skipToAndOver(ctx, ' ');
|
||||
var ty: ?*Value = null;
|
||||
if (eatByte(ctx, ':')) {
|
||||
ty = try parseType(ctx);
|
||||
skipSpace(ctx);
|
||||
}
|
||||
try requireEatBytes(ctx, "= ");
|
||||
const inst = try parseInstruction(ctx);
|
||||
const ident_index = tree.decls.items.len;
|
||||
if (try global_name_map.put(ident, ident_index)) |_| {
|
||||
return parseError(ctx, "redefinition of identifier '{}'", .{ident});
|
||||
}
|
||||
try tree.decls.append(inst);
|
||||
continue;
|
||||
}
|
||||
|
||||
const return_inst = try old_instruction.analyze(&ira);
|
||||
assert(return_inst.val != IrVal.Unknown); // at least the type should be known at this point
|
||||
return_inst.linkToParent(old_instruction);
|
||||
// Note: if we ever modify the above to handle error.CompileError by continuing analysis,
|
||||
// then here we want to check if ira.isCompTime() and return early if true
|
||||
|
||||
if (return_inst.isNoReturn()) {
|
||||
try ira.finishBasicBlock(old_code);
|
||||
continue;
|
||||
}
|
||||
|
||||
ira.instruction_index += 1;
|
||||
}
|
||||
|
||||
if (ira.src_implicit_return_type_list.len == 0) {
|
||||
ira.irb.code.return_type = &Type.NoReturn.get(comp).base;
|
||||
return ira.irb.finish();
|
||||
}
|
||||
|
||||
ira.irb.code.return_type = try ira.resolvePeerTypes(expected_type, ira.src_implicit_return_type_list.span());
|
||||
return ira.irb.finish();
|
||||
},
|
||||
' ', '\n' => continue,
|
||||
else => |byte| return parseError(ctx, "unexpected byte: '{c}'", .{byte}),
|
||||
};
|
||||
}
|
||||
|
||||
fn eatByte(ctx: *ParseContext, byte: u8) bool {
|
||||
if (ctx.i >= ctx.source.len) return false;
|
||||
if (ctx.source[ctx.i] != byte) return false;
|
||||
ctx.i += 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn skipSpace(ctx: *ParseContext) void {
|
||||
while (ctx.i < ctx.source.len and ctx.source[ctx.i] == ' ') : (ctx.i += 1) {}
|
||||
}
|
||||
|
||||
fn requireEatBytes(ctx: *ParseContext, bytes: []const u8) !void {
|
||||
if (ctx.i + bytes.len > ctx.source.len)
|
||||
return parseError(ctx, "unexpected EOF", .{});
|
||||
if (!mem.eql(u8, ctx.source[ctx.i..][0..bytes.len], bytes))
|
||||
return parseError(ctx, "expected '{}'", .{bytes});
|
||||
ctx.i += bytes.len;
|
||||
}
|
||||
|
||||
fn skipToAndOver(ctx: *ParseContext, byte: u8) ![]const u8 {
|
||||
const start_i = ctx.i;
|
||||
while (ctx.i < ctx.source.len) : (ctx.i += 1) {
|
||||
if (ctx.source[ctx.i] == byte) {
|
||||
const result = ctx.source[start_i..ctx.i];
|
||||
ctx.i += 1;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return parseError(ctx, "unexpected EOF", .{});
|
||||
}
|
||||
|
||||
fn parseError(ctx: *ParseContext, comptime format: []const u8, args: var) error{ ParseFailure, OutOfMemory } {
|
||||
const msg = try std.fmt.allocPrint(ctx.allocator, format, args);
|
||||
(try ctx.errors.addOne()).* = .{
|
||||
.byte_offset = ctx.i,
|
||||
.msg = msg,
|
||||
};
|
||||
return error.ParseFailure;
|
||||
}
|
||||
|
||||
fn parseType(ctx: *ParseContext) !*Value {
|
||||
return parseError(ctx, "TODO parse type", .{});
|
||||
}
|
||||
|
||||
fn parseInstruction(ctx: *ParseContext) !*Inst {
|
||||
switch (ctx.source[ctx.i]) {
|
||||
'"' => return parseStringLiteralConst(ctx),
|
||||
'0'...'9' => return parseIntegerLiteralConst(ctx),
|
||||
else => {},
|
||||
}
|
||||
const fn_name = skipToAndOver(ctx, '(');
|
||||
return parseError(ctx, "TODO parse instruction '{}'", .{fn_name});
|
||||
}
|
||||
|
||||
fn parseStringLiteralConst(ctx: *ParseContext) !*Inst {
|
||||
const start = ctx.i;
|
||||
ctx.i += 1; // skip over '"'
|
||||
|
||||
while (ctx.i < ctx.source.len) : (ctx.i += 1) switch (ctx.source[ctx.i]) {
|
||||
'"' => {
|
||||
ctx.i += 1;
|
||||
const span = ctx.source[start..ctx.i];
|
||||
var bad_index: usize = undefined;
|
||||
const parsed = std.zig.parseStringLiteral(ctx.allocator, span, &bad_index) catch |err| switch (err) {
|
||||
error.InvalidCharacter => {
|
||||
ctx.i = start + bad_index;
|
||||
const bad_byte = ctx.source[ctx.i];
|
||||
return parseError(ctx, "invalid string literal character: '{c}'\n", .{bad_byte});
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
const bytes_val = try ctx.allocator.create(Value.Bytes);
|
||||
bytes_val.* = .{ .data = parsed };
|
||||
const const_inst = try ctx.allocator.create(Inst.Constant);
|
||||
const_inst.* = .{ .value = &bytes_val.base };
|
||||
return &const_inst.base;
|
||||
},
|
||||
'\\' => {
|
||||
ctx.i += 1;
|
||||
if (ctx.i >= ctx.source.len) break;
|
||||
continue;
|
||||
},
|
||||
else => continue,
|
||||
};
|
||||
return parseError(ctx, "unexpected EOF in string literal", .{});
|
||||
}
|
||||
|
||||
fn parseIntegerLiteralConst(ctx: *ParseContext) !*Inst {
|
||||
return parseError(ctx, "TODO parse integer literal", .{});
|
||||
}
|
||||
|
||||
pub fn main() anyerror!void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = &arena.allocator;
|
||||
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
|
||||
const src_path = args[1];
|
||||
const debug_error_trace = true;
|
||||
|
||||
const source = try std.fs.cwd().readFileAlloc(allocator, src_path, std.math.maxInt(u32));
|
||||
|
||||
const tree = try parse(allocator, source);
|
||||
if (tree.errors.items.len != 0) {
|
||||
for (tree.errors.items) |err_msg| {
|
||||
const loc = findLineColumn(source, err_msg.byte_offset);
|
||||
std.debug.warn("{}:{}:{}: error: {}\n", .{ src_path, loc.line + 1, loc.column + 1, err_msg.msg });
|
||||
}
|
||||
if (debug_error_trace) return error.ParseFailure;
|
||||
std.process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn findLineColumn(source: []const u8, byte_offset: usize) struct { line: usize, column: usize } {
|
||||
var line: usize = 0;
|
||||
var column: usize = 0;
|
||||
for (source[0..byte_offset]) |byte| {
|
||||
switch (byte) {
|
||||
'\n' => {
|
||||
line += 1;
|
||||
column = 0;
|
||||
},
|
||||
else => {
|
||||
column += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
return .{ .line = line, .column = column };
|
||||
}
|
||||
|
||||
+63
-564
@@ -1,587 +1,86 @@
|
||||
const std = @import("std");
|
||||
const Scope = @import("scope.zig").Scope;
|
||||
const Compilation = @import("compilation.zig").Compilation;
|
||||
const ObjectFile = @import("codegen.zig").ObjectFile;
|
||||
const llvm = @import("llvm.zig");
|
||||
const ArrayListSentineled = std.ArrayListSentineled;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
/// Values are ref-counted, heap-allocated, and copy-on-write
|
||||
/// If there is only 1 ref then write need not copy
|
||||
/// This is the raw data, with no bookkeeping, no memory awareness,
|
||||
/// no de-duplication, and no type system awareness.
|
||||
/// It's important for this struct to be small.
|
||||
/// It is not copyable since it may contain references to its inner data.
|
||||
pub const Value = struct {
|
||||
id: Id,
|
||||
typ: *Type,
|
||||
ref_count: std.atomic.Int(usize),
|
||||
tag: Tag,
|
||||
|
||||
/// Thread-safe
|
||||
pub fn ref(base: *Value) void {
|
||||
_ = base.ref_count.incr();
|
||||
}
|
||||
pub const Tag = enum {
|
||||
void_type,
|
||||
noreturn_type,
|
||||
bool_type,
|
||||
usize_type,
|
||||
|
||||
/// Thread-safe
|
||||
pub fn deref(base: *Value, comp: *Compilation) void {
|
||||
if (base.ref_count.decr() == 1) {
|
||||
base.typ.base.deref(comp);
|
||||
switch (base.id) {
|
||||
.Type => @fieldParentPtr(Type, "base", base).destroy(comp),
|
||||
.Fn => @fieldParentPtr(Fn, "base", base).destroy(comp),
|
||||
.FnProto => @fieldParentPtr(FnProto, "base", base).destroy(comp),
|
||||
.Void => @fieldParentPtr(Void, "base", base).destroy(comp),
|
||||
.Bool => @fieldParentPtr(Bool, "base", base).destroy(comp),
|
||||
.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(comp),
|
||||
.Ptr => @fieldParentPtr(Ptr, "base", base).destroy(comp),
|
||||
.Int => @fieldParentPtr(Int, "base", base).destroy(comp),
|
||||
.Array => @fieldParentPtr(Array, "base", base).destroy(comp),
|
||||
}
|
||||
}
|
||||
}
|
||||
void_value,
|
||||
noreturn_value,
|
||||
bool_true,
|
||||
bool_false,
|
||||
|
||||
pub fn setType(base: *Value, new_type: *Type, comp: *Compilation) void {
|
||||
base.typ.base.deref(comp);
|
||||
new_type.base.ref();
|
||||
base.typ = new_type;
|
||||
}
|
||||
array_sentinel_0_u8_type,
|
||||
single_const_ptr_type,
|
||||
|
||||
pub fn getRef(base: *Value) *Value {
|
||||
base.ref();
|
||||
return base;
|
||||
}
|
||||
|
||||
pub fn cast(base: *Value, comptime T: type) ?*T {
|
||||
if (base.id != @field(Id, @typeName(T))) return null;
|
||||
return @fieldParentPtr(T, "base", base);
|
||||
}
|
||||
|
||||
pub fn dump(base: *const Value) void {
|
||||
std.debug.warn("{}", .{@tagName(base.id)});
|
||||
}
|
||||
|
||||
pub fn getLlvmConst(base: *Value, ofile: *ObjectFile) (error{OutOfMemory}!?*llvm.Value) {
|
||||
switch (base.id) {
|
||||
.Type => unreachable,
|
||||
.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmConst(ofile),
|
||||
.FnProto => return @fieldParentPtr(FnProto, "base", base).getLlvmConst(ofile),
|
||||
.Void => return null,
|
||||
.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmConst(ofile),
|
||||
.NoReturn => unreachable,
|
||||
.Ptr => return @fieldParentPtr(Ptr, "base", base).getLlvmConst(ofile),
|
||||
.Int => return @fieldParentPtr(Int, "base", base).getLlvmConst(ofile),
|
||||
.Array => return @fieldParentPtr(Array, "base", base).getLlvmConst(ofile),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn derefAndCopy(self: *Value, comp: *Compilation) (error{OutOfMemory}!*Value) {
|
||||
if (self.ref_count.get() == 1) {
|
||||
// ( ͡° ͜ʖ ͡°)
|
||||
return self;
|
||||
}
|
||||
|
||||
assert(self.ref_count.decr() != 1);
|
||||
return self.copy(comp);
|
||||
}
|
||||
|
||||
pub fn copy(base: *Value, comp: *Compilation) (error{OutOfMemory}!*Value) {
|
||||
switch (base.id) {
|
||||
.Type => unreachable,
|
||||
.Fn => unreachable,
|
||||
.FnProto => unreachable,
|
||||
.Void => unreachable,
|
||||
.Bool => unreachable,
|
||||
.NoReturn => unreachable,
|
||||
.Ptr => unreachable,
|
||||
.Array => unreachable,
|
||||
.Int => return &(try @fieldParentPtr(Int, "base", base).copy(comp)).base,
|
||||
}
|
||||
}
|
||||
|
||||
pub const Parent = union(enum) {
|
||||
None,
|
||||
BaseStruct: BaseStruct,
|
||||
BaseArray: BaseArray,
|
||||
BaseUnion: *Value,
|
||||
BaseScalar: *Value,
|
||||
|
||||
pub const BaseStruct = struct {
|
||||
val: *Value,
|
||||
field_index: usize,
|
||||
};
|
||||
|
||||
pub const BaseArray = struct {
|
||||
val: *Value,
|
||||
elem_index: usize,
|
||||
};
|
||||
int_u64,
|
||||
int_i64,
|
||||
function,
|
||||
ref,
|
||||
bytes,
|
||||
};
|
||||
|
||||
pub const Id = enum {
|
||||
Type,
|
||||
Fn,
|
||||
Void,
|
||||
Bool,
|
||||
NoReturn,
|
||||
Array,
|
||||
Ptr,
|
||||
Int,
|
||||
FnProto,
|
||||
pub const Int_u64 = struct {
|
||||
base: Value = Value{ .tag = .int_u64 },
|
||||
int: u64,
|
||||
};
|
||||
|
||||
pub const Type = @import("type.zig").Type;
|
||||
|
||||
pub const FnProto = struct {
|
||||
base: Value,
|
||||
|
||||
/// The main external name that is used in the .o file.
|
||||
/// TODO https://github.com/ziglang/zig/issues/265
|
||||
symbol_name: ArrayListSentineled(u8, 0),
|
||||
|
||||
pub fn create(comp: *Compilation, fn_type: *Type.Fn, symbol_name: ArrayListSentineled(u8, 0)) !*FnProto {
|
||||
const self = try comp.gpa().create(FnProto);
|
||||
self.* = FnProto{
|
||||
.base = Value{
|
||||
.id = .FnProto,
|
||||
.typ = &fn_type.base,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
},
|
||||
.symbol_name = symbol_name,
|
||||
};
|
||||
fn_type.base.base.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *FnProto, comp: *Compilation) void {
|
||||
self.symbol_name.deinit();
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
|
||||
pub fn getLlvmConst(self: *FnProto, ofile: *ObjectFile) !?*llvm.Value {
|
||||
const llvm_fn_type = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
|
||||
const llvm_fn = llvm.AddFunction(
|
||||
ofile.module,
|
||||
self.symbol_name.span(),
|
||||
llvm_fn_type,
|
||||
) orelse return error.OutOfMemory;
|
||||
|
||||
// TODO port more logic from codegen.cpp:fn_llvm_value
|
||||
|
||||
return llvm_fn;
|
||||
}
|
||||
pub const Int_i64 = struct {
|
||||
base: Value = Value{ .tag = .int_i64 },
|
||||
int: i64,
|
||||
};
|
||||
|
||||
pub const Fn = struct {
|
||||
base: Value,
|
||||
|
||||
/// The main external name that is used in the .o file.
|
||||
/// TODO https://github.com/ziglang/zig/issues/265
|
||||
symbol_name: ArrayListSentineled(u8, 0),
|
||||
|
||||
/// parent should be the top level decls or container decls
|
||||
fndef_scope: *Scope.FnDef,
|
||||
|
||||
/// parent is scope for last parameter
|
||||
child_scope: *Scope,
|
||||
|
||||
/// parent is child_scope
|
||||
block_scope: ?*Scope.Block,
|
||||
|
||||
/// Path to the object file that contains this function
|
||||
containing_object: ArrayListSentineled(u8, 0),
|
||||
|
||||
link_set_node: *std.TailQueue(?*Value.Fn).Node,
|
||||
|
||||
/// Creates a Fn value with 1 ref
|
||||
/// Takes ownership of symbol_name
|
||||
pub fn create(comp: *Compilation, fn_type: *Type.Fn, fndef_scope: *Scope.FnDef, symbol_name: ArrayListSentineled(u8, 0)) !*Fn {
|
||||
const link_set_node = try comp.gpa().create(Compilation.FnLinkSet.Node);
|
||||
link_set_node.* = Compilation.FnLinkSet.Node{
|
||||
.data = null,
|
||||
.next = undefined,
|
||||
.prev = undefined,
|
||||
};
|
||||
errdefer comp.gpa().destroy(link_set_node);
|
||||
|
||||
const self = try comp.gpa().create(Fn);
|
||||
self.* = Fn{
|
||||
.base = Value{
|
||||
.id = .Fn,
|
||||
.typ = &fn_type.base,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
},
|
||||
.fndef_scope = fndef_scope,
|
||||
.child_scope = &fndef_scope.base,
|
||||
.block_scope = null,
|
||||
.symbol_name = symbol_name,
|
||||
.containing_object = ArrayListSentineled(u8, 0).initNull(comp.gpa()),
|
||||
.link_set_node = link_set_node,
|
||||
};
|
||||
fn_type.base.base.ref();
|
||||
fndef_scope.fn_val = self;
|
||||
fndef_scope.base.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Fn, comp: *Compilation) void {
|
||||
// remove with a tombstone so that we do not have to grab a lock
|
||||
if (self.link_set_node.data != null) {
|
||||
// it's now the job of the link step to find this tombstone and
|
||||
// deallocate it.
|
||||
self.link_set_node.data = null;
|
||||
} else {
|
||||
comp.gpa().destroy(self.link_set_node);
|
||||
}
|
||||
|
||||
self.containing_object.deinit();
|
||||
self.fndef_scope.base.deref(comp);
|
||||
self.symbol_name.deinit();
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
|
||||
/// We know that the function definition will end up in an .o file somewhere.
|
||||
/// Here, all we have to do is generate a global prototype.
|
||||
/// TODO cache the prototype per ObjectFile
|
||||
pub fn getLlvmConst(self: *Fn, ofile: *ObjectFile) !?*llvm.Value {
|
||||
const llvm_fn_type = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
|
||||
const llvm_fn = llvm.AddFunction(
|
||||
ofile.module,
|
||||
self.symbol_name.span(),
|
||||
llvm_fn_type,
|
||||
) orelse return error.OutOfMemory;
|
||||
|
||||
// TODO port more logic from codegen.cpp:fn_llvm_value
|
||||
|
||||
return llvm_fn;
|
||||
}
|
||||
pub const Function = struct {
|
||||
base: Value = Value{ .tag = .function },
|
||||
};
|
||||
|
||||
pub const Void = struct {
|
||||
base: Value,
|
||||
|
||||
pub fn get(comp: *Compilation) *Void {
|
||||
comp.void_value.base.ref();
|
||||
return comp.void_value;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Void, comp: *Compilation) void {
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
pub const ArraySentinel0_u8_Type = struct {
|
||||
base: Value = Value{ .tag = .array_sentinel_0_u8_type },
|
||||
len: u64,
|
||||
};
|
||||
|
||||
pub const Bool = struct {
|
||||
base: Value,
|
||||
x: bool,
|
||||
|
||||
pub fn get(comp: *Compilation, x: bool) *Bool {
|
||||
if (x) {
|
||||
comp.true_value.base.ref();
|
||||
return comp.true_value;
|
||||
} else {
|
||||
comp.false_value.base.ref();
|
||||
return comp.false_value;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Bool, comp: *Compilation) void {
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
|
||||
pub fn getLlvmConst(self: *Bool, ofile: *ObjectFile) !?*llvm.Value {
|
||||
const llvm_type = llvm.Int1TypeInContext(ofile.context) orelse return error.OutOfMemory;
|
||||
if (self.x) {
|
||||
return llvm.ConstAllOnes(llvm_type);
|
||||
} else {
|
||||
return llvm.ConstNull(llvm_type);
|
||||
}
|
||||
}
|
||||
pub const SingleConstPtrType = struct {
|
||||
base: Value = Value{ .tag = .single_const_ptr_type },
|
||||
elem_type: *Value,
|
||||
};
|
||||
|
||||
pub const NoReturn = struct {
|
||||
base: Value,
|
||||
|
||||
pub fn get(comp: *Compilation) *NoReturn {
|
||||
comp.noreturn_value.base.ref();
|
||||
return comp.noreturn_value;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *NoReturn, comp: *Compilation) void {
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
pub const Ref = struct {
|
||||
base: Value = Value{ .tag = .ref },
|
||||
pointee: *MemoryCell,
|
||||
};
|
||||
|
||||
pub const Ptr = struct {
|
||||
base: Value,
|
||||
special: Special,
|
||||
mut: Mut,
|
||||
|
||||
pub const Mut = enum {
|
||||
CompTimeConst,
|
||||
CompTimeVar,
|
||||
RunTime,
|
||||
};
|
||||
|
||||
pub const Special = union(enum) {
|
||||
Scalar: *Value,
|
||||
BaseArray: BaseArray,
|
||||
BaseStruct: BaseStruct,
|
||||
HardCodedAddr: u64,
|
||||
Discard,
|
||||
};
|
||||
|
||||
pub const BaseArray = struct {
|
||||
val: *Value,
|
||||
elem_index: usize,
|
||||
};
|
||||
|
||||
pub const BaseStruct = struct {
|
||||
val: *Value,
|
||||
field_index: usize,
|
||||
};
|
||||
|
||||
pub fn createArrayElemPtr(
|
||||
comp: *Compilation,
|
||||
array_val: *Array,
|
||||
mut: Type.Pointer.Mut,
|
||||
size: Type.Pointer.Size,
|
||||
elem_index: usize,
|
||||
) !*Ptr {
|
||||
array_val.base.ref();
|
||||
errdefer array_val.base.deref(comp);
|
||||
|
||||
const elem_type = array_val.base.typ.cast(Type.Array).?.key.elem_type;
|
||||
const ptr_type = try Type.Pointer.get(comp, Type.Pointer.Key{
|
||||
.child_type = elem_type,
|
||||
.mut = mut,
|
||||
.vol = Type.Pointer.Vol.Non,
|
||||
.size = size,
|
||||
.alignment = .Abi,
|
||||
});
|
||||
var ptr_type_consumed = false;
|
||||
errdefer if (!ptr_type_consumed) ptr_type.base.base.deref(comp);
|
||||
|
||||
const self = try comp.gpa().create(Value.Ptr);
|
||||
self.* = Value.Ptr{
|
||||
.base = Value{
|
||||
.id = .Ptr,
|
||||
.typ = &ptr_type.base,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
},
|
||||
.special = Special{
|
||||
.BaseArray = BaseArray{
|
||||
.val = &array_val.base,
|
||||
.elem_index = 0,
|
||||
},
|
||||
},
|
||||
.mut = Mut.CompTimeConst,
|
||||
};
|
||||
ptr_type_consumed = true;
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Ptr, comp: *Compilation) void {
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
|
||||
pub fn getLlvmConst(self: *Ptr, ofile: *ObjectFile) !?*llvm.Value {
|
||||
const llvm_type = self.base.typ.getLlvmType(ofile.arena, ofile.context);
|
||||
// TODO carefully port the logic from codegen.cpp:gen_const_val_ptr
|
||||
switch (self.special) {
|
||||
.Scalar => |scalar| @panic("TODO"),
|
||||
.BaseArray => |base_array| {
|
||||
// TODO put this in one .o file only, and after that, generate extern references to it
|
||||
const array_llvm_value = (try base_array.val.getLlvmConst(ofile)).?;
|
||||
const ptr_bit_count = ofile.comp.target_ptr_bits;
|
||||
const usize_llvm_type = llvm.IntTypeInContext(ofile.context, ptr_bit_count) orelse return error.OutOfMemory;
|
||||
var indices = [_]*llvm.Value{
|
||||
llvm.ConstNull(usize_llvm_type) orelse return error.OutOfMemory,
|
||||
llvm.ConstInt(usize_llvm_type, base_array.elem_index, 0) orelse return error.OutOfMemory,
|
||||
};
|
||||
return llvm.ConstInBoundsGEP(
|
||||
array_llvm_value,
|
||||
@ptrCast([*]*llvm.Value, &indices),
|
||||
@intCast(c_uint, indices.len),
|
||||
) orelse return error.OutOfMemory;
|
||||
},
|
||||
.BaseStruct => |base_struct| @panic("TODO"),
|
||||
.HardCodedAddr => |addr| @panic("TODO"),
|
||||
.Discard => unreachable,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Array = struct {
|
||||
base: Value,
|
||||
special: Special,
|
||||
|
||||
pub const Special = union(enum) {
|
||||
Undefined,
|
||||
OwnedBuffer: []u8,
|
||||
Explicit: Data,
|
||||
};
|
||||
|
||||
pub const Data = struct {
|
||||
parent: Parent,
|
||||
elements: []*Value,
|
||||
};
|
||||
|
||||
/// Takes ownership of buffer
|
||||
pub fn createOwnedBuffer(comp: *Compilation, buffer: []u8) !*Array {
|
||||
const u8_type = Type.Int.get_u8(comp);
|
||||
defer u8_type.base.base.deref(comp);
|
||||
|
||||
const array_type = try Type.Array.get(comp, Type.Array.Key{
|
||||
.elem_type = &u8_type.base,
|
||||
.len = buffer.len,
|
||||
});
|
||||
errdefer array_type.base.base.deref(comp);
|
||||
|
||||
const self = try comp.gpa().create(Value.Array);
|
||||
self.* = Value.Array{
|
||||
.base = Value{
|
||||
.id = .Array,
|
||||
.typ = &array_type.base,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
},
|
||||
.special = Special{ .OwnedBuffer = buffer },
|
||||
};
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Array, comp: *Compilation) void {
|
||||
switch (self.special) {
|
||||
.Undefined => {},
|
||||
.OwnedBuffer => |buf| {
|
||||
comp.gpa().free(buf);
|
||||
},
|
||||
.Explicit => {},
|
||||
}
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
|
||||
pub fn getLlvmConst(self: *Array, ofile: *ObjectFile) !?*llvm.Value {
|
||||
switch (self.special) {
|
||||
.Undefined => {
|
||||
const llvm_type = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
|
||||
return llvm.GetUndef(llvm_type);
|
||||
},
|
||||
.OwnedBuffer => |buf| {
|
||||
const dont_null_terminate = 1;
|
||||
const llvm_str_init = llvm.ConstStringInContext(
|
||||
ofile.context,
|
||||
buf.ptr,
|
||||
@intCast(c_uint, buf.len),
|
||||
dont_null_terminate,
|
||||
) orelse return error.OutOfMemory;
|
||||
const str_init_type = llvm.TypeOf(llvm_str_init);
|
||||
const global = llvm.AddGlobal(ofile.module, str_init_type, "") orelse return error.OutOfMemory;
|
||||
llvm.SetInitializer(global, llvm_str_init);
|
||||
llvm.SetLinkage(global, llvm.PrivateLinkage);
|
||||
llvm.SetGlobalConstant(global, 1);
|
||||
llvm.SetUnnamedAddr(global, 1);
|
||||
llvm.SetAlignment(global, llvm.ABIAlignmentOfType(ofile.comp.target_data_ref, str_init_type));
|
||||
return global;
|
||||
},
|
||||
.Explicit => @panic("TODO"),
|
||||
}
|
||||
|
||||
//{
|
||||
// uint64_t len = type_entry->data.array.len;
|
||||
// if (const_val->data.x_array.special == ConstArraySpecialUndef) {
|
||||
// return LLVMGetUndef(type_entry->type_ref);
|
||||
// }
|
||||
|
||||
// LLVMValueRef *values = allocate<LLVMValueRef>(len);
|
||||
// LLVMTypeRef element_type_ref = type_entry->data.array.child_type->type_ref;
|
||||
// bool make_unnamed_struct = false;
|
||||
// for (uint64_t i = 0; i < len; i += 1) {
|
||||
// ConstExprValue *elem_value = &const_val->data.x_array.s_none.elements[i];
|
||||
// LLVMValueRef val = gen_const_val(g, elem_value, "");
|
||||
// values[i] = val;
|
||||
// make_unnamed_struct = make_unnamed_struct || is_llvm_value_unnamed_type(elem_value->type, val);
|
||||
// }
|
||||
// if (make_unnamed_struct) {
|
||||
// return LLVMConstStruct(values, len, true);
|
||||
// } else {
|
||||
// return LLVMConstArray(element_type_ref, values, (unsigned)len);
|
||||
// }
|
||||
//}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Int = struct {
|
||||
base: Value,
|
||||
big_int: std.math.big.Int,
|
||||
|
||||
pub fn createFromString(comp: *Compilation, typ: *Type, base: u8, value: []const u8) !*Int {
|
||||
const self = try comp.gpa().create(Value.Int);
|
||||
self.* = Value.Int{
|
||||
.base = Value{
|
||||
.id = .Int,
|
||||
.typ = typ,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
},
|
||||
.big_int = undefined,
|
||||
};
|
||||
typ.base.ref();
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
self.big_int = try std.math.big.Int.init(comp.gpa());
|
||||
errdefer self.big_int.deinit();
|
||||
|
||||
try self.big_int.setString(base, value);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn getLlvmConst(self: *Int, ofile: *ObjectFile) !?*llvm.Value {
|
||||
switch (self.base.typ.id) {
|
||||
.Int => {
|
||||
const type_ref = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
|
||||
if (self.big_int.len() == 0) {
|
||||
return llvm.ConstNull(type_ref);
|
||||
}
|
||||
const unsigned_val = if (self.big_int.len() == 1) blk: {
|
||||
break :blk llvm.ConstInt(type_ref, self.big_int.limbs[0], @boolToInt(false));
|
||||
} else if (@sizeOf(std.math.big.Limb) == @sizeOf(u64)) blk: {
|
||||
break :blk llvm.ConstIntOfArbitraryPrecision(
|
||||
type_ref,
|
||||
@intCast(c_uint, self.big_int.len()),
|
||||
@ptrCast([*]u64, self.big_int.limbs.ptr),
|
||||
);
|
||||
} else {
|
||||
@compileError("std.math.Big.Int.Limb size does not match LLVM");
|
||||
};
|
||||
return if (self.big_int.isPositive()) unsigned_val else llvm.ConstNeg(unsigned_val);
|
||||
},
|
||||
.ComptimeInt => unreachable,
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn copy(old: *Int, comp: *Compilation) !*Int {
|
||||
old.base.typ.base.ref();
|
||||
errdefer old.base.typ.base.deref(comp);
|
||||
|
||||
const new = try comp.gpa().create(Value.Int);
|
||||
new.* = Value.Int{
|
||||
.base = Value{
|
||||
.id = .Int,
|
||||
.typ = old.base.typ,
|
||||
.ref_count = std.atomic.Int(usize).init(1),
|
||||
},
|
||||
.big_int = undefined,
|
||||
};
|
||||
errdefer comp.gpa().destroy(new);
|
||||
|
||||
new.big_int = try old.big_int.clone();
|
||||
errdefer new.big_int.deinit();
|
||||
|
||||
return new;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Int, comp: *Compilation) void {
|
||||
self.big_int.deinit();
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
pub const Bytes = struct {
|
||||
base: Value = Value{ .tag = .bytes },
|
||||
data: []u8,
|
||||
};
|
||||
};
|
||||
|
||||
pub const MemoryCell = struct {
|
||||
parent: Parent,
|
||||
contents: *Value,
|
||||
|
||||
pub const Parent = union(enum) {
|
||||
none,
|
||||
struct_field: struct {
|
||||
struct_base: *MemoryCell,
|
||||
field_index: usize,
|
||||
},
|
||||
array_elem: struct {
|
||||
array_base: *MemoryCell,
|
||||
elem_index: usize,
|
||||
},
|
||||
union_field: *MemoryCell,
|
||||
err_union_code: *MemoryCell,
|
||||
err_union_payload: *MemoryCell,
|
||||
optional_payload: *MemoryCell,
|
||||
optional_flag: *MemoryCell,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
test "hello world IR" {
|
||||
exeCmp(
|
||||
\\@0 = "Hello, world!\n"
|
||||
\\
|
||||
\\@1 = fn({
|
||||
\\ %0 : usize = 1 ;SYS_write
|
||||
\\ %1 : usize = 1 ;STDOUT_FILENO
|
||||
\\ %2 = ptrtoint(@0) ; msg ptr
|
||||
\\ %3 = fieldptr(@0, "len") ; msg len ptr
|
||||
\\ %4 = deref(%3) ; msg len
|
||||
\\ %5 = asm("syscall",
|
||||
\\ volatile=1,
|
||||
\\ output="={rax}",
|
||||
\\ inputs=["{rax}", "{rdi}", "{rsi}", "{rdx}"],
|
||||
\\ clobbers=["rcx", "r11", "memory"],
|
||||
\\ args=[%0, %1, %2, %4])
|
||||
\\
|
||||
\\ %6 : usize = 231 ;SYS_exit_group
|
||||
\\ %7 : usize = 0 ;exit code
|
||||
\\ %8 = asm("syscall",
|
||||
\\ volatile=1,
|
||||
\\ output="={rax}",
|
||||
\\ inputs=["{rax}", "{rdi}"],
|
||||
\\ clobbers=["rcx", "r11", "memory"],
|
||||
\\ args=[%6, %7])
|
||||
\\
|
||||
\\ %9 = unreachable()
|
||||
\\}, cc=naked);
|
||||
\\
|
||||
\\@2 = export("_start", @1)
|
||||
,
|
||||
\\Hello, world!
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
fn exeCmp(src: []const u8, expected_stdout: []const u8) void {}
|
||||
Reference in New Issue
Block a user