@@ -164,9 +164,25 @@ pub const Object = struct {
164
164
/// * it works for functions not all globals.
165
165
/// Therefore, this table keeps track of the mapping.
166
166
decl_map : std .AutoHashMapUnmanaged (* const Module.Decl , * const llvm .Value ),
167
+ /// Maps Zig types to LLVM types. The table memory itself is backed by the GPA of
168
+ /// the compiler, but the Type/Value memory here is backed by `type_map_arena`.
169
+ /// TODO we need to remove entries from this map in response to incremental compilation
170
+ /// but I think the frontend won't tell us about types that get deleted because
171
+ /// hasCodeGenBits() is false for types.
172
+ type_map : TypeMap ,
173
+ /// The backing memory for `type_map`. Periodically garbage collected after flush().
174
+ /// The code for doing the periodical GC is not yet implemented.
175
+ type_map_arena : std.heap.ArenaAllocator ,
167
176
/// Where to put the output object file, relative to bin_file.options.emit directory.
168
177
sub_path : []const u8 ,
169
178
179
+ pub const TypeMap = std .HashMapUnmanaged (
180
+ Type ,
181
+ * const llvm .Type ,
182
+ Type .HashContext64 ,
183
+ std .hash_map .default_max_load_percentage ,
184
+ );
185
+
170
186
pub fn create (gpa : * Allocator , sub_path : []const u8 , options : link.Options ) ! * Object {
171
187
const obj = try gpa .create (Object );
172
188
errdefer gpa .destroy (obj );
@@ -253,6 +269,8 @@ pub const Object = struct {
253
269
.context = context ,
254
270
.target_machine = target_machine ,
255
271
.decl_map = .{},
272
+ .type_map = .{},
273
+ .type_map_arena = std .heap .ArenaAllocator .init (gpa ),
256
274
.sub_path = sub_path ,
257
275
};
258
276
}
@@ -262,6 +280,8 @@ pub const Object = struct {
262
280
self .llvm_module .dispose ();
263
281
self .context .dispose ();
264
282
self .decl_map .deinit (gpa );
283
+ self .type_map .deinit (gpa );
284
+ self .type_map_arena .deinit ();
265
285
self .* = undefined ;
266
286
}
267
287
@@ -725,10 +745,10 @@ pub const DeclGen = struct {
725
745
}
726
746
727
747
fn llvmType (self : * DeclGen , t : Type ) error { OutOfMemory , CodegenFail }! * const llvm.Type {
748
+ const gpa = self .gpa ;
728
749
log .debug ("llvmType for {}" , .{t });
729
750
switch (t .zigTypeTag ()) {
730
- .Void = > return self .context .voidType (),
731
- .NoReturn = > return self .context .voidType (),
751
+ .Void , .NoReturn = > return self .context .voidType (),
732
752
.Int = > {
733
753
const info = t .intInfo (self .module .getTarget ());
734
754
return self .context .intType (info .bits );
@@ -799,18 +819,38 @@ pub const DeclGen = struct {
799
819
return self .context .intType (16 );
800
820
},
801
821
.Struct = > {
822
+ const gop = try self .object .type_map .getOrPut (gpa , t );
823
+ if (gop .found_existing ) return gop .value_ptr .* ;
824
+
825
+ // The Type memory is ephemeral; since we want to store a longer-lived
826
+ // reference, we need to copy it here.
827
+ gop .key_ptr .* = try t .copy (& self .object .type_map_arena .allocator );
828
+
802
829
const struct_obj = t .castTag (.@"struct" ).? .data ;
803
830
assert (struct_obj .haveFieldTypes ());
804
- const llvm_fields = try self .gpa .alloc (* const llvm .Type , struct_obj .fields .count ());
805
- defer self .gpa .free (llvm_fields );
806
- for (struct_obj .fields .values ()) | field , i | {
807
- llvm_fields [i ] = try self .llvmType (field .ty );
831
+
832
+ const name = try struct_obj .getFullyQualifiedName (gpa );
833
+ defer gpa .free (name );
834
+
835
+ const llvm_struct_ty = self .context .structCreateNamed (name );
836
+ gop .value_ptr .* = llvm_struct_ty ; // must be done before any recursive calls
837
+
838
+ var llvm_field_types : std .ArrayListUnmanaged (* const llvm .Type ) = .{};
839
+ try llvm_field_types .ensureTotalCapacity (gpa , struct_obj .fields .count ());
840
+ defer llvm_field_types .deinit (gpa );
841
+
842
+ for (struct_obj .fields .values ()) | field | {
843
+ if (! field .ty .hasCodeGenBits ()) continue ;
844
+ llvm_field_types .appendAssumeCapacity (try self .llvmType (field .ty ));
808
845
}
809
- return self .context .structType (
810
- llvm_fields .ptr ,
811
- @intCast (c_uint , llvm_fields .len ),
812
- .False ,
846
+
847
+ llvm_struct_ty .structSetBody (
848
+ llvm_field_types .items .ptr ,
849
+ @intCast (c_uint , llvm_field_types .items .len ),
850
+ llvm .Bool .fromBool (struct_obj .layout == .Packed ),
813
851
);
852
+
853
+ return llvm_struct_ty ;
814
854
},
815
855
.Union = > {
816
856
const union_obj = t .castTag (.@"union" ).? .data ;
@@ -838,8 +878,8 @@ pub const DeclGen = struct {
838
878
.Fn = > {
839
879
const ret_ty = try self .llvmType (t .fnReturnType ());
840
880
const params_len = t .fnParamLen ();
841
- const llvm_params = try self . gpa .alloc (* const llvm .Type , params_len );
842
- defer self . gpa .free (llvm_params );
881
+ const llvm_params = try gpa .alloc (* const llvm .Type , params_len );
882
+ defer gpa .free (llvm_params );
843
883
for (llvm_params ) | * llvm_param , i | {
844
884
llvm_param .* = try self .llvmType (t .fnParamType (i ));
845
885
}
@@ -1073,21 +1113,26 @@ pub const DeclGen = struct {
1073
1113
return self .context .constStruct (& fields , fields .len , .False );
1074
1114
},
1075
1115
.Struct = > {
1076
- const fields_len = tv .ty . structFieldCount ( );
1116
+ const llvm_struct_ty = try self . llvmType ( tv .ty );
1077
1117
const field_vals = tv .val .castTag (.@"struct" ).? .data ;
1078
1118
const gpa = self .gpa ;
1079
- const llvm_fields = try gpa .alloc (* const llvm .Value , fields_len );
1080
- defer gpa .free (llvm_fields );
1081
- for (llvm_fields ) | * llvm_field , i | {
1082
- llvm_field .* = try self .genTypedValue (.{
1083
- .ty = tv .ty .structFieldType (i ),
1084
- .val = field_vals [i ],
1085
- });
1119
+
1120
+ var llvm_fields : std .ArrayListUnmanaged (* const llvm .Value ) = .{};
1121
+ try llvm_fields .ensureTotalCapacity (gpa , field_vals .len );
1122
+ defer llvm_fields .deinit (gpa );
1123
+
1124
+ for (field_vals ) | field_val , i | {
1125
+ const field_ty = tv .ty .structFieldType (i );
1126
+ if (! field_ty .hasCodeGenBits ()) continue ;
1127
+
1128
+ llvm_fields .appendAssumeCapacity (try self .genTypedValue (.{
1129
+ .ty = field_ty ,
1130
+ .val = field_val ,
1131
+ }));
1086
1132
}
1087
- return self .context .constStruct (
1088
- llvm_fields .ptr ,
1089
- @intCast (c_uint , llvm_fields .len ),
1090
- .False ,
1133
+ return llvm_struct_ty .constNamedStruct (
1134
+ llvm_fields .items .ptr ,
1135
+ @intCast (c_uint , llvm_fields .items .len ),
1091
1136
);
1092
1137
},
1093
1138
.ComptimeInt = > unreachable ,
@@ -1692,13 +1737,15 @@ pub const FuncGen = struct {
1692
1737
const struct_field = self .air .extraData (Air .StructField , ty_pl .payload ).data ;
1693
1738
const struct_ptr = try self .resolveInst (struct_field .struct_operand );
1694
1739
const struct_ptr_ty = self .air .typeOf (struct_field .struct_operand );
1695
- const field_index = @intCast (c_uint , struct_field .field_index );
1696
- return self .fieldPtr (inst , struct_ptr , struct_ptr_ty , field_index );
1740
+ return self .fieldPtr (inst , struct_ptr , struct_ptr_ty , struct_field .field_index );
1697
1741
}
1698
1742
1699
- fn airStructFieldPtrIndex (self : * FuncGen , inst : Air.Inst.Index , field_index : c_uint ) ! ? * const llvm.Value {
1700
- if (self .liveness .isUnused (inst ))
1701
- return null ;
1743
+ fn airStructFieldPtrIndex (
1744
+ self : * FuncGen ,
1745
+ inst : Air.Inst.Index ,
1746
+ field_index : u32 ,
1747
+ ) ! ? * const llvm.Value {
1748
+ if (self .liveness .isUnused (inst )) return null ;
1702
1749
1703
1750
const ty_op = self .air .instructions .items (.data )[inst ].ty_op ;
1704
1751
const struct_ptr = try self .resolveInst (ty_op .operand );
@@ -1707,13 +1754,13 @@ pub const FuncGen = struct {
1707
1754
}
1708
1755
1709
1756
fn airStructFieldVal (self : * FuncGen , inst : Air.Inst.Index ) ! ? * const llvm.Value {
1710
- if (self .liveness .isUnused (inst ))
1711
- return null ;
1757
+ if (self .liveness .isUnused (inst )) return null ;
1712
1758
1713
1759
const ty_pl = self .air .instructions .items (.data )[inst ].ty_pl ;
1714
1760
const struct_field = self .air .extraData (Air .StructField , ty_pl .payload ).data ;
1761
+ const struct_ty = self .air .typeOf (struct_field .struct_operand );
1715
1762
const struct_byval = try self .resolveInst (struct_field .struct_operand );
1716
- const field_index = @intCast ( c_uint , struct_field .field_index );
1763
+ const field_index = llvmFieldIndex ( struct_ty , struct_field .field_index );
1717
1764
return self .builder .buildExtractValue (struct_byval , field_index , "" );
1718
1765
}
1719
1766
@@ -2643,8 +2690,7 @@ pub const FuncGen = struct {
2643
2690
const fill_char = if (val_is_undef ) u8_llvm_ty .constInt (0xaa , .False ) else value ;
2644
2691
const target = self .dg .module .getTarget ();
2645
2692
const dest_ptr_align = ptr_ty .ptrAlignment (target );
2646
- const memset = self .builder .buildMemSet (dest_ptr_u8 , fill_char , len , dest_ptr_align );
2647
- memset .setVolatile (llvm .Bool .fromBool (ptr_ty .isVolatilePtr ()));
2693
+ _ = self .builder .buildMemSet (dest_ptr_u8 , fill_char , len , dest_ptr_align , ptr_ty .isVolatilePtr ());
2648
2694
2649
2695
if (val_is_undef and self .dg .module .comp .bin_file .options .valgrind ) {
2650
2696
// TODO generate valgrind client request to mark byte range as undefined
@@ -2667,14 +2713,14 @@ pub const FuncGen = struct {
2667
2713
const src_ptr_u8 = self .builder .buildBitCast (src_ptr , ptr_u8_llvm_ty , "" );
2668
2714
const is_volatile = src_ptr_ty .isVolatilePtr () or dest_ptr_ty .isVolatilePtr ();
2669
2715
const target = self .dg .module .getTarget ();
2670
- const memcpy = self .builder .buildMemCpy (
2716
+ _ = self .builder .buildMemCpy (
2671
2717
dest_ptr_u8 ,
2672
2718
dest_ptr_ty .ptrAlignment (target ),
2673
2719
src_ptr_u8 ,
2674
2720
src_ptr_ty .ptrAlignment (target ),
2675
2721
len ,
2722
+ is_volatile ,
2676
2723
);
2677
- memcpy .setVolatile (llvm .Bool .fromBool (is_volatile ));
2678
2724
return null ;
2679
2725
}
2680
2726
@@ -2741,11 +2787,14 @@ pub const FuncGen = struct {
2741
2787
inst : Air.Inst.Index ,
2742
2788
struct_ptr : * const llvm.Value ,
2743
2789
struct_ptr_ty : Type ,
2744
- field_index : c_uint ,
2790
+ field_index : u32 ,
2745
2791
) ! ? * const llvm.Value {
2746
2792
const struct_ty = struct_ptr_ty .childType ();
2747
2793
switch (struct_ty .zigTypeTag ()) {
2748
- .Struct = > return self .builder .buildStructGEP (struct_ptr , field_index , "" ),
2794
+ .Struct = > {
2795
+ const llvm_field_index = llvmFieldIndex (struct_ty , field_index );
2796
+ return self .builder .buildStructGEP (struct_ptr , llvm_field_index , "" );
2797
+ },
2749
2798
.Union = > return self .unionFieldPtr (inst , struct_ptr , struct_ty , field_index ),
2750
2799
else = > unreachable ,
2751
2800
}
@@ -2968,3 +3017,15 @@ fn toLlvmAtomicRmwBinOp(
2968
3017
.Min = > if (is_signed ) llvm .AtomicRMWBinOp .Min else return .UMin ,
2969
3018
};
2970
3019
}
3020
+
3021
+ /// Take into account 0 bit fields.
3022
+ fn llvmFieldIndex (ty : Type , index : u32 ) c_uint {
3023
+ const struct_obj = ty .castTag (.@"struct" ).? .data ;
3024
+ var result : c_uint = 0 ;
3025
+ for (struct_obj .fields .values ()[0.. index ]) | field | {
3026
+ if (field .ty .hasCodeGenBits ()) {
3027
+ result += 1 ;
3028
+ }
3029
+ }
3030
+ return result ;
3031
+ }
0 commit comments