1
+ const builtin = @import ("builtin" );
1
2
const std = @import ("std" );
2
3
const Allocator = std .mem .Allocator ;
4
+ const assert = std .debug .assert ;
3
5
4
6
pub const std_options = .{
5
7
.logFn = logOverride ,
@@ -13,6 +15,7 @@ fn logOverride(
13
15
comptime format : []const u8 ,
14
16
args : anytype ,
15
17
) void {
18
+ if (builtin .mode != .Debug ) return ;
16
19
const f = if (log_file ) | f | f else f : {
17
20
const f = std .fs .cwd ().createFile ("libfuzzer.log" , .{}) catch @panic ("failed to open fuzzer log file" );
18
21
log_file = f ;
@@ -75,7 +78,7 @@ export fn __sanitizer_cov_trace_switch(val: u64, cases_ptr: [*]u64) void {
75
78
const val_size_in_bits = cases_ptr [1 ];
76
79
const cases = cases_ptr [2.. ][0.. len ];
77
80
_ = val ;
78
- _ = pc ;
81
+ fuzzer . visitPc ( pc ) ;
79
82
_ = val_size_in_bits ;
80
83
_ = cases ;
81
84
//std.log.debug("0x{x}: switch on value {d} ({d} bits) with {d} cases", .{
@@ -86,14 +89,14 @@ export fn __sanitizer_cov_trace_switch(val: u64, cases_ptr: [*]u64) void {
86
89
export fn __sanitizer_cov_trace_pc_indir (callee : usize ) void {
87
90
const pc = @returnAddress ();
88
91
_ = callee ;
89
- _ = pc ;
92
+ fuzzer . visitPc ( pc ) ;
90
93
//std.log.debug("0x{x}: indirect call to 0x{x}", .{ pc, callee });
91
94
}
92
95
93
96
fn handleCmp (pc : usize , arg1 : u64 , arg2 : u64 ) void {
94
- _ = pc ;
95
97
_ = arg1 ;
96
98
_ = arg2 ;
99
+ fuzzer .visitPc (pc );
97
100
//std.log.debug("0x{x}: comparison of {d} and {d}", .{ pc, arg1, arg2 });
98
101
}
99
102
@@ -103,6 +106,46 @@ const Fuzzer = struct {
103
106
input : std .ArrayListUnmanaged (u8 ),
104
107
pc_range : PcRange ,
105
108
count : usize ,
109
+ recent_cases : RunMap ,
110
+ deduplicated_runs : usize ,
111
+ coverage : Coverage ,
112
+
113
+ const RunMap = std .ArrayHashMapUnmanaged (Run , void , Run .HashContext , false );
114
+
115
+ const Coverage = struct {
116
+ pc_table : std .AutoArrayHashMapUnmanaged (usize , void ),
117
+ run_id_hasher : std.hash.Wyhash ,
118
+
119
+ fn reset (cov : * Coverage ) void {
120
+ cov .pc_table .clearRetainingCapacity ();
121
+ cov .run_id_hasher = std .hash .Wyhash .init (0 );
122
+ }
123
+ };
124
+
125
+ const Run = struct {
126
+ id : Id ,
127
+ input : []const u8 ,
128
+ score : usize ,
129
+
130
+ const Id = u64 ;
131
+
132
+ const HashContext = struct {
133
+ pub fn eql (ctx : HashContext , a : Run , b : Run , b_index : usize ) bool {
134
+ _ = b_index ;
135
+ _ = ctx ;
136
+ return a .id == b .id ;
137
+ }
138
+ pub fn hash (ctx : HashContext , a : Run ) u32 {
139
+ _ = ctx ;
140
+ return @truncate (a .id );
141
+ }
142
+ };
143
+
144
+ fn deinit (run : * Run , gpa : Allocator ) void {
145
+ gpa .free (run .input );
146
+ run .* = undefined ;
147
+ }
148
+ };
106
149
107
150
const Slice = extern struct {
108
151
ptr : [* ]const u8 ,
@@ -125,24 +168,137 @@ const Fuzzer = struct {
125
168
end : usize ,
126
169
};
127
170
171
+ const Analysis = struct {
172
+ score : usize ,
173
+ id : Run.Id ,
174
+ };
175
+
176
+ fn analyzeLastRun (f : * Fuzzer ) Analysis {
177
+ return .{
178
+ .id = f .coverage .run_id_hasher .final (),
179
+ .score = f .coverage .pc_table .count (),
180
+ };
181
+ }
182
+
128
183
fn next (f : * Fuzzer ) ! []const u8 {
129
184
const gpa = f .gpa ;
130
-
131
- // Prepare next input.
132
185
const rng = fuzzer .rng .random ();
133
- const len = rng .uintLessThan (usize , 64 );
134
- try f .input .resize (gpa , len );
135
- rng .bytes (f .input .items );
136
- f .resetCoverage ();
186
+
187
+ if (f .recent_cases .entries .len == 0 ) {
188
+ // Prepare initial input.
189
+ try f .recent_cases .ensureUnusedCapacity (gpa , 100 );
190
+ const len = rng .uintLessThanBiased (usize , 80 );
191
+ try f .input .resize (gpa , len );
192
+ rng .bytes (f .input .items );
193
+ f .recent_cases .putAssumeCapacity (.{
194
+ .id = 0 ,
195
+ .input = try gpa .dupe (u8 , f .input .items ),
196
+ .score = 0 ,
197
+ }, {});
198
+ } else {
199
+ if (f .count % 1000 == 0 ) f .dumpStats ();
200
+
201
+ const analysis = f .analyzeLastRun ();
202
+ const gop = f .recent_cases .getOrPutAssumeCapacity (.{
203
+ .id = analysis .id ,
204
+ .input = undefined ,
205
+ .score = undefined ,
206
+ });
207
+ if (gop .found_existing ) {
208
+ //std.log.info("duplicate analysis: score={d} id={d}", .{ analysis.score, analysis.id });
209
+ f .deduplicated_runs += 1 ;
210
+ if (f .input .items .len < gop .key_ptr .input .len or gop .key_ptr .score == 0 ) {
211
+ gpa .free (gop .key_ptr .input );
212
+ gop .key_ptr .input = try gpa .dupe (u8 , f .input .items );
213
+ gop .key_ptr .score = analysis .score ;
214
+ }
215
+ } else {
216
+ std .log .info ("unique analysis: score={d} id={d}" , .{ analysis .score , analysis .id });
217
+ gop .key_ptr .* = .{
218
+ .id = analysis .id ,
219
+ .input = try gpa .dupe (u8 , f .input .items ),
220
+ .score = analysis .score ,
221
+ };
222
+ }
223
+
224
+ if (f .recent_cases .entries .len >= 100 ) {
225
+ const Context = struct {
226
+ values : []const Run ,
227
+ pub fn lessThan (ctx : @This (), a_index : usize , b_index : usize ) bool {
228
+ return ctx .values [b_index ].score < ctx .values [a_index ].score ;
229
+ }
230
+ };
231
+ f .recent_cases .sortUnstable (Context { .values = f .recent_cases .keys () });
232
+ const cap = 50 ;
233
+ // This has to be done before deinitializing the deleted items.
234
+ const doomed_runs = f .recent_cases .keys ()[cap .. ];
235
+ f .recent_cases .shrinkRetainingCapacity (cap );
236
+ for (doomed_runs ) | * run | {
237
+ std .log .info ("culling score={d} id={d}" , .{ run .score , run .id });
238
+ run .deinit (gpa );
239
+ }
240
+ }
241
+ }
242
+
243
+ const chosen_index = rng .uintLessThanBiased (usize , f .recent_cases .entries .len );
244
+ const run = & f .recent_cases .keys ()[chosen_index ];
245
+ f .input .clearRetainingCapacity ();
246
+ f .input .appendSliceAssumeCapacity (run .input );
247
+ try f .mutate ();
248
+
249
+ f .coverage .reset ();
137
250
f .count += 1 ;
138
251
return f .input .items ;
139
252
}
140
253
141
- fn resetCoverage (f : * Fuzzer ) void {
142
- _ = f ;
254
+ fn visitPc (f : * Fuzzer , pc : usize ) void {
255
+ errdefer | err | oom (err );
256
+ try f .coverage .pc_table .put (f .gpa , pc , {});
257
+ f .coverage .run_id_hasher .update (std .mem .asBytes (& pc ));
258
+ }
259
+
260
+ fn dumpStats (f : * Fuzzer ) void {
261
+ std .log .info ("stats: runs={d} deduplicated={d}" , .{
262
+ f .count ,
263
+ f .deduplicated_runs ,
264
+ });
265
+ for (f .recent_cases .keys ()[0.. @min (f .recent_cases .entries .len , 5 )], 0.. ) | run , i | {
266
+ std .log .info ("best[{d}] id={x} score={d} input: '{}'" , .{
267
+ i , run .id , run .score , std .zig .fmtEscapes (run .input ),
268
+ });
269
+ }
270
+ }
271
+
272
+ fn mutate (f : * Fuzzer ) ! void {
273
+ const gpa = f .gpa ;
274
+ const rng = fuzzer .rng .random ();
275
+
276
+ if (f .input .items .len == 0 ) {
277
+ const len = rng .uintLessThanBiased (usize , 80 );
278
+ try f .input .resize (gpa , len );
279
+ rng .bytes (f .input .items );
280
+ return ;
281
+ }
282
+
283
+ const index = rng .uintLessThanBiased (usize , f .input .items .len * 3 );
284
+ if (index < f .input .items .len ) {
285
+ f .input .items [index ] = rng .int (u8 );
286
+ } else if (index < f .input .items .len * 2 ) {
287
+ _ = f .input .orderedRemove (index - f .input .items .len );
288
+ } else if (index < f .input .items .len * 3 ) {
289
+ try f .input .insert (gpa , index - f .input .items .len * 2 , rng .int (u8 ));
290
+ } else {
291
+ unreachable ;
292
+ }
143
293
}
144
294
};
145
295
296
+ fn oom (err : anytype ) noreturn {
297
+ switch (err ) {
298
+ error .OutOfMemory = > @panic ("out of memory" ),
299
+ }
300
+ }
301
+
146
302
var general_purpose_allocator : std .heap .GeneralPurposeAllocator (.{}) = .{};
147
303
148
304
var fuzzer : Fuzzer = .{
@@ -151,6 +307,9 @@ var fuzzer: Fuzzer = .{
151
307
.input = .{},
152
308
.pc_range = .{ .start = 0 , .end = 0 },
153
309
.count = 0 ,
310
+ .deduplicated_runs = 0 ,
311
+ .recent_cases = .{},
312
+ .coverage = undefined ,
154
313
};
155
314
156
315
export fn fuzzer_next () Fuzzer.Slice {
0 commit comments