Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

fuzzing: more optimized and correct management of 8-bit PC counters #21006

Merged
merged 2 commits into from
Aug 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 31 additions & 19 deletions lib/fuzzer.zig
Original file line number Diff line number Diff line change
Expand Up @@ -213,8 +213,12 @@ const Fuzzer = struct {
.truncate = false,
});
defer coverage_file.close();
const n_bitset_elems = (flagged_pcs.len + 7) / 8;
const bytes_len = @sizeOf(SeenPcsHeader) + flagged_pcs.len * @sizeOf(usize) + n_bitset_elems;
const n_bitset_elems = (flagged_pcs.len + @bitSizeOf(usize) - 1) / @bitSizeOf(usize);
comptime assert(SeenPcsHeader.trailing[0] == .pc_bits_usize);
comptime assert(SeenPcsHeader.trailing[1] == .pc_addr);
const bytes_len = @sizeOf(SeenPcsHeader) +
n_bitset_elems * @sizeOf(usize) +
flagged_pcs.len * @sizeOf(usize);
const existing_len = coverage_file.getEndPos() catch |err| {
fatal("unable to check len of coverage file: {s}", .{@errorName(err)});
};
Expand All @@ -229,7 +233,7 @@ const Fuzzer = struct {
fatal("unable to init coverage memory map: {s}", .{@errorName(err)});
};
if (existing_len != 0) {
const existing_pcs_bytes = f.seen_pcs.items[@sizeOf(SeenPcsHeader)..][0 .. flagged_pcs.len * @sizeOf(usize)];
const existing_pcs_bytes = f.seen_pcs.items[@sizeOf(SeenPcsHeader) + @sizeOf(usize) * n_bitset_elems ..][0 .. flagged_pcs.len * @sizeOf(usize)];
const existing_pcs = std.mem.bytesAsSlice(usize, existing_pcs_bytes);
for (existing_pcs, flagged_pcs, 0..) |old, new, i| {
if (old != new.addr) {
Expand All @@ -246,10 +250,10 @@ const Fuzzer = struct {
.lowest_stack = std.math.maxInt(usize),
};
f.seen_pcs.appendSliceAssumeCapacity(std.mem.asBytes(&header));
f.seen_pcs.appendNTimesAssumeCapacity(0, n_bitset_elems * @sizeOf(usize));
for (flagged_pcs) |flagged_pc| {
f.seen_pcs.appendSliceAssumeCapacity(std.mem.asBytes(&flagged_pc.addr));
}
f.seen_pcs.appendNTimesAssumeCapacity(0, n_bitset_elems);
}
}

Expand Down Expand Up @@ -299,22 +303,30 @@ const Fuzzer = struct {
.score = analysis.score,
};

// Track code coverage from all runs.
{
const seen_pcs = f.seen_pcs.items[@sizeOf(SeenPcsHeader) + f.flagged_pcs.len * @sizeOf(usize) ..];
for (seen_pcs, 0..) |*elem, i| {
const byte_i = i * 8;
const mask: u8 =
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 0] != 0)) << 0) |
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 1] != 0)) << 1) |
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 2] != 0)) << 2) |
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 3] != 0)) << 3) |
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 4] != 0)) << 4) |
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 5] != 0)) << 5) |
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 6] != 0)) << 6) |
(@as(u8, @intFromBool(f.pc_counters.ptr[byte_i + 7] != 0)) << 7);

_ = @atomicRmw(u8, elem, .Or, mask, .monotonic);
// Track code coverage from all runs.
comptime assert(SeenPcsHeader.trailing[0] == .pc_bits_usize);
const header_end_ptr: [*]volatile usize = @ptrCast(f.seen_pcs.items[@sizeOf(SeenPcsHeader)..]);
const remainder = f.flagged_pcs.len % @bitSizeOf(usize);
const aligned_len = f.flagged_pcs.len - remainder;
const seen_pcs = header_end_ptr[0..aligned_len];
const pc_counters = std.mem.bytesAsSlice([@bitSizeOf(usize)]u8, f.pc_counters[0..aligned_len]);
const V = @Vector(@bitSizeOf(usize), u8);
const zero_v: V = @splat(0);

for (header_end_ptr[0..pc_counters.len], pc_counters) |*elem, *array| {
const v: V = array.*;
const mask: usize = @bitCast(v != zero_v);
_ = @atomicRmw(usize, elem, .Or, mask, .monotonic);
}
if (remainder > 0) {
const i = pc_counters.len;
const elem = &seen_pcs[i];
var mask: usize = 0;
for (f.pc_counters[i * @bitSizeOf(usize) ..][0..remainder], 0..) |byte, bit_index| {
mask |= @as(usize, @intFromBool(byte != 0)) << @intCast(bit_index);
}
_ = @atomicRmw(usize, elem, .Or, mask, .monotonic);
}
}

Expand Down
36 changes: 20 additions & 16 deletions lib/fuzzer/wasm/main.zig
Original file line number Diff line number Diff line change
Expand Up @@ -125,12 +125,12 @@ export fn coveredSourceLocations() usize {
}

export fn totalRuns() u64 {
const header: *abi.CoverageUpdateHeader = @ptrCast(recent_coverage_update.items[0..@sizeOf(abi.CoverageUpdateHeader)]);
const header: *abi.CoverageUpdateHeader = @alignCast(@ptrCast(recent_coverage_update.items[0..@sizeOf(abi.CoverageUpdateHeader)]));
return header.n_runs;
}

export fn uniqueRuns() u64 {
const header: *abi.CoverageUpdateHeader = @ptrCast(recent_coverage_update.items[0..@sizeOf(abi.CoverageUpdateHeader)]);
const header: *abi.CoverageUpdateHeader = @alignCast(@ptrCast(recent_coverage_update.items[0..@sizeOf(abi.CoverageUpdateHeader)]));
return header.unique_runs;
}

Expand Down Expand Up @@ -335,7 +335,7 @@ fn computeSourceAnnotations(
if (next_loc_index >= locs.items.len) return;
const next_sli = locs.items[next_loc_index];
const next_sl = next_sli.ptr();
if (next_sl.line > line or (next_sl.line == line and next_sl.column > column)) break;
if (next_sl.line > line or (next_sl.line == line and next_sl.column >= column)) break;
try annotations.append(gpa, .{
.file_byte_offset = offset,
.dom_id = @intFromEnum(next_sli),
Expand All @@ -349,7 +349,7 @@ var coverage = Coverage.init;
/// Index of type `SourceLocationIndex`.
var coverage_source_locations: std.ArrayListUnmanaged(Coverage.SourceLocation) = .{};
/// Contains the most recent coverage update message, unmodified.
var recent_coverage_update: std.ArrayListUnmanaged(u8) = .{};
var recent_coverage_update: std.ArrayListAlignedUnmanaged(u8, @alignOf(u64)) = .{};

fn updateCoverage(
directories: []const Coverage.String,
Expand Down Expand Up @@ -406,19 +406,23 @@ export fn sourceLocationFileCoveredList(sli_file: SourceLocationIndex) Slice(Sou
};
const want_file = sli_file.ptr().file;
global.result.clearRetainingCapacity();
const covered_bits = recent_coverage_update.items[@sizeOf(abi.CoverageUpdateHeader)..];

// This code assumes 64-bit elements, which is incorrect if the executable
// being fuzzed is not a 64-bit CPU. It also assumes little-endian which
// can also be incorrect.
comptime assert(abi.CoverageUpdateHeader.trailing[0] == .pc_bits_usize);
const n_bitset_elems = (coverage_source_locations.items.len + @bitSizeOf(u64) - 1) / @bitSizeOf(u64);
const covered_bits = std.mem.bytesAsSlice(
u64,
recent_coverage_update.items[@sizeOf(abi.CoverageUpdateHeader)..][0 .. n_bitset_elems * @sizeOf(u64)],
);
var sli: u32 = 0;
for (covered_bits) |byte| {
global.result.ensureUnusedCapacity(gpa, 8) catch @panic("OOM");
if ((byte & 0b0000_0001) != 0) global.add(sli + 0, want_file);
if ((byte & 0b0000_0010) != 0) global.add(sli + 1, want_file);
if ((byte & 0b0000_0100) != 0) global.add(sli + 2, want_file);
if ((byte & 0b0000_1000) != 0) global.add(sli + 3, want_file);
if ((byte & 0b0001_0000) != 0) global.add(sli + 4, want_file);
if ((byte & 0b0010_0000) != 0) global.add(sli + 5, want_file);
if ((byte & 0b0100_0000) != 0) global.add(sli + 6, want_file);
if ((byte & 0b1000_0000) != 0) global.add(sli + 7, want_file);
sli += 8;
for (covered_bits) |elem| {
global.result.ensureUnusedCapacity(gpa, 64) catch @panic("OOM");
for (0..@bitSizeOf(u64)) |i| {
if ((elem & (@as(u64, 1) << @intCast(i))) != 0) global.add(sli, want_file);
sli += 1;
}
}
return Slice(SourceLocationIndex).init(global.result.items);
}
13 changes: 6 additions & 7 deletions lib/std/Build/Fuzz/WebServer.zig
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const Step = std.Build.Step;
const Coverage = std.debug.Coverage;
const abi = std.Build.Fuzz.abi;
const log = std.log;
const assert = std.debug.assert;

const WebServer = @This();

Expand Down Expand Up @@ -383,7 +384,7 @@ fn sendCoverageContext(
// TODO: make each events URL correspond to one coverage map
const coverage_map = &coverage_maps[0];
const cov_header: *const abi.SeenPcsHeader = @ptrCast(coverage_map.mapped_memory[0..@sizeOf(abi.SeenPcsHeader)]);
const seen_pcs = coverage_map.mapped_memory[@sizeOf(abi.SeenPcsHeader) + coverage_map.source_locations.len * @sizeOf(usize) ..];
const seen_pcs = cov_header.seenBits();
const n_runs = @atomicLoad(usize, &cov_header.n_runs, .monotonic);
const unique_runs = @atomicLoad(usize, &cov_header.unique_runs, .monotonic);
const lowest_stack = @atomicLoad(usize, &cov_header.lowest_stack, .monotonic);
Expand Down Expand Up @@ -415,7 +416,7 @@ fn sendCoverageContext(
};
const iovecs: [2]std.posix.iovec_const = .{
makeIov(std.mem.asBytes(&header)),
makeIov(seen_pcs),
makeIov(std.mem.sliceAsBytes(seen_pcs)),
};
try web_socket.writeMessagev(&iovecs, .binary);

Expand Down Expand Up @@ -630,8 +631,7 @@ fn prepareTables(
gop.value_ptr.mapped_memory = mapped_memory;

const header: *const abi.SeenPcsHeader = @ptrCast(mapped_memory[0..@sizeOf(abi.SeenPcsHeader)]);
const pcs_bytes = mapped_memory[@sizeOf(abi.SeenPcsHeader)..][0 .. header.pcs_len * @sizeOf(usize)];
const pcs = std.mem.bytesAsSlice(usize, pcs_bytes);
const pcs = header.pcAddrs();
const source_locations = try gpa.alloc(Coverage.SourceLocation, pcs.len);
errdefer gpa.free(source_locations);
debug_info.resolveAddresses(gpa, pcs, source_locations) catch |err| {
Expand All @@ -648,9 +648,8 @@ fn addEntryPoint(ws: *WebServer, coverage_id: u64, addr: u64) error{ AlreadyRepo
defer ws.coverage_mutex.unlock();

const coverage_map = ws.coverage_files.getPtr(coverage_id).?;
const ptr = coverage_map.mapped_memory;
const pcs_bytes = ptr[@sizeOf(abi.SeenPcsHeader)..][0 .. coverage_map.source_locations.len * @sizeOf(usize)];
const pcs: []const usize = @alignCast(std.mem.bytesAsSlice(usize, pcs_bytes));
const header: *const abi.SeenPcsHeader = @ptrCast(coverage_map.mapped_memory[0..@sizeOf(abi.SeenPcsHeader)]);
const pcs = header.pcAddrs();
const index = std.sort.upperBound(usize, pcs, addr, struct {
fn order(context: usize, item: usize) std.math.Order {
return std.math.order(item, context);
Expand Down
48 changes: 42 additions & 6 deletions lib/std/Build/Fuzz/abi.zig
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,40 @@
/// make the ints be the size of the target used with libfuzzer.
///
/// Trailing:
/// * pc_addr: usize for each pcs_len
/// * 1 bit per pc_addr, usize elements
/// * pc_addr: usize for each pcs_len
pub const SeenPcsHeader = extern struct {
n_runs: usize,
unique_runs: usize,
pcs_len: usize,
lowest_stack: usize,

/// Used for comptime assertions. Provides a mechanism for strategically
/// causing compile errors.
pub const trailing = .{
.pc_bits_usize,
.pc_addr,
};

pub fn headerEnd(header: *const SeenPcsHeader) []const usize {
const ptr: [*]align(@alignOf(usize)) const u8 = @ptrCast(header);
const header_end_ptr: [*]const usize = @ptrCast(ptr + @sizeOf(SeenPcsHeader));
const pcs_len = header.pcs_len;
return header_end_ptr[0 .. pcs_len + seenElemsLen(pcs_len)];
}

pub fn seenBits(header: *const SeenPcsHeader) []const usize {
return header.headerEnd()[0..seenElemsLen(header.pcs_len)];
}

pub fn seenElemsLen(pcs_len: usize) usize {
return (pcs_len + @bitSizeOf(usize) - 1) / @bitSizeOf(usize);
}

pub fn pcAddrs(header: *const SeenPcsHeader) []const usize {
const pcs_len = header.pcs_len;
return header.headerEnd()[seenElemsLen(pcs_len)..][0..pcs_len];
}
};

pub const ToClientTag = enum(u8) {
Expand Down Expand Up @@ -47,12 +74,21 @@ pub const SourceIndexHeader = extern struct {
/// changes.
///
/// Trailing:
/// * one bit per source_locations_len, contained in u8 elements
/// * one bit per source_locations_len, contained in u64 elements
pub const CoverageUpdateHeader = extern struct {
tag: ToClientTag = .coverage_update,
n_runs: u64 align(1),
unique_runs: u64 align(1),
lowest_stack: u64 align(1),
flags: Flags = .{},
n_runs: u64,
unique_runs: u64,
lowest_stack: u64,

pub const Flags = packed struct(u64) {
tag: ToClientTag = .coverage_update,
_: u56 = 0,
};

pub const trailing = .{
.pc_bits_usize,
};
};

/// Sent to the fuzzer web client when the set of entry points is updated.
Expand Down
31 changes: 15 additions & 16 deletions tools/dump-cov.zig
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const std = @import("std");
const fatal = std.process.fatal;
const Path = std.Build.Cache.Path;
const assert = std.debug.assert;
const SeenPcsHeader = std.Build.Fuzz.abi.SeenPcsHeader;

pub fn main() !void {
var general_purpose_allocator: std.heap.GeneralPurposeAllocator(.{}) = .{};
Expand Down Expand Up @@ -36,24 +37,29 @@ pub fn main() !void {
};
defer debug_info.deinit(gpa);

const cov_bytes = cov_path.root_dir.handle.readFileAlloc(arena, cov_path.sub_path, 1 << 30) catch |err| {
const cov_bytes = cov_path.root_dir.handle.readFileAllocOptions(
arena,
cov_path.sub_path,
1 << 30,
null,
@alignOf(SeenPcsHeader),
null,
) catch |err| {
fatal("failed to load coverage file {}: {s}", .{ cov_path, @errorName(err) });
};

var bw = std.io.bufferedWriter(std.io.getStdOut().writer());
const stdout = bw.writer();

const header: *align(1) SeenPcsHeader = @ptrCast(cov_bytes);
const header: *SeenPcsHeader = @ptrCast(cov_bytes);
try stdout.print("{any}\n", .{header.*});
//const n_bitset_elems = (header.pcs_len + 7) / 8;
const pcs_bytes = cov_bytes[@sizeOf(SeenPcsHeader)..][0 .. header.pcs_len * @sizeOf(usize)];
const pcs = try arena.alloc(usize, header.pcs_len);
for (0..pcs_bytes.len / @sizeOf(usize), pcs) |i, *pc| {
pc.* = std.mem.readInt(usize, pcs_bytes[i * @sizeOf(usize) ..][0..@sizeOf(usize)], .little);
const pcs = header.pcAddrs();
for (0.., pcs[0 .. pcs.len - 1], pcs[1..]) |i, a, b| {
if (a > b) std.log.err("{d}: 0x{x} > 0x{x}", .{ i, a, b });
}
assert(std.sort.isSorted(usize, pcs, {}, std.sort.asc(usize)));

const seen_pcs = cov_bytes[@sizeOf(SeenPcsHeader) + pcs.len * @sizeOf(usize) ..];
const seen_pcs = header.seenBits();

const source_locations = try arena.alloc(std.debug.Coverage.SourceLocation, pcs.len);
try debug_info.resolveAddresses(gpa, pcs, source_locations);
Expand All @@ -62,18 +68,11 @@ pub fn main() !void {
const file = debug_info.coverage.fileAt(sl.file);
const dir_name = debug_info.coverage.directories.keys()[file.directory_index];
const dir_name_slice = debug_info.coverage.stringAt(dir_name);
const hit: u1 = @truncate(seen_pcs[i / 8] >> @intCast(i % 8));
const hit: u1 = @truncate(seen_pcs[i / @bitSizeOf(usize)] >> @intCast(i % @bitSizeOf(usize)));
try stdout.print("{c}{x}: {s}/{s}:{d}:{d}\n", .{
"-+"[hit], pc, dir_name_slice, debug_info.coverage.stringAt(file.basename), sl.line, sl.column,
});
}

try bw.flush();
}

const SeenPcsHeader = extern struct {
n_runs: usize,
deduplicated_runs: usize,
pcs_len: usize,
lowest_stack: usize,
};
Loading