endless semantic analysis with 0.10.0 and dependant types
Zig Version
0.10.0
Steps to Reproduce and Observed Behavior
I've encountered this issue while working on a protobuf implementation. The following is the smallest repro i can make. Setting oom_trigger = false allows the program to compile and the test segfaults as expected. The problem seems to be related to using the io.limitedReader() (possibly related to error the error set analysis?). Its strange to me that this only happens after introducing the io.limitedReader().
// /tmp/test.zig
const std = @import("std");
const Allocator = std.mem.Allocator;
pub const Error = error{ Overflow, EndOfStream };
const oom_trigger = true;
const A = struct {
b: *B = undefined,
fn deserialize(self: *A, allocator: Allocator, reader: anytype) Error!void {
try self.b.deserialize(allocator, reader);
}
};
const B = struct {
a: *A = undefined,
fn deserialize(self: *B, allocator: Allocator, reader: anytype) Error!void {
if (oom_trigger) {
var limreader = std.io.limitedReader(reader, 1);
try self.a.deserialize(allocator, limreader.reader());
} else {
try self.a.deserialize(allocator, reader);
}
}
};
test {
var x: A = .{};
var fbs = std.io.fixedBufferStream("\xFF\xFF\xFF\x00");
try x.deserialize(undefined, fbs.reader());
}
$ zig version
0.10.0
$ zig test /tmp/test.zig
Semantic Analysis [10882] readByte... ^C
# Note: I've killed the program here with ctrl+c
possibly related to #4572
Expected Behavior
the test should compile and segfault due to infinite recursion (a.deserialize() -> b.deserialize() -> a.deserialize() ...).
this is an intentional feature of using anytype. std.io.Readers are all technically a new type and so when you wrap each reader in a std.io.limitedReader it creates a new type and thus tells the compiler to make a new instantiation of deserialize for it. but some safety against this recursion would be nice. great find!
@nektro please leave it to me or someone else on the core zig team to say whether something is an intentional feature. I do not agree with your assessment.
Just wanted to share two workarounds offered by @SpexGuy on discord in case anyone else has this issue. Either of these allowed my project to compile.
First one is to check at comptime for an io.limitedReader and, if not, wrap the user provided reader once in an io.limitedReader(reader, std.math.maxInt(usize)) and re-call deserialize() with this limited_reader. Then rather than rewrapping the reader when I need to ensure that only N bytes are read, reuse the existing state by saving and updating reader.bytes_left.
Here is what that looks like:
pub fn deserialize(self: *B, allocator: Allocator, reader: anytype) Error!void {
const Context = @TypeOf(reader.context);
const context_info = @typeInfo(Context);
const ContextChild = switch (context_info) {
.Pointer => context_info.Pointer.child,
else => Context,
};
if (!@hasField(ContextChild, "bytes_left")) {
var limreader = std.io.limitedReader(reader, std.math.maxInt(usize));
return self.deserialize(allocator, limreader.reader());
}
const len = 1; //try decoding.readVarint128(usize, reader, .int);
const bytes_left = reader.context.bytes_left;
reader.context.bytes_left = len;
self.a.deserialize(allocator, reader);
reader.context.bytes_left = bytes_left - len;
}
The second approach is uses virtual dispatch and allows re-wrapping the reader:
// /tmp/test.zig
const std = @import("std");
const Allocator = std.mem.Allocator;
pub const Error = error{ Overflow, EndOfStream };
const A = struct {
b: *B = undefined,
fn deserialize(self: *A, allocator: Allocator, reader: anytype) Error!void {
var limit_reader = std.io.limitedReader(reader, 1);
return self.b.deserialize(allocator, virtualReader(&limit_reader));
}
};
const B = struct {
a: *A = undefined,
fn deserialize(self: *B, allocator: Allocator, reader: anytype) Error!void {
var limit_reader = std.io.limitedReader(reader, 1);
return self.a.deserialize(allocator, virtualReader(&limit_reader));
}
};
fn VirtualReader(comptime ErrSet: type) type {
const VirtualReaderImpl = struct {
internalContext: *anyopaque,
readFn: *const fn (context: *anyopaque, buffer: []u8) ErrSet!usize,
pub fn read(context: @This(), buffer: []u8) ErrSet!usize {
return context.readFn(context.internalContext, buffer);
}
};
return std.io.Reader(VirtualReaderImpl, ErrSet, VirtualReaderImpl.read);
}
fn ptrAlignCast(comptime Ptr: type, ptr: *anyopaque) Ptr {
return @ptrCast(Ptr, @alignCast(@typeInfo(Ptr).Pointer.alignment, ptr));
}
fn virtualReader(reader_impl_ptr: anytype) VirtualReader(@TypeOf(reader_impl_ptr.reader()).Error) {
const ErrSet = @TypeOf(reader_impl_ptr.reader()).Error;
const ReaderImplPtr = @TypeOf(reader_impl_ptr);
const gen = struct {
pub fn read(context: *anyopaque, buffer: []u8) !usize {
return ptrAlignCast(ReaderImplPtr, context).reader().read(buffer);
}
};
return VirtualReader(ErrSet){ .context = .{
.internalContext = reader_impl_ptr,
.readFn = gen.read,
} };
}
test {
var x: A = .{};
var fbs = std.io.fixedBufferStream("\xFF\xFF\xFF\x00");
try x.deserialize(undefined, fbs.reader());
}
reduction from linked issue
fn foo(f: anytype) void {
f();
foo(f);
}
fn bar() void {}
pub fn main() void {
foo(bar);
}