Gen.jl icon indicating copy to clipboard operation
Gen.jl copied to clipboard

Backprop with PersistentVectors

Open deoxyribose opened this issue 4 years ago • 1 comments

Hi! I'm trying to do inference for a factor model written in the static modeling language. I'm not sure how to work with the PersistentVectors that the combinators return, and how they interact with map_optimize. Here's the model code:

N = 1000
K = 1
D = 2

@gen (static) function generate_single_weight()
    return @trace(normal(0., 3.), :w_kd)
end;

generate_weight_vector = Map(generate_single_weight);

@gen (static) function generate_weight_vector_()
    return @trace(generate_weight_vector(1:D), :w_d)
end;

generate_weight_matrix = Map(generate_weight_vector_);

function convert_persistent_nested_array_to_matrix(nested_array)
    N, M = length(nested_array), length(nested_array[1])
    X = Array{Float64}(undef, N, M)
    for n in 1:N
        for m in 1:M
            X[n,m] = nested_array[n][m]
        end
    end
    return X
end

@gen (static) function generate_latent()
    return @trace(normal(0., 1.), :z_nk)
end;

generate_latent_vector = Map(generate_latent);

@gen (static) function generate_latent_variables_()
    return @trace(generate_latent_vector(1:K), :z_n)
end;

generate_latent_variables = Map(generate_latent_variables_);

@gen (static) function factor_model(N)
    weights = @trace(generate_weight_matrix(1:K), :W)
    zs = @trace(generate_latent_variables(1:N), :Z)
    W = convert_persistent_nested_array_to_matrix(weights)
    Z = convert_persistent_nested_array_to_matrix(zs)
    X = @trace(broadcasted_normal(Z * W, 1.), :X)
end

My first question is, what is the proper way to implement this? The convert_persistent_nested_array_to_matrix was necessary to use Z and W as arguments in X = @trace(broadcasted_normal(Z * W, 1.), :X) , but I'm sure there's a better way. For inference, I'm trying to do MAP:

function inference(n_iterations)
    observations = choicemap()
    observations[:X] = X
    selection = DynamicSelection()
    push!(selection, :Z)
    push!(selection, :W)

    new_trace, = generate(factor_model, (N,), observations)
    for rep in 1:n_iterations
        new_trace = Gen.map_optimize(new_trace, selection, max_step_size=1., min_step_size=1e-10)
    end
    new_trace
end

where I get the following error:

Breaking for error:
ERROR: MethodError: no method matching zero(::Type{FunctionalCollections.PersistentVector{Float64}})
Closest candidates are:
  zero(::Type{Pkg.Resolve.VersionWeight}) at /buildworker/worker/package_linux64/build/usr/share/julia/stdlib/v1.5/Pkg/src/Resolve/versionweights.jl:15
  zero(::Type{Pkg.Resolve.FieldValue}) at /buildworker/worker/package_linux64/build/usr/share/julia/stdlib/v1.5/Pkg/src/Resolve/fieldvalues.jl:38
  zero(::Type{Dates.Date}) at /buildworker/worker/package_linux64/build/usr/share/julia/stdlib/v1.5/Dates/src/types.jl:405
  ...
Stacktrace:
 [1] zero(::FunctionalCollections.PersistentVector{FunctionalCollections.PersistentVector{Float64}}) at abstractarray.jl:975
 [2] choice_gradients(::Main.Factor.var"##StaticIRTrace_factor_model#315", ::StaticSelection{(:Z, :W),Tuple{AllSelection,AllSelection}}, ::Nothing) at /home/folzd/.julia/packages/Gen/LBj96/src/static_ir/backprop.jl:502
 [3] choice_gradients(::Main.Factor.var"##StaticIRTrace_factor_model#315", ::DynamicSelection, ::Nothing) at /home/folzd/.julia/packages/Gen/LBj96/src/static_ir/backprop.jl:396
 [4] map_optimize(::Main.Factor.var"##StaticIRTrace_factor_model#315", ::DynamicSelection; max_step_size::Float64, tau::Float64, min_step_size::Float64, verbose::Bool) at /home/folzd/.julia/packages/Gen/LBj96/src/inference/map_optimize.jl:15
 [5] (::Gen.var"#map_optimize##kw")(::NamedTuple{(:max_step_size, :min_step_size),Tuple{Float64,Float64}}, ::typeof(map_optimize), ::Main.Factor.var"##StaticIRTrace_factor_model#315", ::DynamicSelection) at /home/folzd/.julia/packages/Gen/LBj96/src/inference/map_optimize.jl:11
 [6] inference(::Int64) at /home/folzd/Julia projects/factor.jl:104

In zero(x) at abstractarray.jl:975
>975  zero(x::AbstractArray{T}) where {T} = fill!(similar(x), zero(T))

About to run: (zero)(FunctionalCollections.PersistentVector{Float64})

which also seems to be a hint that I'm supposed to convert the PersistentVector to something else somewhere. Full code here: https://pastebin.com/CSQJzk7J Might be related to #423

deoxyribose avatar Oct 04 '21 14:10 deoxyribose

@deoxyribose Yes, you've hit something which we're currently investigating. Thanks for the issue and sorry you've hit this!

zero (in general) is not defined or exported for Vector{Vector{T}} (and this is also true for PersistentVector).

In general, if possible -- it's good if code can be coerced into Matrix{Float64} or Array{2, Float64}. Here, with usage of the combinators (at least for now), this is not possible.

As we investigate, will keep you up to date.

femtomc avatar Oct 22 '21 16:10 femtomc