I am trying to do something like this
module DummyModule
using Pumas
mwe(nt) = @eval @model begin
@param begin
Ω ∈ PDiagDomain($nt.dim_Ω)
end
@random begin
η ~ MvNormal(Ω)
end
@derived begin
dv ~ @. Normal(η, 1)
end
end
function dummy_fitting(pop, nts)
map(nts) do nt
m = mwe(nt)
fit(m, pop, init_params(m), JointMAP(), optim_options = (; iterations = 1))
end
end
export mwe, dummy_fitting
end
using .DummyModule
using Pumas
n = 3
dummy_pop = [
Subject(; id = "1", observations = (; dv = rand(3)), time = collect(LinRange(0, 1, n)));
]
nts = [
(; dim_Ω = n);
]
dummy_fitting(dummy_pop, nts)
I’ll need to evaluate many combinations of hyper parameters, that’s why I am using a function, rather than writing out many individual models, which would be impractical. However, when I call dummy_fitting I get an error:
ERROR: TaskFailedException
Stacktrace:
[1] wait(t::Task)
@ Base .\task.jl:370
[2] fetch
@ .\task.jl:390 [inlined]
[3] __tmapreduce(f::Pumas.var"#756#757"{…}, op::typeof(+), tasks::Vector{…}, len::Int64, init::Float64, src::Tuple{…}, batchargs::Tuple{…})
@ Pumas .\none:548
[4] _tmapreduce
@ .\none:570 [inlined]
[5] tmapreduce(f::Function, ::Type{…}, op::Function, src::Tuple{…}, batchargs::Tuple{…}; init::Float64, tasks::Vector{…})
@ Pumas .\none:581
[6] tmapreduce
@ .\none:572 [inlined]
[7] _logdensitygrad(b::Pumas.ThreadedBayesLogDensity{…}, v::Vector{…})
@ Pumas .\none:489
[8] logdensity_and_gradient(b::Pumas.ThreadedBayesLogDensity{…}, v::Vector{…})
@ Pumas .\none:71
[9] (::Pumas.var"#859#862"{Pumas.ThreadedBayesLogDensity{…}})(f::Float64, g::Vector{Float64}, vparam::Vector{Float64})
@ Pumas .\none:236
[10] (::NLSolversBase.var"#51#52"{NLSolversBase.InplaceObjective{…}, Float64})(G::Vector{Float64}, x::Vector{Float64})
@ NLSolversBase C:\Users\D-LINKEVICIUS\.julia\packages\NLSolversBase\n7XXO\src\objective_types\incomplete.jl:54
[11] value_gradient!!(obj::NLSolversBase.OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, x::Vector{Float64})
@ NLSolversBase C:\Users\D-LINKEVICIUS\.julia\packages\NLSolversBase\n7XXO\src\interface.jl:82
[12] initial_state(method::Optim.LBFGS{…}, options::Optim.Options{…}, d::NLSolversBase.OnceDifferentiable{…}, initial_x::Vector{…})
@ Optim C:\Users\D-LINKEVICIUS\.julia\packages\Optim\7krni\src\multivariate\solvers\first_order\l_bfgs.jl:168
[13] DefaultOptimizeFN
@ .\none:3780 [inlined]
[14] DefaultOptimizeFN
@ .\none:3770 [inlined]
[15] _fit_jointmap(bayes::Pumas.ThreadedBayesLogDensity{…}, init_randeffs::Nothing, optimize_fn::Pumas.DefaultOptimizeFN{…}, cb::Returns{…})
@ Pumas .\none:251
[16] _fit(model::PumasModel{…}, data::Vector{…}, param::@NamedTuple{…}, alg::JointMAP{…}, constantcoef::Tuple{}, init_randeffs::Nothing, ignore_numerical_error::Bool)
@ Pumas .\none:174
[17] #fit#857
@ .\none:85 [inlined]
[18] (::Main.DummyModule.var"#1#2"{Vector{Subject{…}}})(nt::@NamedTuple{dim_Ω::Int64})
@ Main.DummyModule c:\Users\D-LINKEVICIUS\GeneralSciMLHHModels.jl\mwe_pumas.jl:21
[19] iterate
@ .\generator.jl:48 [inlined]
[20] _collect(c::Vector{…}, itr::Base.Generator{…}, ::Base.EltypeUnknown, isz::Base.HasShape{…})
@ Base .\array.jl:811
[21] collect_similar
@ .\array.jl:720 [inlined]
[22] map
@ .\abstractarray.jl:3371 [inlined]
[23] dummy_fitting(pop::Vector{Subject{…}}, nts::Vector{@NamedTuple{…}})
@ Main.DummyModule c:\Users\D-LINKEVICIUS\GeneralSciMLHHModels.jl\mwe_pumas.jl:19
[24] top-level scope
@ c:\Users\D-LINKEVICIUS\GeneralSciMLHHModels.jl\mwe_pumas.jl:40
nested task error: MethodError: no method matching (::Main.DummyModule.var"#3#8")(::@NamedTuple{Ω::PDMats.PDiagMat{ForwardDiff.Dual{…}, Vector{…}}}, ::@NamedTuple{})
The function `#3` exists, but no method is defined for this combination of argument types.
Closest candidates are:
(::Main.DummyModule.var"#3#8")(::NamedTuple, ::NamedTuple{()}) (method too new to be called from this world context.)
@ Main.DummyModule none:472
Stacktrace:
[1] (::Pumas.RandomObj{…})(::Subject{…}, param::@NamedTuple{…})
@ Pumas .\none:447
[2] _penalized_conditional_nll(model::PumasModel{…}, subject::Subject{…}, param::@NamedTuple{…}, vrandeffsorth::SubArray{…}, diffeq_options::@NamedTuple{…})
@ Pumas .\none:1396
ientConfig{…})
@ ForwardDiff C:\Users\D-LINKEVICIUS\.julia\packages\ForwardDiff\X74OO\src\gradient.jl:98
[6] gradient!(result::DiffResults.MutableDiffResult{…}, f::Pumas._L_rfx{…}, x::Vector{…}, cfg::ForwardDiff.GradientConfig{…}, ::Val{…})
@ ForwardDiff C:\Users\D-LINKEVICIUS\.julia\packages\ForwardDiff\X74OO\src\gradient.jl:39
[7] value_and_gradient!(::Pumas._L_rfx{…}, ::Vector{…}, ::DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{…}, ::ADTypes.AutoForwardDiff{…}, ::Vector{…})
@ DifferentiationInterfaceForwardDiffExt C:\Users\D-LINKEVICIUS\.julia\packages\DifferentiationInterface\zJHX8\ext\DifferentiationInterfaceForwardDiffExt\onearg.jl:396
[8] (::Pumas.var"#756#757"{…})(i::Int64, buffer1_i::Vector{…}, buffer2_i::Vector{…}, res_i::Vector{…}, cfg_rfx_i::DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{…})
@ Pumas .\none:502
[9] macro expansion
@ .\none:562 [inlined]
[10] macro expansion
@ .\simdloop.jl:77 [inlined]
[11] batch_mapreduce
@ .\none:560 [inlined]
[12] (::Pumas.var"#758#759"{Float64, UnitRange{…}, Pumas.var"#756#757"{…}, typeof(+), Tuple{…}, Tuple{…}})()
@ Pumas .\none:543
Some type information was truncated. Use `show(err)` to see complete types.
I assume this is due to an anonymous function in @random which stays in DummyModule rather than where it should be, which I’d guess is Main. Is there a way to do this that would avoid having to use other external packages?
I am working with an academic license of DeepPumas v0.9.0.