Heston model example from DifferentialEquations.jl giving Bridging distribution error - julia

While trying to run a code which is paraphrasing the DE tutorial on SDE's, I'm getting the following stacktrace (only the first few lines):
Bridging distribution is unknown. Cannot use adapativity
Stacktrace:
[1] error(s::String)
# Base ./error.jl:33
[2] (::DiffEqNoiseProcess.var"#106#108")(rand_vec::Vector{Float64}, W::NoiseProcess{Float64, 2, Float64, Vector{Float64}, Nothing, Nothing, DiffEqNoiseProcess.var"#105#107"{Vector{Float64}, Matrix{Float64}}, DiffEqNoiseProcess.var"#106#108", true, ResettableStacks.ResettableStack{Tuple{Float64, Vector{Float64}, Nothing}, true}, ResettableStacks.ResettableStack{Tuple{Float64, Vector{Float64}, Nothing}, true}, RSWM{Float64}, Nothing, RandomNumbers.Xorshifts.Xoroshiro128Plus}, W0::Int64, Wh::Vector{Float64}, q::Float64, h::Float64, u::Vector{Float64}, p::SciMLBase.NullParameters, t::Float64, rng::RandomNumbers.Xorshifts.Xoroshiro128Plus)
# DiffEqNoiseProcess ~/.julia/packages/DiffEqNoiseProcess/9NzQP/src/correlated_noisefunc.jl:28
[3] reject_step!
# ~/.julia/packages/DiffEqNoiseProcess/9NzQP/src/noise_interfaces/noise_process_interface.jl:278 [inlined]
[4] reject_step! (repeats 2 times)
# ~/.julia/packages/StochasticDiffEq/Ysmjy/src/integrators/integrator_utils.jl:7 [inlined]
I suspect this has to do with the way I am defining the correlated wiener processes.
Here is the block of code I'm trying to run:
# Correlated Brownian motions
# e.g. Heston model
heston_tspan = (0.0,1.0)
μ = 1.0
κ = 1.0
Θ = 1.0
σ = 1.0
ρ = 0.333
function heston_drift!(du,u,p,t)
du[1] = μ*u[1]
du[2] = κ*(Θ-u[2])
end
function heston_sigma!(du,u,p,t)
du[1] = √u[2]*u[1]
du[2] = σ*√u[2]
end
correl = [1 ρ;ρ 1]
heston_noise = CorrelatedWienerProcess!(correl, heston_tspan[1], zeros(2), zeros(2))
heston_problem = SDEProblem(heston_drift!, heston_sigma!, ones(2), heston_tspan, noise=heston_noise)
heston_sol = solve(heston_problem)
plot(heston_sol)
EDIT:
The solver works if I tell it to not use adaptive methods explicitly.
For example,
heston_sol = solve(heston_problem, adaptive=false, dt=0.01)
However, I don't understand why
There's no bridging distribution property defined for this CorrelatedWienerProcess! "object" (mathematically, it's similar to the default WienerProcess)
The solve function's auto selector does not try a non-adaptive method when it fails to find a bridging distribution.

Related

JuMP: Issues of expecting float64, typeError: in typeassert, expected Float64, got ForwardDiff.Dual with autodiff = true and problems with exp()

So I tried to make a minimum example to ask questions based on a more complicated piece of code I have written:
A HUGE common error I'm getting is expecting float64 and instead got ForwardDiff.Dual - can someone give me a tip how in general I always make sure I avoid this bug. I feel like every time I do a new optimization problem I have to reinvent the wheel to try to make this go away
Apparently you cannot autodiff the julia exp() function? Does anyone know how to make it work?
A workaround is I did a finite sum to approximate it via the taylor series. In my one function if I had 20 terms the autodiff worked, but it wasn't accurate enough - so I went to 40 terms but then julia told me to do factorial(big(k)) and then when I try to do that with autodiff it doesn't work now - anyone have a fix for this?
Any advice would be greatly appreciated!
using Cubature
using Juniper
using Ipopt
using JuMP
using LinearAlgebra
using Base.Threads
using Cbc
using DifferentialEquations
using Trapz
function mat_exp(x::AbstractVector{T},dim,num_terms,A) where T
sum = zeros(Complex{T},(dim,dim))
A[1,1] = A[1,1]*x[1]
A[2,2] = A[2,2]*x[2]
return exp(A)-1
end
function exp_approx_no_big(x::AbstractVector{T},dim,num_terms,A) where T
sum = zeros(Complex{T},(dim,dim))
A[1,1] = A[1,1]*x[1]
A[2,2] = A[2,2]*x[2]
for k=0:num_terms-1
sum = sum + (1.0/factorial(k))*A^k
end
return norm(sum)-1
end
function exp_approx_big(x::AbstractVector{T},dim,num_terms,A) where T
sum = zeros(Complex{T},(dim,dim))
A[1,1] = A[1,1]*x[1]
A[2,2] = A[2,2]*x[2]
for k=0:num_terms-1
sum = sum + (1.0/factorial(big(k)))*A^k
end
return norm(sum)-1
end
optimizer = Juniper.Optimizer
nl_solver= optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0)
mip_solver = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0, "threads"=>nthreads())
m = Model(optimizer_with_attributes(optimizer, "nl_solver"=>nl_solver, "mip_solver"=>mip_solver))
#variable(m, 0.0<=x[1:2]<=1.0)
dim=5
A=zeros(Complex,(dim,dim))
for k=1:dim
A[k,k]=1.0
end
println(A)
f(x...) = exp_approx_no_big(collect(x),dim,20,A)
g(x...) = exp_approx_big(collect(x),dim,40,A)
h(x...) = mat_exp(collect(x),dim,20,A)
register(m, :f, 2, f; autodiff = true)
#NLobjective(m, Min, f(x...))
optimize!(m)
println(JuMP.value.(x))
println(JuMP.objective_value(m))
println(JuMP.termination_status(m))
There are quite a few problems with your mat_exp function:
It modifies A in-place, so repeated calls will not do what you think
It returns exp(x) - 1, which is a matrix. JuMP only supports scalar calls
You probably meant norm(exp(x)) - 1
But ForwardDiff doesn't support differentiating through exp
julia> using ForwardDiff
julia> function mat_exp(x::AbstractVector{T}) where {T}
A = zeros(Complex{T}, (dim, dim))
for k = 1:dim
A[k, k] = one(T)
end
A[1, 1] = A[1, 1] * x[1]
A[2, 2] = A[2, 2] * x[2]
return norm(exp(A)) - one(T)
end
mat_exp (generic function with 3 methods)
julia> ForwardDiff.gradient(mat_exp, [0.5, 0.5])
ERROR: MethodError: no method matching exp(::Matrix{Complex{ForwardDiff.Dual{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2}}})
Closest candidates are:
exp(::StridedMatrix{var"#s832"} where var"#s832"<:Union{Float32, Float64, ComplexF32, ComplexF64}) at /Users/julia/buildbot/worker/package_macos64/build/usr/share/julia/stdlib/v1.6/LinearAlgebra/src/dense.jl:557
exp(::StridedMatrix{var"#s832"} where var"#s832"<:Union{Integer, Complex{var"#s831"} where var"#s831"<:Integer}) at /Users/julia/buildbot/worker/package_macos64/build/usr/share/julia/stdlib/v1.6/LinearAlgebra/src/dense.jl:558
exp(::Diagonal) at /Users/julia/buildbot/worker/package_macos64/build/usr/share/julia/stdlib/v1.6/LinearAlgebra/src/diagonal.jl:603
...
Stacktrace:
[1] mat_exp(x::Vector{ForwardDiff.Dual{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2}})
# Main ./REPL[34]:8
[2] vector_mode_dual_eval!(f::typeof(mat_exp), cfg::ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2}}}, x::Vector{Float64})
# ForwardDiff ~/.julia/packages/ForwardDiff/jJIvy/src/apiutils.jl:37
[3] vector_mode_gradient(f::typeof(mat_exp), x::Vector{Float64}, cfg::ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2}}})
# ForwardDiff ~/.julia/packages/ForwardDiff/jJIvy/src/gradient.jl:106
[4] gradient(f::Function, x::Vector{Float64}, cfg::ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2}}}, ::Val{true})
# ForwardDiff ~/.julia/packages/ForwardDiff/jJIvy/src/gradient.jl:19
[5] gradient(f::Function, x::Vector{Float64}, cfg::ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{typeof(mat_exp), Float64}, Float64, 2}}}) (repeats 2 times)
# ForwardDiff ~/.julia/packages/ForwardDiff/jJIvy/src/gradient.jl:17
[6] top-level scope
# REPL[35]:1
I also don't know why you're using Juniper, or that you have a bunch of other packages installed.
If you want to have a discussion on this, come join the community forum: https://discourse.julialang.org/c/domain/opt/13. (It's much better for back-and-forth than stackoverflow.) Someone might have suggestions, but I don't know of an AD tool in Julia that can differentiate through a matrix exponential.

LoadError: UndefVarError: #defVar not defined

I have a piece of code about JuMP. When I run it ,it says that LoadError: UndefVarError: #defVar not defined. I have tried using global forward or backward but both fails.
See:
function T1(w_func,grid_b,β,u,z)
# objective for each grid point
for j in 1:cp.Nb
b = grid_b[j]
choice1 = Model(solver=GLPKSolverLP())
#defVar (choice1, a >= 0)
#setObjective(choice1, Max, u(a) + cp.β * (w_func.((b*(1+cp.r)+cp.w-a) .* cp.z[i])))
results1 = solve(choice1)
Tw1 = getObjectiveValue(choice1)
c_choice1 = getValue(x)
return Tw, σ
end
end
LoadError: UndefVarError: #defVar not defined
in expression starting at In[44]:37
Stacktrace:
[1] top-level scope
# :0
[2] eval
# ./boot.jl:360 [inlined]
[3] include_string(mapexpr::typeof(REPL.softscope), mod::Module, code::String, filename::String)
# Base ./loading.jl:1094
thanks
It seems that you're using an outdated code. Look at the fresh documentation and make sure you have installed the latest versions of libraries and Julia.
In short, #defVar and #setObjective were replaced by #variable and #objective correspondingly.
function T1(w_func,grid_b,β,u,z)
# objective for each grid point
for j in 1:cp.Nb
b = grid_b[j]
choice1 = Model(solver=GLPKSolverLP())
#variable(choice1, a >= 0)
#objective(choice1, Max, u(a) + cp.β * (w_func.((b*(1+cp.r)+cp.w-a) .* cp.z[i])))
results1 = solve(choice1)
Tw1 = getObjectiveValue(choice1)
c_choice1 = getValue(x)
return Tw, σ
end
end

How to pass a list of parameters to workers in Julia Distributed

with Julia 1.5.3, I wanted to pass a list or parameters to the distributed workers.
I first tried in a non distributed way :
using Distributed
#everywhere begin
using SharedArrays
solve(a,b,c) = return (1,2,3)
d_rates = LinRange(0.01, 0.33, 5)
m_rates = LinRange(0.01, 0.25, 5)
population_size = 10^3
max_iterations_perloop = 10^3
nb_repeats = 2
nb_params = length(d_rates)*length(m_rates)*nb_repeats
para = enumerate(Base.product(d_rates, m_rates, population_size, max_iterations_perloop, 1:nb_repeats))
results = SharedArray{Tuple{Int, Int, Int}}(nb_params)
end
for (y , x) in para
results[y] = solve(x[1], x[2], x[3])
end
which worked fine. And then changed the final loop to:
#sync #distributed for (y , x) in para
results[y] = solve(x[1], x[2], x[3])
end
I then got an error (truncated):
ERROR: LoadError: TaskFailedException:
MethodError: no method matching firstindex(::Base.Iterators.Enumerate{Base.Iterators.ProductIterator{Tuple{LinRange{Float64},LinRange{Float64},Int64,Int64,UnitRange{Int64}}}})
Closest candidates are:
firstindex(::Cmd) at process.jl:638
firstindex(::Core.SimpleVector) at essentials.jl:599
firstindex(::Base64.Buffer) at /buildworker/worker/package_linux64/build/usr/share/julia/stdlib/v1.5/Base64/src/buffer.jl:18
...
Stacktrace:
[1] (::Distributed.var"#159#161"{var"#271#272",Base.Iterators.Enumerate{Base.Iterators.ProductIterator{Tuple{LinRange{Float64},LinRange{Float64},Int64,Int64,UnitRange{Int64}}}}})() at ./task.jl:332
Stacktrace:
[1] sync_end(::Channel{Any}) at ./task.jl:314
[2] top-level scope at task.jl:333
[3] include_string(::Function, ::Module, ::String, ::String) at ./loading.jl:1088
[4] include_string(::Module, ::String, ::String) at ./loading.jl:1096
[5] invokelatest(::Any, ::Any, ::Vararg{Any,N} where N; kwargs::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at ./essentials.jl:710
[6] invokelatest(::Any, ::Any, ::Vararg{Any,N} where N) at ./essentials.jl:709
Is it possible to pass such a list, if so how?
I assume that all your workers are on a single server and that you have actually added some workers using the addprocs command. The first problem with your code is that you create the SharedArray on all workers. Rather than that the syntax of a SharedArray is the following:
help?> SharedArray
SharedArray{T}(dims::NTuple; init=false, pids=Int[])
SharedArray{T,N}(...)
Construct a SharedArray of a bits type T and size dims across the processes specified by pids - all of which have to be on the same host. (...)
This means that you create SharedArray only once from the master worker and you can specify the workers that are aware of it using the pids argument (if you do not specify pids all worker processes have the access).
Hence your code will look like this:
using Distributed, SharedArrays
addprocs(4)
#everywhere using SharedArrays
#everywhere solve(a,b,c) = return (1,2,3)
#(...) # your setup code without #everywhere
results = SharedArray{Tuple{Int, Int, Int}}(nb_params)
#sync #distributed for (y , x) in collect(para)
results[y] = solve(x[1], x[2], x[3])
end
Note that you will need collect because #distributed macro needs to know the size of the Vector and it does not work good with iterators.

In Julia, creating a Weights vector in statsbase

I am playing a bit with Julia.
Consider this function:
function drawValues(fromDistribution, byCount)
#=
inputs:
fromDistribution :
A 2D array
Each element is an array with two elements
The first one is a value, and the second one is the probability of that value
We will draw a value out of this distribution from a random number generator
byCount :
An integer
We draw that many values from the source distribution
=#
values = []
wts = []
for i = 1:length(fromDistribution)
push!(values, fromDistribution[i][1])
push!(wts , fromDistribution[i][2])
end
w = Weights(wts)
res = []
for i = 1:byCount
r = sample(values, w)
push!(res, r)
end
plot(values, wts)
print(res)
end
This throws the error :
ERROR: MethodError: no method matching Weights(::Array{Any,1},
::Float64) Closest candidates are: Weights(::var"#18#V",
::var"#16#S") where {var"#16#S"<:Real, var"#17#T"<:Real,
var"#18#V"<:AbstractArray{var"#17#T",1}} at
/home/hedgehog/.julia/packages/StatsBase/EA8Mh/src/weights.jl:13
Weights(::Any) at
/home/hedgehog/.julia/packages/StatsBase/EA8Mh/src/weights.jl:16
Stacktrace: [1] Weights(::Array{Any,1}) at
/home/hedgehog/.julia/packages/StatsBase/EA8Mh/src/weights.jl:16 [2]
drawValues(::Array{Array{Float64,1},1}, ::Int64) at
/home/hedgehog/LASER.jl:51 [3] top-level scope at REPL[13]:1 [4]
run_repl(::REPL.AbstractREPL, ::Any) at
/build/julia/src/julia-1.5.3/usr/share/julia/stdlib/v1.5/REPL/src/REPL.jl:288
It seems, that the second definition ( Weights(::Array{Any,1})) whould fit. But somehow Julia sees two input arguments?
Please help.
Version details :
Julia Version 1.5.3
Commit 788b2c77c1* (2020-11-09 13:37 UTC)
Platform Info:
OS: Linux (x86_64-pc-linux-gnu)
CPU: AMD Ryzen 7 3700X 8-Core Processor
WORD_SIZE: 64
LIBM: libopenlibm
LLVM:
libLLVM-10.0.1 (ORCJIT, znver2)
Your Vectors have elements of type any.
It should be:
wts = Float64[]
When you write wts=[] it is an equivalent of wts=Any[].
Have a look at the weight methods:
julia> methods(weights)
# 3 methods for generic function "weights":
[1] weights(vs::AbstractArray{T,1} where T<:Real) in StatsBase at c:\JuliaPkg\Julia1.5.3\packages\StatsBase\EA8Mh\src\weights.jl:76
[2] weights(vs::AbstractArray{T,N} where N where T<:Real) in StatsBase at c:\JuliaPkg\Julia1.5.3\packages\StatsBase\EA8Mh\src\weights.jl:77
[3] weights(model::StatisticalModel) in StatsBase at c:\JuliaPkg\Julia1.5.3\packages\StatsBase\EA8Mh\src\statmodels.jl:143
A container having elements of subtype of Real is required.
Similarly for the other containers providing the types is recommended as well:
value = Float64[]
res = Float64[] # or maybe Int[] depending on what your code does

My julia ray tracer returns StackOverFlowError

I'm trying to write a ray tracer in julia but my main function returns StackOverFlowError. Below is my main function:
function trace(ray::Ray, surfaces::Array{Any, 1}, depth::Int64, maxDepth::Int64)
material, t = findIntersection(ray, surfaces, Inf)
if typeof(material) == Empty
Vec3(0,0,0)
end
if depth > maxDepth
Vec3(0,0,0)
end
if material.isLight == true
material.emittance
end
normal = material.normal
ρ = material.reflectance
BRDF = ρ/3.14159
R = randHemi(normal)
cosθ = dot(R,normal)
newRay = Ray(ray.s + t*ray.d, R)
In = trace(newRay, surfaces, depth+1, maxDepth)
2.0*3.14159*BRDF*In
end
And here is my intersection function:
function findIntersection(ray::Ray, surfaces::Array{Any, 1}, tmin::Float64)
hitSurface = Empty(Vec3(0,0,0), Vec3(0,0,0), Vec3(0,0,0), false)
for surface in surfaces
t = intersect(surface, ray)
if t < tmin
hitSurface = surface
tmin = t
end
end
return hitSurface, tmin
end
But I receive this error:
StackOverflowError:
Stacktrace:
[1] findIntersection(::Ray, ::Array{Any,1}, ::Float64) at .\In[31]:10
[2] trace(::Ray, ::Array{Any,1}, ::Int64, ::Int64) at .\In[33]:2
[3] trace(::Ray, ::Array{Any,1}, ::Int64, ::Int64) at .\In[33]:18 (repeats
14493 times)
[4] top-level scope at In[35]:1
What is the cause of error?
Your problem is that you missed a return.
What you meant to have was
if depth > maxDepth
return Vec3(0,0,0)
end
Without the return, that line just allocates a Vec3, and the code continues looping.
Stackoverflow error usually occurs when there are too many recursive calls. In a nutshell, in most programming languages there is a limit on how many recursive calls you can make.
In your example, the trace functions calls itself recursively and it can cause an stack overflow. You have a parameters maxDepth that can limit that. Setting it to a lower value will probably solve this particular issue.

Resources