@@ -16,6 +16,7 @@ using LatinHypercubeSampling
1616using OptimizationOptimJL
1717using OptimizationNLopt
1818using Test
19+ using StableRNGs
1920```
2021
2122## Setting up the problem
@@ -24,7 +25,7 @@ Let us start by defining the data and the likelihood problem:
2425
2526``` julia
2627# # Step 1: Generate some data for the problem and define the likelihood
27- Random . seed! (2992999 )
28+ rng = StableRNG (2992999 )
2829λ = - 0.5
2930y₀ = 15.0
3031σ = 0.5
@@ -33,16 +34,14 @@ n = 450
3334Δt = T / n
3435t = [j * Δt for j in 0 : n]
3536y = y₀ * exp .(λ * t)
36- yᵒ = y .+ [0.0 , rand (Normal (0 , σ), n)... ]
37+ yᵒ = y .+ [0.0 , rand (rng, Normal (0 , σ), n)... ]
3738@inline function ode_fnc (u, p, t)
38- local λ
3939 λ = p
4040 du = λ * u
4141 return du
4242end
4343using LoopVectorization, MuladdMacro
44- @inline function _loglik_fnc (θ:: AbstractVector{T} , data, integrator) where {T}
45- local yᵒ, n, λ, σ, u0
44+ function _loglik_fnc (θ:: AbstractVector{T} , data, integrator) where {T}
4645 yᵒ, n = data
4746 λ, σ, u0 = θ
4847 integrator. p = λ
@@ -90,7 +89,7 @@ We can now use this grid to evaluate the likelihood function at each point, and
9089``` julia
9190gs = grid_search (prob, regular_grid)
9291LikelihoodSolution. retcode: Success
93- Maximum likelihood: - 547.9579886200935
92+ Maximum likelihood: - 548.3068396174556
9493Maximum likelihood estimates: 3 - element Vector{Float64}
9594 λ: - 0.612244897959183
9695 σ: 0.816327448979592
@@ -103,7 +102,7 @@ You could also use an irregular grid, defining some grid as a matrix where each
103102using LatinHypercubeSampling
104103d = 3
105104gens = 1000
106- plan, _ = LHCoptim (500 , d, gens)
105+ plan, _ = LHCoptim (500 , d, gens; rng )
107106new_lb = [- 2.0 , 0.05 , 10.0 ]
108107new_ub = [2.0 , 0.2 , 20.0 ]
109108bnds = [(new_lb[i], new_ub[i]) for i in 1 : d]
@@ -112,20 +111,21 @@ irregular_grid = IrregularGrid(lb, ub, parameter_vals)
112111gs_ir, loglik_vals_ir = grid_search (prob, irregular_grid; save_vals= Val (true ), parallel = Val (true ))
113112```
114113``` julia
114+ julia> gs_ir
115115LikelihoodSolution. retcode: Success
116- Maximum likelihood: - 1729.7407123603484
116+ Maximum likelihood: - 2611.078183576969
117117Maximum likelihood estimates: 3 - element Vector{Float64}
118- λ: - 0.5090180360721444
119- σ: 0.19368737474949904
120- y₀: 15.791583166332664
118+ λ: - 0.5170340681362726
119+ σ: 0.18256513026052107
120+ y₀: 14.348697394789578
121121```
122122``` julia
123123max_lik, max_idx = findmax (loglik_vals_ir)
124124@test max_lik == PL. get_maximum (gs_ir)
125125@test parameter_vals[:, max_idx] ≈ PL. get_mle (gs_ir)
126126```
127127
128- (If you just want to try many points for starting your optimiser, see the optimiser in MultistartOptimization.jl.)
128+ (If you just want to try many points for starting your optimiser, see e.g. the optimiser in MultistartOptimization.jl.)
129129
130130## Parameter estimation
131131
@@ -143,10 +143,10 @@ prof = profile(prob, sol; alg=NLopt.LN_NELDERMEAD, parallel = true)
143143```
144144``` julia
145145ProfileLikelihoodSolution. MLE retcode: Success
146- Confidence intervals:
147- 95.0 % CI for λ: (- 0.51091362373969 , - 0.49491369219060505 )
148- 95.0 % CI for σ: (0.49607205632240814 , 0.5652591835193789 )
149- 95.0 % CI for y₀: (14.98587355568687 , 15.305179849533756 )
146+ Confidence intervals:
147+ 95.0 % CI for λ: (- 0.5092192953535792 , - 0.49323747169071175 )
148+ 95.0 % CI for σ: (0.4925813447124647 , 0.5612815283609663 )
149+ 95.0 % CI for y₀: (14.856528827532468 , 15.173375766524025 )
150150```
151151``` julia
152152@test λ ∈ get_confidence_intervals (prof, :λ )
@@ -162,90 +162,13 @@ Finally, we can visualise the profiles:
162162fig = plot_profiles (prof; nrow= 1 , ncol= 3 ,
163163 latex_names= [L "\l ambda" , L "\s igma" , L " y_0" ],
164164 true_vals= [λ, σ, y₀],
165- fig_kwargs= (fontsize= 30 , resolution = ( 2109.644f0 , 444.242f0 ) ),
165+ fig_kwargs= (fontsize= 41 , ),
166166 axis_kwargs= (width= 600 , height= 300 ))
167+ resize_to_layout! (fig)
167168```
168169
169- ![ Linear exponential profiles] ( https://github.com/DanielVandH/ProfileLikelihood.jl/blob/main/test/figures/linear_exponential_example.png?raw=true )
170-
171- ## Just the code
172-
173- Here is all the code used for obtaining the results in this example, should you want a version that you can directly copy and paste.
174-
175- ``` julia
176- # # Step 1: Generate some data for the problem and define the likelihood
177- using OrdinaryDiffEq, Random, Distributions, LoopVectorization, MuladdMacro
178- Random. seed! (2992999 )
179- λ = - 0.5
180- y₀ = 15.0
181- σ = 0.5
182- T = 5.0
183- n = 450
184- Δt = T / n
185- t = [j * Δt for j in 0 : n]
186- y = y₀ * exp .(λ * t)
187- yᵒ = y .+ [0.0 , rand (Normal (0 , σ), n)... ]
188- @inline function ode_fnc (u, p, t)
189- local λ
190- λ = p
191- du = λ * u
192- return du
193- end
194- @inline function _loglik_fnc (θ:: AbstractVector{T} , data, integrator) where {T}
195- local yᵒ, n, λ, σ, u0
196- yᵒ, n = data
197- λ, σ, u0 = θ
198- integrator. p = λ
199- # # Now solve the problem
200- reinit! (integrator, u0)
201- solve! (integrator)
202- if ! SciMLBase. successful_retcode (integrator. sol)
203- return typemin (T)
204- end
205- ℓ = - 0.5 (n + 1 ) * log (2 π * σ^ 2 )
206- s = zero (T)
207- @turbo @muladd for i in eachindex (yᵒ, integrator. sol. u)
208- s = s + (yᵒ[i] - integrator. sol. u[i]) * (yᵒ[i] - integrator. sol. u[i])
209- end
210- ℓ = ℓ - 0.5 s / σ^ 2
211- end
212-
213- # # Step 2: Define the problem
214- using Optimization
215- θ₀ = [- 1.0 , 0.5 , 19.73 ] # will be replaced anyway
216- lb = [- 10.0 , 1e-6 , 0.5 ]
217- ub = [10.0 , 10.0 , 25.0 ]
218- syms = [:λ , :σ , :y₀ ]
219- prob = LikelihoodProblem (
220- _loglik_fnc, θ₀, ode_fnc, y₀, (0.0 , T);
221- syms= syms,
222- data= (yᵒ, n),
223- ode_parameters= 1.0 , # temp value for λ
224- ode_kwargs= (verbose= false , saveat= t),
225- f_kwargs= (adtype= Optimization. AutoFiniteDiff (),),
226- prob_kwargs= (lb= lb, ub= ub),
227- ode_alg= Tsit5 ()
228- )
229-
230- # # Step 3: Grid search
231- regular_grid = RegularGrid (lb, ub, 50 ) # resolution can also be given as a vector for each parameter
232- gs = grid_search (prob, regular_grid)
233-
234- # # Step 4: Compute the MLE, starting at the grid search solution
235- using OptimizationOptimJL
236- prob = ProfileLikelihood. update_initial_estimate (prob, gs)
237- sol = mle (prob, Optim. LBFGS ())
238-
239- # # Step 5: Profile
240- using OptimizationNLopt
241- prof = profile (prob, sol; alg= NLopt. LN_NELDERMEAD, parallel= true )
242-
243-
244- # # Step 6: Visualise
245- using CairoMakie, LaTeXStrings
246- fig = plot_profiles (prof; nrow= 1 , ncol= 3 ,
247- latex_names= [L "\l ambda" , L "\s igma" , L " y_0" ],
248- true_vals= [λ, σ, y₀],
249- fig_kwargs= (fontsize= 30 , resolution= (2109.644f0 , 444.242f0 )),
250- axis_kwargs= (width= 600 , height= 300 ))
251- ```
170+ ``` @raw html
171+ <figure>
172+ <img src='../figures/linear_exponential_example.png', alt'Linear exponential profiles'><br>
173+ </figure>
174+ ```
0 commit comments