In [1]:
using Optim
using BenchmarkTools
In [2]:
rosenbrock(x) =  (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2
Out[2]:
rosenbrock (generic function with 1 method)
In [3]:
x0 = zeros(2)
Out[3]:
2-element Array{Float64,1}:
 0.0
 0.0
In [4]:
method = BFGS()
Out[4]:
Optim.BFGS{LineSearches.InitialStatic{Float64},LineSearches.HagerZhang{Float64,Base.RefValue{Bool}},Optim.##40#42}(LineSearches.InitialStatic{Float64}
  alpha: Float64 1.0
  scaled: Bool false
, LineSearches.HagerZhang{Float64,Base.RefValue{Bool}}
  delta: Float64 0.1
  sigma: Float64 0.9
  alphamax: Float64 Inf
  rho: Float64 5.0
  epsilon: Float64 1.0e-6
  gamma: Float64 0.66
  linesearchmax: Int64 50
  psi3: Float64 0.1
  display: Int64 0
  mayterminate: Base.RefValue{Bool}
, Optim.#40, Optim.Flat())
In [5]:
@btime optimize($rosenbrock, $x0, $method)
  63.073 μs (554 allocations: 21.83 KiB)
Out[5]:
Results of Optimization Algorithm
 * Algorithm: BFGS
 * Starting Point: [0.0,0.0]
 * Minimizer: [0.9999999926033423,0.9999999852005353]
 * Minimum: 5.471433e-17
 * Iterations: 16
 * Convergence: true
   * |x - x'| ≤ 0.0e+00: false 
     |x - x'| = 3.47e-07 
   * |f(x) - f(x')| ≤ 0.0e+00 |f(x)|: false
     |f(x) - f(x')| = 1.20e+03 |f(x)|
   * |g(x)| ≤ 1.0e-08: true 
     |g(x)| = 2.33e-09 
   * Stopped by an increasing objective: false
   * Reached Maximum Number of Iterations: false
 * Objective Calls: 53
 * Gradient Calls: 53
In [6]:
method = NelderMead()
@btime optimize($rosenbrock, $x0, $method)
  61.075 μs (755 allocations: 19.94 KiB)
Out[6]:
Results of Optimization Algorithm
 * Algorithm: Nelder-Mead
 * Starting Point: [0.0,0.0]
 * Minimizer: [0.9999634355313174,0.9999315506115275]
 * Minimum: 3.525527e-09
 * Iterations: 60
 * Convergence: true
   *  √(Σ(yᵢ-ȳ)²)/n < 1.0e-08: true
   * Reached Maximum Number of Iterations: false
 * Objective Calls: 117
In [5]:
@btime optimize(rosenbrock, zeros(2), LBFGS())
  109.029 μs (774 allocations: 32.02 KiB)
Out[5]:
Results of Optimization Algorithm
 * Algorithm: L-BFGS
 * Starting Point: [0.0,0.0]
 * Minimizer: [0.9999999926662504,0.9999999853325008]
 * Minimum: 5.378388e-17
 * Iterations: 24
 * Convergence: true
   * |x - x'| ≤ 0.0e+00: false 
     |x - x'| = 4.54e-11 
   * |f(x) - f(x')| ≤ 0.0e+00 |f(x)|: false
     |f(x) - f(x')| = 5.30e-03 |f(x)|
   * |g(x)| ≤ 1.0e-08: true 
     |g(x)| = 9.88e-14 
   * Stopped by an increasing objective: false
   * Reached Maximum Number of Iterations: false
 * Objective Calls: 67
 * Gradient Calls: 67
In [6]:
@btime optimize(rosenbrock, zeros(2), LBFGS(); autodiff = :forward)
  161.328 μs (978 allocations: 37.52 KiB)
Out[6]:
Results of Optimization Algorithm
 * Algorithm: L-BFGS
 * Starting Point: [0.0,0.0]
 * Minimizer: [0.999999999999928,0.9999999999998559]
 * Minimum: 5.191703e-27
 * Iterations: 24
 * Convergence: true
   * |x - x'| ≤ 0.0e+00: false 
     |x - x'| = 4.58e-11 
   * |f(x) - f(x')| ≤ 0.0e+00 |f(x)|: false
     |f(x) - f(x')| = 8.50e+07 |f(x)|
   * |g(x)| ≤ 1.0e-08: true 
     |g(x)| = 1.44e-13 
   * Stopped by an increasing objective: false
   * Reached Maximum Number of Iterations: false
 * Objective Calls: 67
 * Gradient Calls: 67
In [7]:
# inplace function, which mutates an array passed by Optim
function rosenbrock_gradient!(G, x)
    G[1] = -2.0 * (1.0 - x[1]) - 400.0 * (x[2] - x[1]^2) * x[1]
    G[2] = 200.0 * (x[2] - x[1]^2)
end
Out[7]:
rosenbrock_gradient! (generic function with 1 method)
In [8]:
# check allocations
@btime optimize(rosenbrock, rosenbrock_gradient!, zeros(2))
  62.973 μs (771 allocations: 22.03 KiB)
Out[8]:
Results of Optimization Algorithm
 * Algorithm: Nelder-Mead
 * Starting Point: [0.0,0.0]
 * Minimizer: [0.9999634355313174,0.9999315506115275]
 * Minimum: 3.525527e-09
 * Iterations: 60
 * Convergence: true
   *  √(Σ(yᵢ-ȳ)²)/n < 1.0e-08: true
   * Reached Maximum Number of Iterations: false
 * Objective Calls: 117
In [9]:
# this function allocates
function rosenbrock_gradient(x)
    G = similar(x)
    G[1] = -2.0 * (1.0 - x[1]) - 400.0 * (x[2] - x[1]^2) * x[1]
    G[2] = 200.0 * (x[2] - x[1]^2)
end
Out[9]:
rosenbrock_gradient (generic function with 1 method)
In [10]:
# this version should have more allocations
@btime optimize(rosenbrock, rosenbrock_gradient, zeros(2); inplace = false)
  63.149 μs (772 allocations: 22.13 KiB)
Out[10]:
Results of Optimization Algorithm
 * Algorithm: Nelder-Mead
 * Starting Point: [0.0,0.0]
 * Minimizer: [0.9999634355313174,0.9999315506115275]
 * Minimum: 3.525527e-09
 * Iterations: 60
 * Convergence: true
   *  √(Σ(yᵢ-ȳ)²)/n < 1.0e-08: true
   * Reached Maximum Number of Iterations: false
 * Objective Calls: 117
In [11]:
versioninfo()
Julia Version 0.6.3
Commit d55cadc350 (2018-05-28 20:20 UTC)
Platform Info:
  OS: macOS (x86_64-apple-darwin14.5.0)
  CPU: Intel(R) Core(TM) i5-4258U CPU @ 2.40GHz
  WORD_SIZE: 64
  BLAS: libopenblas (USE64BITINT DYNAMIC_ARCH NO_AFFINITY Haswell)
  LAPACK: libopenblas64_
  LIBM: libopenlibm
  LLVM: libLLVM-3.9.1 (ORCJIT, haswell)
In [12]:
println(now())
for i in 1:1000
    optimize(rosenbrock, zeros(2), NelderMead())
end
println(now())
2018-07-22T09:44:07.031
2018-07-22T09:44:07.348
In [14]:
Pkg.status("Optim")
 - Optim                         0.15.1
In [ ]: