Skip to content

Commit e5ae416

Browse files
Merge pull request #46 from SciML/adtypes1
ADTypes v1.0 support updates
2 parents 0f2e93c + 3e26da5 commit e5ae416

13 files changed

+374
-183
lines changed

.github/workflows/Downstream.yml

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
name: IntegrationTest
2+
on:
3+
push:
4+
branches: [master]
5+
tags: [v*]
6+
pull_request:
7+
8+
jobs:
9+
test:
10+
name: ${{ matrix.package.repo }}/${{ matrix.package.group }}/${{ matrix.julia-version }}
11+
runs-on: ${{ matrix.os }}
12+
env:
13+
GROUP: ${{ matrix.package.group }}
14+
strategy:
15+
fail-fast: false
16+
matrix:
17+
julia-version: [1]
18+
os: [ubuntu-latest]
19+
package:
20+
- {user: SciML, repo: Optimization.jl, group: Optimization}
21+
22+
steps:
23+
- uses: actions/checkout@v4
24+
- uses: julia-actions/setup-julia@v2
25+
with:
26+
version: ${{ matrix.julia-version }}
27+
arch: x64
28+
- uses: julia-actions/julia-buildpkg@latest
29+
- name: Clone Downstream
30+
uses: actions/checkout@v4
31+
with:
32+
repository: ${{ matrix.package.user }}/${{ matrix.package.repo }}
33+
path: downstream
34+
- name: Load this and run the downstream tests
35+
shell: julia --color=yes --project=downstream {0}
36+
run: |
37+
using Pkg
38+
# force it to use this PR's version of the package
39+
Pkg.develop(PackageSpec(path=".")) # resolver may fail with main deps
40+
Pkg.update()
41+
Pkg.test(coverage=true) # resolver may fail with test time deps
42+
43+
- uses: julia-actions/julia-processcoverage@v1
44+
- uses: codecov/codecov-action@v4
45+
with:
46+
file: lcov.info

Project.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "OptimizationBase"
22
uuid = "bca83a33-5cc9-4baa-983d-23429ab6bcbb"
33
authors = ["Vaibhav Dixit <[email protected]> and contributors"]
4-
version = "1.0.0"
4+
version = "1.0.1"
55

66
[deps]
77
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
@@ -35,10 +35,10 @@ OptimizationTrackerExt = "Tracker"
3535
OptimizationZygoteExt = "Zygote"
3636

3737
[compat]
38-
ADTypes = "0.2.5, 1"
38+
ADTypes = "1"
3939
ArrayInterface = "7.6"
4040
DocStringExtensions = "0.9"
41-
Enzyme = "0.11.11, 0.12"
41+
Enzyme = "0.11.11, =0.12.6"
4242
FiniteDiff = "2.12"
4343
ForwardDiff = "0.10.26"
4444
LinearAlgebra = "1.9, 1.10"

ext/OptimizationFiniteDiffExt.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,8 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
1616

1717
if f.grad === nothing
1818
gradcache = FD.GradientCache(x, x, adtype.fdtype)
19-
grad = (res, θ, args...) -> FD.finite_difference_gradient!(res, x -> _f(x, args...),
19+
grad = (res, θ, args...) -> FD.finite_difference_gradient!(
20+
res, x -> _f(x, args...),
2021
θ, gradcache)
2122
else
2223
grad = (G, θ, args...) -> f.grad(G, θ, p, args...)
@@ -125,7 +126,8 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true},
125126

126127
if f.grad === nothing
127128
gradcache = FD.GradientCache(cache.u0, cache.u0, adtype.fdtype)
128-
grad = (res, θ, args...) -> FD.finite_difference_gradient!(res, x -> _f(x, args...),
129+
grad = (res, θ, args...) -> FD.finite_difference_gradient!(
130+
res, x -> _f(x, args...),
129131
θ, gradcache)
130132
else
131133
grad = (G, θ, args...) -> f.grad(G, θ, cache.p, args...)

ext/OptimizationForwardDiffExt.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
6565
if cons !== nothing && f.cons_h === nothing
6666
fncs = [(x) -> cons_oop(x)[i] for i in 1:num_cons]
6767
hess_config_cache = [ForwardDiff.HessianConfig(fncs[i], x,
68-
ForwardDiff.Chunk{chunksize}())
68+
ForwardDiff.Chunk{chunksize}())
6969
for i in 1:num_cons]
7070
cons_h = function (res, θ)
7171
for i in 1:num_cons
@@ -143,7 +143,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true},
143143
if cons !== nothing && f.cons_h === nothing
144144
fncs = [(x) -> cons_oop(x)[i] for i in 1:num_cons]
145145
hess_config_cache = [ForwardDiff.HessianConfig(fncs[i], cache.u0,
146-
ForwardDiff.Chunk{chunksize}())
146+
ForwardDiff.Chunk{chunksize}())
147147
for i in 1:num_cons]
148148
cons_h = function (res, θ)
149149
for i in 1:num_cons
@@ -224,7 +224,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
224224
if cons !== nothing && f.cons_h === nothing
225225
fncs = [(x) -> cons_oop(x)[i] for i in 1:num_cons]
226226
hess_config_cache = [ForwardDiff.HessianConfig(fncs[i], x,
227-
ForwardDiff.Chunk{chunksize}())
227+
ForwardDiff.Chunk{chunksize}())
228228
for i in 1:num_cons]
229229
cons_h = function (θ)
230230
map(1:num_cons) do i
@@ -306,7 +306,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false},
306306
if cons !== nothing && f.cons_h === nothing
307307
fncs = [(x) -> cons_oop(x)[i] for i in 1:num_cons]
308308
hess_config_cache = [ForwardDiff.HessianConfig(fncs[i], x,
309-
ForwardDiff.Chunk{chunksize}())
309+
ForwardDiff.Chunk{chunksize}())
310310
for i in 1:num_cons]
311311
cons_h = function (θ)
312312
map(1:num_cons) do i

ext/OptimizationMTKExt.jl

Lines changed: 109 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,112 @@ module OptimizationMTKExt
33
import OptimizationBase, OptimizationBase.ArrayInterface
44
import OptimizationBase.SciMLBase
55
import OptimizationBase.SciMLBase: OptimizationFunction
6-
import OptimizationBase.ADTypes: AutoModelingToolkit
6+
import OptimizationBase.ADTypes: AutoModelingToolkit, AutoSymbolics, AutoSparse
77
isdefined(Base, :get_extension) ? (using ModelingToolkit) : (using ..ModelingToolkit)
88

9-
function OptimizationBase.instantiate_function(f, x, adtype::AutoModelingToolkit, p,
9+
function OptimizationBase.ADTypes.AutoModelingToolkit(sparse = false, cons_sparse = false)
10+
if sparse || cons_sparse
11+
return AutoSparse(AutoSymbolics())
12+
else
13+
return AutoSymbolics()
14+
end
15+
end
16+
17+
function OptimizationBase.instantiate_function(
18+
f, x, adtype::AutoSparse{<:AutoSymbolics, S, C}, p,
19+
num_cons = 0) where {S, C}
20+
p = isnothing(p) ? SciMLBase.NullParameters() : p
21+
22+
sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p;
23+
lcons = fill(0.0,
24+
num_cons),
25+
ucons = fill(0.0,
26+
num_cons))))
27+
#sys = ModelingToolkit.structural_simplify(sys)
28+
f = OptimizationProblem(sys, x, p, grad = true, hess = true,
29+
sparse = true, cons_j = true, cons_h = true,
30+
cons_sparse = true).f
31+
32+
grad = (G, θ, args...) -> f.grad(G, θ, p, args...)
33+
34+
hess = (H, θ, args...) -> f.hess(H, θ, p, args...)
35+
36+
hv = function (H, θ, v, args...)
37+
res = adtype.obj_sparse ? (eltype(θ)).(f.hess_prototype) :
38+
ArrayInterface.zeromatrix(θ)
39+
hess(res, θ, args...)
40+
H .= res * v
41+
end
42+
43+
if !isnothing(f.cons)
44+
cons = (res, θ) -> f.cons(res, θ, p)
45+
cons_j = (J, θ) -> f.cons_j(J, θ, p)
46+
cons_h = (res, θ) -> f.cons_h(res, θ, p)
47+
else
48+
cons = nothing
49+
cons_j = nothing
50+
cons_h = nothing
51+
end
52+
53+
return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv,
54+
cons = cons, cons_j = cons_j, cons_h = cons_h,
55+
hess_prototype = f.hess_prototype,
56+
cons_jac_prototype = f.cons_jac_prototype,
57+
cons_hess_prototype = f.cons_hess_prototype,
58+
expr = OptimizationBase.symbolify(f.expr),
59+
cons_expr = OptimizationBase.symbolify.(f.cons_expr),
60+
sys = sys,
61+
observed = f.observed)
62+
end
63+
64+
function OptimizationBase.instantiate_function(f, cache::OptimizationBase.ReInitCache,
65+
adtype::AutoSparse{<:AutoSymbolics, S, C}, num_cons = 0) where {S, C}
66+
p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p
67+
68+
sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0,
69+
cache.p;
70+
lcons = fill(0.0,
71+
num_cons),
72+
ucons = fill(0.0,
73+
num_cons))))
74+
#sys = ModelingToolkit.structural_simplify(sys)
75+
f = OptimizationProblem(sys, cache.u0, cache.p, grad = true, hess = true,
76+
sparse = true, cons_j = true, cons_h = true,
77+
cons_sparse = true).f
78+
79+
grad = (G, θ, args...) -> f.grad(G, θ, cache.p, args...)
80+
81+
hess = (H, θ, args...) -> f.hess(H, θ, cache.p, args...)
82+
83+
hv = function (H, θ, v, args...)
84+
res = adtype.obj_sparse ? (eltype(θ)).(f.hess_prototype) :
85+
ArrayInterface.zeromatrix(θ)
86+
hess(res, θ, args...)
87+
H .= res * v
88+
end
89+
90+
if !isnothing(f.cons)
91+
cons = (res, θ) -> f.cons(res, θ, cache.p)
92+
cons_j = (J, θ) -> f.cons_j(J, θ, cache.p)
93+
cons_h = (res, θ) -> f.cons_h(res, θ, cache.p)
94+
else
95+
cons = nothing
96+
cons_j = nothing
97+
cons_h = nothing
98+
end
99+
100+
return OptimizationFunction{true}(f.f, adtype; grad = grad, hess = hess, hv = hv,
101+
cons = cons, cons_j = cons_j, cons_h = cons_h,
102+
hess_prototype = f.hess_prototype,
103+
cons_jac_prototype = f.cons_jac_prototype,
104+
cons_hess_prototype = f.cons_hess_prototype,
105+
expr = OptimizationBase.symbolify(f.expr),
106+
cons_expr = OptimizationBase.symbolify.(f.cons_expr),
107+
sys = sys,
108+
observed = f.observed)
109+
end
110+
111+
function OptimizationBase.instantiate_function(f, x, adtype::AutoSymbolics, p,
10112
num_cons = 0)
11113
p = isnothing(p) ? SciMLBase.NullParameters() : p
12114

@@ -17,8 +119,8 @@ function OptimizationBase.instantiate_function(f, x, adtype::AutoModelingToolkit
17119
num_cons))))
18120
#sys = ModelingToolkit.structural_simplify(sys)
19121
f = OptimizationProblem(sys, x, p, grad = true, hess = true,
20-
sparse = adtype.obj_sparse, cons_j = true, cons_h = true,
21-
cons_sparse = adtype.cons_sparse).f
122+
sparse = false, cons_j = true, cons_h = true,
123+
cons_sparse = false).f
22124

23125
grad = (G, θ, args...) -> f.grad(G, θ, p, args...)
24126

@@ -53,7 +155,7 @@ function OptimizationBase.instantiate_function(f, x, adtype::AutoModelingToolkit
53155
end
54156

55157
function OptimizationBase.instantiate_function(f, cache::OptimizationBase.ReInitCache,
56-
adtype::AutoModelingToolkit, num_cons = 0)
158+
adtype::AutoSymbolics, num_cons = 0)
57159
p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p
58160

59161
sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0,
@@ -64,8 +166,8 @@ function OptimizationBase.instantiate_function(f, cache::OptimizationBase.ReInit
64166
num_cons))))
65167
#sys = ModelingToolkit.structural_simplify(sys)
66168
f = OptimizationProblem(sys, cache.u0, cache.p, grad = true, hess = true,
67-
sparse = adtype.obj_sparse, cons_j = true, cons_h = true,
68-
cons_sparse = adtype.cons_sparse).f
169+
sparse = false, cons_j = true, cons_h = true,
170+
cons_sparse = false).f
69171

70172
grad = (G, θ, args...) -> f.grad(G, θ, cache.p, args...)
71173

ext/OptimizationReverseDiffExt.jl

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
4949
xdual = ForwardDiff.Dual{
5050
typeof(T),
5151
eltype(x),
52-
chunksize,
52+
chunksize
5353
}.(x, Ref(ForwardDiff.Partials((ones(eltype(x), chunksize)...,))))
5454
h_tape = ReverseDiff.GradientTape(_f, xdual)
5555
htape = ReverseDiff.compile(h_tape)
@@ -119,9 +119,9 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
119119
end
120120
gs = [x -> grad_cons(x, conshtapes[i]) for i in 1:num_cons]
121121
jaccfgs = [ForwardDiff.JacobianConfig(gs[i],
122-
x,
123-
ForwardDiff.Chunk{chunksize}(),
124-
T) for i in 1:num_cons]
122+
x,
123+
ForwardDiff.Chunk{chunksize}(),
124+
T) for i in 1:num_cons]
125125
cons_h = function (res, θ)
126126
for i in 1:num_cons
127127
ForwardDiff.jacobian!(res[i], gs[i], θ, jaccfgs[i], Val{false}())
@@ -182,7 +182,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true},
182182
xdual = ForwardDiff.Dual{
183183
typeof(T),
184184
eltype(cache.u0),
185-
chunksize,
185+
chunksize
186186
}.(cache.u0, Ref(ForwardDiff.Partials((ones(eltype(cache.u0), chunksize)...,))))
187187
h_tape = ReverseDiff.GradientTape(_f, xdual)
188188
htape = ReverseDiff.compile(h_tape)
@@ -255,9 +255,9 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true},
255255
end
256256
gs = [x -> grad_cons(x, conshtapes[i]) for i in 1:num_cons]
257257
jaccfgs = [ForwardDiff.JacobianConfig(gs[i],
258-
cache.u0,
259-
ForwardDiff.Chunk{chunksize}(),
260-
T) for i in 1:num_cons]
258+
cache.u0,
259+
ForwardDiff.Chunk{chunksize}(),
260+
T) for i in 1:num_cons]
261261
cons_h = function (res, θ)
262262
for i in 1:num_cons
263263
ForwardDiff.jacobian!(res[i], gs[i], θ, jaccfgs[i], Val{false}())
@@ -319,7 +319,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
319319
xdual = ForwardDiff.Dual{
320320
typeof(T),
321321
eltype(x),
322-
chunksize,
322+
chunksize
323323
}.(x, Ref(ForwardDiff.Partials((ones(eltype(x), chunksize)...,))))
324324
h_tape = ReverseDiff.GradientTape(_f, xdual)
325325
htape = ReverseDiff.compile(h_tape)
@@ -393,9 +393,9 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
393393
end
394394
gs = [x -> grad_cons(x, conshtapes[i]) for i in 1:num_cons]
395395
jaccfgs = [ForwardDiff.JacobianConfig(gs[i],
396-
x,
397-
ForwardDiff.Chunk{chunksize}(),
398-
T) for i in 1:num_cons]
396+
x,
397+
ForwardDiff.Chunk{chunksize}(),
398+
T) for i in 1:num_cons]
399399
cons_h = function (θ)
400400
map(1:num_cons) do i
401401
ForwardDiff.jacobian(gs[i], θ, jaccfgs[i], Val{false}())
@@ -456,7 +456,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false},
456456
xdual = ForwardDiff.Dual{
457457
typeof(T),
458458
eltype(x),
459-
chunksize,
459+
chunksize
460460
}.(x, Ref(ForwardDiff.Partials((ones(eltype(x), chunksize)...,))))
461461
h_tape = ReverseDiff.GradientTape(_f, xdual)
462462
htape = ReverseDiff.compile(h_tape)
@@ -530,9 +530,9 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false},
530530
end
531531
gs = [x -> grad_cons(x, conshtapes[i]) for i in 1:num_cons]
532532
jaccfgs = [ForwardDiff.JacobianConfig(gs[i],
533-
x,
534-
ForwardDiff.Chunk{chunksize}(),
535-
T) for i in 1:num_cons]
533+
x,
534+
ForwardDiff.Chunk{chunksize}(),
535+
T) for i in 1:num_cons]
536536
cons_h = function (θ)
537537
map(1:num_cons) do i
538538
ForwardDiff.jacobian(gs[i], θ, jaccfgs[i], Val{false}())

ext/OptimizationSparseDiffExt.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@ module OptimizationSparseDiffExt
22

33
import OptimizationBase, OptimizationBase.ArrayInterface
44
import OptimizationBase.SciMLBase: OptimizationFunction
5-
import OptimizationBase.ADTypes: AutoSparseForwardDiff,
6-
AutoSparseFiniteDiff, AutoSparseReverseDiff
5+
import OptimizationBase.ADTypes: AutoSparse, AutoFiniteDiff, AutoForwardDiff,
6+
AutoReverseDiff
77
using OptimizationBase.LinearAlgebra, ReverseDiff
88
isdefined(Base, :get_extension) ?
99
(using SparseDiffTools,
10-
SparseDiffTools.ForwardDiff, SparseDiffTools.FiniteDiff, Symbolics) :
10+
SparseDiffTools.ForwardDiff, SparseDiffTools.FiniteDiff, Symbolics) :
1111
(using ..SparseDiffTools,
12-
..SparseDiffTools.ForwardDiff, ..SparseDiffTools.FiniteDiff, ..Symbolics)
12+
..SparseDiffTools.ForwardDiff, ..SparseDiffTools.FiniteDiff, ..Symbolics)
1313

1414
function default_chunk_size(len)
1515
if len < ForwardDiff.DEFAULT_CHUNK_THRESHOLD

0 commit comments

Comments
 (0)