Skip to content

Commit

Permalink
ADD: test example works for Enzyme only, but failed optimization
Browse files Browse the repository at this point in the history
  • Loading branch information
enigne committed Oct 4, 2024
1 parent 47266cb commit b930d35
Showing 1 changed file with 11 additions and 14 deletions.
25 changes: 11 additions & 14 deletions test/testoptimization.jl
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
module enzymeDiff

using Enzyme
using DJUICE
using MAT
using Test
using Enzyme

using Optimization, OptimizationOptimJL

#Load model from MATLAB file
#file = matopen(joinpath(@__DIR__, "..", "data","temp12k.mat")) #BIG model
file = matopen(joinpath(@__DIR__, "..", "data","temp.mat")) #SMALL model (35 elements)
mat = read(file, "md")
close(file)
Expand All @@ -19,27 +16,27 @@ md.stressbalance.maxiter = 20

#Now call AD!
md.inversion.iscontrol = 1
md.inversion.onlygrad = 1
md.inversion.independent = md.friction.coefficient
md.inversion.min_parameters = ones(md.mesh.numberofvertices)*(0.0)
md.inversion.max_parameters = ones(md.mesh.numberofvertices)*(1.0e3)
md.inversion.independent_string = "FrictionCoefficient"
md.inversion.dependent_string = ["SurfaceAbsVelMisfit"]

α = md.inversion.independent
∂J_∂α = zero(α)

femmodel=DJUICE.ModelProcessor(md, :StressbalanceSolution)
n = length(α)

# test Enzyme autodiff only
dfemmodel = Enzyme.Compiler.make_zero(Base.Core.Typeof(femmodel), IdDict(), femmodel)
autodiff(Enzyme.Reverse, DJUICE.costfunction, Active, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel))

# use user defined grad, errors!
optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme(), grad=DJUICE.computeGradient)
#optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme())
optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme())
#prob = Optimization.OptimizationProblem(optprob, α, femmodel, lb=md.inversion.min_parameters, ub=md.inversion.max_parameters)
prob = Optimization.OptimizationProblem(optprob, α, femmodel)
sol = Optimization.solve(prob, Optim.LBFGS())


#md = solve(md, :sb)

# compute gradient by finite differences at each node
#addJ = md.results["StressbalanceSolution"]["Gradient"]

end
#sol = Optimization.solve(prob, Optim.GradientDescent())
#sol = Optimization.solve(prob, Optim.NelderMead())

0 comments on commit b930d35

Please sign in to comment.