diff --git a/test/testoptimization.jl b/test/testoptimization.jl index 38004e8..9a5e075 100755 --- a/test/testoptimization.jl +++ b/test/testoptimization.jl @@ -1,14 +1,11 @@ -module enzymeDiff - +using Enzyme using DJUICE using MAT using Test -using Enzyme using Optimization, OptimizationOptimJL #Load model from MATLAB file -#file = matopen(joinpath(@__DIR__, "..", "data","temp12k.mat")) #BIG model file = matopen(joinpath(@__DIR__, "..", "data","temp.mat")) #SMALL model (35 elements) mat = read(file, "md") close(file) @@ -19,27 +16,27 @@ md.stressbalance.maxiter = 20 #Now call AD! md.inversion.iscontrol = 1 +md.inversion.onlygrad = 1 md.inversion.independent = md.friction.coefficient md.inversion.min_parameters = ones(md.mesh.numberofvertices)*(0.0) md.inversion.max_parameters = ones(md.mesh.numberofvertices)*(1.0e3) md.inversion.independent_string = "FrictionCoefficient" +md.inversion.dependent_string = ["SurfaceAbsVelMisfit"] α = md.inversion.independent ∂J_∂α = zero(α) femmodel=DJUICE.ModelProcessor(md, :StressbalanceSolution) n = length(α) + +# test Enzyme autodiff only +dfemmodel = Enzyme.Compiler.make_zero(Base.Core.Typeof(femmodel), IdDict(), femmodel) +autodiff(Enzyme.Reverse, DJUICE.costfunction, Active, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel)) + # use user defined grad, errors! -optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme(), grad=DJUICE.computeGradient) -#optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme()) +optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme()) #prob = Optimization.OptimizationProblem(optprob, α, femmodel, lb=md.inversion.min_parameters, ub=md.inversion.max_parameters) prob = Optimization.OptimizationProblem(optprob, α, femmodel) sol = Optimization.solve(prob, Optim.LBFGS()) - - -#md = solve(md, :sb) - -# compute gradient by finite differences at each node -#addJ = md.results["StressbalanceSolution"]["Gradient"] - -end +#sol = Optimization.solve(prob, Optim.GradientDescent()) +#sol = Optimization.solve(prob, Optim.NelderMead())