Skip to content

Commit

Permalink
Test code for packed CellSpace Variables
Browse files Browse the repository at this point in the history
Requires PALEOtoolkit/PALEOboxes.jl#94

Test code for packing CellSpace Variables, to evaluate whether
this improves solver performance for atmosphere models.

PALEOmodel.initialize!(model; pack_domain="atm")

will reorder CellSpace Variables in "atm" Domain by cell
(so all variables for a cell are adjacent in the aggregated stateexplicit
and sms vectors)

Tested with steadystate_ptcForwardDiff solver
(NB: steadystate_ptc_splitdae does not work !)

Shows neglible difference with minimal CHO and CHONS models,
which suggests that the default reordering by UMFPACK (Julia \ for sparse
matrices) already does a good job of finding a permutation that puts the
Jacobian in block-diagonal form (?).
  • Loading branch information
sjdaines committed Jul 25, 2023
1 parent 754cfe8 commit 065ad61
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 48 deletions.
4 changes: 3 additions & 1 deletion src/Initialize.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ and supplying `method_barrier` (a thread barrier to add to `ReactionMethod` disp
- `method_barrier=nothing`: thread barrier to add to dispatch lists if `threadsafe==true`
- `expect_hostdep_varnames=["global.tforce"]`: non-state-Variable host-dependent Variable names expected
- `SolverView_all=true`: `true` to create `modeldata.solver_view_all`
- `pack_domain=""`: set to a Domain name to pack CellSpace variables for that Domain
- `create_dispatchlists_all=true`: `true` to create `modeldata.dispatchlists_all`
- `generated_dispatch=true`: `true` to autogenerate code for `modeldata.dispatchlists_all` (fast dispatch, slow compile)
"""
Expand All @@ -49,6 +50,7 @@ function initialize!(
nothing,
expect_hostdep_varnames=["global.tforce"],
SolverView_all=true,
pack_domain="",
create_dispatchlists_all=true,
generated_dispatch=true,
)
Expand All @@ -63,7 +65,7 @@ function initialize!(

# Create modeldata.solver_view_all for the entire model
if SolverView_all
@timeit "set_default_solver_view!" set_default_solver_view!(model, modeldata)
@timeit "set_default_solver_view!" set_default_solver_view!(model, modeldata; pack_domain)
end

# Initialize model Reaction data arrays (calls ReactionMethod.preparefn)
Expand Down
27 changes: 17 additions & 10 deletions src/JacobianAD.jl
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ function jac_config_ode(
iszero(PALEOmodel.num_total(modeldata.solver_view_all)) ||
throw(ArgumentError("model contains implicit variables, solve as a DAE"))

pack_domain = modeldata.solver_view_all.pack_domain

# generate arrays with ODE layout for model Variables
state_sms_vars_data = similar(PALEOmodel.get_statevar_sms(modeldata.solver_view_all))
state_vars_data = similar(PALEOmodel.get_statevar(modeldata.solver_view_all))
Expand All @@ -85,7 +87,7 @@ function jac_config_ode(

PB.add_arrays_data!(model, modeldata, eltype(jacconf), "jac_ad"; use_base_vars)
arrays_idx_jac_ad = PB.num_arrays(modeldata)
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad) # Variables from whole model
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad; pack_domain) # Variables from whole model
jac_dispatchlists = PB.create_dispatch_methodlists(model, modeldata, arrays_idx_jac_ad, jac_cellranges; generated_dispatch)

@info " using ForwardDiff dense Jacobian chunksize=$(ForwardDiff.chunksize(chunk)))"
Expand All @@ -112,7 +114,7 @@ function jac_config_ode(
@timeit "calcJacobianSparsitySparsityTracing!" begin
jac_proto_unfilled = calcJacobianSparsitySparsityTracing!(
model, modeldata, initial_state, jac_ad_t_sparsity;
jac_cellranges, use_base_vars,
jac_cellranges, use_base_vars, pack_domain,
)
end # timeit
jac_prototype = SparseUtils.fill_sparse_jac(jac_proto_unfilled; fill_diagonal=fill_jac_diagonal)
Expand All @@ -126,7 +128,7 @@ function jac_config_ode(
arrays_idx_jac_ad = PB.num_arrays(modeldata)
end # timeit
@timeit "jac_solverview" begin
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad) # Variables from whole model
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad; pack_domain) # Variables from whole model
end # timeit
@timeit "jac_dispatchlists" begin
jac_dispatchlists = PB.create_dispatch_methodlists(model, modeldata, arrays_idx_jac_ad, jac_cellranges; generated_dispatch)
Expand Down Expand Up @@ -182,6 +184,8 @@ function jac_config_dae(
@info "jac_config_dae: jac_ad=$jac_ad"

PB.check_modeldata(model, modeldata)

pack_domain = modeldata.solver_view_all.pack_domain

# generate arrays with ODE layout for model Variables
state_sms_vars_data = similar(PALEOmodel.get_statevar_sms(modeldata.solver_view_all))
Expand All @@ -202,7 +206,7 @@ function jac_config_dae(
jacconf = ForwardDiff.JacobianConfig(nothing, state_sms_vars_data, state_vars_data, chunk)
PB.add_arrays_data!(model, modeldata, eltype(jacconf), "jac_ad"; use_base_vars)
arrays_idx_jac_ad = PB.num_arrays(modeldata)
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad) # Variables from whole model
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad; pack_domain) # Variables from whole model
jac_dispatchlists = PB.create_dispatch_methodlists(model, modeldata, arrays_idx_jac_ad, jac_cellranges; generated_dispatch)

if iszero(PALEOmodel.num_total(modeldata.solver_view_all))
Expand Down Expand Up @@ -245,7 +249,7 @@ function jac_config_dae(
@info " using ForwardDiff sparse Jacobian with sparsity calculated at t=$jac_ad_t_sparsity"
jac_proto_unfilled = calcJacobianSparsitySparsityTracing!(
model, modeldata, initial_state, jac_ad_t_sparsity;
jac_cellranges, use_base_vars,
jac_cellranges, use_base_vars, pack_domain,
)
jac_prototype = SparseUtils.fill_sparse_jac(jac_proto_unfilled)
# println("using jac_prototype: ", jac_prototype)
Expand All @@ -259,7 +263,7 @@ function jac_config_dae(

implicit_proto_unfilled = calcImplicitSparsitySparsityTracing!(
model, modeldata, initial_state, jac_ad_t_sparsity;
implicit_cellranges, use_base_vars,
implicit_cellranges, use_base_vars, pack_domain,
)
implicit_prototype = SparseUtils.fill_sparse_jac(implicit_proto_unfilled, fill_diagonal=false)
implicit_colorvec = SparseDiffTools.matrix_colors(implicit_prototype)
Expand All @@ -278,7 +282,7 @@ function jac_config_dae(
chunksize = ForwardDiff.pickchunksize(maximum(colorvec), request_adchunksize)
PB.add_arrays_data!(model, modeldata, ForwardDiff.Dual{Nothing, eltype(modeldata, 1), chunksize}, "jac_ad"; use_base_vars)
arrays_idx_jac_ad = PB.num_arrays(modeldata)
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad) # Variables from whole model
jac_solverview = PALEOmodel.SolverView(model, modeldata, arrays_idx_jac_ad; pack_domain) # Variables from whole model
jac_dispatchlists = PB.create_dispatch_methodlists(model, modeldata, arrays_idx_jac_ad, jac_cellranges; generated_dispatch)

jac_cache = SparseDiffTools.ForwardColorJacCache(
Expand Down Expand Up @@ -345,6 +349,7 @@ function calcJacobianSparsitySparsityTracing!(
model::PB.Model, modeldata::PB.ModelData, initial_state, tjacsparsity;
jac_cellranges=modeldata.cellranges_all,
use_base_vars=String[],
pack_domain="",
)

@info "calcJacobianSparsitySparsityTracing!"
Expand All @@ -355,7 +360,7 @@ function calcJacobianSparsitySparsityTracing!(
arrays_idx_adst = PB.num_arrays(modeldata)
end # timeit

solver_view_all_adst = PALEOmodel.SolverView(model, modeldata, arrays_idx_adst)
solver_view_all_adst = PALEOmodel.SolverView(model, modeldata, arrays_idx_adst; pack_domain)
PALEOmodel.set_statevar!(solver_view_all_adst, initial_state_adst) # fix up initial state, as derivative information missing

PALEOmodel.set_tforce!(solver_view_all_adst, tjacsparsity)
Expand Down Expand Up @@ -392,14 +397,15 @@ function calcImplicitSparsitySparsityTracing!(
model::PB.Model, modeldata::PB.ModelData, initial_state, tsparsity;
implicit_cellranges=modeldata.cellranges_all,
use_base_vars=String[],
pack_domain="",
)
# Implicit Jacobian setup
initial_state_adst = SparsityTracing.create_advec(initial_state)

PB.add_arrays_data!(model, modeldata, eltype(initial_state_adst), "sparsity_tracing_implicit"; use_base_vars)
arrays_idx_adst = PB.num_arrays(modeldata)

solver_view_all_adst = PALEOmodel.SolverView(model, modeldata, arrays_idx_adst)
solver_view_all_adst = PALEOmodel.SolverView(model, modeldata, arrays_idx_adst; pack_domain)
PALEOmodel.set_statevar!(solver_view_all_adst, initial_state_adst) # fix up initial state, as derivative information missing

PALEOmodel.set_tforce!(solver_view_all_adst, tsparsity)
Expand Down Expand Up @@ -428,6 +434,7 @@ function directional_config(
eltypestomap=String[],
generated_dispatch=true,
use_base_transfer_jacobian=false,
pack_domain=modeldata.solver_view_all.pack_domain,
)

directional_ad_eltype = ForwardDiff.Dual{Nothing, Float64, 1}
Expand All @@ -436,7 +443,7 @@ function directional_config(
PB.add_arrays_data!(model, modeldata, directional_ad_eltype, "directional_deriv"; eltypemap, use_base_transfer_jacobian,)
arrays_idx_directional = PB.num_arrays(modeldata)

directional_sv = PALEOmodel.SolverView(model, modeldata, arrays_idx_directional) # Variables from whole model
directional_sv = PALEOmodel.SolverView(model, modeldata, arrays_idx_directional; pack_domain) # Variables from whole model

directional_dispatchlists = PB.create_dispatch_methodlists(
model, modeldata, arrays_idx_directional, directional_cellranges; generated_dispatch,
Expand Down
Loading

0 comments on commit 065ad61

Please sign in to comment.