Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PETSc support dev #162

Open
wants to merge 6 commits into
base: mfem_45_dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/changelog.txt
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@
* ex33, ex33p is added.
* ex15 is updated to use Numba

* Initial support of PETSc + MFEM

2022 03.
* Added --cuda-arch option to specify the compute cuda archtecture

Expand Down
4 changes: 4 additions & 0 deletions docs/install.txt
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,10 @@ If you are sure, you could use --skip-swig option, so that it compiles the wrapp
codes without re-generating it.
$ python setup.py install --with-parallel --skip-ext --skip-swig --mfem-branch="master"

## PETSC (experimental)

$ pip install petsc
$ python setup.py install --with-parallel --with-petsc

## Other options
--unverifiedSSL :
Expand Down
177 changes: 177 additions & 0 deletions examples/petsc/ex1p.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
'''
MFEM example 1p

See c++ version in the MFEM library for more detail
'''
import sys
import os
from os.path import expanduser, join
import numpy as np

from mpi4py import MPI

from mfem.common.arg_parser import ArgParser
from mfem import path
import mfem.par as mfem

def_meshfile = expanduser(join(path, 'data', 'star.mesh'))

num_proc = MPI.COMM_WORLD.size
myid = MPI.COMM_WORLD.rank


def run(order=1, static_cond=False,
meshfile=def_meshfile,
visualization=False,
device='cpu',
pa=False):

algebraic_ceed = False

device = mfem.Device(device)
if myid == 0:
device.Print()

mfem.MFEMInitializePetsc(None, None, "", None)

mesh = mfem.Mesh(meshfile, 1, 1)
dim = mesh.Dimension()

ref_levels = int(np.floor(np.log(10000./mesh.GetNE())/np.log(2.)/dim))
for x in range(ref_levels):
mesh.UniformRefinement()
mesh.ReorientTetMesh()
pmesh = mfem.ParMesh(MPI.COMM_WORLD, mesh)
del mesh

par_ref_levels = 2
for l in range(par_ref_levels):
pmesh.UniformRefinement()

if order > 0:
fec = mfem.H1_FECollection(order, dim)
elif mesh.GetNodes():
fec = mesh.GetNodes().OwnFEC()
print("Using isoparametric FEs: " + str(fec.Name()))
else:
order = 1
fec = mfem.H1_FECollection(order, dim)

fespace = mfem.ParFiniteElementSpace(pmesh, fec)
fe_size = fespace.GlobalTrueVSize()

if (myid == 0):
print('Number of finite element unknowns: ' + str(fe_size))

ess_tdof_list = mfem.intArray()
if pmesh.bdr_attributes.Size() > 0:
ess_bdr = mfem.intArray(pmesh.bdr_attributes.Max())
ess_bdr.Assign(1)
fespace.GetEssentialTrueDofs(ess_bdr, ess_tdof_list)

# the basis functions in the finite element fespace.
b = mfem.ParLinearForm(fespace)
one = mfem.ConstantCoefficient(1.0)
b.AddDomainIntegrator(mfem.DomainLFIntegrator(one))
b.Assemble()

x = mfem.ParGridFunction(fespace)
x.Assign(0.0)

a = mfem.ParBilinearForm(fespace)
if pa:
a.SetAssemblyLevel(mfem.AssemblyLevel_PARTIAL)

a.AddDomainIntegrator(mfem.DiffusionIntegrator(one))

if static_cond:
a.EnableStaticCondensation()

a.Assemble()

A = mfem.OperatorPtr()
B = mfem.Vector()
X = mfem.Vector()
a.FormLinearSystem(ess_tdof_list, x, b, A, X, B)

if pa:
if mfem.UsesTensorBasis(fespace):
if algebraic_ceed:
assert False, "not supported"
# prec = new ceed::AlgebraicSolver(a, ess_tdof_list);
else:
prec = mfem.OperatorJacobiSmoother(a, ess_tdof_list)
else:
prec = mfem.HypreBoomerAMG()

'''
cg = mfem.CGSolver(MPI.COMM_WORLD)
cg.SetRelTol(1e-12)
cg.SetMaxIter(200)
cg.SetPrintLevel(1)
cg.SetPreconditioner(prec)
cg.SetOperator(A.Ptr())
cg.Mult(B, X)
'''
hA = A.AsHypreParMatrix()
pcg = mfem.PetscPCGSolver(hA, False)
prec = mfem.HypreBoomerAMG(hA)
pcg.SetPreconditioner(prec)
pcg.iterative_mode = True
pcg.SetRelTol(1e-12)
pcg.SetAbsTol(1e-12)
pcg.SetMaxIter(200)
pcg.SetPrintLevel(1)
pcg.Mult(B, X)

a.RecoverFEMSolution(X, b, x)

smyid = '{:0>6d}'.format(myid)
mesh_name = "mesh."+smyid
sol_name = "sol."+smyid

pmesh.Print(mesh_name, 8)
x.Save(sol_name, 8)


if __name__ == "__main__":
parser = ArgParser(description='Ex1 (Laplace Problem)')
parser.add_argument('-m', '--mesh',
default='star.mesh',
action='store', type=str,
help='Mesh file to use.')
parser.add_argument('-vis', '--visualization',
action='store_true',
help='Enable GLVis visualization')
parser.add_argument('-o', '--order',
action='store', default=1, type=int,
help="Finite element order (polynomial degree) or -1 for isoparametric space.")
parser.add_argument('-sc', '--static-condensation',
action='store_true',
help="Enable static condensation.")
parser.add_argument("-pa", "--partial-assembly",
action='store_true',
help="Enable Partial Assembly.")
parser.add_argument("-d", "--device",
default="cpu", type=str,
help="Device configuration string, see Device::Configure().")

args = parser.parse_args()

if myid == 0:
parser.print_options(args)

order = args.order
static_cond = args.static_condensation
meshfile = expanduser(
join(os.path.dirname(__file__), '..', '..', 'data', args.mesh))
visualization = args.visualization
device = args.device
pa = args.partial_assembly

run(order=order,
static_cond=static_cond,
meshfile=meshfile,
visualization=visualization,
device=device,
pa=pa)
17 changes: 17 additions & 0 deletions examples/petsc/rc_ex1p
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Options for the Krylov solver
-ksp_view
-ksp_converged_reason
# Options for the preconditioner
-pc_type gamg
-pc_gamg_type agg
-pc_gamg_agg_nsmooths 1
-pc_gamg_coarse_eq_limit 100
-pc_gamg_reuse_interpolation
-pc_gamg_square_graph 1
-pc_gamg_threshold 0.0
-mg_levels_ksp_max_it 2
-mg_levels_ksp_type chebyshev
-mg_levels_esteig_ksp_type cg
-mg_levels_esteig_ksp_max_it 10
-mg_levels_ksp_chebyshev_esteig 0,0.05,0,1.05
-mg_levels_pc_type sor
3 changes: 2 additions & 1 deletion mfem/_par/handle.i
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
#include "fem/pfespace.hpp"
#include "numpy/arrayobject.h"
#include "pyoperator.hpp"
#include "../common/pycoefficient.hpp"
%}

%include "../common/mfem_config.i"
Expand All @@ -34,7 +35,7 @@ import_array();
%import "hypre.i"
#endif
#ifdef MFEM_USE_PETSC
%include "petsc.i"
%import "petsc.i"
#endif

%import "mem_manager.i"
Expand Down
43 changes: 43 additions & 0 deletions mfem/_par/petsc.i
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
%module(package="mfem._par") petsc
%{
#include "mfem.hpp"

#include "../common/pycoefficient.hpp"
#include "numpy/arrayobject.h"
#include "pyoperator.hpp"
#include "../common/io_stream.hpp"
%}

%include "../common/mfem_config.i"

#ifdef MFEM_USE_MPI
%include mpi4py/mpi4py.i
%mpi4py_typemap(Comm, MPI_Comm);
#endif

%init %{
import_array();
%}

%include "exception.i"
%import "fe.i"
%import "fe_fixed_order.i"
%import "element.i"
%import "mesh.i"
%import "operators.i"
%import "coefficient.i"

%include "../common/typemap_macros.i"
%include "../common/exception.i"

%import "../common/io_stream_typemap.i"
OSTREAM_TYPEMAP(std::ostream&)
ISTREAM_TYPEMAP(std::istream&)

%import "petscconf.h"
%include "linalg/petsc.hpp"





5 changes: 5 additions & 0 deletions mfem/_par/pyoperator.hpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
#ifndef PYMFEM_PYOPERATOR
#define PYMFEM_PYOPERATOR

#include "linalg/operator.hpp"

namespace mfem{
Expand All @@ -20,3 +23,5 @@ class PyOperatorBase : public Operator
virtual ~PyTimeDependentOperatorBase() {}
};
}

#endif
9 changes: 8 additions & 1 deletion mfem/_par/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,11 @@ def get_extensions():
from setup_local import (mfembuilddir, mfemincdir, mfemsrcdir, mfemlnkdir,
mfemptpl, build_mfem,
hypreinc, metisinc, hyprelib, metis5lib,
petscinc, petsclib,
cc_par, cxx_par,
cxx11flag,
add_pumi, add_cuda, add_libceed, add_strumpack,
add_suitesparse, add_gslibp)
add_suitesparse, add_gslibp, add_petsc)

include_dirs = [mfembuilddir, mfemincdir, mfemsrcdir,
numpy.get_include(),
Expand Down Expand Up @@ -154,6 +155,11 @@ def get_extensions():
include_dirs.append(gslibpinc)
modules.append("gslib")

if add_petsc == '1':
include_dirs.append(petscinc)
library_dirs.append(petsclib)
modules.append("petsc")

sources = {name: [name + "_wrap.cxx"] for name in modules}
proxy_names = {name: '_'+name for name in modules}

Expand All @@ -173,6 +179,7 @@ def get_extensions():
else:
runtime_library_dirs = library_dirs


ext_modules = [Extension(proxy_names[modules[0]],
sources=sources[modules[0]],
extra_compile_args=extra_compile_args,
Expand Down
4 changes: 4 additions & 0 deletions mfem/common/io_stream.hpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
#ifndef PYMFEM_IO_STREAM
#define PYMFEM_IO_STREAM

namespace PyMFEM
{
class wFILE
Expand Down Expand Up @@ -45,3 +48,4 @@ class wFILE
};
}

#endif
4 changes: 4 additions & 0 deletions mfem/common/pycoefficient.hpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
#ifndef PYMFEM_PYCOEFFICIENT
#define PYMFEM_PYCOEFFICIENT

#include "fem/coefficient.hpp"

#define assertm(exp, msg) assert(((void)msg, exp))
Expand Down Expand Up @@ -191,3 +194,4 @@ class MatrixNumbaCoefficient : public mfem::MatrixFunctionCoefficient, public N
};


#endif
4 changes: 4 additions & 0 deletions mfem/par.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,10 @@
from mfem._par.gslib import *
except:
pass
try:
from mfem._par.petsc import *
except:
pass

import mfem._par.array as array
import mfem._par.blockoperator as blockoperator
Expand Down
Loading