Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Interface to metatensor to use arbitrary machine learning models as collective variables #1082

Merged
merged 32 commits into from
Jun 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
b06c4a8
metatensor: scaffold the build system
Luthaf Mar 22, 2024
92be03b
Fix build instructions on Linux
Luthaf Mar 28, 2024
a48796d
Implementation of the metatensor action
Luthaf Apr 8, 2024
2b1c3a3
Debug extension loading
Luthaf Apr 11, 2024
2f8b614
Determine the shape of metatensor model output early
Luthaf Apr 16, 2024
3fe2246
Check the capabilties to define evaluation options
Luthaf Apr 16, 2024
af8db59
Run the model on other dtype/device
Luthaf Apr 16, 2024
2a1e5df
Handle out of order samples
Luthaf Apr 16, 2024
4e252ff
Print model metadata & citations
Luthaf Apr 17, 2024
9ee2cae
Fix sign for virials
Luthaf Apr 22, 2024
24a6486
Start working on the documentation
Luthaf Apr 22, 2024
397be72
Fix build when libraries can not be found
Luthaf Apr 22, 2024
206194a
codecheck does not like dash in file names
Luthaf Apr 23, 2024
676272f
Disable astyle for metatensor code
Luthaf Apr 23, 2024
11628f4
Added example in metatensor showing usage
Apr 24, 2024
4f1e7c2
Check model consistency by default
Luthaf Apr 25, 2024
8089cf5
Add perovskite regression test
Luthaf May 1, 2024
15a1833
Finished writing basic regression test for metatensor
Apr 24, 2024
4227168
Add regtest without any dependency, using pre-generated model
Luthaf Apr 25, 2024
8cfce8d
Added copyright file and list of people involved
May 1, 2024
2a59711
Update to metatensor-torch v0.5
Luthaf May 6, 2024
15ea378
Do not use Action::error() outside of the constructor
Luthaf May 15, 2024
d05d96e
Update docs website URL
Luthaf May 15, 2024
1a5eae0
Implement selected_atoms
Luthaf May 15, 2024
46b7723
Remove regtests that rely on rascaline
Luthaf May 23, 2024
42d3d9b
Correct header
Luthaf May 23, 2024
d765964
Regenerate configure with autoconf 2.69
Luthaf May 23, 2024
4f9b9c4
Skip regtests on CI
Luthaf May 23, 2024
3b16287
Declare output for metatensor action
Luthaf May 27, 2024
3ef3c8d
Only one namespace per line
Luthaf May 27, 2024
63bb890
Some more cppcheck fixes
Luthaf May 27, 2024
3ba18d7
Use import.sh to vendor code from vesin
Luthaf Jun 12, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
201 changes: 201 additions & 0 deletions configure
Original file line number Diff line number Diff line change
Expand Up @@ -751,6 +751,7 @@ enable_af_ocl
enable_af_cuda
enable_af_cpu
enable_libtorch
enable_metatensor
enable_openmp
'
ac_precious_vars='build_alias
Expand Down Expand Up @@ -1452,6 +1453,7 @@ Optional Features:
--enable-af_cuda enable search for arrayfire_cuda, default: no
--enable-af_cpu enable search for arrayfire_cpu, default: no
--enable-libtorch enable search for libtorch, default: no
--enable-metatensor enable search for metatensor, default: no
--disable-openmp do not use OpenMP

Some influential environment variables:
Expand Down Expand Up @@ -3220,6 +3222,24 @@ fi

#added by luigibonati

metatensor=
# Check whether --enable-metatensor was given.
if test "${enable_metatensor+set}" = set; then :
enableval=$enable_metatensor; case "${enableval}" in
(yes) metatensor=true ;;
(no) metatensor=false ;;
(*) as_fn_error $? "wrong argument to --enable-metatensor" "$LINENO" 5 ;;
esac
else
case "no" in
(yes) metatensor=true ;;
(no) metatensor=false ;;
esac

fi






Expand Down Expand Up @@ -9635,6 +9655,11 @@ $as_echo "$as_me: WARNING: cannot enable __PLUMED_HAS_ARRAYFIRE" >&2;}

fi

# metatensor requires libtorch
if test $metatensor = true ; then
libtorch=true;
fi

#added by luigibonati
if test $libtorch = true ; then
# disable as-needed in linking libraries (both static and shared)
Expand Down Expand Up @@ -9955,6 +9980,182 @@ $as_echo "$as_me: WARNING: cannot enable __PLUMED_HAS_LIBTORCH" >&2;}
fi
fi

if test $metatensor = true ; then
# find metatensor and metatensor_torch

found=ko
__PLUMED_HAS_METATENSOR=no
if test "${libsearch}" = true ; then
testlibs="metatensor metatensor_torch"
else
testlibs=""
fi
save_LIBS="$LIBS"

# check if multiple libraries are required simultaneously
multiple="no"
if test "true" = "true"; then
multiple="yes"
all_LIBS=""
for testlib in $testlibs;
do
all_LIBS="$all_LIBS -l$testlib"
done
testlibs=" " # to check only without libraries, and later with all together
fi

# check without libraries
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking metatensor without extra libs" >&5
$as_echo_n "checking metatensor without extra libs... " >&6; }
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */

// torch header creates a lot a pedantic warnings, which we can't do anything about
// we disable them to make finding the relevant part in the config.log easier
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpedantic"
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wfloat-equal"
#pragma GCC diagnostic ignored "-Wfloat-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-float-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-int-conversion"
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
#pragma GCC diagnostic ignored "-Wsign-conversion"
#pragma GCC diagnostic ignored "-Wold-style-cast"
#include <torch/torch.h>
#include <torch/script.h>
#pragma GCC diagnostic pop
#include <metatensor/torch.hpp>
#if METATENSOR_TORCH_VERSION_MAJOR != 0 || METATENSOR_TORCH_VERSION_MINOR != 5
#error "this code is only compatible with metatensor-torch >=0.5.1,<0.6"
#endif
int main() {
metatensor_torch::version();
return 0;
}

_ACEOF
if ac_fn_cxx_try_link "$LINENO"; then :
found=ok
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }

fi
rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext

if test "$found" = "ko" ; then
if test "$multiple" = "yes" ; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking metatensor with $all_LIBS" >&5
$as_echo_n "checking metatensor with $all_LIBS... " >&6; }
LIBS="$all_LIBS $LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */

// torch header creates a lot a pedantic warnings, which we can't do anything about
// we disable them to make finding the relevant part in the config.log easier
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpedantic"
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wfloat-equal"
#pragma GCC diagnostic ignored "-Wfloat-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-float-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-int-conversion"
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
#pragma GCC diagnostic ignored "-Wsign-conversion"
#pragma GCC diagnostic ignored "-Wold-style-cast"
#include <torch/torch.h>
#include <torch/script.h>
#pragma GCC diagnostic pop
#include <metatensor/torch.hpp>
#if METATENSOR_TORCH_VERSION_MAJOR != 0 || METATENSOR_TORCH_VERSION_MINOR != 5
#error "this code is only compatible with metatensor-torch >=0.5.1,<0.6"
#endif
int main() {
metatensor_torch::version();
return 0;
}

_ACEOF
if ac_fn_cxx_try_link "$LINENO"; then :
found=ok
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }

fi
rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext
else
for testlib in $testlibs
do
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking metatensor with -l$testlib" >&5
$as_echo_n "checking metatensor with -l$testlib... " >&6; }
LIBS="-l$testlib $LIBS"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */

// torch header creates a lot a pedantic warnings, which we can't do anything about
// we disable them to make finding the relevant part in the config.log easier
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpedantic"
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wfloat-equal"
#pragma GCC diagnostic ignored "-Wfloat-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-float-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-int-conversion"
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
#pragma GCC diagnostic ignored "-Wsign-conversion"
#pragma GCC diagnostic ignored "-Wold-style-cast"
#include <torch/torch.h>
#include <torch/script.h>
#pragma GCC diagnostic pop
#include <metatensor/torch.hpp>
#if METATENSOR_TORCH_VERSION_MAJOR != 0 || METATENSOR_TORCH_VERSION_MINOR != 5
#error "this code is only compatible with metatensor-torch >=0.5.1,<0.6"
#endif
int main() {
metatensor_torch::version();
return 0;
}

_ACEOF
if ac_fn_cxx_try_link "$LINENO"; then :
found=ok
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
$as_echo "yes" >&6; }
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
$as_echo "no" >&6; }

fi
rm -f core conftest.err conftest.$ac_objext \
conftest$ac_exeext conftest.$ac_ext
if test $found = ok ; then
break
fi
LIBS="$save_LIBS"
done
fi
fi

if test $found = ok ; then
$as_echo "#define __PLUMED_HAS_METATENSOR 1" >>confdefs.h

__PLUMED_HAS_METATENSOR=yes
else
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cannot enable __PLUMED_HAS_METATENSOR" >&5
$as_echo "$as_me: WARNING: cannot enable __PLUMED_HAS_METATENSOR" >&2;}
LIBS="$save_LIBS"
fi

fi

# in non-debug mode, add -DNDEBUG
if test "$debug" = false ; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: Release mode, adding -DNDEBUG" >&5
Expand Down
35 changes: 35 additions & 0 deletions configure.ac
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,7 @@ PLUMED_CONFIG_ENABLE([af_ocl],[search for arrayfire_ocl],[no])
PLUMED_CONFIG_ENABLE([af_cuda],[search for arrayfire_cuda],[no])
PLUMED_CONFIG_ENABLE([af_cpu],[search for arrayfire_cpu],[no])
PLUMED_CONFIG_ENABLE([libtorch],[search for libtorch],[no]) #added by luigibonati
PLUMED_CONFIG_ENABLE([metatensor],[search for metatensor],[no])

AC_ARG_VAR(SOEXT,[extension of dynamic libraries (so/dylib)])
AC_ARG_VAR(STATIC_LIBS,[variables that should be linked statically directly to MD code - configure will add here -ldl if necessary ])
Expand Down Expand Up @@ -926,6 +927,11 @@ if test "$af_cpu" = true ; then
PLUMED_CHECK_PACKAGE([arrayfire.h],[af_is_double],[__PLUMED_HAS_ARRAYFIRE],[afcpu])
fi

# metatensor requires libtorch
if test $metatensor = true ; then
libtorch=true;
fi

#added by luigibonati
if test $libtorch = true ; then
# disable as-needed in linking libraries (both static and shared)
Expand Down Expand Up @@ -963,6 +969,35 @@ if test $libtorch = true ; then
fi
fi

if test $metatensor = true ; then
# find metatensor and metatensor_torch
PLUMED_CHECK_CXX_PACKAGE([metatensor],[
// torch header creates a lot a pedantic warnings, which we can't do anything about
// we disable them to make finding the relevant part in the config.log easier
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpedantic"
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wfloat-equal"
#pragma GCC diagnostic ignored "-Wfloat-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-float-conversion"
#pragma GCC diagnostic ignored "-Wimplicit-int-conversion"
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
#pragma GCC diagnostic ignored "-Wsign-conversion"
#pragma GCC diagnostic ignored "-Wold-style-cast"
#include <torch/torch.h>
#include <torch/script.h>
#pragma GCC diagnostic pop
#include <metatensor/torch.hpp>
#if METATENSOR_TORCH_VERSION_MAJOR != 0 || METATENSOR_TORCH_VERSION_MINOR != 5
#error "this code is only compatible with metatensor-torch >=0.5.1,<0.6"
#endif
int main() {
metatensor_torch::version();
return 0;
}
], [__PLUMED_HAS_METATENSOR], [metatensor metatensor_torch], [true])
fi

# in non-debug mode, add -DNDEBUG
if test "$debug" = false ; then
AC_MSG_NOTICE([Release mode, adding -DNDEBUG])
Expand Down
1 change: 1 addition & 0 deletions regtest/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
!/clusters
!/unittest
!/wham
!/metatensor
# These files we just want to ignore completely
tmp
report.txt
1 change: 1 addition & 0 deletions regtest/metatensor/rt-basic/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
include ../../scripts/test.make
8 changes: 8 additions & 0 deletions regtest/metatensor/rt-basic/config
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
plumed_modules=metatensor
plumed_needs=metatensor
type=driver

# NOTE: to enable --debug-forces, also change the dtype of the models to float64
arg="--plumed plumed.dat --ixyz structure.xyz --length-units A --dump-forces forces --dump-forces-fmt %8.2f" # --debug-forces forces.num"

PLUMED_ALLOW_SKIP_ON_TRAVIS=yes
Loading