From a7ba11fee9a83c4a7ee3af1f79c547214f3db1ce Mon Sep 17 00:00:00 2001 From: Axel Kohlmeyer Date: Fri, 6 Jan 2023 16:42:51 -0500 Subject: [PATCH] mliappy fixes for kokkos support --- python/lammps/mliap/__init__.py | 3 ++- python/lammps/mliap/pytorch.py | 5 ----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/python/lammps/mliap/__init__.py b/python/lammps/mliap/__init__.py index 9a12b075c4..c1a9752855 100644 --- a/python/lammps/mliap/__init__.py +++ b/python/lammps/mliap/__init__.py @@ -33,6 +33,7 @@ else: from .loader import load_model, load_unified, activate_mliappy try: from .loader import load_model_kokkos, activate_mliappy_kokkos - except: + except Exception as ee: + # ignore import error, it means that the KOKKOS package was not included in LAMMPS pass del sysconfig, ctypes, library, pylib diff --git a/python/lammps/mliap/pytorch.py b/python/lammps/mliap/pytorch.py index fdf04b77d3..93df96d2e0 100644 --- a/python/lammps/mliap/pytorch.py +++ b/python/lammps/mliap/pytorch.py @@ -140,8 +140,6 @@ class TorchWrapper(torch.nn.Module): else: energy_nn = self.model(descriptors, elems).flatten() energy[:] = energy_nn.detach().cpu().numpy().astype(np.float64) - #if energy_nn.ndim > 1: - # energy_nn = energy_nn.flatten() if (use_gpu_data): beta_nn = torch.as_tensor(beta,dtype=self.dtype, device=self.device) @@ -150,9 +148,6 @@ class TorchWrapper(torch.nn.Module): beta_nn = torch.autograd.grad(energy_nn.sum(), descriptors)[0] beta[:] = beta_nn.detach().cpu().numpy().astype(np.float64) - elems=elems+1 - - class IgnoreElems(torch.nn.Module): """ A class to represent a NN model agnostic of element typing.