mliappy fixes for kokkos support

This commit is contained in:
Axel Kohlmeyer
2023-01-06 16:42:51 -05:00
parent 6a8df032b6
commit a7ba11fee9
2 changed files with 2 additions and 6 deletions

View File

@ -33,6 +33,7 @@ else:
from .loader import load_model, load_unified, activate_mliappy
try:
from .loader import load_model_kokkos, activate_mliappy_kokkos
except:
except Exception as ee:
# ignore import error, it means that the KOKKOS package was not included in LAMMPS
pass
del sysconfig, ctypes, library, pylib

View File

@ -140,8 +140,6 @@ class TorchWrapper(torch.nn.Module):
else:
energy_nn = self.model(descriptors, elems).flatten()
energy[:] = energy_nn.detach().cpu().numpy().astype(np.float64)
#if energy_nn.ndim > 1:
# energy_nn = energy_nn.flatten()
if (use_gpu_data):
beta_nn = torch.as_tensor(beta,dtype=self.dtype, device=self.device)
@ -150,9 +148,6 @@ class TorchWrapper(torch.nn.Module):
beta_nn = torch.autograd.grad(energy_nn.sum(), descriptors)[0]
beta[:] = beta_nn.detach().cpu().numpy().astype(np.float64)
elems=elems+1
class IgnoreElems(torch.nn.Module):
"""
A class to represent a NN model agnostic of element typing.