Changes to MLIAP python
- update lammps python package to use setuptools - refactor MLIAP classes into lammps python package lammps.mliap package - change TorchWrapper to use dtype and device as arguments - turn activation of mliappy into functions (was a class) - add a check to see if python interpreter is compatible with python lib calls internal to lammps mliap_model_python_couple.pyx: - load models ending in '.pt' or '.pth' with pytorch rather than pickle
This commit is contained in:
13
python/lammps/mliap/__init__.py
Normal file
13
python/lammps/mliap/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
|
||||
# Check compatiblity of this build with the python shared library.
|
||||
# If this fails, lammps will segfault because its library will
|
||||
# try to improperly start up a new interpreter.
|
||||
import sysconfig
|
||||
import ctypes
|
||||
library = sysconfig.get_config_vars('INSTSONAME')[0]
|
||||
pylib = ctypes.CDLL(library)
|
||||
if not pylib.Py_IsInitialized():
|
||||
raise RuntimeError("This interpreter is not compatible with python-based mliap for LAMMPS.")
|
||||
del sysconfig, ctypes, library, pylib
|
||||
|
||||
from .loader import load_model, activate_mliappy
|
||||
52
python/lammps/mliap/loader.py
Normal file
52
python/lammps/mliap/loader.py
Normal file
@ -0,0 +1,52 @@
|
||||
# ----------------------------------------------------------------------
|
||||
# LAMMPS - Large-scale Atomic/Molecular Massively Parallel Simulator
|
||||
# http://lammps.sandia.gov, Sandia National Laboratories
|
||||
# Steve Plimpton, sjplimp@sandia.gov
|
||||
#
|
||||
# Copyright (2003) Sandia Corporation. Under the terms of Contract
|
||||
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
|
||||
# certain rights in this software. This software is distributed under
|
||||
# the GNU General Public License.
|
||||
#
|
||||
# See the README file in the top-level LAMMPS directory.
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Contributing author: Nicholas Lubbers (LANL)
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
|
||||
import sys
|
||||
import importlib.util
|
||||
import importlib.machinery
|
||||
|
||||
def activate_mliappy(lmp):
|
||||
try:
|
||||
# Begin Importlib magic to find the embedded python module
|
||||
# This is needed because the filename for liblammps does not
|
||||
# match the spec for normal python modules, wherein
|
||||
# file names match with PyInit function names.
|
||||
# Also, python normally doesn't look for extensions besides '.so'
|
||||
# We fix both of these problems by providing an explict
|
||||
# path to the extension module 'mliap_model_python_couple' in
|
||||
|
||||
path = lmp.lib._name
|
||||
loader = importlib.machinery.ExtensionFileLoader('mliap_model_python_couple', path)
|
||||
spec = importlib.util.spec_from_loader('mliap_model_python_couple', loader)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules['mliap_model_python_couple'] = module
|
||||
spec.loader.exec_module(module)
|
||||
# End Importlib magic to find the embedded python module
|
||||
|
||||
except Exception as ee:
|
||||
raise ImportError("Could not load MLIAP python coupling module.") from ee
|
||||
|
||||
def load_model(model):
|
||||
try:
|
||||
import mliap_model_python_couple
|
||||
except ImportError as ie:
|
||||
raise ImportError("MLIAP python module must be activated before loading\n"
|
||||
"the pair style. Call lammps.mliap.activate_mliappy(lmp)."
|
||||
) from ie
|
||||
mliap_model_python_couple.load_from_python(model)
|
||||
|
||||
65
python/lammps/mliap/pytorch.py
Normal file
65
python/lammps/mliap/pytorch.py
Normal file
@ -0,0 +1,65 @@
|
||||
# ----------------------------------------------------------------------
|
||||
# LAMMPS - Large-scale Atomic/Molecular Massively Parallel Simulator
|
||||
# http://lammps.sandia.gov, Sandia National Laboratories
|
||||
# Steve Plimpton, sjplimp@sandia.gov
|
||||
#
|
||||
# Copyright (2003) Sandia Corporation. Under the terms of Contract
|
||||
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
|
||||
# certain rights in this software. This software is distributed under
|
||||
# the GNU General Public License.
|
||||
#
|
||||
# See the README file in the top-level LAMMPS directory.
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Contributing author: Nicholas Lubbers (LANL)
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
import numpy as np
|
||||
import torch
|
||||
|
||||
def calc_n_params(model):
|
||||
return sum(p.nelement() for p in model.parameters())
|
||||
|
||||
class TorchWrapper(torch.nn.Module):
|
||||
def __init__(self, model,n_descriptors,n_elements,n_params=None,device=None,dtype=torch.float64):
|
||||
super().__init__()
|
||||
|
||||
self.model = model
|
||||
self.device = device
|
||||
self.dtype = dtype
|
||||
|
||||
# Put model on device and convert to dtype
|
||||
self.to(self.dtype)
|
||||
self.to(self.device)
|
||||
|
||||
if n_params is None:
|
||||
n_params = calc_n_params(model)
|
||||
|
||||
self.n_params = n_params
|
||||
self.n_descriptors = n_descriptors
|
||||
self.n_elements = n_elements
|
||||
|
||||
def forward(self, elems, bispectrum, beta, energy):
|
||||
|
||||
bispectrum = torch.from_numpy(bispectrum).to(dtype=self.dtype, device=self.device).requires_grad_(True)
|
||||
elems = torch.from_numpy(elems).to(dtype=torch.long, device=self.device) - 1
|
||||
|
||||
with torch.autograd.enable_grad():
|
||||
|
||||
energy_nn = self.model(bispectrum, elems)
|
||||
if energy_nn.ndim > 1:
|
||||
energy_nn = energy_nn.flatten()
|
||||
|
||||
beta_nn = torch.autograd.grad(energy_nn.sum(), bispectrum)[0]
|
||||
|
||||
beta[:] = beta_nn.detach().cpu().numpy().astype(np.float64)
|
||||
energy[:] = energy_nn.detach().cpu().numpy().astype(np.float64)
|
||||
|
||||
class IgnoreElems(torch.nn.Module):
|
||||
def __init__(self,subnet):
|
||||
super().__init__()
|
||||
self.subnet = subnet
|
||||
|
||||
def forward(self,bispectrum,elems):
|
||||
return self.subnet(bispectrum)
|
||||
Reference in New Issue
Block a user