Merge branch 'develop' into dump-style-yaml

This commit is contained in:
Axel Kohlmeyer
2022-04-08 18:42:28 -04:00
71 changed files with 11869 additions and 3308 deletions

View File

@ -5,7 +5,14 @@
import sysconfig
import ctypes
library = sysconfig.get_config_vars('INSTSONAME')[0]
pylib = ctypes.CDLL(library)
try:
pylib = ctypes.CDLL(library)
except OSError as e:
if pylib.endswith(".a"):
pylib.strip(".a") + ".so"
pylib = ctypes.CDLL(library)
else:
raise e
if not pylib.Py_IsInitialized():
raise RuntimeError("This interpreter is not compatible with python-based mliap for LAMMPS.")
del sysconfig, ctypes, library, pylib

View File

@ -19,10 +19,75 @@ import numpy as np
import torch
def calc_n_params(model):
"""
Returns the sum of two decimal numbers in binary digits.
Parameters:
model (torch.nn.Module): Network model that maps descriptors to a per atom attribute
Returns:
n_params (int): Number of NN model parameters
"""
return sum(p.nelement() for p in model.parameters())
class TorchWrapper(torch.nn.Module):
def __init__(self, model,n_descriptors,n_elements,n_params=None,device=None,dtype=torch.float64):
"""
A class to wrap Modules to ensure lammps mliap compatability.
...
Attributes
----------
model : torch.nn.Module
Network model that maps descriptors to a per atom attribute
device : torch.nn.Module (None)
Accelerator device
dtype : torch.dtype (torch.float64)
Dtype to use on device
n_params : torch.nn.Module (None)
Number of NN model parameters
n_descriptors : int
Max number of per atom descriptors
n_elements : int
Max number of elements
Methods
-------
forward(descriptors, elems):
Feeds descriptors to network model to produce per atom energies and forces.
"""
def __init__(self, model, n_descriptors, n_elements, n_params=None, device=None, dtype=torch.float64):
"""
Constructs all the necessary attributes for the network module.
Parameters
----------
model : torch.nn.Module
Network model that maps descriptors to a per atom attribute
n_descriptors : int
Max number of per atom descriptors
n_elements : int
Max number of elements
n_params : torch.nn.Module (None)
Number of NN model parameters
device : torch.nn.Module (None)
Accelerator device
dtype : torch.dtype (torch.float64)
Dtype to use on device
"""
super().__init__()
self.model = model
@ -40,26 +105,222 @@ class TorchWrapper(torch.nn.Module):
self.n_descriptors = n_descriptors
self.n_elements = n_elements
def forward(self, elems, bispectrum, beta, energy):
def forward(self, elems, descriptors, beta, energy):
"""
Takes element types and descriptors calculated via lammps and
calculates the per atom energies and forces.
bispectrum = torch.from_numpy(bispectrum).to(dtype=self.dtype, device=self.device).requires_grad_(True)
Parameters
----------
elems : numpy.array
Per atom element types
descriptors : numpy.array
Per atom descriptors
beta : numpy.array
Expired beta array to be filled with new betas
energy : numpy.array
Expired per atom energy array to be filled with new per atom energy
(Note: This is a pointer to the lammps per atom energies)
Returns
-------
None
"""
descriptors = torch.from_numpy(descriptors).to(dtype=self.dtype, device=self.device).requires_grad_(True)
elems = torch.from_numpy(elems).to(dtype=torch.long, device=self.device) - 1
with torch.autograd.enable_grad():
energy_nn = self.model(bispectrum, elems)
energy_nn = self.model(descriptors, elems)
if energy_nn.ndim > 1:
energy_nn = energy_nn.flatten()
beta_nn = torch.autograd.grad(energy_nn.sum(), bispectrum)[0]
beta_nn = torch.autograd.grad(energy_nn.sum(), descriptors)[0]
beta[:] = beta_nn.detach().cpu().numpy().astype(np.float64)
energy[:] = energy_nn.detach().cpu().numpy().astype(np.float64)
class IgnoreElems(torch.nn.Module):
def __init__(self,subnet):
"""
A class to represent a NN model agnostic of element typing.
...
Attributes
----------
subnet : torch.nn.Module
Network model that maps descriptors to a per atom attribute
Methods
-------
forward(descriptors, elems):
Feeds descriptors to network model
"""
def __init__(self, subnet):
"""
Constructs all the necessary attributes for the network module.
Parameters
----------
subnet : torch.nn.Module
Network model that maps descriptors to a per atom attribute
"""
super().__init__()
self.subnet = subnet
def forward(self,bispectrum,elems):
return self.subnet(bispectrum)
def forward(self, descriptors, elems):
"""
Feeds descriptors to network model
Parameters
----------
descriptors : torch.tensor
Per atom descriptors
elems : torch.tensor
Per atom element types
Returns
-------
self.subnet(descriptors) : torch.tensor
Per atom attribute computed by the network model
"""
return self.subnet(descriptors)
class UnpackElems(torch.nn.Module):
"""
A class to represent a NN model pseudo-agnostic of element typing for
systems with multiple element typings.
...
Attributes
----------
subnet : torch.nn.Module
Network model that maps descriptors to a per atom attribute
n_types : int
Number of atom types used in training the NN model.
Methods
-------
forward(descriptors, elems):
Feeds descriptors to network model after adding zeros into
descriptor columns relating to different atom types
"""
def __init__(self, subnet, n_types):
"""
Constructs all the necessary attributes for the network module.
Parameters
----------
subnet : torch.nn.Module
Network model that maps descriptors to a per atom attribute.
n_types : int
Number of atom types used in training the NN model.
"""
super().__init__()
self.subnet = subnet
self.n_types = n_types
def forward(self, descriptors, elems):
"""
Feeds descriptors to network model after adding zeros into
descriptor columns relating to different atom types
Parameters
----------
descriptors : torch.tensor
Per atom descriptors
elems : torch.tensor
Per atom element types
Returns
-------
self.subnet(descriptors) : torch.tensor
Per atom attribute computed by the network model
"""
unpacked_descriptors = torch.zeros(elems.shape[0], self.n_types, descriptors.shape[1], dtype=torch.float64)
for i, ind in enumerate(elems):
unpacked_descriptors[i, ind, :] = descriptors[i]
return self.subnet(torch.reshape(unpacked_descriptors, (elems.shape[0], -1)), elems)
class ElemwiseModels(torch.nn.Module):
"""
A class to represent a NN model dependent on element typing.
...
Attributes
----------
subnets : list of torch.nn.Modules
Per element type network models that maps per element type
descriptors to a per atom attribute.
n_types : int
Number of atom types used in training the NN model.
Methods
-------
forward(descriptors, elems):
Feeds descriptors to network model after adding zeros into
descriptor columns relating to different atom types
"""
def __init__(self, subnets, n_types):
"""
Constructs all the necessary attributes for the network module.
Parameters
----------
subnets : list of torch.nn.Modules
Per element type network models that maps per element
type descriptors to a per atom attribute.
n_types : int
Number of atom types used in training the NN model.
"""
super().__init__()
self.subnets = subnets
self.n_types = n_types
def forward(self, descriptors, elems):
"""
Feeds descriptors to network model after adding zeros into
descriptor columns relating to different atom types
Parameters
----------
descriptors : torch.tensor
Per atom descriptors
elems : torch.tensor
Per atom element types
Returns
-------
self.subnets(descriptors) : torch.tensor
Per atom attribute computed by the network model
"""
per_atom_attributes = torch.zeros(elems.size[0])
given_elems, elem_indices = torch.unique(elems, return_inverse=True)
for i, elem in enumerate(given_elems):
per_atom_attribute[elem_indices == i] = self.subnets[elem](descriptors[elem_indices == i])
return per_atom_attributes