Merge pull request #1290 from akohlmey/python-script-portability

Use argparse module in for Install.py scripts in lib and further portability and consistency enhancements. Some lint removal as well.
This commit is contained in:
Axel Kohlmeyer
2019-02-01 01:18:40 +01:00
committed by GitHub
17 changed files with 1374 additions and 890 deletions

View File

@ -559,10 +559,15 @@ if(PKG_USER-PLUMED)
message(STATUS "PLUMED download requested - we will build our own")
include(ExternalProject)
ExternalProject_Add(plumed_build
URL https://github.com/plumed/plumed2/releases/download/v2.4.3/plumed-src-2.4.3.tgz
URL_MD5 b1be7c48971627febc11c61b70767fc5
URL https://github.com/plumed/plumed2/releases/download/v2.4.4/plumed-src-2.4.4.tgz
URL_MD5 71ed465bdc7c2059e282dbda8d564e71
BUILD_IN_SOURCE 1
CONFIGURE_COMMAND <SOURCE_DIR>/configure --prefix=<INSTALL_DIR> ${CONFIGURE_REQUEST_PIC})
CONFIGURE_COMMAND <SOURCE_DIR>/configure --prefix=<INSTALL_DIR>
${CONFIGURE_REQUEST_PIC}
--enable-modules=all
CXX=${CMAKE_MPI_CXX_COMPILER}
CC=${CMAKE_MPI_C_COMPILER}
)
ExternalProject_get_property(plumed_build INSTALL_DIR)
set(PLUMED_INSTALL_DIR ${INSTALL_DIR})
list(APPEND LAMMPS_DEPS plumed_build)
@ -614,8 +619,8 @@ if(PKG_USER-SMD)
message(STATUS "Eigen3 download requested - we will build our own")
include(ExternalProject)
ExternalProject_Add(Eigen3_build
URL http://bitbucket.org/eigen/eigen/get/3.3.4.tar.gz
URL_MD5 1a47e78efe365a97de0c022d127607c3
URL http://bitbucket.org/eigen/eigen/get/3.3.7.tar.gz
URL_MD5 f2a417d083fe8ca4b8ed2bc613d20f07
CONFIGURE_COMMAND "" BUILD_COMMAND "" INSTALL_COMMAND ""
)
ExternalProject_get_property(Eigen3_build SOURCE_DIR)

View File

@ -95,6 +95,7 @@ which GPU hardware to build for.
GPU_ARCH settings for different GPU hardware is as follows:
sm_12 or sm_13 for GT200 (supported by CUDA 3.2 until CUDA 6.5)
sm_20 or sm_21 for Fermi (supported by CUDA 3.2 until CUDA 7.5)
sm_30 or sm_35 or sm_37 for Kepler (supported since CUDA 5)
sm_50 or sm_52 for Maxwell (supported since CUDA 6)
@ -135,7 +136,7 @@ specified by the "-m" switch. For your convenience, machine makefiles
for "mpi" and "serial" are provided, which have the same settings as
the corresponding machine makefiles in the main LAMMPS source
folder. In addition you can alter 4 important settings in the
Makefile.machine you start from via the corresponding -h, -a, -p, -e
Makefile.machine you start from via the corresponding -c, -a, -p, -e
switches (as in the examples above), and also save a copy of the new
Makefile if desired:

View File

@ -1,49 +1,66 @@
#!/usr/bin/env python
# install.py tool to do a generic build of a library
# soft linked to by many of the lib/Install.py files
# used to automate the steps described in the corresponding lib/README
"""
Install.py tool to do a generic build of a library
soft linked to by many of the lib/Install.py files
used to automate the steps described in the corresponding lib/README
"""
from __future__ import print_function
import sys,os,subprocess
import sys, os, subprocess
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus
from install_helpers import get_cpus, fullpath
# parse args
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error()
HELP = """
Syntax from src dir: make lib-libname args="-m machine -e suffix"
Syntax from lib dir: python Install.py -m machine -e suffix
machine = None
extraflag = 0
libname = name of lib dir (e.g. atc, h5md, meam, poems, etc)
specify -m and optionally -e, order does not matter
iarg = 0
while iarg < nargs:
if args[iarg] == "-m":
if iarg+2 > nargs: error()
machine = args[iarg+1]
iarg += 2
elif args[iarg] == "-e":
if iarg+2 > nargs: error()
extraflag = 1
suffix = args[iarg+1]
iarg += 2
else: error()
Examples:
make lib-poems args="-m serial" # build POEMS lib with same settings as in the serial Makefile in src
make lib-colvars args="-m mpi" # build USER-COLVARS lib with same settings as in the mpi Makefile in src
make lib-meam args="-m ifort" # build MEAM lib with custom Makefile.ifort (using Intel Fortran)
"""
# parse and process arguments
parser.add_argument("-m", "--machine",
help="suffix of a <libname>/Makefile.* file used for compiling this library")
parser.add_argument("-e", "--extramake",
help="set EXTRAMAKE variable in <libname>/Makefile.<machine> to Makefile.lammps.<extramake>")
args = parser.parse_args()
# print help message and exit, if neither build nor path options are given
if not args.machine and not args.extramake:
parser.print_help()
sys.exit(HELP)
machine = args.machine
extraflag = not args.extramake
suffix = args.extramake
# set lib from working dir
cwd = os.getcwd()
cwd = fullpath('.')
lib = os.path.basename(cwd)
# create Makefile.auto as copy of Makefile.machine
# reset EXTRAMAKE if requested
if not os.path.exists("Makefile.%s" % machine):
error("lib/%s/Makefile.%s does not exist" % (lib,machine))
sys.exit("lib/%s/Makefile.%s does not exist" % (lib, machine))
lines = open("Makefile.%s" % machine,'r').readlines()
fp = open("Makefile.auto",'w')
lines = open("Makefile.%s" % machine, 'r').readlines()
fp = open("Makefile.auto", 'w')
has_extramake = False
for line in lines:
@ -51,7 +68,7 @@ for line in lines:
if len(words) == 3 and words[0] == "EXTRAMAKE" and words[1] == '=':
has_extramake = True
if extraflag:
line = line.replace(words[2],"Makefile.lammps.%s" % suffix)
line = line.replace(words[2], "Makefile.lammps.%s" % suffix)
fp.write(line)
fp.close()
@ -62,13 +79,16 @@ n_cpus = get_cpus()
print("Building lib%s.a ..." % lib)
cmd = "make -f Makefile.auto clean; make -f Makefile.auto -j%d" % n_cpus
try:
txt = subprocess.check_output(cmd,shell=True,stderr=subprocess.STDOUT)
txt = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
if os.path.exists("lib%s.a" % lib):
print("Build was successful")
else:
sys.exit("Build of lib/%s/lib%s.a was NOT successful" % (lib, lib))
if os.path.exists("lib%s.a" % lib): print("Build was successful")
else: error("Build of lib/%s/lib%s.a was NOT successful" % (lib,lib))
if has_extramake and not os.path.exists("Makefile.lammps"):
print("lib/%s/Makefile.lammps was NOT created" % lib)
print("WARNING: lib/%s/Makefile.lammps was NOT created" % lib)

View File

@ -5,7 +5,12 @@
from __future__ import print_function
import sys,os,subprocess
sys.path.append('..')
from install_helpers import error,get_cpus
from install_helpers import get_cpus
from argparse import ArgumentParser
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# help message
@ -26,37 +31,33 @@ Examples:
make lib-colvars args="-m mpi" # build COLVARS lib with default mpi compiler wrapper
"""
# parse args
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
machine = None
extraflag = False
iarg = 0
while iarg < nargs:
if args[iarg] == "-m":
if iarg+2 > len(args): error(help=help)
machine = args[iarg+1]
iarg += 2
elif args[iarg] == "-e":
if iarg+2 > len(args): error(help=help)
extraflag = True
suffix = args[iarg+1]
iarg += 2
else: error(help=help)
# set lib from working dir
cwd = os.getcwd()
lib = os.path.basename(cwd)
# parse and process arguments
parser.add_argument("-m", "--machine",
help="suffix of a <libname>/Makefile.* or of a src/MAKE/MACHINES/Makefile.* file used for compiling this library")
parser.add_argument("-e", "--extramake",
help="set EXTRAMAKE variable in <libname>/Makefile.<machine> to Makefile.lammps.<extramake>")
args = parser.parse_args()
# print help message and exit, if neither build nor path options are given
if not args.machine and not args.extramake:
parser.print_help()
sys.exit(help)
machine = args.machine
extraflag = args.extramake != None
suffix = args.extramake
def get_lammps_machine_flags(machine):
"""Parse Makefile.machine from LAMMPS, return dictionary of compiler flags"""
if not os.path.exists("../../src/MAKE/MACHINES/Makefile.%s" % machine):
error("Cannot locate src/MAKE/MACHINES/Makefile.%s" % machine)
sys.exit("Cannot locate src/MAKE/MACHINES/Makefile.%s" % machine)
lines = open("../../src/MAKE/MACHINES/Makefile.%s" % machine,
'r').readlines()
machine_flags = {}
@ -102,7 +103,7 @@ if not os.path.exists("Makefile.%s" % machine):
machine_flags = get_lammps_machine_flags(machine)
gen_colvars_makefile_machine(machine, machine_flags)
if not os.path.exists("Makefile.%s" % machine):
error("lib/%s/Makefile.%s does not exist" % (lib,machine))
sys.exit("lib/%s/Makefile.%s does not exist" % (lib,machine))
# create Makefile.auto as copy of Makefile.machine
# reset EXTRAMAKE if requested
@ -131,6 +132,6 @@ except subprocess.CalledProcessError as e:
sys.exit(1)
if os.path.exists("lib%s.a" % lib): print("Build was successful")
else: error("Build of lib/%s/lib%s.a was NOT successful" % (lib,lib))
else: sys.exit("Build of lib/%s/lib%s.a was NOT successful" % (lib,lib))
if not os.path.exists("Makefile.lammps"):
print("lib/%s/Makefile.lammps was NOT created" % lib)

View File

@ -1,16 +1,23 @@
#!/usr/bin/env python
# Install.py tool to build the GPU library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to build the GPU library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,subprocess
import sys, os, subprocess, shutil
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus
from install_helpers import get_cpus
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# help message
help = """
HELP = """
Syntax from src dir: make lib-gpu args="-m machine -h hdir -a arch -p precision -e esuffix -b -o osuffix"
Syntax from lib dir: python Install.py -m machine -h hdir -a arch -p precision -e esuffix -b -o osuffix
@ -23,28 +30,8 @@ optionally uses Makefile.auto to build the GPU library -> libgpu.a
and to copy a Makefile.lammps.esuffix -> Makefile.lammps
optionally copies Makefile.auto to a new Makefile.osuffix
-m = use Makefile.machine as starting point, copy to Makefile.auto
default machine = linux
default for -h, -a, -p, -e settings are those in -m Makefile
-h = set CUDA_HOME variable in Makefile.auto to hdir
hdir = path to NVIDIA Cuda software, e.g. /usr/local/cuda
-a = set CUDA_ARCH variable in Makefile.auto to arch
use arch = sm_20 for Fermi (C2050/C2070, deprecated as of CUDA 8.0)
or GeForce GTX 580 or similar
use arch = sm_30 for Kepler (K10)
use arch = sm_35 for Kepler (K40) or GeForce GTX Titan or similar
use arch = sm_37 for Kepler (dual K80)
use arch = sm_60 for Pascal (P100)
use arch = sm_70 for Volta
-p = set CUDA_PRECISION variable in Makefile.auto to precision
use precision = double or mixed or single
-e = set EXTRAMAKE variable in Makefile.auto to Makefile.lammps.esuffix
-b = make the GPU library using Makefile.auto
first performs a "make clean"
then produces libgpu.a if successful
also copies EXTRAMAKE file -> Makefile.lammps
-e can set which Makefile.lammps.esuffix file is copied
-o = copy final Makefile.auto to Makefile.osuffix
See lib/gpu/README and the LAMMPS manual for more information
on which settings to use and how to build.
Examples:
@ -53,67 +40,68 @@ make lib-gpu args="-m xk7 -p single -o xk7.single" # create new Makefile.xk
make lib-gpu args="-m mpi -a sm_35 -p single -o mpi.mixed -b" # create new Makefile.mpi.mixed, also build GPU lib with these settings
"""
# parse args
# parse and process arguments
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
parser.add_argument("-b", "--build", action="store_true",
help="build the GPU library from scratch from a customized Makefile.auto")
parser.add_argument("-m", "--machine", default='linux',
help="suffix of Makefile.machine used as base for customizing Makefile.auto")
parser.add_argument("-a", "--arch", default='sm_30',
choices=['sm_12', 'sm_13', 'sm_20', 'sm_21', 'sm_30', 'sm_35', 'sm_37',
'sm_50', 'sm_52', 'sm_60', 'sm_61', 'sm_70', 'sm_75'],
help="set GPU architecture and instruction set (default: 'sm_30')")
parser.add_argument("-p", "--precision", default='mixed', choices=['single', 'mixed', 'double'],
help="set GPU kernel precision mode (default: mixed)")
parser.add_argument("-e", "--extramake", default='standard',
help="set EXTRAMAKE variable in Makefile.auto to Makefile.lammps.<extramake>")
parser.add_argument("-c", "--cuda",
help="set CUDA_HOME variable in Makefile.auto. Will be used if $CUDA_HOME environment variable is not set")
parser.add_argument("-o", "--output",
help="if set, copy final Makefile.auto to Makefile.<output> for later re-use")
isuffix = "linux"
hflag = aflag = pflag = eflag = 0
args = parser.parse_args()
# print help message and exit, if neither build nor output options are given
if not args.build and not args.output:
parser.print_help()
sys.exit(HELP)
hflag = 0
eflag = 0
makeflag = 0
outflag = 0
iarg = 0
while iarg < nargs:
if args[iarg] == "-m":
if iarg+2 > nargs: error(help=help)
isuffix = args[iarg+1]
iarg += 2
elif args[iarg] == "-h":
if iarg+2 > nargs: error(help=help)
hflag = 1
hdir = args[iarg+1]
iarg += 2
elif args[iarg] == "-a":
if iarg+2 > nargs: error(help=help)
aflag = 1
arch = args[iarg+1]
iarg += 2
elif args[iarg] == "-p":
if iarg+2 > nargs: error(help=help)
pflag = 1
precision = args[iarg+1]
iarg += 2
elif args[iarg] == "-e":
if iarg+2 > nargs: error(help=help)
eflag = 1
lmpsuffix = args[iarg+1]
iarg += 2
elif args[iarg] == "-b":
makeflag = 1
iarg += 1
elif args[iarg] == "-o":
if iarg+2 > nargs: error(help=help)
outflag = 1
osuffix = args[iarg+1]
iarg += 2
else: error(help=help)
if args.build:
makeflag = 1
if pflag:
if precision == "double": precstr = "-D_DOUBLE_DOUBLE"
elif precision == "mixed": precstr = "-D_SINGLE_DOUBLE"
elif precision == "single": precstr = "-D_SINGLE_SINGLE"
else: error("Invalid precision setting")
isuffix = args.machine
arch = args.arch
if args.precision == "double":
precstr = "-D_DOUBLE_DOUBLE"
elif args.precision == "mixed":
precstr = "-D_SINGLE_DOUBLE"
else:
precstr = "-D_SINGLE_SINGLE"
lmpsuffix = args.extramake
if args.cuda:
hflag = 1
hdir = args.cuda
if args.output:
outflag = 1
osuffix = args.output
# create Makefile.auto
# reset EXTRAMAKE, CUDA_HOME, CUDA_ARCH, CUDA_PRECISION if requested
if not os.path.exists("Makefile.%s" % isuffix):
error("lib/gpu/Makefile.%s does not exist" % isuffix)
sys.exit("lib/gpu/Makefile.%s does not exist" % isuffix)
lines = open("Makefile.%s" % isuffix,'r').readlines()
fp = open("Makefile.auto",'w')
lines = open("Makefile.%s" % isuffix, 'r').readlines()
fp = open("Makefile.auto", 'w')
for line in lines:
words = line.split()
@ -122,13 +110,13 @@ for line in lines:
continue
if hflag and words[0] == "CUDA_HOME" and words[1] == '=':
line = line.replace(words[2],hdir)
if aflag and words[0] == "CUDA_ARCH" and words[1] == '=':
line = line.replace(words[2],"-arch=%s" % arch)
if pflag and words[0] == "CUDA_PRECISION" and words[1] == '=':
line = line.replace(words[2],precstr)
line = line.replace(words[2], hdir)
if words[0] == "CUDA_ARCH" and words[1] == '=':
line = line.replace(words[2], "-arch=%s" % arch)
if words[0] == "CUDA_PRECISION" and words[1] == '=':
line = line.replace(words[2], precstr)
if eflag and words[0] == "EXTRAMAKE" and words[1] == '=':
line = line.replace(words[2],"Makefile.lammps.%s" % lmpsuffix)
line = line.replace(words[2], "Makefile.lammps.%s" % lmpsuffix)
fp.write(line)
fp.close()
@ -143,20 +131,19 @@ if makeflag:
n_cpus = get_cpus()
cmd = "make -f Makefile.auto clean; make -f Makefile.auto -j%d" % n_cpus
try:
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
if not os.path.exists("libgpu.a"):
error("Build of lib/gpu/libgpu.a was NOT successful")
sys.exit("Build of lib/gpu/libgpu.a was NOT successful")
if not os.path.exists("Makefile.lammps"):
error("lib/gpu/Makefile.lammps was NOT created")
sys.exit("lib/gpu/Makefile.lammps was NOT created")
# copy new Makefile.auto to Makefile.osuffix
if outflag:
print("Creating new Makefile.%s" % osuffix)
cmd = "cp Makefile.auto Makefile.%s" % osuffix
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
shutil.copyfile("Makefile.auto", "Makefile.%s" % osuffix)

View File

@ -91,51 +91,14 @@ Performance Computers - Three-Body Potentials. Computer Physics Communications.
----
NOTE: Installation of the CUDA SDK is not required.
NOTE: Installation of the CUDA SDK is not required, only the CUDA
toolkit itself or an OpenCL 1.2 compatible header and library.
Current styles supporting GPU acceleration:
Pair styles supporting GPU acceleration this this library
are marked in the list of Pair style potentials with a "g".
See the online version at: https://lammps.sandia.gov/doc/Commands_pair.html
1 beck
2 born/coul/long
3 born/coul/wolf
4 born
5 buck/coul/cut
6 buck/coul/long
7 buck
8 colloid
9 coul/dsf
10 coul/long
11 eam/alloy
12 eam/fs
13 eam
14 gauss
15 gayberne
16 lj96/cut
17 lj/charmm/coul/long
18 lj/class2/coul/long
19 lj/class2
20 lj/cut/coul/cut
21 lj/cut/coul/debye
22 lj/cut/coul/dsf
23 lj/cut/coul/long
24 lj/cut/coul/msm
25 lj/cut/dipole/cut
26 lj/cut
27 lj/expand
28 lj/gromacs
29 lj/sdk/coul/long
30 lj/sdk
31 lj/sf/dipole/sf
32 mie/cut
33 morse
34 resquared
35 soft
36 sw
37 table
38 yukawa/colloid
39 yukawa
40 pppm
41 ufm
In addition the (plain) pppm kspace style is supported as well.
MULTIPLE LAMMPS PROCESSES
@ -165,7 +128,8 @@ that ships with the CUDA toolkit, but also with the CUDA driver library
(libcuda.so) that ships with the Nvidia driver. If you are compiling LAMMPS
on the head node of a GPU cluster, this library may not be installed,
so you may need to copy it over from one of the compute nodes (best into
this directory).
this directory). Recent CUDA toolkits starting from CUDA 9 provide a dummy
libcuda.so library, that can be used for linking (but not for running).
The gpu library supports 3 precision modes as determined by
the CUDA_PRECISION variable:
@ -174,40 +138,37 @@ the CUDA_PRECISION variable:
CUDA_PRECISION = -D_DOUBLE_DOUBLE # Double precision for all calculations
CUDA_PRECISION = -D_SINGLE_DOUBLE # Accumulation of forces, etc. in double
NOTE: PPPM acceleration can only be run on GPUs with compute capability>=1.1.
You will get the error "GPU library not compiled for this accelerator."
when attempting to run PPPM on a GPU with compute capability 1.0.
As of CUDA 7.5 only GPUs with compute capability 2.0 (Fermi) or newer are
supported and as of CUDA 9.0 only compute capability 3.0 (Kepler) or newer
are supported. There are some limitations of this library for GPUs older
than that, which require additional preprocessor flag, and limit features,
but they are kept for historical reasons. There is no value in trying to
use those GPUs for production calculations.
NOTE: Double precision is only supported on certain GPUs (with
compute capability>=1.3). If you compile the GPU library for
a GPU with compute capability 1.1 and 1.2, then only single
precision FFTs are supported, i.e. LAMMPS has to be compiled
with -DFFT_SINGLE. For details on configuring FFT support in
LAMMPS, see http://lammps.sandia.gov/doc/Section_start.html#2_2_4
NOTE: For graphics cards with compute capability>=1.3 (e.g. Tesla C1060),
make sure that -arch=sm_13 is set on the CUDA_ARCH line.
You have to make sure that you set a CUDA_ARCH line suitable for your
hardware and CUDA toolkit version: e.g. -arch=sm_35 for Tesla K20 or K40
or -arch=sm_52 GeForce GTX Titan X. A detailed list of GPU architectures
and CUDA compatible GPUs can be found e.g. here:
https://en.wikipedia.org/wiki/CUDA#GPUs_supported
NOTE: For newer graphics card (a.k.a. "Fermi", e.g. Tesla C2050), make
sure that either -arch=sm_20 or -arch=sm_21 is set on the
CUDA_ARCH line, depending on hardware and CUDA toolkit version.
NOTE: when compiling with CMake, all of the considerations listed below
are considered within the CMake configuration process, so no separate
compilation of the gpu library is required. Also this will build in support
for all compute architecture that are supported by the CUDA toolkit version
used to build the gpu library.
NOTE: The gayberne/gpu pair style will only be installed if the ASPHERE
package has been installed.
NOTE: The cg/cmm/gpu and cg/cmm/coul/long/gpu pair styles will only be
installed if the USER-CG-CMM package has been installed.
NOTE: The lj/cut/coul/long/gpu, cg/cmm/coul/long/gpu, coul/long/gpu,
lj/charmm/coul/long/gpu and pppm/gpu styles will only be installed
if the KSPACE package has been installed.
Please note the CUDA_CODE settings in Makefile.linux_multi, which allows
to compile this library with support for multiple GPUs. This list can be
extended for newer GPUs with newer CUDA toolkits and should allow to build
a single GPU library compatible with all GPUs that are worth using for
GPU acceleration and supported by the current CUDA toolkits and drivers.
NOTE: The system-specific setting LAMMPS_SMALLBIG (default), LAMMPS_BIGBIG,
or LAMMPS_SMALLSMALL if specified when building LAMMPS (i.e. in
src/MAKE/Makefile.foo) should be consistent with that specified
when building libgpu.a (i.e. by LMP_INC in the lib/gpu/Makefile.bar).
EXAMPLE BUILD PROCESS
EXAMPLE CONVENTIONAL BUILD PROCESS
--------------------------------
cd ~/lammps/lib/gpu

View File

@ -1,36 +1,5 @@
import hashlib,os,subprocess,sys
# default help message
defhelp = """
Syntax from src dir: make lib-libname args="-m machine -e suffix"
Syntax from lib dir: python Install.py -m machine -e suffix
libname = name of lib dir (e.g. atc, h5md, meam, poems, etc)
specify -m and optionally -e, order does not matter
-m = peform a clean followed by "make -f Makefile.machine"
machine = suffix of a lib/Makefile.* file
-e = set EXTRAMAKE variable in Makefile.machine to Makefile.lammps.suffix
does not alter existing Makefile.machine
Examples:
make lib-poems args="-m serial" # build POEMS lib with same settings as in the serial Makefile in src
make lib-colvars args="-m mpi" # build USER-COLVARS lib with same settings as in the mpi Makefile in src
make lib-meam args="-m ifort" # build MEAM lib with custom Makefile.ifort (using Intel Fortran)
"""
# print error message or help
def error(str=None,help=None):
if not str:
if not help:
print(defhelp)
else:
print(help)
else: print("ERROR",str)
sys.exit()
# try to auto-detect the maximum number of available CPUs
def get_cpus():
try:

View File

@ -1,16 +1,28 @@
#!/usr/bin/env python
# install.py tool to download, compile, and setup the kim-api library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to download, compile, and setup the kim-api library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,subprocess,shutil
import sys, os, subprocess, shutil
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,fullpath,which,geturl
from install_helpers import fullpath, geturl
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# settings
thisdir = fullpath('.')
version = "kim-api-v1.9.5"
# help message
help = """
HELP = """
Syntax from src dir: make lib-kim args="-b -v version -a kim-name"
or: make lib-kim args="-b -a everything"
or: make lib-kim args="-n -a kim-name"
@ -20,27 +32,12 @@ Syntax from lib dir: python Install.py -b -v version -a kim-name
or: python Install.py -n -a kim-name
or: python Install.py -p /usr/local/open-kim -a kim-name
specify one or more options, order does not matter
-v = version of KIM API library to use
default = kim-api-v1.9.5 (current as of May 2018)
-b = download and build base KIM API library with example Models
this will delete any previous installation in the current folder
-n = do NOT download and build base KIM API library.
Use an existing installation
-p = specify location of KIM API installation (implies -n)
-a = add single KIM model or model driver with kim-name
to existing KIM API lib (see example below).
If kim-name = everything, then rebuild KIM API library with
*all* available OpenKIM Models (make take a long time).
-vv = be more verbose about what is happening while the script runs
Examples:
make lib-kim args="-b" # install KIM API lib with only example models
make lib-kim args="-a Glue_Ercolessi_Adams_Al__MO_324507536345_001" # Ditto plus one model
make lib-kim args="-b -a Glue_Ercolessi_Adams_Al__MO_324507536345_001" # Ditto plus one model
make lib-kim args="-b -a everything" # install KIM API lib with all models
make lib-kim args="-n -a EAM_Dynamo_Ackland_W__MO_141627196590_002" # only add one model or model driver
make lib-kim args="-n -a EAM_Dynamo_Ackland_W__MO_141627196590_002" # only add one model or model driver
See the list of KIM model drivers here:
https://openkim.org/kim-items/model-drivers/alphabetical
@ -53,62 +50,49 @@ https://openkim.org/kim-api
in the "What is in the KIM API source package?" section
"""
# parse args
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build base KIM API library with example Models.")
pgroup.add_argument("-n", "--nobuild", action="store_true",
help="use the previously downloaded and compiled base KIM API.")
pgroup.add_argument("-p", "--path",
help="specify location of existing KIM API installation.")
parser.add_argument("-v", "--version", default=version,
help="set version of KIM API library to download and build (default: %s)" % version)
parser.add_argument("-a", "--add",
help="add single KIM model or model driver. If adding 'everything', then all available OpenKIM models are added (may take a long time)")
parser.add_argument("-vv", "--verbose", action="store_true",
help="be more verbose about is happening while this script runs")
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
args = parser.parse_args()
thisdir = fullpath('.')
version = "kim-api-v1.9.5"
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path and not args.nobuild:
parser.print_help()
sys.exit(HELP)
buildflag = False
buildflag = args.build
pathflag = args.path is not None
addflag = args.add is not None
addmodelname = args.add
everythingflag = False
addflag = False
verboseflag = False
pathflag = False
if addflag and addmodelname == "everything":
everythingflag = True
buildflag = True
verboseflag = args.verbose
iarg = 0
while iarg < len(args):
if args[iarg] == "-v":
if iarg+2 > len(args): error(help=help)
version = args[iarg+1]
iarg += 2
elif args[iarg] == "-b":
buildflag = True
iarg += 1
elif args[iarg] == "-n":
buildflag = False
iarg += 1
elif args[iarg] == "-p":
if iarg+2 > len(args): error(help=help)
kimdir = fullpath(args[iarg+1])
pathflag = True
buildflag = False
iarg += 2
elif args[iarg] == "-a":
addflag = True
if iarg+2 > len(args): error(help=help)
addmodelname = args[iarg+1]
if addmodelname == "everything":
buildflag = True
everythingflag = True
addflag = False
iarg += 2
elif args[iarg] == "-vv":
verboseflag = True
iarg += 1
else: error(help=help)
if pathflag:
buildflag = False
kimdir = args.path
if not os.path.isdir(kimdir):
sys.exit("KIM API path %s does not exist" % kimdir)
kimdir = fullpath(kimdir)
url = "https://s3.openkim.org/kim-api/%s.txz" % version
# set KIM API directory
if pathflag:
if not os.path.isdir(kimdir):
print("\nkim-api is not installed at %s" % kimdir)
error(help=help)
# configure LAMMPS to use existing kim-api installation
with open("%s/Makefile.KIM_DIR" % thisdir, 'w') as mkfile:
mkfile.write("KIM_INSTALL_DIR=%s\n\n" % kimdir)
@ -116,9 +100,11 @@ if pathflag:
mkfile.write("print_dir:\n")
mkfile.write(" @printf $(KIM_INSTALL_DIR)\n")
print("Created %s/Makefile.KIM_DIR\n using %s" % (thisdir,kimdir))
print("Created %s/Makefile.KIM_DIR\n using %s" % (thisdir, kimdir))
else:
kimdir = os.path.join(os.path.abspath(thisdir), "installed-" + version)
if args.nobuild and not os.path.isdir(kimdir):
sys.exit("Cannot use -n/--nobuild without first building the KIM API with -b")
# download KIM tarball, unpack, build KIM
if buildflag:
@ -137,55 +123,60 @@ if buildflag:
mkfile.write("print_dir:\n")
mkfile.write(" @printf $(KIM_INSTALL_DIR)\n")
print("Created %s/Makefile.KIM_DIR\n using %s" % (thisdir,kimdir))
print("Created %s/Makefile.KIM_DIR\n using %s" % (thisdir, kimdir))
# download entire kim-api tarball
print("Downloading kim-api tarball ...")
geturl(url,"%s/%s.txz" % (thisdir,version))
geturl(url, "%s/%s.txz" % (thisdir, version))
print("Unpacking kim-api tarball ...")
cmd = 'cd "%s"; rm -rf "%s"; tar -xJvf %s.txz' % (thisdir,version,version)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'cd "%s"; rm -rf "%s"; tar -xJvf %s.txz' % (thisdir, version, version)
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
# configure kim-api
print("Configuring kim-api ...")
cmd = 'cd "%s/%s"; ./configure --prefix="%s"' % (thisdir,version,kimdir)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'cd "%s/%s"; ./configure --prefix="%s"' % (thisdir, version, kimdir)
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
# build kim-api
print("Building kim-api ...")
cmd = 'cd "%s/%s"; make' % (thisdir,version)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
if verboseflag: print(txt.decode("UTF-8"))
cmd = 'cd "%s/%s"; make' % (thisdir, version)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
if verboseflag:
print(txt.decode("UTF-8"))
# install kim-api
print("Installing kim-api ...")
cmd = 'cd "%s/%s"; make install' % (thisdir,version)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
if verboseflag: print(txt.decode("UTF-8"))
cmd = 'cd "%s/%s"; make install' % (thisdir, version)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
if verboseflag:
print(txt.decode("UTF-8"))
# remove source files
print("Building and installing example Models")
cmd = 'cd "%s/%s/examples"; make model-drivers-all-system' % (thisdir,version)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
if verboseflag: print (txt.decode("UTF-8"))
cmd = 'cd "%s/%s/examples"; make models-all-system' % (thisdir,version)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
if verboseflag: print (txt.decode("UTF-8"))
cmd = 'cd "%s/%s/examples"; make model-drivers-all-system' % (thisdir, version)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
if verboseflag:
print(txt.decode("UTF-8"))
cmd = 'cd "%s/%s/examples"; make models-all-system' % (thisdir, version)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
if verboseflag:
print(txt.decode("UTF-8"))
print("Removing kim-api source and build files ...")
cmd = 'cd "%s"; rm -rf %s; rm -rf %s.txz' % (thisdir,version,version)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'cd "%s"; rm -rf %s; rm -rf %s.txz' % (thisdir, version, version)
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
# add all OpenKIM models, if desired
if everythingflag:
print("Adding all OpenKIM models, this will take a while ...")
cmd = '%s/bin/kim-api-v1-collections-management install system OpenKIM' % (kimdir)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
if verboseflag: print(txt.decode("UTF-8"))
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
if verboseflag:
print(txt.decode("UTF-8"))
# add single OpenKIM model
if addflag:
@ -193,13 +184,13 @@ if addflag:
makefile_path = os.path.join(thisdir, "Makefile.KIM_DIR")
if os.path.isfile(makefile_path):
cmd = 'make --no-print-directory -f %s print_dir' % makefile_path
kimdir = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
kimdir = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
if not os.path.isdir(kimdir):
print("\nkim-api is not installed")
error(help=help)
sys.exit("\nkim-api is not installed")
# download single model
cmd = '%s/bin/kim-api-v1-collections-management install system %s' % (kimdir.decode("UTF-8"), addmodelname)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
if verboseflag: print (txt.decode("UTF-8"))
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
if verboseflag:
print(txt.decode("UTF-8"))

View File

@ -1,16 +1,35 @@
#!/usr/bin/env python
# Install.py tool to download, unpack, build, and link to the LATTE library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to download, unpack, build, and link to the LATTE library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,subprocess,shutil
import sys, os, subprocess, shutil, tarfile
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus,fullpath,which,geturl,checkmd5sum
from install_helpers import fullpath, geturl, checkmd5sum
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# settings
version = '1.2.1'
suffix = 'gfortran'
# known checksums for different LATTE versions. used to validate the download.
checksums = { \
'1.1.0' : '533635721ee222d0ed2925a18fb5b294', \
'1.2.0' : '68bf0db879da5e068a71281020239ae7', \
'1.2.1' : '85ac414fdada2d04619c8f936344df14', \
}
# help message
help = """
HELP = """
Syntax from src dir: make lib-latte args="-b"
or: make lib-latte args="-p /usr/local/latte"
or: make lib-latte args="-m gfortran"
@ -21,129 +40,99 @@ Syntax from lib dir: python Install.py -b
or: python Install.py -m gfortran
or: python Install.py -v 1.2.1 -b
specify one or more options, order does not matter
-b = download and build the LATTE library
-p = specify folder of existing LATTE installation
-m = copy Makefile.lammps.suffix to Makefile.lammps
-v = set version of LATTE library to download and set up (default = 1.2.1)
Example:
make lib-latte args="-b -m gfortran" # download/build in lib/latte
make lib-latte args="-p $HOME/latte" # use existing LATTE installation
"""
# settings
version = '1.2.1'
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build the LATTE library")
pgroup.add_argument("-p", "--path",
help="specify folder of existing LATTE installation")
parser.add_argument("-m", "--machine", choices=['gfortran', 'ifort', 'linalg', 'serial', 'mpi'],
help="suffix of a Makefile.lammps.* file used for linking LAMMPS with this library")
parser.add_argument("-v", "--version", default=version,
help="set version of LATTE to download and build (default: %s)" % version)
# known checksums for different LATTE versions. used to validate the download.
checksums = { \
'1.1.0' : '533635721ee222d0ed2925a18fb5b294', \
'1.2.0' : '68bf0db879da5e068a71281020239ae7', \
'1.2.1' : '85ac414fdada2d04619c8f936344df14', \
}
args = parser.parse_args()
# parse args
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path:
parser.print_help()
sys.exit(HELP)
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
homepath = fullpath(".")
homepath = "."
buildflag = args.build
pathflag = args.path is not None
version = args.version
suffixflag = args.machine is not None
suffix = args.machine
buildflag = False
pathflag = False
suffixflag = False
linkflag = True
iarg = 0
while iarg < nargs:
if args[iarg] == "-p":
if iarg+2 > nargs: error(help=help)
lattedir = fullpath(args[iarg+1])
pathflag = True
iarg += 2
elif args[iarg] == "-b":
buildflag = True
iarg += 1
elif args[iarg] == "-m":
if iarg+2 > nargs: error(help=help)
suffix = args[iarg+1]
suffixflag = True
iarg += 2
elif args[iarg] == "-v":
if iarg+2 > nargs: error(help=help)
version = args[iarg+1]
iarg += 2
else: error(help=help)
if pathflag:
lattedir = args.path
if not os.path.isdir(lattedir):
sys.exit("LATTE path %s does not exist" % lattedir)
lattedir = fullpath(lattedir)
homedir = "LATTE-%s" % version
if (buildflag and pathflag):
error("Cannot use -b and -p flag at the same time")
if buildflag:
url = "https://github.com/lanl/LATTE/archive/v%s.tar.gz" % version
lattepath = fullpath(homepath)
lattedir = "%s/%s" % (lattepath,homedir)
if pathflag:
if not os.path.isdir(lattedir): error("LATTE path does not exist")
lattedir = os.path.join(lattepath, homedir)
# download and unpack LATTE tarball
if buildflag:
print("Downloading LATTE ...")
geturl(url,"LATTE.tar.gz")
geturl(url, "LATTE.tar.gz")
# verify downloaded archive integrity via md5 checksum, if known.
if version in checksums:
if not checkmd5sum(checksums[version],'LATTE.tar.gz'):
error("Checksum for LATTE library does not match")
if not checkmd5sum(checksums[version], 'LATTE.tar.gz'):
sys.exit("Checksum for LATTE library does not match")
print("Unpacking LATTE ...")
if os.path.exists(lattedir):
shutil.rmtree(lattedir)
cmd = 'cd "%s"; tar zxvf LATTE.tar.gz' % lattepath
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
os.remove("%s/LATTE.tar.gz" % lattepath)
if tarfile.is_tarfile('LATTE.tar.gz'):
tgz = tarfile.open('LATTE.tar.gz')
tgz.extractall()
os.remove('LATTE.tar.gz')
else:
sys.exit("File LATTE.tar.gz is not a supported archive")
# build LATTE
if buildflag:
# build LATTE
print("Building LATTE ...")
cmd = 'cd "%s"; make' % lattedir
try:
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
sys.exit("Make failed with:\n %s" % e.output.decode('UTF-8'))
# create 3 links in lib/latte to LATTE dirs
# do this -b or -p is set
if buildflag or pathflag:
print("Creating links to LATTE files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
if os.path.isfile("filelink.o") or os.path.islink("filelink.o"):
os.remove("filelink.o")
cmd = 'ln -s "%s/src" includelink' % lattedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'ln -s "%s" liblink' % lattedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'ln -s "%s/src/latte_c_bind.o" filelink.o' % lattedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print("Creating links to LATTE files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
if os.path.isfile("filelink.o") or os.path.islink("filelink.o"):
os.remove("filelink.o")
os.symlink(os.path.join(lattedir, 'src'), 'includelink')
os.symlink(lattedir, 'liblink')
os.symlink(os.path.join(lattedir, 'src', 'latte_c_bind.o'), 'filelink.o')
# copy Makefile.lammps.suffix to Makefile.lammps
if suffixflag:
if suffixflag or not os.path.exists("Makefile.lammps"):
if suffix is None:
suffix = 'gfortran'
print("Creating Makefile.lammps")
if os.path.exists("Makefile.lammps.%s" % suffix):
cmd = 'cp Makefile.lammps.%s Makefile.lammps' % suffix
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
shutil.copyfile("Makefile.lammps.%s" % suffix, 'Makefile.lammps')

View File

@ -1,94 +1,84 @@
#!/usr/bin/env python
# Install.py tool to build the CSlib library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to build the CSlib library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,subprocess
import sys, os, subprocess, shutil
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus,fullpath,which
from install_helpers import fullpath
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# help message
help = """
HELP = """
Syntax from src dir: make lib-message args="-m"
or: make lib-message args="-s -z"
Syntax from lib dir: python Install.py -m
or: python Install.py -s -z
specify zero or more options, order does not matter
-m = parallel build of CSlib library
-s = serial build of CSlib library
-z = build CSlib library with ZMQ socket support, default = no ZMQ support
Example:
make lib-message args="-m -z" # build parallel CSlib with ZMQ support
make lib-message args="-s" # build serial CSlib with no ZMQ support
"""
# parse args
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-m", "--mpi", action="store_true",
help="parallel build of CSlib with MPI")
pgroup.add_argument("-s", "--serial", action="store_true",
help="serial build of CSlib")
parser.add_argument("-z", "--zmq", default=False, action="store_true",
help="build CSlib with ZMQ socket support, default ()")
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
args = parser.parse_args()
mpiflag = False
serialflag = False
zmqflag = False
# print help message and exit, if neither build nor path options are given
if not args.mpi and not args.serial:
parser.print_help()
sys.exit(HELP)
iarg = 0
while iarg < nargs:
if args[iarg] == "-m":
mpiflag = True
iarg += 1
elif args[iarg] == "-s":
serialflag = True
iarg += 1
elif args[iarg] == "-z":
zmqflag = True
iarg += 1
else: error(help=help)
if (not mpiflag and not serialflag):
error("Must use either -m or -s flag")
if (mpiflag and serialflag):
error("Cannot use -m and -s flag at the same time")
mpiflag = args.mpi
serialflag = args.serial
zmqflag = args.zmq
# build CSlib
# copy resulting lib to cslib/src/libmessage.a
# copy appropriate Makefile.lammps.* to Makefile.lammps
print("Building CSlib ...")
srcdir = fullpath("./cslib/src")
srcdir = fullpath(os.path.join("cslib", "src"))
if mpiflag and zmqflag:
cmd = "cd %s; make lib_parallel" % srcdir
cmd = "make -C %s lib_parallel" % srcdir
elif mpiflag and not zmqflag:
cmd = "cd %s; make lib_parallel zmq=no" % srcdir
cmd = "make -C %s lib_parallel zmq=no" % srcdir
elif not mpiflag and zmqflag:
cmd = "cd %s; make lib_serial" % srcdir
cmd = "make -C %s lib_serial" % srcdir
elif not mpiflag and not zmqflag:
cmd = "cd %s; make lib_serial zmq=no" % srcdir
cmd = "make -C %s lib_serial zmq=no" % srcdir
print(cmd)
try:
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
if mpiflag: cmd = "cd %s; cp libcsmpi.a libmessage.a" % srcdir
else: cmd = "cd %s; cp libcsnompi.a libmessage.a" % srcdir
print(cmd)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print(txt.decode('UTF-8'))
slb = os.path.join(srcdir, "libcsnompi.a")
if mpiflag:
slb = os.path.join(srcdir, "libcsmpi.a")
shutil.copyfile(slb, os.path.join(srcdir, "libmessage.a"))
if zmqflag: cmd = "cp Makefile.lammps.zmq Makefile.lammps"
else: cmd = "cp Makefile.lammps.nozmq Makefile.lammps"
print(cmd)
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print(txt.decode('UTF-8'))
smk = "Makefile.lammps.nozmq"
if zmqflag:
smk = "Makefile.lammps.zmq"
shutil.copyfile(smk, "Makefile.lammps")
print("Using %s for Makefile.lammps" % smk)

View File

@ -1,116 +1,123 @@
#!/usr/bin/env python
# Install.py tool to download, unpack, build, and link to the MS-CG library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to download, unpack, build, and link to the MS-CG library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,subprocess,shutil
import sys, os, subprocess, shutil, tarfile
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus,fullpath,which,get_cpus,geturl
from install_helpers import fullpath, geturl
# help message
help = """
Syntax from src dir: make lib-mscg args="-p [path] -m [suffix]"
or: make lib-mscg args="-b -m [suffix]"
Syntax from lib dir: python Install.py -p [path] -m [suffix]
Syntax from lib dir: python Install.py -b -m [suffix]
specify one or more options, order does not matter
-b = download and build MS-CG library
-p = specify folder of existing MS-CG installation
-m = machine suffix specifies which src/Make/Makefile.suffix to use
default suffix = g++_simple
Example:
make lib-mscg args="-b -m serial " # download/build in lib/mscg/MSCG-release-master with settings compatible with "make serial"
make lib-mscg args="-b -m mpi " # download/build in lib/mscg/MSCG-release-master with settings compatible with "make mpi"
make lib-mscg args="-p /usr/local/mscg-release " # use existing MS-CG installation in /usr/local/mscg-release
"""
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# settings
version = "1.7.3.1"
machine = "g++_simple"
# help message
HELP = """
Syntax from src dir: make lib-mscg args="-p [path] -m [suffix] -v [version]"
or: make lib-mscg args="-b -m [suffix]"
Syntax from lib dir: python Install.py -p [path] -m [suffix] -v [version]
Syntax from lib dir: python Install.py -b -m [suffix]
Example:
make lib-mscg args="-b -m serial " # download/build in lib/mscg/MSCG-release with settings compatible with "make serial"
make lib-mscg args="-b -m mpi " # download/build in lib/mscg/MSCG-release with settings compatible with "make mpi"
make lib-mscg args="-p /usr/local/mscg-release " # use existing MS-CG installation in /usr/local/mscg-release
"""
# known checksums for different MSCG versions. used to validate the download.
checksums = { \
'1.7.3.1' : '8c45e269ee13f60b303edd7823866a91', \
}
# parse and process arguments
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build the MSCG library")
pgroup.add_argument("-p", "--path",
help="specify folder of existing MSCG installation")
parser.add_argument("-v", "--version", default=version, choices=checksums.keys(),
help="set version of MSCG to download and build (default: %s)" % version)
parser.add_argument("-m", "--machine", default=machine, choices=['mpi', 'serial', 'g++_simple', 'intel_simple', 'lapack', 'mac'],
help="set machine suffix specifies which src/Make/Makefile.suffix to use. (default: %s)" % machine)
args = parser.parse_args()
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path:
parser.print_help()
sys.exit(HELP)
buildflag = args.build
pathflag = args.path is not None
mscgpath = args.path
msuffix = args.machine
mscgver = args.version
# settings
mscgver = "1.7.3.1"
url = "https://github.com/uchicago-voth/MSCG-release/archive/%s.tar.gz" % mscgver
tarfile = "MS-CG-%s.tar.gz" % mscgver
tarname = "MS-CG-%s.tar.gz" % mscgver
tardir = "MSCG-release-%s" % mscgver
# parse args
homepath = fullpath('.')
homedir = os.path.join(homepath, tardir)
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
homepath = "."
homedir = tardir
buildflag = False
pathflag = False
linkflag = True
msuffix = "g++_simple"
iarg = 0
while iarg < nargs:
if args[iarg] == "-p":
if iarg+2 > nargs: error(help=help)
mscgpath = fullpath(args[iarg+1])
pathflag = True
iarg += 2
elif args[iarg] == "-m":
if iarg+2 > nargs: error(help=help)
msuffix = args[iarg+1]
iarg += 2
elif args[iarg] == "-b":
buildflag = True
iarg += 1
else: error(help=help)
homepath = fullpath(homepath)
homedir = "%s/%s" % (homepath,homedir)
if (pathflag):
if not os.path.isdir(mscgpath): error("MS-CG path does not exist")
homedir = mscgpath
if (buildflag and pathflag):
error("Cannot use -b and -p flag at the same time")
if (not buildflag and not pathflag):
error("Have to use either -b or -p flag")
if pathflag:
if not os.path.isdir(mscgpath):
sys.exit("MS-CG path %s does not exist" % mscgpath)
homedir = fullpath(mscgpath)
# download and unpack MS-CG tarfile
if buildflag:
print("Downloading MS-CG ...")
geturl(url,"%s/%s" % (homepath,tarfile))
tarname = os.path.join(homepath, tarname)
geturl(url, tarname)
print("Unpacking MS-CG tarfile ...")
if os.path.exists("%s/%s" % (homepath,tardir)):
shutil.rmtree("%s/%s" % (homepath,tardir))
cmd = 'cd "%s"; tar -xzvf %s' % (homepath,tarfile)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
os.remove("%s/%s" % (homepath,tarfile))
if os.path.exists(os.path.join(homepath, tardir)):
shutil.rmtree(os.path.join(homepath, tardir))
if tarfile.is_tarfile(tarname):
tgz = tarfile.open(tarname)
tgz.extractall(path=homepath)
os.remove(tarname)
else:
sys.exit("File %s is not a supported archive", tarname)
if os.path.basename(homedir) != tardir:
if os.path.exists(homedir):
shutil.rmtree(homedir)
os.rename("%s/%s" % (homepath,tardir),homedir)
os.rename(os.path.join(homepath, tardir), homedir)
# build MS-CG
if buildflag:
print("Building MS-CG ...")
if os.path.exists("%s/src/Make/Makefile.%s" % (homedir,msuffix)):
cmd = 'cd "%s/src"; cp Make/Makefile.%s .; make -f Makefile.%s' % \
(homedir,msuffix,msuffix)
mkf = "Makefile.%s" % msuffix
mkp = os.path.join(homedir, 'src', 'Make', mkf)
if os.path.exists(mkp):
shutil.copyfile(mkp, os.path.join(homedir, 'src', mkf))
elif os.path.exists("Makefile.%s" % msuffix):
cmd = 'cd "%s/src"; cp ../../Makefile.%s .; make -f Makefile.%s' % \
(homedir,msuffix,msuffix)
shutil.copyfile("Makefile.%s" % msuffix, os.path.join(homedir, 'src', mkf))
else:
error("Cannot find Makefile.%s" % msuffix)
sys.exit("Cannot find Makefile.%s" % msuffix)
try:
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'make -C %s -f Makefile.%s' % (os.path.join(homedir, 'src'), msuffix)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
@ -119,21 +126,17 @@ if buildflag:
if not os.path.exists("Makefile.lammps"):
print("Creating Makefile.lammps")
if os.path.exists("Makefile.lammps.%s" % msuffix):
cmd = 'cp Makefile.lammps.%s Makefile.lammps' % msuffix
shutil.copyfile('Makefile.lammps.%s' % msuffix, 'Makefile.lammps')
else:
cmd = 'cp Makefile.lammps.default Makefile.lammps'
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
shutil.copyfile('Makefile.lammps.default', 'Makefile.lammps')
else: print("Makefile.lammps exists. Please check its settings")
# create 2 links in lib/mscg to MS-CG src dir
if linkflag:
print("Creating links to MS-CG include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
cmd = 'ln -s "%s/src" includelink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'ln -s "%s/src" liblink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print("Creating links to MS-CG include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
os.symlink(os.path.join(homedir, 'src'), 'includelink')
os.symlink(os.path.join(homedir, 'src'), 'liblink')

View File

@ -1,16 +1,28 @@
#!/usr/bin/env python
# Install.py tool to download, unpack, build, and link to the plumed2 library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to download, unpack, build, and link to the plumed2 library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,subprocess,hashlib,shutil
import sys, os, subprocess, shutil
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus,fullpath,which,geturl,checkmd5sum
from install_helpers import get_cpus, fullpath, geturl, checkmd5sum
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# settings
version = "2.4.4"
mode = "static"
# help message
help = """
HELP = """
Syntax from src dir: make lib-plumed args="-b"
or: make lib-plumed args="-b -v 2.4.3"
or: make lib-plumed args="-p /usr/local/plumed2 -m shared"
@ -19,127 +31,105 @@ Syntax from lib dir: python Install.py -b -v 2.4.3
or: python Install.py -b
or: python Install.py -p /usr/local/plumed2 -m shared
specify one or more options, order does not matter
-b = download and build the plumed2 library
-v = set version of plumed2 to download and build (default: 2.4.3)
-p = specify folder of existing plumed2 installation
-m = set plumed linkage mode: static (default), shared, or runtime
Example:
make lib-plumed args="-b" # download/build in lib/plumed/plumed2
make lib-plumed args="-p $HOME/plumed2 -m shared" # use existing Plumed2 installation in $HOME/plumed2
"""
# settings
version = "2.4.3"
mode = "static"
# known checksums for different PLUMED versions. used to validate the download.
checksums = { \
'2.4.2' : '88188743a6e03ef076e5377d03ebb0e7', \
'2.4.3' : 'b1be7c48971627febc11c61b70767fc5', \
'2.5b' : 'e341bdef469be1da058b8a0b97a3db22', \
'2.4.4' : '71ed465bdc7c2059e282dbda8d564e71', \
'2.5.0' : '6224cd089493661e19ceacccd35cf911', \
}
# parse args
# parse and process arguments
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build the plumed2 library")
pgroup.add_argument("-p", "--path",
help="specify folder of existing plumed2 installation")
parser.add_argument("-v", "--version", default=version, choices=checksums.keys(),
help="set version of plumed to download and build (default: %s)" % version)
parser.add_argument("-m", "--mode", default=mode, choices=['static', 'shared', 'runtime'],
help="set plumed linkage mode: static (default), shared, or runtime")
homepath = "."
args = parser.parse_args()
buildflag = False
pathflag = False
suffixflag = False
linkflag = True
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path:
parser.print_help()
sys.exit(HELP)
iarg = 0
while iarg < nargs:
if args[iarg] == "-v":
if iarg+2 > nargs: error(help=help)
version = args[iarg+1]
iarg += 2
elif args[iarg] == "-p":
if iarg+2 > nargs: error(help=help)
plumedpath = fullpath(args[iarg+1])
pathflag = True
iarg += 2
elif args[iarg] == "-m":
if iarg+2 > nargs: error(help=help)
mode = args[iarg+1]
iarg += 2
elif args[iarg] == "-b":
buildflag = True
iarg += 1
else: error(help=help)
buildflag = args.build
pathflag = args.path is not None
plumedpath = args.path
homepath = fullpath(homepath)
homepath = fullpath('.')
homedir = "%s/plumed2" % (homepath)
if (pathflag):
if not os.path.isdir(plumedpath): error("Plumed2 path does not exist")
homedir = plumedpath
if (buildflag and pathflag):
error("Cannot use -b and -p flag at the same time")
if (not buildflag and not pathflag):
error("Have to use either -b or -p flag")
if ((mode != "static") and (mode != "shared") and (mode != "runtime")):
error("Unknown linkage mode '%s' for Plumed" % mode)
if pathflag:
if not os.path.isdir(plumedpath):
sys.exit("Plumed2 path %s does not exist" % plumedpath)
homedir = fullpath(plumedpath)
# download and unpack plumed2 tarball
if buildflag:
url = "https://github.com/plumed/plumed2/releases/download/v%s/plumed-src-%s.tgz" % (version,version)
url = "https://github.com/plumed/plumed2/releases/download/v%s/plumed-src-%s.tgz" % (version, version)
filename = "plumed-src-%s.tar.gz" %version
print("Downloading plumed ...")
geturl(url,filename)
geturl(url, filename)
# verify downloaded archive integrity via md5 checksum, if known.
if version in checksums:
if not checkmd5sum(checksums[version],filename):
error("Checksum for plumed2 library does not match")
if not checkmd5sum(checksums[version], filename):
sys.exit("Checksum for plumed2 library does not match")
print("Unpacking plumed2 source tarball ...")
if os.path.exists("%s/plumed-%s" % (homepath,version)):
shutil.rmtree("%s/plumed-%s" % (homepath,version))
if os.path.exists("%s/plumed-%s" % (homepath, version)):
shutil.rmtree("%s/plumed-%s" % (homepath, version))
if os.path.exists(homedir):
shutil.rmtree(homedir)
cmd = 'cd "%s"; tar -xzvf %s' % (homepath,filename)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
os.remove("%s/%s" % (homepath,filename))
cmd = 'cd "%s"; tar -xzvf %s' % (homepath, filename)
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
os.remove(os.path.join(homepath, filename))
# build plumed
print("Building plumed ...")
n_cpus = get_cpus()
cmd = 'cd %s/plumed-%s; ./configure --prefix=%s --enable-static-patch ; make -j%d ; make install' % (homepath,version,homedir,n_cpus)
cmd = 'cd %s/plumed-%s; ./configure --prefix=%s --enable-modules=all --enable-static-patch ; make -j%d ; make install' % (homepath, version, homedir, n_cpus)
try:
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
# create 2 links in lib/plumed to plumed2 installation dir
if linkflag:
print("Creating links to plumed2 include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
cmd = 'ln -s "%s/include" includelink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'ln -s "%s/lib" liblink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
if os.path.isfile("Makefile.lammps.%s" % mode):
print("Creating Makefile.lammps")
cmd = 'echo PLUMED_LIBDIR="%s/lib" > Makefile.lammps; cat liblink/plumed/src/lib/Plumed.inc.%s Makefile.lammps.%s >> Makefile.lammps' % (homedir,mode,mode)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print("Creating links to plumed2 include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
os.symlink(os.path.join(homedir, 'include'), 'includelink')
libpath = os.path.join(homedir, 'lib64')
if not os.path.exists(libpath):
libpath = os.path.join(homedir, 'lib')
os.symlink(libpath, 'liblink')
if os.path.isfile("Makefile.lammps.%s" % mode):
print("Creating Makefile.lammps")
plumedinc = os.path.join('liblink', 'plumed', 'src', 'lib', 'Plumed.inc.' + mode)
lines1 = open(plumedinc, 'r').readlines()
lines2 = open("Makefile.lammps.%s" % mode, 'r').readlines()
fp = open("Makefile.lammps", 'w')
fp.write(os.path.join("PLUMED_LIBDIR=", homedir, "lib\n"))
for line in lines1:
fp.write(line)
for line in lines2:
fp.write(line)
fp.close()

564
lib/pylint.rc Normal file
View File

@ -0,0 +1,564 @@
# pylint configuration for checking Install.py files in the lib folder and its subdirs
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
init-hook=sys.path.append('..')
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=print-statement,
parameter-unpacking,
unpacking-in-except,
old-raise-syntax,
backtick,
long-suffix,
old-ne-operator,
old-octal-literal,
import-star-module-level,
non-ascii-bytes-literal,
raw-checker-failed,
bad-inline-option,
locally-disabled,
locally-enabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
apply-builtin,
basestring-builtin,
buffer-builtin,
cmp-builtin,
coerce-builtin,
execfile-builtin,
file-builtin,
long-builtin,
raw_input-builtin,
reduce-builtin,
standarderror-builtin,
unicode-builtin,
xrange-builtin,
coerce-method,
delslice-method,
getslice-method,
setslice-method,
no-absolute-import,
old-division,
dict-iter-method,
dict-view-method,
next-method-called,
metaclass-assignment,
indexing-exception,
raising-string,
reload-builtin,
oct-method,
hex-method,
nonzero-method,
cmp-method,
input-builtin,
round-builtin,
intern-builtin,
unichr-builtin,
map-builtin-not-iterating,
zip-builtin-not-iterating,
range-builtin-not-iterating,
filter-builtin-not-iterating,
using-cmp-argument,
eq-without-hash,
div-method,
idiv-method,
rdiv-method,
exception-message-attribute,
invalid-str-codec,
sys-max-int,
bad-python3-import,
deprecated-string-function,
deprecated-str-translate-call,
deprecated-itertools-function,
deprecated-types-field,
next-method-defined,
dict-items-not-iterating,
dict-keys-not-iterating,
dict-values-not-iterating,
deprecated-operator-function,
deprecated-urllib-function,
xreadlines-attribute,
deprecated-sys-function,
exception-escape,
comprehension-escape,
invalid-name,
bad-indentation,
multiple-imports,
line-too-long
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,
dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package..
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[IMPORTS]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled).
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled).
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[DESIGN]
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement.
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception".
overgeneral-exceptions=Exception

View File

@ -1,3 +1,4 @@
/scafacos*
/includelink
/liblink
/build

View File

@ -1,116 +1,122 @@
#!/usr/bin/env python
# Install.py tool to download, unpack, build, and link to the Scafacos library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to download, unpack, build, and link to the ScaFaCoS library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,subprocess,shutil
import sys, os, subprocess, shutil, tarfile
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,fullpath,which,geturl
from install_helpers import fullpath, geturl, get_cpus, checkmd5sum
# help message
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
help = """
# settings
version = "1.0.1"
url = "https://github.com/scafacos/scafacos/releases/download/v%s/scafacos-%s.tar.gz" % (version, version)
# known checksums for different ScaFaCoS versions. used to validate the download.
checksums = { \
'1.0.1' : 'bd46d74e3296bd8a444d731bb10c1738' \
}
# extra help message
HELP = """
Syntax from src dir: make lib-scafacos args="-b"
or: make lib-scafacos args="-p /usr/local/scafacos"
Syntax from lib dir: python Install.py -b
or: python Install.py -p /usr/local/scafacos
specify zero or more options, order does not matter
-b = download and build the Scafacos library
-p = specify folder of existing Scafacos installation
always creates includelink, liblink to Scafacos dirs
Example:
make lib-scafacos args="-b" # download/build in lib/scafacos/scafacos
make lib-scafacos args="-p $HOME/scafacos" # use existing Scafacos installation in $HOME
make lib-scafacos args="-p $HOME/scafacos" # use existing ScaFaCoS installation in $HOME
"""
# settings
# parse and process arguments
version = "scafacos-1.0.1"
url = "https://github.com/scafacos/scafacos/releases/download/v1.0.1/scafacos-1.0.1.tar.gz"
#url = "https://gigamove.rz.rwth-aachen.de/d/id/CTzyApN76MXMJ6/dd/100" % version
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build the ScaFaCoS library")
pgroup.add_argument("-p", "--path",
help="specify folder of existing ScaFaCoS installation")
parser.add_argument("-v", "--version", default=version,
help="set version of ScaFaCoS to download and build (default: %s)" % version)
# parse args
args = parser.parse_args()
args = sys.argv[1:]
nargs = len(args)
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path:
parser.print_help()
sys.exit(HELP)
homepath = "."
buildflag = args.build
pathflag = args.path is not None
version = args.version
buildflag = True
pathflag = False
linkflag = True
homepath = fullpath(".")
scafacospath = os.path.join(homepath, "scafacos-%s" % version)
iarg = 0
while iarg < nargs:
if args[iarg] == "-v":
if iarg+2 > nargs: error(help=help)
version = args[iarg+1]
iarg += 2
elif args[iarg] == "-p":
if iarg+2 > nargs: error(help=help)
scafacospath = fullpath(args[iarg+1])
pathflag = True
iarg += 2
elif args[iarg] == "-b":
buildflag = True
iarg += 1
else: error(help=help)
if pathflag:
scafacospath = args.path
if not os.path.isdir(os.path.join(scafacospath, "include")):
sys.exit("ScaFaCoS include path for %s does not exist" % scafacospath)
if (not os.path.isdir(os.path.join(scafacospath, "lib64"))) \
and (not os.path.isdir(os.path.join(scafacospath, "lib"))):
sys.exit("ScaFaCoS lib path for %s does not exist" % scafacospath)
scafacospath = fullpath(scafacospath)
homepath = fullpath(homepath)
homedir = "%s/%s" % (homepath,version)
if (pathflag):
if not os.path.isdir(scafacospath): error("Scafacos path does not exist")
homedir =scafacospath
if (buildflag and pathflag):
error("Cannot use -b and -p flag at the same time")
# download and unpack Scafacos tarball
# download and unpack ScaFaCoS tarball
if buildflag:
print("Downloading Scafacos ...")
geturl(url,"%s/%s.tar.gz" % (homepath,version))
print("Downloading ScaFaCoS ...")
geturl(url, "%s/scafacos-%s.tar.gz" % (homepath, version))
print("Unpacking Scafacos tarball ...")
if os.path.exists("%s/%s" % (homepath,version)):
shutil.rmtree("%s/%s" % (homepath,version))
cmd = 'cd "%s"; tar -xzvf %s.tar.gz' % (homepath,version)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
os.remove("%s/%s.tar.gz" % (homepath,version))
if os.path.basename(homedir) != version:
if os.path.exists(homedir):
shutil.rmtree(homedir)
os.rename("%s/%s" % (homepath,version),homedir)
# verify downloaded archive integrity via md5 checksum, if known.
if version in checksums:
if not checkmd5sum(checksums[version], '%s/scafacos-%s.tar.gz' % (homepath, version)):
sys.exit("Checksum for ScaFaCoS library does not match")
# build Scafacos
print("Unpacking ScaFaCoS tarball ...")
if os.path.exists(scafacospath):
shutil.rmtree(scafacospath)
tarname = os.path.join(homepath, "%s.tar.gz" % scafacospath)
if tarfile.is_tarfile(tarname):
tgz = tarfile.open(tarname)
tgz.extractall(path=homepath)
os.remove(tarname)
else:
sys.exit("File %s is not a supported archive" % tarname)
if buildflag:
print("Building Scafacos ...")
n_cpu = get_gpus()
cmd = 'cd "%s"; ./configure --prefix="%s/build" --disable-doc --enable-fcs-solvers=fmm,p2nfft,direct,ewald,p3m --with-internal-fftw --with-internal-pfft --with-internal-pnfft CC=mpicc FC=mpif90 CXX=mpicxx F77=; make -j%d; make install' % (homedir,homedir,n_cpu)
# build ScaFaCoS
print("Building ScaFaCoS ...")
n_cpu = get_cpus()
cmd = 'cd "%s"; ./configure --prefix="%s" --disable-doc --enable-fcs-solvers=fmm,p2nfft,direct,ewald,p3m --with-internal-fftw --with-internal-pfft --with-internal-pnfft CC=mpicc FC=mpif90 CXX=mpicxx F77=; make -j%d; make install' % (scafacospath, os.path.join(homepath, 'build'), n_cpu)
try:
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
sys.exit("Make failed with:\n %s" % e.output.decode('UTF-8'))
# create 2 links in lib/scafacos to Scafacos include/lib dirs
# create 2 links in lib/scafacos to ScaFaCoS include/lib dirs
if linkflag:
print("Creating links to Scafacos include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
cmd = 'ln -s "%s/build/include" includelink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'ln -s "%s/build/lib" liblink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print("Creating links to ScaFaCoS include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
if buildflag:
os.symlink(os.path.join(homepath, 'build', 'include'), 'includelink')
os.symlink(os.path.join(homepath, 'build', 'lib'), 'liblink')
else:
os.symlink(os.path.join(scafacospath, 'include'), 'includelink')
if os.path.isdir(os.path.join(scafacospath, "lib64")):
os.symlink(os.path.join(scafacospath, 'lib64'), 'liblink')
else:
os.symlink(os.path.join(scafacospath, 'lib'), 'liblink')

View File

@ -1,16 +1,36 @@
#!/usr/bin/env python
# Install.py tool to download, unpack, and point to the Eigen library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to download, unpack, and point to the Eigen library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,glob,subprocess,shutil
import sys, os, glob, shutil, tarfile
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus,fullpath,which,geturl
from install_helpers import fullpath, geturl, checkmd5sum
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# settings
version = '3.3.7'
tarball = "eigen.tar.gz"
# known checksums for different Eigen versions. used to validate the download.
checksums = { \
'3.3.4' : '1a47e78efe365a97de0c022d127607c3', \
'3.3.5' : 'ee48cafede2f51fe33984ff5c9f48026', \
'3.3.6' : 'd1be14064b50310b0eb2b49e402c64d7', \
'3.3.7' : 'f2a417d083fe8ca4b8ed2bc613d20f07' \
}
# help message
help = """
HELP = """
Syntax from src dir: make lib-smd args="-b"
or: make lib-smd args="-p /usr/include/eigen3"
@ -18,91 +38,77 @@ Syntax from lib dir: python Install.py -b
or: python Install.py -p /usr/include/eigen3"
or: python Install.py -v 3.3.4 -b
specify one or more options, order does not matter
-b = download and unpack/configure the Eigen library
-p = specify folder holding an existing installation of Eigen
-v = set version of Eigen library to download and set up (default = 3.3.4)
Example:
make lib-smd args="-b" # download/build in default lib/smd/eigen-eigen-*
make lib-smd args="-p /usr/include/eigen3" # use existing Eigen installation in /usr/include/eigen3
"""
# settings
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build the Eigen3 library")
pgroup.add_argument("-p", "--path",
help="specify folder of existing Eigen installation")
parser.add_argument("-v", "--version", default=version,
help="set version of Eigen to download and build (default: %s)" % version)
version = '3.3.4'
tarball = "eigen.tar.gz"
args = parser.parse_args()
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path:
parser.print_help()
sys.exit(HELP)
# parse args
homepath = fullpath(".")
eigenpath = os.path.join(homepath, "eigen3")
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
buildflag = args.build
pathflag = args.path is not None
version = args.version
homepath = "."
homedir = "eigen3"
buildflag = False
pathflag = False
linkflag = True
iarg = 0
while iarg < nargs:
if args[iarg] == "-v":
if iarg+2 > nargs: error(help=help)
version = args[iarg+1]
iarg += 2
elif args[iarg] == "-p":
if iarg+2 > nargs: error(help=help)
eigenpath = fullpath(args[iarg+1])
pathflag = True
iarg += 2
elif args[iarg] == "-b":
buildflag = True
iarg += 1
else: error(help=help)
homepath = fullpath(homepath)
if (pathflag):
if not os.path.isdir(eigenpath): error("Eigen path does not exist")
if (buildflag and pathflag):
error("Cannot use -b and -p flag at the same time")
if (not buildflag and not pathflag):
error("Have to use either -b or -p flag")
if pathflag:
eigenpath = args.path
if not os.path.isdir(eigenpath):
sys.exit("Eigen path %s does not exist" % eigenpath)
eigenpath = fullpath(eigenpath)
# download and unpack Eigen tarball
# use glob to find name of dir it unpacks to
if buildflag:
print("Downloading Eigen ...")
eigentar = os.path.join(homepath, tarball)
url = "http://bitbucket.org/eigen/eigen/get/%s.tar.gz" % version
geturl(url,"%s/%s" % (homepath,tarball))
geturl(url, eigentar)
print("Unpacking Eigen tarball ...")
edir = glob.glob("%s/eigen-eigen-*" % homepath)
# verify downloaded archive integrity via md5 checksum, if known.
if version in checksums:
print("checking version %s\n" % version)
if not checkmd5sum(checksums[version], eigentar):
sys.exit("Checksum for Eigen library does not match")
print("Cleaning up old folders ...")
edir = glob.glob(os.path.join(homepath, "eigen-eigen-*"))
edir.append(eigenpath)
for one in edir:
if os.path.isdir(one):
shutil.rmtree(one)
cmd = 'cd "%s"; tar -xzvf %s' % (homepath,tarball)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
edir = glob.glob("%s/eigen-eigen-*" % homepath)
os.rename(edir[0],"%s/%s" % (homepath,homedir))
os.remove(tarball)
print("Unpacking Eigen tarball ...")
if tarfile.is_tarfile(eigentar):
tgz = tarfile.open(eigentar)
tgz.extractall(path=homepath)
os.remove(eigentar)
else:
sys.exit("File %s is not a supported archive" % eigentar)
edir = glob.glob(os.path.join(homepath, "eigen-eigen-*"))
os.rename(edir[0], eigenpath)
# create link in lib/smd to Eigen src dir
if linkflag:
print("Creating link to Eigen files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if pathflag: linkdir = eigenpath
else: linkdir = "%s/%s" % (homepath,homedir)
cmd = 'ln -s "%s" includelink' % linkdir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print("Creating link to Eigen include folder")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
linkdir = eigenpath
os.symlink(linkdir, 'includelink')

View File

@ -1,99 +1,102 @@
#!/usr/bin/env python
# Install.py tool to download, unpack, build, and link to the Voro++ library
# used to automate the steps described in the README file in this dir
"""
Install.py tool to download, unpack, build, and link to the Voro++ library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys,os,re,subprocess,shutil
import sys, os, subprocess, shutil, tarfile
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import error,get_cpus,fullpath,which,geturl
from install_helpers import fullpath, geturl, checkmd5sum
# help message
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
help = """
# settings
version = "voro++-0.4.6"
url = "http://math.lbl.gov/voro++/download/dir/%s.tar.gz" % version
# known checksums for different Voro++ versions. used to validate the download.
checksums = { \
'voro++-0.4.6' : '2338b824c3b7b25590e18e8df5d68af9' \
}
# extra help message
HELP = """
Syntax from src dir: make lib-voronoi args="-b"
or: make lib-voronoi args="-p /usr/local/voro++-0.4.6"
or: make lib-voronoi args="-b -v voro++-0.4.6"
Syntax from lib dir: python Install.py -b -v voro++-0.4.6
or: python Install.py -b
or: python Install.py -p /usr/local/voro++-0.4.6
specify one or more options, order does not matter
-b = download and build the Voro++ library
-p = specify folder of existing Voro++ installation
-v = set version of Voro++ to download and build (default voro++-0.4.6)
Example:
make lib-voronoi args="-b" # download/build in lib/voronoi/voro++-0.4.6
make lib-voronoi args="-p $HOME/voro++-0.4.6" # use existing Voro++ installation in $HOME/voro++-0.4.6
"""
# settings
# parse and process arguments
version = "voro++-0.4.6"
url = "http://math.lbl.gov/voro++/download/dir/%s.tar.gz" % version
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build the Voro++ library")
pgroup.add_argument("-p", "--path",
help="specify folder of existing Voro++ installation")
parser.add_argument("-v", "--version", default=version,
help="set version of Voro++ to download and build (default: %s)" % version)
args = parser.parse_args()
# parse args
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path:
parser.print_help()
sys.exit(HELP)
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error(help=help)
buildflag = args.build
pathflag = args.path is not None
voropath = args.path
homepath = "."
homedir = version
homepath = fullpath(".")
homedir = os.path.join(homepath, version)
buildflag = False
pathflag = False
linkflag = True
iarg = 0
while iarg < nargs:
if args[iarg] == "-v":
if iarg+2 > nargs: error(help=help)
version = args[iarg+1]
iarg += 2
elif args[iarg] == "-p":
if iarg+2 > nargs: error(help=help)
voropath = fullpath(args[iarg+1])
pathflag = True
iarg += 2
elif args[iarg] == "-b":
buildflag = True
iarg += 1
else: error(help=help)
homepath = fullpath(homepath)
homedir = "%s/%s" % (homepath,version)
if (pathflag):
if not os.path.isdir(voropath): error("Voro++ path does not exist")
homedir = voropath
if (buildflag and pathflag):
error("Cannot use -b and -p flag at the same time")
if (not buildflag and not pathflag):
error("Have to use either -b or -p flag")
if pathflag:
if not os.path.isdir(voropath):
sys.exit("Voro++ path %s does not exist" % voropath)
homedir = fullpath(voropath)
# download and unpack Voro++ tarball
if buildflag:
print("Downloading Voro++ ...")
geturl(url,"%s/%s.tar.gz" % (homepath,version))
vorotar = os.path.join(homepath, version) + '.tar.gz'
geturl(url, vorotar)
# verify downloaded archive integrity via md5 checksum, if known.
if version in checksums:
if not checkmd5sum(checksums[version], vorotar):
sys.exit("Checksum for Voro++ library does not match")
print("Unpacking Voro++ tarball ...")
if os.path.exists("%s/%s" % (homepath,version)):
shutil.rmtree("%s/%s" % (homepath,version))
cmd = 'cd "%s"; tar -xzvf %s.tar.gz' % (homepath,version)
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
os.remove("%s/%s.tar.gz" % (homepath,version))
srcpath = os.path.join(homepath, version)
if os.path.exists(srcpath):
shutil.rmtree(srcpath)
if tarfile.is_tarfile(vorotar):
tgz = tarfile.open(vorotar)
tgz.extractall(path=homepath)
os.remove(vorotar)
else:
sys.exit("File %s is not a supported archive" % vorotar)
if os.path.basename(homedir) != version:
if os.path.exists(homedir):
shutil.rmtree(homedir)
os.rename("%s/%s" % (homepath,version),homedir)
os.rename(srcpath, homedir)
# build Voro++
@ -101,7 +104,7 @@ if buildflag:
print("Building Voro++ ...")
cmd = 'cd "%s"; make CXX=g++ CFLAGS="-fPIC -O3"' % homedir
try:
txt = subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
@ -109,13 +112,10 @@ if buildflag:
# create 2 links in lib/voronoi to Voro++ src dir
if linkflag:
print("Creating links to Voro++ include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
cmd = 'ln -s "%s/src" includelink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
cmd = 'ln -s "%s/src" liblink' % homedir
subprocess.check_output(cmd,stderr=subprocess.STDOUT,shell=True)
print("Creating links to Voro++ include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
os.symlink(os.path.join(homedir, 'src'), 'includelink')
os.symlink(os.path.join(homedir, 'src'), 'liblink')