new examples and debugging code changes

This commit is contained in:
Steve Plimpton
2022-06-14 17:29:01 -06:00
parent 34863c6c97
commit a5745d925a
19 changed files with 507 additions and 81 deletions

View File

@ -50,7 +50,7 @@ changed in the in.aimd.engine or in.aimd.engine.plugin scripts.
Run the entire calculation with a single instance of LAMMPS by itself
results should be identical to running this example with MDI
% lmp_mpi < in.aimd.alone
% lmp_mpi -log log.aimd.alone < in.aimd.alone
With MDI, the thermo output of the driver should match the thermo
output of the in.aimd.alone script.
@ -59,41 +59,157 @@ output of the in.aimd.alone script.
Run with TCP: 1 proc each
% lmp_mpi -mdi "-name aimd -role DRIVER -method TCP -port 8021" -log log.aimd.driver -in in.aimd.driver
% lmp_mpi -mdi "-name LMP1 -role DRIVER -method TCP -port 8021" -log log.aimd.driver.tcp -in in.aimd.driver
% lmp_mpi -mdi "-name LAMMPS -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.aimd.engine -in in.aimd.engine
% lmp_mpi -mdi "-name LMP2 -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.aimd.engine.tcp -in in.aimd.engine
---
Run with TCP: 3 procs + 4 procs
% mpirun -np 3 lmp_mpi -mdi "-name aimd -role DRIVER -method TCP -port 8021" -log log.aimd.driver -in in.aimd.driver
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method TCP -port 8021" -log log.aimd.driver.tcp -in in.aimd.driver
% mpirun -np 4 lmp_mpi -mdi "-name LAMMPS -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.aimd.engine -in in.aimd.engine
% mpirun -np 4 lmp_mpi -mdi "-name LMP2 -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.aimd.engine.tcp -in in.aimd.engine
---
Run with MPI: 1 proc each
% mpirun -np 1 lmp_mpi -mdi "-name aimd -role DRIVER -method MPI" -log log.aimd.driver -in in.aimd.driver : -np 1 lmp_mpi -mdi "-name LAMMPS -role ENGINE -method MPI" -log log.aimd.engine -in in.aimd.engine
% mpirun -np 1 lmp_mpi -mdi "-name LMP1 -role DRIVER -method MPI" -log log.aimd.driver.mpi -in in.aimd.driver : -np 1 lmp_mpi -mdi "-name LMP2 -role ENGINE -method MPI" -log log.aimd.engine.mpi -in in.aimd.engine
---
Run with MPI: 3 procs + 4 procs
% mpirun -np 3 lmp_mpi -mdi "-name aimd -role DRIVER -method MPI" -log log.aimd.driver -in in.aimd.driver : -np 4 lmp_mpi -mdi "-name LAMMPS -role ENGINE -method MPI" -log log.aimd.engine -in in.aimd.engine
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method MPI" -log log.aimd.driver.mpi -in in.aimd.driver : -np 4 lmp_mpi -mdi "-name LMP2 -role ENGINE -method MPI" -log log.aimd.engine.mpi -in in.aimd.engine
---
Run in plugin mode: 1 proc
% lmp_mpi -mdi "-name aimd -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.aimd.driver.plugin -in in.aimd.driver.plugin
% lmp_mpi -mdi "-name LMP1 -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.aimd.driver.plugin -in in.aimd.driver.plugin
---
Run in plugin mode: 3 procs
% mpirun -np 3 lmp_mpi -mdi "-name aimd -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.aimd.driver.plugin -in in.aimd.driver.plugin
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.aimd.driver.plugin -in in.aimd.driver.plugin
-------------------------------------------------
-------------------------------------------------
* Example #1b = run LAMMPS, compute QM forces on snapshots from a long run
Two instances of LAMMPS operate as a driver and engine
As an engine, LAMMPS is a surrogate for a quantum code
---
Run the entire calculation with a single instance of LAMMPS by itself
results should be identical to running this example with MDI
% lmp_mpi -log log.snapshot.alone < in.snapshot.alone
With MDI, the thermo output of the driver should match the thermo
output of the in.snapshot.alone script. Likewise the dump file written
by the driver should match dump.snapshot.alone.
---
Run with TCP: 1 proc each
% lmp_mpi -mdi "-name LMP1 -role DRIVER -method TCP -port 8021" -log log.snapshot.driver.tcp -in in.snapshot.driver
% lmp_mpi -mdi "-name LMP2 -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.snapshot.engine.tcp -in in.snapshot.engine
---
Run with TCP: 3 procs + 4 procs
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method TCP -port 8021" -log log.snapshot.driver.tcp -in in.snapshot.driver
% mpirun -np 4 lmp_mpi -mdi "-name LMP2 -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.snapshot.engine.tcp -in in.snapshot.engine
---
Run with MPI: 1 proc each
% mpirun -np 1 lmp_mpi -mdi "-name LMP1 -role DRIVER -method MPI" -log log.snapshot.driver.mpi -in in.snapshot.driver : -np 1 lmp_mpi -mdi "-name LMP2 -role ENGINE -method MPI" -log log.snapshot.engine.mpi -in in.snapshot.engine
---
Run with MPI: 3 procs + 4 procs
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method MPI" -log log.snapshot.driver.mpi -in in.snapshot.driver : -np 4 lmp_mpi -mdi "-name LMP2 -role ENGINE -method MPI" -log log.snapshot.engine.mpi -in in.snapshot.engine
---
Run in plugin mode: 1 proc
% lmp_mpi -mdi "-name LMP1 -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.snapshot.driver.plugin -in in.snapshot.driver.plugin
---
Run in plugin mode: 3 procs
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.snapshot.driver.plugin -in in.snapshot.driver.plugin
-------------------------------------------------
-------------------------------------------------
* Example #1c = run LAMMPS, compute QM forces on series of independent systems
Two instances of LAMMPS operate as a driver and engine
As an engine, LAMMPS is a surrogate for a quantum code
---
Run the entire calculation with a single instance of LAMMPS by itself
results should be identical to running this example with MDI
% lmp_mpi -log log.series.alone < in.series.alone
With MDI, the thermo output of the driver should match the thermo
output of the in.series.alone script. Likewise the dump files written
by the driver should match dump.series.alone files.
---
Run with TCP: 1 proc each
% lmp_mpi -mdi "-name LMP1 -role DRIVER -method TCP -port 8021" -log log.series.driver.tcp -in in.series.driver
% lmp_mpi -mdi "-name LMP2 -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.series.engine.tcp -in in.series.engine
---
Run with TCP: 3 procs + 4 procs
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method TCP -port 8021" -log log.series.driver.tcp -in in.series.driver
% mpirun -np 4 lmp_mpi -mdi "-name LMP2 -role ENGINE -method TCP -port 8021 -hostname localhost" -log log.series.engine.tcp -in in.series.engine
---
Run with MPI: 1 proc each
% mpirun -np 1 lmp_mpi -mdi "-name LMP1 -role DRIVER -method MPI" -log log.series.driver.mpi -in in.series.driver : -np 1 lmp_mpi -mdi "-name LMP2 -role ENGINE -method MPI" -log log.series.engine.mpi -in in.series.engine
---
Run with MPI: 3 procs + 4 procs
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method MPI" -log log.series.driver.mpi -in in.series.driver : -np 4 lmp_mpi -mdi "-name LMP2 -role ENGINE -method MPI" -log log.series.engine.mpi -in in.series.engine
---
Run in plugin mode: 1 proc
% lmp_mpi -mdi "-name LMP1 -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.series.driver.plugin -in in.series.driver.plugin
---
Run in plugin mode: 3 procs
% mpirun -np 3 lmp_mpi -mdi "-name LMP1 -role DRIVER -method LINK -plugin_path /home/sjplimp/lammps/git/src" -log log.series.driver.plugin -in in.series.driver.plugin
-------------------------------------------------
-------------------------------------------------
@ -134,7 +250,7 @@ Run with TCP: 1 proc each
% python3 sequence_driver.py -mdi "-role DRIVER -name sequence -method TCP -port 8021"
% lmp_mpi -mdi "-role ENGINE -name LAMMPS -method TCP -port 8021 -hostname localhost" -log log.sequence -in in.sequence
% lmp_mpi -mdi "-role ENGINE -name LMP -method TCP -port 8021 -hostname localhost" -log log.sequence -in in.sequence.python
---
@ -142,31 +258,31 @@ Run with TCP: 2 proc + 4 procs
% mpirun -np 2 python3 sequence_driver.py -mdi "-role DRIVER -name sequence -method TCP -port 8021"
% mpirun -np 4 lmp_mpi -mdi "-role ENGINE -name LAMMPS -method TCP -port 8021 -hostname localhost" -log log.sequence -in in.sequence
% mpirun -np 4 lmp_mpi -mdi "-role ENGINE -name LMP -method TCP -port 8021 -hostname localhost" -log log.sequence -in in.sequence.python
---
Run with MPI: 1 proc each
% mpirun -np 1 python3 sequence_driver.py -mdi "-role DRIVER -name sequence -method MPI" : -np 1 lmp_mpi -mdi "-role ENGINE -name LAMMPS -method MPI" -log log.sequence -in in.sequence
% mpirun -np 1 python3 sequence_driver.py -mdi "-role DRIVER -name sequence -method MPI" : -np 1 lmp_mpi -mdi "-role ENGINE -name LAMMPS -method MPI" -log log.sequence -in in.sequence.python
---
Run with MPI: 2 procs + 4 procs
% mpirun -np 2 python3 sequence_driver.py -mdi "-role DRIVER -name sequence -method MPI" : -np 4 lmp_mpi -mdi "-role ENGINE -name LAMMPS -method MPI" -log log.sequence -in in.sequence
% mpirun -np 2 python3 sequence_driver.py -mdi "-role DRIVER -name sequence -method MPI" : -np 4 lmp_mpi -mdi "-role ENGINE -name LMP -method MPI" -log log.sequence -in in.sequence.python
---
Run in plugin mode: 1 proc
% python3 sequence_driver.py -plugin lammps -mdi "-role DRIVER -name sequence -method LINK -plugin_path /home/sjplimp/lammps/git/src" -plugin_args "-log log.sequence -in in.sequence"
% python3 sequence_driver.py -plugin lammps -mdi "-role DRIVER -name sequence -method LINK -plugin_path /home/sjplimp/lammps/git/src" -plugin_args "-log log.sequence -in in.sequence".python
---
Run in plugin mode: 3 procs
% mpirun -np 3 python3 sequence_driver.py -plugin lammps -mdi "-role DRIVER -name sequence -method LINK -plugin_path /home/sjplimp/lammps/git/src" -plugin_args "-log log.sequence -in in.sequence"
% mpirun -np 3 python3 sequence_driver.py -plugin lammps -mdi "-role DRIVER -name sequence -method LINK -plugin_path /home/sjplimp/lammps/git/src" -plugin_args "-log log.sequence -in in.sequence".python
-------------------------------------------------
-------------------------------------------------
@ -190,7 +306,7 @@ here:
Run the entire calculation with a single instance of LAMMPS by itself
results should be identical to running this example with MDI
% lmp_mpi < in.aimd.alone
% lmp_mpi -log log.aimd.alone < in.aimd.alone
With MDI, the driver prints the QM and Total energies. These should
match the PotEng and TotEng output of the in.aimd.alone script.

View File

@ -23,8 +23,7 @@ fix 1 all nve
# NPT
#fix 1 all npt temp 1.0 1.0 0.1 iso 1.0 1.0 1.0
fix 2 all mdi/aimd
fix_modify 2 energy yes virial yes
fix 2 all mdi/qm virial yes
thermo_style custom step temp pe etotal press vol
thermo 1

View File

@ -23,12 +23,11 @@ fix 1 all nve
# NPT
#fix 1 all npt temp 1.0 1.0 0.1 iso 1.0 1.0 1.0
fix 2 all mdi/aimd
fix_modify 2 energy yes virial yes
fix 2 all mdi/qm virial yes
thermo_style custom step temp pe etotal press vol
thermo 1
mdi plugin lammps mdi "-role ENGINE -name lammps -method LINK" &
mdi plugin lammps mdi "-role ENGINE -name LMP2 -method LINK" &
infile in.aimd.engine extra "-log log.aimd.engine.plugin" &
command "run 5"

View File

@ -1,16 +1,11 @@
# 3d Lennard-Jones melt - MDI engine script
variable x index 5
variable y index 5
variable z index 5
units lj
atom_style atomic
lattice fcc 0.8442
region box block 0 $x 0 $y 0 $z
lattice fcc 1.0
region box block 0 1 0 1 0 1
create_box 1 box
create_atoms 1 box
mass 1 1.0
pair_style lj/cut 2.5

View File

@ -0,0 +1,43 @@
# 3d Lennard-Jones melt - MDI driver script
variable x index 5
variable y index 5
variable z index 5
variable ifile loop 3
variable rho index 0.7 0.8 0.9
label LOOP
units lj
atom_style atomic
lattice fcc ${rho}
region box block 0 $x 0 $y 0 $z
create_box 1 box
create_atoms 1 box
mass 1 1.0
displace_atoms all random 0.1 0.1 0.1 48294
pair_style lj/cut 2.5
pair_coeff 1 1 1.0 1.0 2.5
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
compute 1 all pressure NULL virial
thermo_style custom step temp pe c_1 c_1[1] c_1[2] c_1[3]
run 0
write_dump all custom dump.series.alone.${ifile} &
id type x y z fx fy fz modify sort id
clear
next ifile
next rho
jump SELF LOOP

View File

@ -0,0 +1,48 @@
# 3d Lennard-Jones melt - MDI driver script
variable x index 5
variable y index 5
variable z index 5
variable ifile loop 3
variable rho index 0.7 0.8 0.9
mdi start
label LOOP
units lj
atom_style atomic
lattice fcc ${rho}
region box block 0 $x 0 $y 0 $z
create_box 1 box
create_atoms 1 box
mass 1 1.0
displace_atoms all random 0.1 0.1 0.1 48294
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
fix 1 all mdi/qm add no virial yes external yes
variable epress equal (f_1[1]+f_1[2]+f_1[3])/3
thermo_style custom step temp f_1 v_epress f_1[1] f_1[2] f_1[3]
run 0
write_dump all custom dump.series.driver.${ifile} &
id type x y z f_1[1] f_1[2] f_1[3] modify sort id
# cannot do "clear" b/c will shutdown MDI engine
# have to
clear
next ifile
next rho
jump SELF LOOP
mdi stop

View File

@ -0,0 +1,44 @@
# 3d Lennard-Jones melt - MDI driver script
variable x index 5
variable y index 5
variable z index 5
variable ifile loop 3
variable rho index 0.7 0.8 0.9
label LOOP
units lj
atom_style atomic
lattice fcc ${rho}
region box block 0 $x 0 $y 0 $z
create_box 1 box
create_atoms 1 box
mass 1 1.0
displace_atoms all random 0.1 0.1 0.1 48294
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
fix 1 all mdi/qm add no virial yes
variable epress equal (f_1[1]+f_1[2]+f_1[3])/3
thermo_style custom step temp f_1 v_epress f_1[1] f_1[2] f_1[3]
mdi plugin lammps mdi "-role ENGINE -name LMP2 -method LINK" &
infile in.series.engine &
extra "-log log.series.engine.plugin" &
command "run 0"
write_dump all custom dump.series.driver.${ifile} &
id type x y z f_1[1] f_1[2] f_1[3] modify sort id
clear
next ifile
next rho
jump SELF LOOP

View File

@ -0,0 +1,17 @@
# 3d Lennard-Jones melt - MDI engine script
units lj
atom_style atomic
lattice fcc 1.0
region box block 0 1 0 1 0 1
create_box 1 box
mass 1 1.0
pair_style lj/cut 2.5
pair_coeff 1 1 1.0 1.0 2.5
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
mdi engine

View File

@ -0,0 +1,36 @@
# 3d Lennard-Jones melt - MDI driver script
variable x index 5
variable y index 5
variable z index 5
units lj
atom_style atomic
lattice fcc 0.8442
region box block 0 $x 0 $y 0 $z
create_box 1 box
create_atoms 1 box
mass 1 1.0
velocity all create 1.44 87287 loop geom
pair_style lj/cut 2.5
pair_coeff 1 1 1.0 1.0 2.5
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
fix 1 all nve
compute 1 all pressure NULL virial
thermo_style custom step temp pe c_1 c_1[1] c_1[2] c_1[3]
thermo 100
dump 1 all custom 500 dump.snapshot.alone &
id type x y z fx fy fz
dump_modify 1 sort id
run 1500

View File

@ -0,0 +1,41 @@
# 3d Lennard-Jones melt - MDI driver script
variable x index 5
variable y index 5
variable z index 5
units lj
atom_style atomic
lattice fcc 0.8442
region box block 0 $x 0 $y 0 $z
create_box 1 box
create_atoms 1 box
mass 1 1.0
velocity all create 1.44 87287 loop geom
pair_style lj/cut 2.5
pair_coeff 1 1 1.0 1.0 2.5
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
fix 1 all nve
fix 2 all mdi/qm add no every 500 virial yes
compute 1 all pressure NULL virial
variable epress equal (f_2[1]+f_2[2]+f_2[3])/3
thermo_style custom step temp pe c_1 c_1[1] c_1[2] c_1[3]
thermo 100
dump 1 all custom 500 dump.snapshot.driver &
id type x y z f_2[1] f_2[2] f_2[3]
dump_modify 1 sort id
run 1500 pre no post no every 500 &
"print 'QM eng = $(f_2/atoms)'" &
"print 'QM virial = $(v_epress) $(f_2[1]) $(f_2[2]) $(f_2[3])'"

View File

@ -0,0 +1,46 @@
# 3d Lennard-Jones melt - MDI driver script
variable x index 5
variable y index 5
variable z index 5
units lj
atom_style atomic
lattice fcc 0.8442
region box block 0 $x 0 $y 0 $z
create_box 1 box
create_atoms 1 box
mass 1 1.0
velocity all create 1.44 87287 loop geom
pair_style lj/cut 2.5
pair_coeff 1 1 1.0 1.0 2.5
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
fix 1 all nve
fix 2 all mdi/qm add no every 500 virial yes
compute 1 all pressure NULL virial
variable epress equal (f_2[1]+f_2[2]+f_2[3])/3
thermo_style custom step temp pe c_1 c_1[1] c_1[2] c_1[3]
thermo 100
dump 1 all custom 500 dump.snapshot.driver &
id type x y z f_2[1] f_2[2] f_2[3]
dump_modify 1 sort id
mdi plugin lammps mdi "-role ENGINE -name LMP2 -method LINK" &
infile in.snapshot.engine &
extra "-log log.snapshot.engine.plugin" &
command """
run 1500 pre no post no every 500
"print 'QM eng = $(f_2/atoms)'"
"print 'QM virial = $(v_epress) $(f_2[1]) $(f_2[2]) $(f_2[3])'"
"""

View File

@ -0,0 +1,17 @@
# 3d Lennard-Jones melt - MDI engine script
units lj
atom_style atomic
lattice fcc 1.0
region box block 0 1 0 1 0 1
create_box 1 box
mass 1 1.0
pair_style lj/cut 2.5
pair_coeff 1 1 1.0 1.0 2.5
neighbor 0.3 bin
neigh_modify delay 0 every 1 check yes
mdi engine

View File

@ -303,5 +303,5 @@ if not plugin:
if plugin:
world = MPI.COMM_WORLD
plugin_args += " -mdi \"-role ENGINE -name lammps -method LINK\""
plugin_args += " -mdi \"-role ENGINE -name LMP -method LINK\""
mdi.MDI_Launch_plugin(plugin,plugin_args,world,perform_tasks,None)

View File

@ -48,19 +48,20 @@ FixMDIQM::FixMDIQM(LAMMPS *lmp, int narg, char **arg) : Fix(lmp, narg, arg)
virialflag = 0;
addflag = 1;
every = 1;
extflag = 0;
int iarg = 3;
while (iarg < narg) {
if (strcmp(arg[iarg],"virial") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix mdi/qm command");
if (strcmp(arg[iarg],"yes") == 0) virialflag = 1;
else if (strcmp(arg[iarg],"no") == 0) virialflag = 0;
if (strcmp(arg[iarg+1],"yes") == 0) virialflag = 1;
else if (strcmp(arg[iarg+1],"no") == 0) virialflag = 0;
else error->all(FLERR,"Illegal fix mdi/qm command");
iarg += 2;
} else if (strcmp(arg[iarg],"add") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix mdi/qm command");
if (strcmp(arg[iarg],"yes") == 0) addflag = 1;
else if (strcmp(arg[iarg],"no") == 0) addflag = 0;
if (strcmp(arg[iarg+1],"yes") == 0) addflag = 1;
else if (strcmp(arg[iarg+1],"no") == 0) addflag = 0;
else error->all(FLERR,"Illegal fix mdi/qm command");
iarg += 2;
} else if (strcmp(arg[iarg],"every") == 0) {
@ -68,6 +69,12 @@ FixMDIQM::FixMDIQM(LAMMPS *lmp, int narg, char **arg) : Fix(lmp, narg, arg)
every = utils::inumeric(FLERR,arg[iarg+1],false,lmp);
if (every <= 0) error->all(FLERR,"Illegal fix mdi/qm command");
iarg += 2;
} else if (strcmp(arg[iarg],"external") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix mdi/qm command");
if (strcmp(arg[iarg+1],"yes") == 0) extflag = 1;
else if (strcmp(arg[iarg+1],"no") == 0) extflag = 0;
else error->all(FLERR,"Illegal fix mdi/qm command");
iarg += 2;
} else error->all(FLERR,"Illegal fix mdi/qm command");
}
@ -77,10 +84,14 @@ FixMDIQM::FixMDIQM(LAMMPS *lmp, int narg, char **arg) : Fix(lmp, narg, arg)
global_freq = every;
extscalar = 1;
peratom_flag = 1;
size_peratom_cols = 3;
peratom_freq = every;
extvector = 0;
if (virialflag) {
vector_flag = 1;
size_vector = 6;
extvector = 1;
}
if (addflag) {
@ -135,7 +146,7 @@ FixMDIQM::~FixMDIQM()
// send exit command to engine if it is a stand-alone code
// for plugin, this happens in MDIPlugin::plugin_wrapper()
if (!plugin) {
if (!plugin && !extflag) {
int ierr = MDI_Send_command("EXIT", mdicomm);
if (ierr) error->all(FLERR, "MDI: EXIT command");
}
@ -169,6 +180,8 @@ void FixMDIQM::init()
// also initializes mdicomm
// set plugin = 0/1 for engine = stand-alone code vs plugin library
if (!extflag) {
if (mdicomm == MDI_COMM_NULL) {
MDI_Get_communicator(&mdicomm, 0);
if (mdicomm == MDI_COMM_NULL) {
@ -185,10 +198,17 @@ void FixMDIQM::init()
}
}
} else {
plugin = 0;
mdicomm = lmp->mdicomm;
}
// send natoms, atom types, and simulation box to engine
// this will trigger setup of a new system
// subsequent calls in post_force() will be for same system until new init()
reallocate();
int ierr = MDI_Send_command(">NATOMS", mdicomm);
if (ierr) error->all(FLERR, "MDI: >NATOMS command");
int n = static_cast<int> (atom->natoms);
@ -293,31 +313,25 @@ void FixMDIQM::post_force(int vflag)
}
// optionally request pressure tensor from MDI engine, convert to virial
// divide by nprocs so each proc stores a portion
// MPI_Allreduce is performed in compute_vector()
// qm_virial = fix output for global QM virial
if (virialflag) {
double ptensor[6];
ierr = MDI_Send_command("<STRESS", mdicomm);
if (ierr) error->all(FLERR, "MDI: <STRESS command");
ierr = MDI_Recv(ptensor, 6, MDI_DOUBLE, mdicomm);
ierr = MDI_Recv(qm_virial, 6, MDI_DOUBLE, mdicomm);
if (ierr) error->all(FLERR, "MDI: <STRESS data");
MPI_Bcast(ptensor, 6, MPI_DOUBLE, 0, world);
MPI_Bcast(qm_virial, 6, MPI_DOUBLE, 0, world);
double volume = domain->xprd * domain->yprd * domain->zprd;
for (int i = 0; i < 6; i++) {
ptensor[i] *= mdi2lmp_pressure;
qm_virial[i] = ptensor[i] * volume / force->nktv2p / nprocs;
}
sumflag = 0;
for (int i = 0; i < 6; i++)
qm_virial[i] *= mdi2lmp_pressure;
}
// optionally set fix->virial
// divide by nprocs so each proc stores a portion
if (virialflag && addflag) {
for (int i = 0; i < 6; i++)
virial[i] = qm_virial[i];
virial[i] = qm_virial[i]/nprocs;
}
}
@ -343,14 +357,7 @@ double FixMDIQM::compute_scalar()
double FixMDIQM::compute_vector(int n)
{
// only sum across procs one time
if (sumflag == 0) {
MPI_Allreduce(qm_virial, qm_virial_all, 6, MPI_DOUBLE, MPI_SUM, world);
sumflag = 1;
}
return qm_virial_all[n];
return qm_virial[n];
}
/* ----------------------------------------------------------------------
@ -388,7 +395,8 @@ void FixMDIQM::reallocate()
void FixMDIQM::send_types()
{
memset(ibuf1, 0, atom->natoms * sizeof(int));
int n = static_cast<int> (atom->natoms);
memset(ibuf1, 0, n * sizeof(int));
// use local atomID to index into ordered ibuf1
@ -402,10 +410,11 @@ void FixMDIQM::send_types()
ibuf1[index] = type[i];
}
int n = static_cast<int> (atom->natoms);
MPI_Reduce(ibuf1, ibuf1all, n, MPI_INT, MPI_SUM, 0, world);
int ierr = MDI_Send(ibuf1all, n, MDI_INT, mdicomm);
int ierr = MDI_Send_command(">TYPES", mdicomm);
if (ierr) error->all(FLERR, "MDI: >TYPES command");
ierr = MDI_Send(ibuf1all, n, MDI_INT, mdicomm);
if (ierr) error->all(FLERR, "MDI: >TYPES data");
}

View File

@ -40,7 +40,7 @@ class FixMDIQM : public Fix {
private:
int nprocs;
int virialflag,addflag,every;
int virialflag,addflag,extflag,every;
int plugin;
int maxlocal;
int sumflag;

View File

@ -31,8 +31,26 @@ void MDICommand::command(int narg, char **arg)
if (strcmp(arg[0], "engine") == 0) {
MDIEngine(lmp, narg - 1, &arg[1]);
} else if (strcmp(arg[0], "plugin") == 0) {
MDIPlugin(lmp, narg - 1, &arg[1]);
} else
error->all(FLERR, "Illegal mdi command");
} else if (strcmp(arg[0], "start") == 0) {
MDI_Comm mdicomm;
MDI_Get_communicator(&mdicomm, 0);
if (mdicomm == MDI_COMM_NULL) {
MDI_Accept_communicator(&mdicomm);
if (mdicomm == MDI_COMM_NULL)
error->all(FLERR, "MDI unable to connect to stand-alone engine");
} else error->all(FLERR, "Cannot use mdi start in plugin mode");
lmp->mdicomm = mdicomm;
} else if (strcmp(arg[0], "stop") == 0) {
MDI_Comm mdicomm = lmp->mdicomm;
int ierr = MDI_Send_command("EXIT", mdicomm);
if (ierr) error->all(FLERR, "MDI: EXIT command");
lmp->mdicomm = MDI_COMM_NULL;
} else error->all(FLERR, "Illegal mdi command");
}

View File

@ -129,9 +129,8 @@ LAMMPS::LAMMPS(int narg, char **arg, MPI_Comm communicator) :
version = (const char *) LAMMPS_VERSION;
num_ver = utils::date2num(version);
clientserver = 0;
cslib = nullptr;
cscomm = 0;
external_comm = 0;
mdicomm = 0;
skiprunflag = 0;
@ -155,19 +154,16 @@ LAMMPS::LAMMPS(int narg, char **arg, MPI_Comm communicator) :
#endif
// check if -mpicolor is first arg
// if so, then 2 apps were launched with one mpirun command
// if so, then 2 or more apps were launched with one mpirun command
// this means passed communicator (e.g. MPI_COMM_WORLD) is bigger than LAMMPS
// e.g. for client/server coupling with another code
// in the future LAMMPS might leverage this in other ways
// universe communicator needs to shrink to be just LAMMPS
// syntax: -mpicolor color
// color = integer for this app, different than other app(s)
// color = integer for this app, different than any other app(s)
// do the following:
// perform an MPI_Comm_split() to create a new LAMMPS-only subcomm
// NOTE: this assumes other app(s) does same thing, else will hang!
// NOTE: this assumes other app(s) make same call, else will hang!
// re-create universe with subcomm
// store full multi-app comm in cscomm
// cscomm is used by CSLIB package to exchange messages w/ other app
// store comm that all apps belong to in external_comm
int iarg = 1;
if (narg-iarg >= 2 && (strcmp(arg[iarg],"-mpicolor") == 0 ||
@ -178,7 +174,7 @@ LAMMPS::LAMMPS(int narg, char **arg, MPI_Comm communicator) :
int color = atoi(arg[iarg+1]);
MPI_Comm subcomm;
MPI_Comm_split(communicator,color,me,&subcomm);
cscomm = communicator;
external_comm = communicator;
communicator = subcomm;
delete universe;
universe = new Universe(this,communicator);
@ -290,7 +286,7 @@ LAMMPS::LAMMPS(int narg, char **arg, MPI_Comm communicator) :
logflag = iarg + 1;
iarg += 2;
} else if (strcmp(arg[iarg],"-mpi") == 0 ||
} else if (strcmp(arg[iarg],"-mpicolor") == 0 ||
strcmp(arg[iarg],"-m") == 0) {
if (iarg+2 > narg)
error->universe_all(FLERR,"Invalid command-line argument");
@ -762,13 +758,13 @@ LAMMPS::~LAMMPS()
delete [] suffix2;
delete [] suffixp;
// free the MPI comm created by -mpi command-line arg processed in constructor
// free the MPI comm created by -mpicolor cmdline arg processed in constructor
// it was passed to universe as if original universe world
// may have been split later by partitions, universe will free the splits
// free a copy of uorig here, so check in universe destructor will still work
MPI_Comm copy = universe->uorig;
if (cscomm) MPI_Comm_free(&copy);
if (external_comm) MPI_Comm_free(&copy);
delete input;
delete universe;

View File

@ -61,13 +61,15 @@ class LAMMPS {
char *suffix, *suffix2, *suffixp; // suffixes to add to input script style names
int suffix_enable; // 1 if suffixes are enabled, 0 if disabled
char *exename; // pointer to argv[0]
//
char ***packargs; // arguments for cmdline package commands
int num_package; // number of cmdline package commands
//
int clientserver; // 0 = neither, 1 = client, 2 = server
void *cslib; // client/server messaging via CSlib
MPI_Comm cscomm; // MPI comm for client+server in mpi/one mode
MPI_Comm external_comm; // MPI comm encompassing external programs
// when multiple programs launched by mpirun
// set by -mpicolor command line arg
int mdicomm; // for use with MDI code coupling library
const char *match_style(const char *style, const char *name);
static const char *installed_packages[];