Gromacs

Introduction

http://www.gromacs.org/

http://www.gromacs.org/Downloads

Online Manual: http://www.gromacs.org/Support/Online_Manual

Location

/sw/gromacs/4.5.5

Usage

module load gromacs/4.5.5-intel-mpi
OR
module load gromacs/4.5.5-intel
OR
module load gromacs/5.1.2-gcc-mpi-gpu
OR
module load gromacs/5.1.2-gcc-mpi
  
The GPU nodes are n020, n021, n022 and n023
 
For example, to use the gpu nodes n020,n021, n022 and n023, you can use the resource named "host" to request this in the pbs script.

Name of execution host can be requested only inside of a select statement. 
e.g.
qsub -I -X -q workq -l walltime=100:00:00 -l select=1:ncpus=1:mem=12g:mpiprocs=1:host=n020
OR
qsub -I -q workq -l walltime=100:00:00 -l select=1:ncpus=1:mem=2g:mpiprocs=1:host=n021

Compilation and Installation

mkdir /sw/gromacs/4.5.5-intel
cd /sw/gromacs/4.5.5-intel
tar -zxvf /data1/gromacs-4.5.5.tar.gz
mv gromacs-4.5.5 source
cd /sw/gromacs/4.5.5-intel/source

export CC=icc
export CXX=icc
export F77=ifort
export CPPFLAGS="-I/sw/sdev/intel/Compiler/11.1/072/mkl/include"
export LDFLAGS="-L/sw/sdev/intel/Compiler/11.1/072/mkl/lib/em64t"
export LD_LIBRARY_PATH=/sw/sdev/intel/Compiler/11.1/072/lib/intel64:/sw/sdev/intel/Compiler/11.1/072/mkl/lib/em64t
module load intel-tools-11
module load intel-mpi
#module load intel-cc-11/11.1.072


mpi version
===========
./configure --prefix=/sw/gromacs/4.5.5-intel-mpi --with-fft=mkl --enable-mpi 2>&1 | tee configure-mpi.log

###./configure --prefix=/sw/gromacs/4.5.5-intel-mpi --with-fft=mkl --enable-mpi --program-suffix=-mpi 2>&1 | tee configure-mpi.log

make 2>&1 | tee make-mpi.txt
make install 2>&1 | tee make_install-mpi.txt
make tests 2>&1 | tee make_test-mpi.txt
#make mdrun 2>&1 | tee make_mdrun.txt
#make install-mdrun 2>&1 | tee make_install-mdrun.txt

Non MPI version
===============
>>>>>>>>>>>>>>>
Not needed if using intel's MKL

#export CC=icc
#export CXX=icc
#export F77=ifort
#module load intel-cmkl-11/11.1.072
#module load intel-tools-11
#Configured fftw with "--enable-float"
#cd /sw/FFTW/3.3.alpha-intel-float
# ./configure --enable-float --enable-threads --enable-fortran  --prefix=/sw/gromacs/4.5.5-intel/fftw 2>&1 |tee #configure_fftw_float.txt
#make 2>&1 |tee make_fftw_float.txt
#make install 2>&1 |tee make_install_fftw_float.txt

#export CPPFLAGS="-I/sw/sdev/intel/Compiler/11.1/072/mkl/include -I  /sw/gromacs/4.5.5-intel/fftw/include"
#export LDFLAGS="-L/sw/sdev/intel/Compiler/11.1/072/mkl/lib/em64t -L /sw/gromacs/4.5.5-intel/fftw/lib"
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>


export CPPFLAGS="-I/sw/sdev/intel/Compiler/11.1/072/mkl/include "
export LDFLAGS="-L/sw/sdev/intel/Compiler/11.1/072/mkl/lib/em64t"
export LD_LIBRARY_PATH=/sw/sdev/intel/Compiler/11.1/072/lib/intel64:/sw/sdev/intel/Compiler/11.1/072/mkl/lib/em64t
export CC=icc
export CXX=icc
export F77=ifort
module load intel-cmkl-11/11.1.072
module load intel-tools-11
make distclean

./configure --with-fft=mkl --prefix=/sw/gromacs/4.5.5-intel 2>&1 | tee configure-nonmpi.log
make  2>&1 | tee make-nonmpi.log
make install  2>&1 | tee make_install-nonmpi.log


sample PBS script

#PBS -m abe
#PBS -M <YourEmail>@griffith.edu.au
#PBS -N gromacs_intelMPI
#PBS -l select=5:ncpus=2:mem=4g:mpiprocs=2
source $HOME/.bashrc
module load gromacs/4.5.5-intel-mpi
## The number of nodes is given by the select =<NUM > above
NODES=5

###$PBS_NODEFILE is a node-list file created with select and mpiprocs options by PBS
###### The number of MPI processes available is mpiprocs * nodes (=NPROCS)
NPROCS=10



echo "Starting job"
echo Running on host `hostname`
echo Directory is `pwd`
mpirun -machinefile $PBS_NODEFILE -np $NPROCS  env PATH=$PATH  env LD_LIBRARY_PATH=$LD_LIBRARY_PATH mdrun -v -nice 0 -np $NPROCS -s eq1.tpr 
#
echo "Done with job"

 

Gromacs 5.1.2 Installation

cd /sw/gromacs/5.1.2/src/gromacs-5.1.2/build

module load fftw/3.3.4
module load cmake/3.5.0
module load boost/1.60.0

cmake  -DCMAKE_INSTALL_PREFIX=/sw/gromacs/5.1.2/5.1.2-gcc-mpi-gpu --with-fft=mkl --enable-mpi  -DGMX_GPU=ON -DGMX_BUILD_OWN_FFTW=ON .. 2>&1 | tee configure-mpi.log

make 2>&1 | tee makelog.txt

make install 2>&1 | tee makeInstall.txt

Usage: module load gromacs/5.1.2-gcc-mpi-gpu


>>>Sample pbs script>>>>>>>>
#!/bin/bash
#PBS -N GromacsCuda
#PBS -m abe
#PBS -M YourEmail@griffith.edu.au
#PBS -l select=1:ncpus=2:mem=4g:mpiprocs=2:ngpus=1,walltime=100:00:00
####PBS -l select=1:ncpus=2:mem=4g:mpiprocs=2:ngpus=1:host=n020,walltime=100:00:00
#####PBS -l select=1:ncpus=1:mem=12gb,walltime=100:00:00
#PBS -l walltime=1:00:00
###Comment group_list if not in the group named gpu. This feature is for accounting only and not currently used on gowonda.
###PBS -W group_list=gpu
#PBS -q gpu
source $HOME/.bashrc
## The number of chunks is given by the select =<NUM > above

###$PBS_NODEFILE is a node-list file created with select and mpiprocs options by PBS
###### The number of MPI processes available is mpiprocs * chunks (=NPROCS)
NPROCS=2

echo "Starting job"
echo Running on host `hostname`
echo Directory is `pwd`
module load gromacs/5.1.2-gcc-mpi-gpumodule load mpi/mpt/2.02
module list
nvcc --version
echo `cat $PBS_NODEFILE`
mpirun $NPROCS  "/sw/mpi/mpt/pingpong_mpt"
#
echo "Done with job"

>>>>>>>Another Sample >>>>>
#!/bin/bash
#PBS -N GromacsCuda
#PBS -m abe
#PBS -e 5md_s10v.err
#PBS -o 5md_s10v.log
#PBS -M YourEmail@griffith.edu.au
#PBS -l select=1:ncpus=2:mem=4g:mpiprocs=2:ngpus=1:host=n020,walltime=100:00:00
#PBS -l walltime=1:00:00
#PBS -q gpu
source $HOME/.bashrc
## The number of chunks is given by the select =<NUM > above

###$PBS_NODEFILE is a node-list file created with select and mpiprocs options by PBS
###### The number of MPI processes available is mpiprocs * chunks (=NPROCS)
NPROCS=2

cd $PBS_O_WORKDIR
echo "Starting job"
echo Running on host `hostname`
echo Directory is `pwd`
module load gromacs/5.1.2-gcc-mpi-gpu
module load mpi/mpt/2.02
module list
nvcc --version
echo `cat $PBS_NODEFILE`

mpirun $NPROCS gmx mdrun -cpo s10v100hydrap01.cpt -s /sw/gromacs/5.1.2/5.1.2-gcc-mpi-gpu/examples/md100.tpr -o s10v100hydra.trr -c s10v100hydra.gro -g s10v100hydra.log -e s10v100hydra.edr -noappend -maxh 1

echo "Done with job"
>>>>>>>>>>>  

 

Reference

1. http://www.dxulab.com/wiki/gromacs33installation