[wip] Using a Bramble as a Gramble

The Bramble has been setup and running in a previous post.

1- Making sure MPI works fine

In the NFS folder create a mpi_hello.c file with

#include 
#include 

int main(int argc, char** argv) {
 int myrank, nprocs;

 MPI_Init(&argc, &argv);
 MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
 MPI_Comm_rank(MPI_COMM_WORLD, &myrank);

 printf("Hello from processor %d of %d\n", myrank, nprocs);

 MPI_Finalize();
 return 0;
}

and compile it with mpicc

mpicc mpi_hello.c -o mpi_hello

Run with

mpirun -np  -host <host1,host2,...> mpi_hello

and it should output something as

Hello from processor 2 of 20
Hello from processor 7 of 20

If you monitor the activity of each node, for example with htop -d 1, you will a transient increase in CPU activity.

2- To use Gromacs, it has to be compiled with correct options.

We are installing Gromacs at the NFS /apps folder. To work with MPI, the correct MPI flags are required:

cd /apps
wget ftp://ftp.gromacs.org/pub/gromacs/gromacs-5.1.4.tar.gz
tar xvf gromacs-5.1.4.tar.gz
cd gromacs-5.1.4
mkdir build
cd build
cmake .. -DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ -DGMX_MPI=on -DGMX_BUILD_MDRUN_ONLY=on -DCMAKE_INSTALL_PREFIX=:/apps/gromacs-5.1.4-mpi -DBUILD_SHARED_LIBS=off -DGMX_BUILD_OWN_FFTW=ON -DGMX_DEFAULT_SUFFIX=mpi
make -j 4
make check
make install

To add the programs to the environment list just edit the /home/ubuntu/.bashrc and add the line

source /apps/gromacs/5.1.4/bin/GMXRC

and restart the terminal or source the profile file.

3- Run some Gromacs job.

Jobs can now be easily distributed over the nodes using

mpirun -np  -host <hostname1,hostname2,...> gmx_mpi mdrun

4- Playing with options to speed-up

4.1- Compiling with own FFTW

cd /apps
wget http://www.fftw.org/fftw-3.3.6-pl1.tar.gz
cd fftw-3.3.6-pl1
./configure CC=mpicc --enable.mpi
make -j 4
make install

4.1- Enabling sub-cycle counters at command time by recompiling with the new FFTW libraries and with GMX_CYCLE_SUBCOUNTERS:

cd /apps
wget ftp://ftp.gromacs.org/pub/gromacs/gromacs-5.1.4.tar.gz
tar xvf gromacs-5.1.4.tar.gz
mv gromacs-5.1.4 gromacs-5.1.4-dev
cd gromacs-5.1.4-dev
mkdir build
cd build
cmake .. -DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ -DGMX_MPI=on -DGMX_BUILD_MDRUN_ONLY=on -DCMAKE_INSTALL_PREFIX=:/apps/gromacs-5.1.4-dev -DBUILD_SHARED_LIBS=off -DGMX_BUILD_OWN_FFTW=ON -DGMX_DEFAULT_SUFFIX=mpi_dev -DGMX_CYCLE_SUBCOUNTERS=on 
make -j 4
make check
make install

Compiling NWCHEM under Ubuntu with MPICH support

Note: Some code are written in the csh shell (setenv ) but should be written in bash (export ="name")

  1. Install required packages:
    sudo apt-get install python-dev gfortran libopenblas-dev libopenmpi-dev openmpi-bin tcsh make

  2. Set environment variables at a /home/$USER/.nwchem_login file containing
    export USE_MPI=y
    export NWCHEM_TARGET=LINUX64
    export USE_PYTHONCONFIG=y
    export PYTHONVERSION=2.7
    export PYTHONHOME=/usr
    export BLASOPT="-lopenblas -lpthread -lrt"
    export BLAS_SIZE=4
    export USE_64TO32=y

setenv NWCHEM_TOP /nwchem
setenv NWCHEM_TARGET LINUX64
setenv NWCHEM_MODULES all

Common environmental variables for building with MPI are:
– The following environment variables need to be set when NWChem is compiled with MPI:
setenv USE_MPI y
setenv USE_MPIF y
setenv USE_MPIF4 y
setenv MPI_LOC /openmpi-1.4.3 (for example, if you are using OpenMPI)
setenv MPI_LIB /openmpi-1.4.3/lib
setenv MPI_INCLUDE /openmpi-1.4.3/include
setenv LIBMPI "-lmpi_f90 -lmpi_f77 -lmpi -lpthread"

adding one of the following blocks according to the implemented MPI:
MPICH:
setenv MPI_LOC /usr/local #location of mpich installation
setenv MPI_LIB $MPI_LOC/lib
setenv MPI_INCLUDE $MPI_LOC/include
setenv LIBMPI "-lfmpich -lmpich -lpmpich"

MPICH2:
setenv MPI_LOC /usr/local #location of mpich2 installation
setenv MPI_LIB $MPI_LOC/lib
setenv MPI_INCLUDE $MPI_LOC/include
setenv LIBMPI "-lmpich -lopa -lmpl -lrt -lpthread"

OpenMPI:
setenv MPI_LOC /usr/local #location of openmpi installation
setenv MPI_LIB $MPI_LOC/lib
setenv MPI_INCLUDE $MPI_LOC/include
setenv LIBMPI "-lmpi_f90 -lmpi_f77 -lmpi -ldl -Wl,--export-dynamic -lnsl -lutil"

  1. Source the .nwchem_login file at the .bashrc file:
    source .nwchem_login

  2. Compile the software:
    make nwchem_config NWCHEM_MODULES="all python"
    make 64_to_32
    make

    or:
    cd $NWCHEM_TOP/src
    make nwchem_config
    make FC=gfortran >& make.log


Note:
Sometimes a default.nwchemrc file at the user home directory is required to list the directories NWChem should use:
nwchem_basis_library /usr/local/NWChem/data/libraries/
nwchem_nwpw_library /usr/local/NWChem/data/libraryps/
ffield amber
amber_1 /usr/local/NWChem/data/amber_s/
amber_2 /usr/local/NWChem/data/amber_q/
amber_3 /usr/local/NWChem/data/amber_x/
amber_4 /usr/local/NWChem/data/amber_u/
spce /usr/local/NWChem/data/solvents/spce.rst
charmm_s /usr/local/NWChem/data/charmm_s/
charmm_x /usr/local/NWChem/data/charmm_x/

Edit the .bashrc file to include
source nwchem_default

 

Compiling GROMACS 2016.1 in Ubuntu 16.04 with GPU support

  1. Install Intel Paralell XE cluster from binary file with ALL components

This install the Intel C, C++ and F compiler, the MKL libraries and the Intel MPI version. To run on one single machine, mpi is not required. To run across multiples machines, a mpi implementation is required, either Intel, MPICH or OpenMPI.

  1. Set environment variables (optionally add to .bashrc)

source intel/bin/compilervars.sh -arch intel64 -platform linux
export PATH=$PATH:"/opt/intel"
export MKLROOT="/opt/intel"
export CC=icc
export CXX=icc
export F77=ifort
export CFLAGS="-O3 -ipo- -static -std=c99 -fPIC -DMKL_LP64 -DM_PI=3.1415926535897932384"
export CPPFLAGS="-I$MKLROOT/include -I$MKLROOT/include/fftw"
export LDFLAGS="-L$MKLROOT/lib/intel64 -L$MKLROOT/../compiler/lib/intel64"
export LD_LIBRARY_PATH="$MKLROOT/lib/intel64:$MKLROOT/../compiler/lib/intel64:$LD_LIBRARY_PATH"

  1. Compile GROMACS
    tar xfz gromacs-2016.1.tar.gz
    cd gromacs-2016.1
    mkdir build
    cd build
    cmake .. -DCMAKE_C_COMPILER=icc -DGMX_MPI=on -DGMX_GPU=on -DGMX_USE_OPENCL=on -DCMAKE_CXX_COMPILER=icc -DGMX_SIMD=AVX2_256 -DCMAKE_INSTALL_PREFIX=/opt/gromacs-2016.1-mod -DGMX_FFT_LIBRARY=fftpack -DGMX_DEFAULT_SUFFIX=OFF -DGMX_BINARY_SUFFIX=_mod -DGMX_LIBS_SUFFIX=_mod -DGMX_FFT_LIBRARY=mkl
    make
    #make -j 6
    make check
    sudo make install
    source /opt/gromacs-2016.1-mod/bin/GMXRC

Documentation

INTEL Parallel Studio XE Install Guide for Linux

NVIDIA CUDA Quick Start Guide

NVIDIA CUDA Installation Guide for Linux