[wip] Using a Bramble as a Gramble

The Bramble has been setup and running in a previous post.

1- Making sure MPI works fine

In the NFS folder create a mpi_hello.c file with


int main(int argc, char** argv) {
 int myrank, nprocs;

 MPI_Init(&argc, &argv);
 MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
 MPI_Comm_rank(MPI_COMM_WORLD, &myrank);

 printf("Hello from processor %d of %d\n", myrank, nprocs);

 return 0;

and compile it with mpicc

mpicc mpi_hello.c -o mpi_hello

Run with

mpirun -np  -host <host1,host2,...> mpi_hello

and it should output something as

Hello from processor 2 of 20
Hello from processor 7 of 20

If you monitor the activity of each node, for example with htop -d 1, you will a transient increase in CPU activity.

2- To use Gromacs, it has to be compiled with correct options.

We are installing Gromacs at the NFS /apps folder. To work with MPI, the correct MPI flags are required:

cd /apps
wget ftp://ftp.gromacs.org/pub/gromacs/gromacs-5.1.4.tar.gz
tar xvf gromacs-5.1.4.tar.gz
cd gromacs-5.1.4
mkdir build
cd build
make -j 4
make check
make install

To add the programs to the environment list just edit the /home/ubuntu/.bashrc and add the line

source /apps/gromacs/5.1.4/bin/GMXRC

and restart the terminal or source the profile file.

3- Run some Gromacs job.

Jobs can now be easily distributed over the nodes using

mpirun -np  -host <hostname1,hostname2,...> gmx_mpi mdrun

4- Playing with options to speed-up

4.1- Compiling with own FFTW

cd /apps
wget http://www.fftw.org/fftw-3.3.6-pl1.tar.gz
cd fftw-3.3.6-pl1
./configure CC=mpicc --enable.mpi
make -j 4
make install

4.1- Enabling sub-cycle counters at command time by recompiling with the new FFTW libraries and with GMX_CYCLE_SUBCOUNTERS:

cd /apps
wget ftp://ftp.gromacs.org/pub/gromacs/gromacs-5.1.4.tar.gz
tar xvf gromacs-5.1.4.tar.gz
mv gromacs-5.1.4 gromacs-5.1.4-dev
cd gromacs-5.1.4-dev
mkdir build
cd build
make -j 4
make check
make install

Compiling NWCHEM under Ubuntu with MPICH support

Note: Some code is written in the csh shell (setenv) but should be written in bash (export ="name")

1. Install required packages:

sudo apt-get install python-dev gfortran libopenblas-dev libopenmpi-dev openmpi-bin tcsh make 

2. Set environment variables at a /home/$USER/.nwchem_login file containing

export USE_MPI=y
export PYTHONHOME=/usr
export BLASOPT="-lopenblas -lpthread -lrt"
export BLAS_SIZE=4
export USE_64TO32=y
setenv NWCHEM_TOP /nwchem

The following environment variables need to be set when NWChem is compiled with MPI:

setenv USE_MPI y
setenv USE_MPIF y
setenv USE_MPIF4 y
setenv MPI_LOC /openmpi-1.4.3 (for example, if you are using OpenMPI)
setenv MPI_LIB /openmpi-1.4.3/lib
setenv MPI_INCLUDE /openmpi-1.4.3/include
setenv LIBMPI "-lmpi_f90 -lmpi_f77 -lmpi -lpthread" 

Adding one of the following blocks according to the implemented MPI:

setenv MPI_LOC /usr/local #location of mpich installation
setenv MPI_LIB $MPI_LOC/lib
setenv MPI_INCLUDE $MPI_LOC/include
setenv LIBMPI "-lfmpich -lmpich -lpmpich"


setenv MPI_LOC /usr/local #location of mpich2 installation
setenv MPI_LIB $MPI_LOC/lib
setenv MPI_INCLUDE $MPI_LOC/include
setenv LIBMPI "-lmpich -lopa -lmpl -lrt -lpthread"


setenv MPI_LOC /usr/local #location of openmpi installation
setenv MPI_LIB $MPI_LOC/lib
setenv MPI_INCLUDE $MPI_LOC/include
setenv LIBMPI "-lmpi_f90 -lmpi_f77 -lmpi -ldl -Wl,--export-dynamic -lnsl -lutil"

3. Source the .nwchem_login file at the .bashrc file:

source .nwchem_login

4. Compile the software:

make nwchem_config NWCHEM_MODULES="all python"
make 64_to_32


cd $NWCHEM_TOP/src
make nwchem_config
make FC=gfortran >& make.log

Sometimes a default.nwchemrc file at the user home directory is required to list the directories NWChem should use:

nwchem_basis_library /usr/local/NWChem/data/libraries/
nwchem_nwpw_library /usr/local/NWChem/data/libraryps/
ffield amber
amber_1 /usr/local/NWChem/data/amber_s/
amber_2 /usr/local/NWChem/data/amber_q/
amber_3 /usr/local/NWChem/data/amber_x/
amber_4 /usr/local/NWChem/data/amber_u/
spce    /usr/local/NWChem/data/solvents/spce.rst
charmm_s /usr/local/NWChem/data/charmm_s/
charmm_x /usr/local/NWChem/data/charmm_x/

In this case, edit the .bashrc file to include

source nwchem_default

Compiling GROMACS 2016.1 in Ubuntu 16.04 with GPU support

  1. Install Intel Paralell XE cluster from binary file with ALL components

This install the Intel C, C++ and F compiler, the MKL libraries and the Intel MPI version. To run on one single machine, mpi is not required. To run across multiples machines, a mpi implementation is required, either Intel, MPICH or OpenMPI.

  1. Set environment variables (optionally add to .bashrc)

source intel/bin/compilervars.sh -arch intel64 -platform linux
export PATH=$PATH:"/opt/intel"
export MKLROOT="/opt/intel"
export CC=icc
export CXX=icc
export F77=ifort
export CFLAGS="-O3 -ipo- -static -std=c99 -fPIC -DMKL_LP64 -DM_PI=3.1415926535897932384"
export CPPFLAGS="-I$MKLROOT/include -I$MKLROOT/include/fftw"
export LDFLAGS="-L$MKLROOT/lib/intel64 -L$MKLROOT/../compiler/lib/intel64"
export LD_LIBRARY_PATH="$MKLROOT/lib/intel64:$MKLROOT/../compiler/lib/intel64:$LD_LIBRARY_PATH"

  1. Compile GROMACS
    tar xfz gromacs-2016.1.tar.gz
    cd gromacs-2016.1
    mkdir build
    cd build
    #make -j 6
    make check
    sudo make install
    source /opt/gromacs-2016.1-mod/bin/GMXRC


INTEL Parallel Studio XE Install Guide for Linux

NVIDIA CUDA Quick Start Guide

NVIDIA CUDA Installation Guide for Linux