#!/bin/bash
# runs nasa benchmark suite with various compilers and MPI's
# See README.bsc for more information
# best viewed with "set ts=2"
# Not done yet or tested
#   intel
#   pgi
#

VERSION="Version 1.4.1"

clean_exit(){
if [ "$USINGMODULES" == "yes" ];then
	echo $MRESTORE > ~/.currentmodules
fi
# comment out if looking for problems
#/bin/rm -f tmp/*

exit $1
}




show_help() {
        echo ${VERSION}
        printf "Usage: $0 <OPTIONS>\n\n"
        printf "Options:\n"
        printf "  -v                  verbose output from make stage (default=make.log)\n"
	printf "  -c <compiler>       compiler (gnu/gnu4/pgi/intel/path)\n"
	printf "  -n <processors>     number of processors\n"
	printf "  -t <test>           test size (A,B,C,S)\n"
	printf "  -m <mpi>            mpi version(lam,mpich,mpichgm,mpich2,ompi,dummy)\n"
        printf "  -o                  only build programs\n"
        printf "  -s                  run through SGE\n"
        printf "  -h                  show this help\n"
        printf "To run on a single CPU use: '-c gnu -n 1 -t S -m dummy'\n"
        printf "\n"
        clean_exit 0
}

check_stuff() {
# check environment variables, only makes sense if not using
# modules
  if [ "$USINGMODULES" == "no" ];then
    case $MPI in 
	lam) 
	    if [ "$LAM_HOME" = "" ]
	    then
		echo "LAM_HOME is not defined. A environment variable"
		echo "called LAM_HOME needs to be defined. It should"
		echo "point to you LAM directory"
		clean_exit 1
	    else
		echo "LAM_HOME is defined as $LAM_HOME"
	    fi

	    if [ -e "$LAM_HOME/bin/mpirun" ]
	    then 
		echo "mpirun found for lam"
	    else
		echo "mpirun not found in $LAM_HOME/bin/mpirun"
		clean_exit 1
	    fi
	    ;;


	mpich)
	    if [ -n "$MPICH_HOME" ]
	    then
                echo "MPICH_HOME is defined as $MPICH_HOME"
	    else
		echo "MPICH_HOME is not defined"
		clean_exit 1
	    fi

	    if [ -e "$MPICH_HOME/bin/mpirun" ]
	    then 
		echo "mpirun found for mpich"
	    else
		echo "mpirun not found in $MPICH_HOME/bin/mpirun"
		clean_exit 1 
	    fi
	    ;;
	mpichgm)
	    if [ -n "$MPICH_HOME" ]
	    then
                echo "MPICH_HOME is defined as $MPICH_HOME"
	    else
		echo "MPICH_HOME is not defined"
		clean_exit 1
	    fi

	    if [ -e "$MPICH_HOME/bin/mpirun" ]
	    then 
		echo "mpirun found for mpichgm"
	    else
		echo "mpirun not found in $MPICH_HOME/bin/mpirun"
		clean_exit 1 
	    fi
	    ;;
	mpich2)
	    if [ -n "$MPICH2_HOME" ]
	    then
                echo "MPICH2_HOME is defined as $MPICH2_HOME"
	    else
		echo "MPICH2_HOME is not defined"
		clean_exit 1
	    fi

	    if [ -e "$MPICH2_HOME/bin/mpiexec" ]
	    then 
		echo "mpiexec found for mpich2"
	    else
		echo "mpiexec not found in $MPICH_HOME/bin/mpiexec"
		clean_exit 1
	    fi
	    ;;
	ompi)
	    if [ -n "$OMPI_HOME" ]
	    then
                echo "OMPI_HOME is defined as $OMPI_HOME"
	    else
		echo "OMPI_HOME is not defined"
		clean_exit 1
	    fi

	    if [ -e "$OMPI_HOME/bin/mpiexec" ]
	    then 
		echo "mpiexec found for ompi"
	    else
		echo "mpiexec not found in $OMPI_HOME/bin/mpiexec"
		clean_exit 1
	    fi
	    ;;
    esac
  fi
# check if compilers are where they should be and check for gcc versions
	if [ "$Compiler" = "gnu" ] || [ "$Compiler" = "gnu4" ]
	then
		GCC_VERSION=`gcc -v 2>&1|grep version|cut -d" " -f3|cut -d"." -f1`
		case $GCC_VERSION in
		4)		
			echo "gcc detected as version 4."
			Compiler="gnu4"
			;;
		3)
			echo "gcc detected as version 3."
			Compiler="gnu"
			;;
		default)
			echo "Unable to determine gcc major version."
			clean_exit 1
			;;
		esac
	fi
    case $Compiler in
	gnu) 
            #this could be done better
	    if [ -e "/usr/bin/g77" ]
	    then 
		echo "GNU G77  Compiler found"
	    else
		echo "GNU G77 Compiler Not found : exiting"
		clean_exit 1
	    fi
	    if [ -e "/usr/bin/gcc" ]
	    then 
		echo "GNU C Compiler found"
	    else
		echo "GNU C Compiler Not found : exiting"
		clean_exit 1
	    fi
	    ;;

	gnu4) 
            #this could be done better
	    if [ -e "/usr/bin/gfortran" ]
	    then 
		echo "GNU gfortran  Compiler found"
	    else
		echo "GNU gfortran Compiler Not found : exiting"
		clean_exit 1
	    fi
	    if [ -e "/usr/bin/gcc" ]
	    then 
		echo "GNU C Compiler found"
	    else
		echo "GNU Compiler Not found : exiting"
		clean_exit 1
	    fi
	    ;;

	pgi)
		if [ "$USINGMODULES" == "yes" ]; then
			module load  pgi
		fi
		if [ -n "$PGI" ]; then
			echo "PGI is defined"
		else
			echo "PGI is not defined"
			clean_exit 1
		fi

		if [ -e "$PGI/linux86/bin/pgcc" ]; then 
			echo "PGI Compiler found"
		else
			echo "PGI Compiler Not found : exiting"
			clean_exit 1
		fi
	  ;;

	intel)
			if [ "$USINGMODULES" == "yes" ]; then
				INTELMOD=`module avail  intel 2>&1|grep intel|gawk '{print $1}'`
				module load $INTELMOD
			fi
			#if not using modules, then maybe ifc and icc are in the users path
      ICCPATH=`which icc 2>/dev/null `
      if [ -z "$ICCPATH" ]
      then
        echo "Intel icc not found"
        clean_exit 1
      fi
      IFCPATH=`which ifc 2>/dev/null `
      if [ -z "$IFCPATH" ]
      then
        echo "Intel ifc not found"
        clean_exit 1
      fi
			echo "Intel ifc and icc found"
	;;
	esac
}



clean_up() {
    cd -
    echo "cleaning up"
    if [ "$Report" = "" ]
    then
	$Report = "~/npbresults"
    fi

    mkdir $Report
    cp results/* $Report
}


username=`whoami` 
while getopts "vc:n:t:m:ohs" opt
do
        case $opt in
                v) verbose=1 ;;
                c) Compiler=$OPTARG ;;
                n) Processors=$OPTARG ;;
                t) Test=$OPTARG ;;
                m) MPI=$OPTARG ;;
		r) Report=$OPTARG ;;
                o) OnlyBuild="1" ;;
		s) SGEscript="1";;
                h) show_help ;;
        esac
done
#Check the tests here, everthing else is checked in the script.
#Processors are checked when built, illegal CPU numbers will not
#wil not produce binaries. The legal numbers depends on the test
case $Test in
	A) ;;
	B) ;;
	C) ;;
	S) ;;
	*) 
		echo "Invalid Test: $Test"
		clean_exit 1
		;;
esac

PROGRAMS="bt cg ep ft is lu mg sp"
#if modules are used take care of modules
export USINGMODULES=no
rpm -qf /opt/modules/bin/ >& /dev/null
if [ $? -eq 0 ]; then
  echo "Assuming BSC Modules"
  export MRESTORE=`cat ~/.currentmodules`
  export USINGMODULES=yes
  . /etc/profile.d/modules.sh
  module rm `module list 2>&1|grep mpi|cut -d")" -f2`
fi
#check some of the path names etc.
check_stuff
ARCH=`uname -i`
if [ $ARCH == "x86_64" ]; then
  if [ "$Compiler" = "gnu4" ]; then
   GNUSED4="gnu4-64.sed"
  else
   GNUSED="gnu-64.sed"
  fi
else
  if [ "$Compiler" = "gnu4" ]; then
   GNUSED4="gnu4.sed"
  else
   GNUSED="gnu.sed"
  fi
fi

# clean up everything
if [ -n "$verbose" ]
then
	make veryclean
else
	make veryclean >&/dev/null
fi

case $MPI in
	lam)
			echo "Using LAM"
			sed -f ./config/$MPI.sed ./config/make.def.template> ./tmp/m1
			sed -f ./config/$MPI.run.sed ./run.template > ./tmp/r1
			sed s/fill_in_PROCS/$Processors/ ./tmp/r1 > ./run.$MPI
			case $Compiler in
				gnu)	
					echo "Using g77 and gcc"
						if [ "$USINGMODULES" == "yes" ];then
							module load mpi/lam-gnu3
        		fi
						export LAMHF77=g77
						export LAMHCC=gcc
					sed -f ./config/$GNUSED ./tmp/m1> ./tmp/m2
					;;
				gnu4)
					echo "Using gfortran and gcc"
					if [ "$USINGMODULES" == "yes" ];then
						module load mpi/lam-gnu4
					fi
					export LAMHF77=gfortran
					export LAMHCC=gcc
					sed -f ./config/$GNUSED4 ./tmp/m1> ./tmp/m2
					;;
			pgi)
					echo "Using pgf77 and pgcc"
					if [ "$USINGMODULES" == "yes" ];then
						module load mpi/lam-pgi
						module load pgi
					fi
					export LAMHF77=$PGI/linux86/bin/pgf77
					export LAMHCC=$PGI/linux86/bin/pgcc
					sed -f ./config/pgi.lam.sed ./tmp/m1> ./tmp/m2
					;;
			intel)
				echo "Using Intel ifc and icc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/lam-intel
					module load $INTELMOD 
				fi
					export LAMHF77=$IFCPATH
					export LAMHCC=$ICCPATH
					sed -f ./config/intel.lam.sed ./tmp/m1> ./tmp/m2
				;;
			default)
				echo "Unsupported compiler: "$Compiler""
				clean_exit 1
				;;
		esac
		sed 's/fill_in_dummy_library/ /' ./tmp/m2 >config/make.def
		;;
	mpich)
		echo "Using MPICH1"
		sed -f ./config/$MPI.sed ./config/make.def.template> ./tmp/m1
		sed -f ./config/$MPI.run.sed ./run.template > ./tmp/r1
		sed s/fill_in_PROCS/$Processors/ ./tmp/r1 > ./run.$MPI
		case $Compiler in
			gnu)
				echo "Using g77 and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich-gnu3
				fi
				export MPICH_F77=g77
				export MPICH_F77LINKER=g77
				export MPICH_CC=gcc
				export MPICH_CLINKER=gcc
				sed -f ./config/$GNUSED ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			gnu4)
				echo "Using gfortran and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich-gnu4
				fi
				export MPICH_F77=gfortran
				export MPICH_F77LINKER=gfortran
				export MPICH_CC=gcc
				export MPICH_CLINKER=gcc
				sed -f ./config/$GNUSED4 ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			pgi)
				echo "Using pgf77 and pgcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich-pgi
					module load pgi 
				fi
				export MPICH_F77=$PGI/linux86/bin/pgf77
				export MPICH_F77LINKER=$PGI/linux86/bin/pgf77
				export MPICH_CC=$PGI/linux86/bin/pgcc
				export MPICH_CLINKER=$PGI/linux86/bin/pgcc
				sed -f ./config/pgi.sed ./tmp/m1> ./tmp/m2
				cd pgi
				$PGI/linux86/bin/pgf77 -c farg.f -Msecond_underscore >&make.out
				cd ..
				sed 's!fill_in_PGI! ../pgi/farg.o !' ./tmp/m2 >./tmp/m3
				;;
			intel)
				echo "Using Intel ifc and icc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich-intel
					module load $INTELMOD
				fi
				export MPICH_F77=$IFCPATH
				export MPICH_F77LINKER=$IFCPATH
				export MPICH_CC=$ICCPATH
				export MPICH_CLINKER=$ICCPATH
				sed -f ./config/intel.sed ./tmp/m1> ./tmp/m2
				cd intel
				$IFCPATH -c -nus farg.f  >&make.out
				cd ..
				sed 's!fill_in_PGI! ../intel/farg.o !' ./tmp/m2 >./tmp/m3
				;;
			default)
				echo "Unsupported compiler: "$Compiler""
				clean_exit 1
				;;
		esac
		sed 's/fill_in_dummy_library/ /' ./tmp/m3 >config/make.def
		;;

	mpichgm)
		echo "Using MPICHGM"
		sed -f ./config/$MPI.sed ./config/make.def.template> ./tmp/m1
		sed -f ./config/$MPI.run.sed ./run.template > ./tmp/r1
		sed s/fill_in_PROCS/$Processors/ ./tmp/r1 > ./run.$MPI
		case $Compiler in
			gnu)
				echo "Using g77 and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpichgm-gnu3
				fi
				export MPICH_F77=g77
				export MPICH_F77LINKER=g77
				export MPICH_CC=gcc
				export MPICH_CLINKER=gcc
				sed -f ./config/$GNUSED ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			gnu4)
				echo "Using gfortran and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpichgm-gnu4
				fi
				export MPICH_F77=gfortran
				export MPICH_F77LINKER=gfortran
				export MPICH_CC=gcc
				export MPICH_CLINKER=gcc
				sed -f ./config/$GNUSED4 ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			pgi)
				echo "Using pgf77 and pgcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpichgm-pgi
					module load pgi 
				fi
				export MPICH_F77=$PGI/linux86/bin/pgf77
				export MPICH_F77LINKER=$PGI/linux86/bin/pgf77
				export MPICH_CC=$PGI/linux86/bin/pgcc
				export MPICH_CLINKER=$PGI/linux86/bin/pgcc
				sed -f ./config/pgi.sed ./tmp/m1> ./tmp/m2
				cd pgi
				$PGI/linux86/bin/pgf77 -c farg.f -Msecond_underscore >&make.out
				cd ..
				sed 's!fill_in_PGI! ../pgi/farg.o !' ./tmp/m2 >./tmp/m3
				;;
			intel)
				echo "Using Intel ifc and icc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpichgm-intel
					module load $INTELMOD 
				fi
				export MPICH_F77=$IFCPATH
				export MPICH_F77LINKER=$IFCPATH
				export MPICH_CC=$ICCPATH
				export MPICH_CLINKER=$ICCPATH
				sed -f ./config/intel.sed ./tmp/m1> ./tmp/m2
				cd intel
				$IFCPATH -c -nus farg.f  >&make.out
				cd ..
				sed 's!fill_in_PGI! ../intel/farg.o !' ./tmp/m2 >./tmp/m3
				;;
			default)
				echo "Unsupported compiler: "$Compiler""
				clean_exit 1
				;;
		esac
		sed 's/fill_in_dummy_library/ /' ./tmp/m3 >config/make.def
		;;

	mpich2)
		echo "Using MPICH2"
		sed -f ./config/$MPI.sed ./config/make.def.template> ./tmp/m1
		sed -f ./config/$MPI.run.sed ./run.template > ./tmp/r1
		sed s/fill_in_PROCS/$Processors/ ./tmp/r1 > ./run.$MPI
		case $Compiler in
			gnu)
				echo "Using g77 and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich2-gnu3
				fi
				sed -f ./config/$GNUSED ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			gnu4)
				echo "Using gfortran and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich2-gnu4
				fi
				sed -f ./config/$GNUSED4 ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			pgi)
				echo "Using pgf77 and pgcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich2-pgi
					module load pgi 
				fi
				export MPICH_F77=$PGI/linux86/bin/pgf77
				export MPICH_F77LINKER=$PGI/linux86/bin/pgf77
				export MPICH_CC=$PGI/linux86/bin/pgcc
				export MPICH_CLINKER=$PGI/linux86/bin/pgcc
				sed -f ./config/pgi.sed ./tmp/m1> ./tmp/m2
				cd pgi
				$PGI/linux86/bin/pgf77 -c farg.f -Msecond_underscore >&make.out
				cd ..
				sed 's!fill_in_PGI! ../pgi/farg.o !' ./tmp/m2 >./tmp/m3
				;;
			intel)
				echo "Using Intel ifc and icc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/mpich2-intel
					module load $INTELMOD
				fi
				export MPICH_F77=$IFCPATH
				export MPICH_F77LINKER=$IFCPATH
				export MPICH_CC=$ICCPATH
				export MPICH_CLINKER=$ICCPATH
				sed -f ./config/intel.sed ./tmp/m1> ./tmp/m2
				cd intel
				$IFCPATH -c -nus farg.f  >&make.out
				cd ..
				sed 's!fill_in_PGI! ../intel/farg.o !' ./tmp/m2 >./tmp/m3
				;;
			default)
				echo "Unsupported compiler: "$Compiler""
				clean_exit 1
				;;
		esac
		sed 's/fill_in_dummy_library/ /' ./tmp/m3 >config/make.def
		;;
	ompi) 
		echo "Using OpenMPI"
		sed -f ./config/$MPI.sed ./config/make.def.template> ./tmp/m1
		sed -f ./config/$MPI.run.sed ./run.template > ./tmp/r1
		sed s/fill_in_PROCS/$Processors/ ./tmp/r1 > ./run.$MPI
		case $Compiler in
			gnu)
				echo "Using g77 and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/openmpi-gnu3
				fi
				sed -f ./config/$GNUSED ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			gnu4)
				echo "Using gfortran and gcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/openmpi-gnu4
				fi
				sed -f ./config/$GNUSED4 ./tmp/m1> ./tmp/m2
				sed 's!fill_in_PGI! !' ./tmp/m2 >./tmp/m3
				;;
			pgi)
				echo "Using pgf77 and pgcc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/openmpi-pgi
					module load pgi 
				fi
				sed -f ./config/pgi.sed ./tmp/m1> ./tmp/m2
				cd pgi
				$PGI/linux86/bin/pgf77 -c farg.f -Msecond_underscore >&make.out
				cd ..
				sed 's!fill_in_PGI! ../pgi/farg.o !' ./tmp/m2 >./tmp/m3
				;;
			intel)
				echo "Using Intel ifc and icc"
				if [ "$USINGMODULES" == "yes" ];then
					module load mpi/openmpi-intel
					module load intel91
				fi
				sed -f ./config/intel.sed ./tmp/m1> ./tmp/m2
				cd intel
				$IFCPATH -c -nus farg.f  >&make.out
				cd ..
				sed 's!fill_in_PGI! ../intel/farg.o !' ./tmp/m2 >./tmp/m3
				;;
		esac
		sed 's/fill_in_dummy_library/ /' ./tmp/m3 >config/make.def
		;;
	dummy)
		Processors="1"
		echo "Using dummy (single CPU) version"
		sed -f ./config/$MPI.sed ./config/make.def.template> ./tmp/m0
		sed -f ./config/$MPI.run.sed ./run.template > ./tmp/r1
		sed s/fill_in_PROCS/$Processors/ ./tmp/r1 > ./run.$MPI
		#Selecting the dummy compiler is a bit different becuase 
		#we are not using MPI wrappers around the compilers.
		case $Compiler in
	
			gnu)
				echo "Using g77 and gcc"
				sed -f ./config/dummy.gnu.sed ./tmp/m0> ./tmp/m1
				sed -f ./config/$GNUSED ./tmp/m1> ./tmp/m2
				cp ./MPI_dummy/Makefile.gnu ./MPI_dummy/Makefile
				;;
			gnu4)
				echo "Using gfortran and gcc"
				sed -f ./config/dummy.gnu4.sed ./tmp/m0> ./tmp/m1
				sed -f ./config/$GNUSED4 ./tmp/m1> ./tmp/m2
				cp ./MPI_dummy/Makefile.gnu4 ./MPI_dummy/Makefile
				;;
			pgi)
				if [ "$USINGMODULES" == "yes" ];then
					module load pgi 
				fi
				echo "Using pgf77 and pgcc"
				sed -f ./config/dummy.pgi.sed ./tmp/m0> ./tmp/m1
				sed -f ./config/pgi.sed ./tmp/m1> ./tmp/m2
				cp ./MPI_dummy/Makefile.pgi ./MPI_dummy/Makefile
				;;
			intel)
				if [ "$USINGMODULES" == "yes" ];then
					module load $INTELMOD
				fi
				echo "Using ifc and icc"
				sed -f ./config/dummy.intel.sed ./tmp/m0> ./tmp/m1
				sed -f ./config/intel.sed ./tmp/m1> ./tmp/m2
				cp ./MPI_dummy/Makefile.intel ./MPI_dummy/Makefile
				;;
			default)
				echo "Unsupported compiler: $Compiler"
				clean_exit 1
				;;
		esac
    sed 's!fill_in_dummy_library!include ../config/make.dummy!' ./tmp/m2 >config/make.def
		;;
	default)
		echo "Unknown MPI version: $MPI"
		clean_exit 1
		;;
esac


#now we make the suite.def file for the make
echo "Generating suite.def"
echo "#Generated by BPS">config/suite.def
for P in $PROGRAMS
        do
echo "$P      $Test       $Processors">>config/suite.def
done
#make the benchmarks
echo "Making test programs"
if [ "$USINGMODULES" == "yes" ];then
  module list
fi
if [ -n "$verbose" ]
then
	make suite
else
	make suite >& ./make.log
fi

#run the tests
if [ -n "$OnlyBuild" ]
then
	echo "Compilation complete, you may run the programs generated by executing: sh ./run.$MPI $Processors $Test $Compiler $MPI"
else
	if [ -n "$SGEscript" ]
	then
		case $MPI in
			lam)
				sed -f ./config/lam.sge.sed run.sge.template> ./tmp/m1
				sed "s!fill_in_NUMBER_OF_CPUS!$Processors!" ./tmp/m1 >./tmp/m2
				sed "s!fill_in_JOBNAME!npb.$Compiler.$MPI.$Test.$Processors!" ./tmp/m2 > results/npb.$Compiler.$MPI.$Test.$Processors.sh
				for P in $PROGRAMS 
				do
					echo "$LAM_HOME/bin/mpirun -np $Processors ../bin/$P.$Test.$Processors">>results/npb.$Compiler.$MPI.$Test.$Processors.sh
				done
				;;
			mpich)
				sed -f ./config/mpich.sge.sed run.sge.template> ./tmp/m1
				sed "s!fill_in_NUMBER_OF_CPUS!$Processors!" ./tmp/m1 >./tmp/m2
				sed "s!fill_in_JOBNAME!npb.$Compiler.$MPI.$Test.$Processors!" ./tmp/m2 > results/npb.$Compiler.$MPI.$Test.$Processors.sh
				for P in $PROGRAMS 
				do
					echo "$MPICH_HOME/bin/mpirun -np $Processors -machinefile \$TMPDIR/machines ../bin/$P.$Test.$Processors">>results/npb.$Compiler.$MPI.$Test.$Processors.sh
				done
				;;
			mpichgm)
				sed -f ./config/mpichgm.sge.sed run.sge.template> ./tmp/m1
				sed "s!fill_in_NUMBER_OF_CPUS!$Processors!" ./tmp/m1 >./tmp/m2
				sed "s!fill_in_JOBNAME!npb.$Compiler.$MPI.$Test.$Processors!" ./tmp/m2 > results/npb.$Compiler.$MPI.$Test.$Processors.sh
				for P in $PROGRAMS 
				do
					echo "$MPICH_HOME/bin/mpirun.ch_gm -np $Processors -machinefile \$TMPDIR/machines --gm-kill 20 ../bin/$P.$Test.$Processors">>results/npb.$Compiler.$MPI.$Test.$Processors.sh
				done
				;;
			mpich2)
				sed -f ./config/mpich2.sge.sed run.sge.template> ./tmp/m1
				sed "s!fill_in_NUMBER_OF_CPUS!$Processors!" ./tmp/m1 >./tmp/m2
				sed "s!fill_in_JOBNAME!npb.$Compiler.$MPI.$Test.$Processors!" ./tmp/m2 > results/npb.$Compiler.$MPI.$Test.$Processors.sh
				for P in $PROGRAMS 
				do
					echo "$MPICH2_HOME/bin/mpiexec -np $Processors -machinefile \$TMPDIR/machines -port \$port ../bin/$P.$Test.$Processors">>results/npb.$Compiler.$MPI.$Test.$Processors.sh
				done
				;;
			ompi)
				sed -f ./config/ompi.sge.sed run.sge.template> ./tmp/m1
				sed "s!fill_in_NUMBER_OF_CPUS!$Processors!" ./tmp/m1 >./tmp/m2
				sed "s!fill_in_JOBNAME!npb.$Compiler.$MPI.$Test.$Processors!" ./tmp/m2 > ./tmp/m3 
				sed "s!_OMPI_PATH_!$OMPI_HOME!g" ./tmp/m3 >results/npb.$Compiler.$MPI.$Test.$Processors.sh
				for P in $PROGRAMS 
				do
					echo "$OMPI_HOME/bin/mpiexec -np $Processors -machinefile \$TMPDIR/machines  ../bin/$P.$Test.$Processors">>results/npb.$Compiler.$MPI.$Test.$Processors.sh
				done
				;;
			dummy)
				sed -f ./config/dummy.sge.sed run.sge.template> ./tmp/m1
				sed "s!fill_in_NUMBER_OF_CPUS! !" ./tmp/m1 >./tmp/m2
				sed "s!fill_in_JOBNAME!npb.$Compiler.$MPI.$Test.$Processors!" ./tmp/m2 > ./tmp/m3 
				sed "s!#$ -pe!!g" ./tmp/m3 >results/npb.$Compiler.$MPI.$Test.$Processors.sh
				for P in $PROGRAMS 
				do
					echo "../bin/$P.$Test.$Processors">>results/npb.$Compiler.$MPI.$Test.$Processors.sh
				done
				;;
			default)
				echo "not implemented yet"
				clean_exit 1
			;;
		esac
		cd results
		if [ "$USINGMODULES" == "yes" ];then
			module load gridengine
		fi
		qsub npb.$Compiler.$MPI.$Test.$Processors.sh
		cd ..

	else
		echo "Running NASA Parallel Suite Class $Test using:"
		echo "    $Compiler Compilers"
		echo "    $MPI MPI library" 
		echo "    $Processors Processors"
		sh run.$MPI $Processors $Test $Compiler $MPI
	fi
fi



clean_exit 0
