#!/bin/bash # Submission script for GridEngine (GE). Each job will # be executed via the jobScript.sh # This jobScript supports up to 7 parameters. Edit # the user specific part of the script according to # your program. # # Input to the script is a filelist with 1 file per line. # For each file a job is started. With the parameter # nFilesPerJob a comma separated filelist will be # generated and handed to the job script. This feature # is usefull when running many small jobs. Each # job has its own logfile. All needed directories for the # logfiles will be created if non existing. # # IMPORTANT: the hera/prometheus cluster jobs will only # see the /hera file system. All needed scripts, programs # and parameters have to be located on /hera or the job # will crash. This script syncs your working dir to the submission # dir on /hera . Make sure your scripts use the submission dir! # Software should be taken from /cvmfs/hades.gsi.de/install/ # # job log files will be named like inputfiles. If nFilesPerJob > 1 # the log files will contain the partnumber. # ###################################################################### # CONFIGURATION user=$(whoami) currentDir=$(pwd) . /u/${user}/workspace/pluto/macros/pluto_table.sh # decay from pluto_table.sh if [ "$#" -eq "2" ] ; then pluto_reac=$1 beamenergy=$2 else pluto_reac=17 beamenergy=25 fi ckt=cktB_T180 collision=auau # Mother particle will decay either into dielectron or dimuon pair # plus possibly a third particle ('dalitz'). # Particle names must be as in PStdData.cc mother=${arr_pluto[$pluto_reac]} dimuon=${arr_dimuon[$pluto_reac]} # 0: decay into e+/e-, 1: decay into mu+/mu- dalitz=${arr_dalitz[$pluto_reac]} # pluto particle name for Dalitz decay. "no" if there should be no Dalitz # used to generate output directories and file names type=${arr_dir[$pluto_reac]} decay=${arr_decay[$pluto_reac]} dimuon=${arr_dimuon[$pluto_reac]} energy=${beamenergy}gev exec=pluto_qa_thermal # name of executable to evaluate Pluto files cbmroot_version=mar13 # cbmroot release version nfiles=5000 submmissionbase=/hera/hades/user/${user}/pluto/macros submissiondir=${submmissionbase}/${exec} nFilesPerJob=250 # number of files to be analyzed by 1 job (default==1) jobscript=${submissiondir}/jobScript.sh # exec script (full path, call without dot, set it executable!) inputdir=/hera/hades/user/ekrebs/cbm/pluto/${ckt}/${energy}/${type}/${decay} outputdir=${inputdir}/qa # outputdir for files AND logFiles pathoutputlog=${outputdir}/out # protocol from batch farm for each file filename=pluto.${collision}.${energy}.${type}.${decay} # filename of log file if nFilesPerJob > 1 (partnumber will be appended) par1=/cvmfs/fairroot.gsi.de/cbm/cbmroot/release/$cbmroot_version/build/config.sh # optional par1 : environment script #par1=/hera/hades/user/ekrebs/cbm/cbmroot/$cbmroot_version/build/config.sh # optional par1 : environment script par2=${submissiondir}/${exec} # optional par2 : executable par3=${outputdir} # optional par3 : outputdir par4="" # optional par4 : outputfile (from file list) par5=${inputdir} # optional par5 : directory with Pluto files par6=${dimuon} # optional par6 : analyze dimuons or dielectrons par7= par8= par9= par10= par11= par12= resources="-l h_rt=10:0:0,h_vmem=2G" # runtime < 10h, mem < 2GB filelist=${currentDir}/all_files.list # file list in local dir! not in submissiondir!!! createList=yes # (yes/no) use this to create files list with generic names (for simulation, testing) # use "no" if you have a filelist available ###################################################################### #--------------------------------------------------------------------- # create a file list for submission (simulation, testing etc.) # for real data you will have a filelist with real filenames if [ "$createList" == "yes" ] then if [ -f $filelist ] then echo "===> REMOVING EXISTING FILELIST : $filelist" rm -f $filelist fi echo "===> CREATE FILELIST : $filelist" for ((ct=1;ct<=$nfiles;ct++)) do num=$(printf "%04i\n" $ct) echo ${filename}.${num}.root >> $filelist done fi #--------------------------------------------------------------------- nFiles=$( cat $filelist | wc -l) #--------------------------------------------------------------------- # create needed dirs if [ ! -d $submmissionbase ] then echo "===> CREATE SUBMISSIONBASEDIR : $submmissionbase" mkdir -p $submmissionbase else echo "===> USE SUBMISSIONBASEDIR : $submmissionbase" fi if [ ! -d $pathoutputlog ] then echo "===> CREATE LOGDIR : $pathoutputlog" mkdir -p $pathoutputlog else echo "===> USE LOGDIR : $pathoutputlog" fi #--------------------------------------------------------------------- #--------------------------------------------------------------------- # sync the local modified stuff # to the submission dir echo "===> SYNC CURENTDIR TO SUBMISSIONDIR : rsync -vHaz $currentDir ${submmissionbase}" rsync -vHaz $currentDir ${submmissionbase}/ syncStat=$? if [ ! $syncStat -eq 0 ] then echo "===> ERROR : SYNCHRONIZATION ENCOUNTERED PROBLEMS" fi echo "-------------------------------------------------" #--------------------------------------------------------------------- ctF=0 # counter for file number ctJ=0 # counter for job number partNumber=0 # counter for part number #--------------------------------------------------------------------- # read the files list into an job array declare -a jobarray ct1=0 for file in $(cat $filelist) do jobarray[$ct1]=$file ((ct1+=1)) done #--------------------------------------------------------------------- #--------------------------------------------------------------------- # loop over the job array and submit parts with # nFilesPerJob to GE while ((ctF<$nFiles)) do #--------------------------------------------------------------------- # build comma separated file list # per job if [ $nFilesPerJob -gt 1 ] then infileList=${jobarray[${ctF}]} ((ctF+=1)) for (( ctList=1;ctList<$nFilesPerJob; ctList++ )) do if [ $ctF -lt ${nFiles} ] then infileList="${infileList},${jobarray[${ctF}]}" ((ctF+=1)) fi done else infileList=${jobarray[${ctF}]} ((ctF+=1)) fi #--------------------------------------------------------------------- ((partNumber+=1)) logfile="${pathoutputlog}/${filename}_${partNumber}.log" if [ $nFilesPerJob -eq 1 ] then file=$(basename ${infileList}) logfile="${pathoutputlog}/${file}.log" fi if [ -f ${logfile} ] then rm -f ${logfile} fi echo "-----------------------------------------------------------------------------" echo "add part ${partNumber} last file ${ctF} of $nFiles ====> add new job ${infileList}" ###################################################################### # SEND NEW JOB (USER SPECIFIC) par4=${infileList} command="-j y -wd ${submissiondir} ${resources} -o ${logfile} \ ${jobscript} ${par1} ${par2} ${par3} ${par4} ${par5} ${par6} ${par7} ${par8} ${par9} ${par10} ${par11} ${par12}" #jobscript.sh defall.sh prog outdir outfile nev echo qsub ${command} if [ ! $syncStat -eq 0 ] then echo "===> ERROR : SYNCHRONIZATION ENCOUNTERED PROBLEMS" else #echo ${command} qsub ${command} fi ###################################################################### done #---------------------------------------------------------------------