File indexing completed on 2026-04-03 08:12:35
0001
0002 export USER="$(id -u -n)"
0003 export LOGNAME=${USER}
0004 export HOME=/sphenix/u/${LOGNAME}/macros/detectors/sPHENIX/
0005
0006
0007 runnumber=0
0008 outDir=""
0009 triggertype="MB"
0010 dosubmit=false
0011 triggertype="MB"
0012 prodtype="26"
0013 condor_testfile="condor_blank.job"
0014 verbose=false
0015 superverbose=false
0016 allsegments=false
0017 filedensity=25
0018 nfiles=100
0019 makedatalist=false
0020 forcechunk=false
0021
0022
0023
0024 make_condor_jobs()
0025 {
0026 if [[ $nfiles -eq 0 ]]; then
0027 nfiles=`wc -l < ${triggertype}_data/jet_density_${filedensity}.list`
0028 fi
0029 for i in $(seq 0 ${nfiles}); do
0030 j=$(( i+1 ))
0031 if [ $i -eq $nfiles ]; then
0032 break
0033 fi
0034 condor_file="$(pwd)/condor_file_dir/condor_"$triggertype"_seg_"$i".job"
0035 condor_out_file=$(pwd)"/condor_file_dir/condor_"$triggertype"_seg_"$i".out"
0036 condor_err_file=$(pwd)"/condor_file_dir/condor_"$triggertype"_seg_"$i".err"
0037 condor_log_file=$(pwd)"/condor_file_dir/condor_"$triggertype"_seg_"$i".log"
0038 global=`sed "${j}q;d" ${triggertype}_data/global_density_${filedensity}.list`
0039 truth=`sed "${j}q;d" ${triggertype}_data/truth_density_${filedensity}.list`
0040 jet=`sed "${j}q;d" ${triggertype}_data/jet_density_${filedensity}.list`
0041 calo=`sed "${j}q;d" ${triggertype}_data/calo_density_${filedensity}.list`
0042
0043 if [ "$vebose_mode" = true ]; then
0044 echo "Producing condor job file " $condor_file
0045 fi
0046 IFS=$'\n' read -d '' -r -a blanklines < $condor_testfile
0047 echo "${blanklines[0]}" > $condor_file
0048 echo "${blanklines[1]}"$(pwd)"/run_VandySkimmerTruth.sh" >> $condor_file
0049 echo "${blanklines[2]}"$calo $truth $jet $global $outDir $MYINSTALL $(pwd)>> $condor_file
0050 echo "${blanklines[3]}"$condor_out_file >> $condor_file
0051 echo "${blanklines[4]}"$condor_err_file >> $condor_file
0052 echo "${blanklines[5]}"$condor_log_file >> $condor_file
0053 echo "${blanklines[6]} $outDir" >>$condor_file
0054 echo "${blanklines[7]}" >> $condor_file
0055 echo "${blanklines[8]}" >> $condor_file
0056 echo "${blanklines[9]}" " " $USER >> $condor_file
0057 echo "${blanklines[10]}" >> $condor_file
0058 echo "${blanklines[11]}" >> $condor_file
0059 echo "${blanklines[12]}" >> $condor_file
0060 echo "${blanklines[13]}" >> $condor_file
0061 done
0062 }
0063 make_home_dir()
0064 {
0065 if [ ! -d ${HOME} ]; then
0066 if [[ "$verbose" == true ]]; then
0067 echo "Home Directory doesn't exist. Creating now"
0068 fi
0069 mkdir -p ${HOME}
0070 fi
0071 }
0072
0073 set_out_dir()
0074 {
0075 if [[ "$outDir" == "" ]]; then
0076 outDir=/sphenix/tg/tg01/jets/${USER}/VandyDSTs/
0077 fi
0078 if [ ! -d ${outDir} ]; then
0079 if [[ "$verbose" == true ]]; then
0080 echo "Output Directory doesn't exist. Creating now"
0081 fi
0082 mkdir -p ${outDir}
0083 fi
0084 }
0085
0086 get_dst_list()
0087 {
0088 base_dir=$(pwd)
0089 if [ "$verbose" = true ]; then
0090 echo "Checking if data directory exists for ${triggertype}"
0091 fi
0092 if [ ! -d ${triggertype}"_data" ]; then
0093 if [ "$verbose" = true ]; then
0094 echo "data directory doesn't exist for ${triggertype}, fixing now"
0095 fi
0096
0097 mkdir -p ${triggertype}"_data"
0098 fi
0099 if [ "$verbose" = true ]; then
0100 echo "Create DST for ${triggertype}"
0101 fi
0102 cd ${triggertype}_data
0103 converttriggertype
0104 CreateFileList.pl -nopileup -type ${prodtype} -run 28 G4Hits DST_TRUTH_JET DST_CALO_CLUSTER DST_GLOBAL
0105 mv g4hits.list truth.list
0106 mv dst_truth_jet.list jet.list
0107 mv dst_calo_cluster.list calo.list
0108 mv dst_global.list global.list
0109 cd ${base_dir}
0110 }
0111 chunk_dst_list()
0112 {
0113 base_dir=$(pwd)
0114 cd ${triggertype}_data
0115 if [ "$verbose" = true ]; then
0116 echo "Checking if lookup file and file list exist for a per job density of ${filedensity} exists"
0117 fi
0118 listdir=lists_${filedensity}_per_file
0119 if [ ! -d ${listdir} ]; then
0120 mkdir -p $listdir
0121 fi
0122 if [ ! -f truth_density_${filedensity}.list ]; then
0123 forcechunk=true
0124 fi
0125 if [ "$forcechunk" = true ]; then
0126 if [ "$verbose" = true ]; then
0127 echo "Creating file lists and lookup files"
0128 fi
0129 if [ -f truth_density_${filedensity}.list ]; then
0130 rm truth_density_${filedensity}.list
0131 rm jet_density_${filedensity}.list
0132 rm calo_density_${filedensity}.list
0133 rm global_density_${filedensity}.list
0134 rm ${listdir}/*
0135 fi
0136 touch truth_density_${filedensity}.list
0137 touch jet_density_${filedensity}.list
0138 touch calo_density_${filedensity}.list
0139 touch global_density_${filedensity}.list
0140
0141 nChunks=0
0142 Nseg=`wc -l < truth.list`
0143 nSegsUsed=0
0144 while [ $nSegsUsed -le $Nseg ]; do
0145 nStop=$(( nSegsUsed + filedensity - 1 ))
0146 truthChunk=$(pwd)/${listdir}/truth_seg_${nSegsUsed}_to_${nStop}.list
0147 jetChunk=$(pwd)/${listdir}/jet_seg_${nSegsUsed}_to_${nStop}.list
0148 caloChunk=$(pwd)/${listdir}/calo_seg_${nSegsUsed}_to_${nStop}.list
0149 globalChunk=$(pwd)/${listdir}/global_seg_${nSegsUsed}_to_${nStop}.list
0150 if [ "$superverbose" = true ]; then
0151 echo "Building the following files"
0152 echo " Truth data (g4Hits): " $truthChunk
0153 echo " Jet data : " $jetChunk
0154 echo " Calo data : " $caloChunk
0155 echo " Global data: " $globalChunk
0156 fi
0157 touch ${truthChunk}
0158 touch ${jetChunk}
0159 touch ${caloChunk}
0160 touch ${globalChunk}
0161
0162 echo ${truthChunk} >> truth_density_${filedensity}.list
0163 echo ${jetChunk} >> jet_density_${filedensity}.list
0164 echo ${caloChunk} >> calo_density_${filedensity}.list
0165 echo ${globalChunk} >> global_density_${filedensity}.list
0166 line0=$(( nChunks * filedensity ))
0167 for i in $(seq 0 $filedensity); do
0168 j=$(( line0 + i + 1 ))
0169 nSegsUsed=$(( nSegsUsed + 1 ))
0170 if [[ $j -gt $Nseg || $j -gt $nStop ]]; then
0171 break
0172 fi
0173 if [ "$superverbose" = true ]; then
0174 echo "Looking at line " $j
0175 fi
0176 truth=`sed "${j}q;d" truth.list`
0177 jet=`sed "${j}q;d" jet.list`
0178 calo=`sed "${j}q;d" calo.list`
0179 global=`sed "${j}q;d" global.list`
0180
0181 echo ${truth} >> ${truthChunk}
0182 echo ${jet} >> ${jetChunk}
0183 echo ${calo} >> ${caloChunk}
0184 echo ${global} >> ${globalChunk}
0185 done
0186 nChunks=$(( nChunks + 1 ))
0187 if [[ $nSegsUsed -gt $Nseg ]]; then
0188 break
0189 fi
0190 done
0191 fi
0192 cd ${base_dir}
0193 }
0194 submit_condor_jobs(){
0195
0196 for n in $(seq 0 ${nfiles}); do
0197 if [ $n -ge ${nfiles} ]; then
0198 break
0199 fi
0200 i=$(pwd)"/condor_file_dir/condor_"$triggertype"_seg_"$n".job"
0201 condor_submit $i
0202 done
0203 }
0204
0205 has_argument(){
0206 [[ ("$1" == *=* && -n ${1
0207 }
0208
0209 extract_argument() {
0210 echo "${2:-${1#*=}}"
0211 }
0212
0213 handle_options()
0214 {
0215 while [ $
0216 case $1 in
0217 -h | --help)
0218 echo "Options for MC running of Vandy Skimmer"
0219 echo "$0 [OPTIONS]"
0220 echo "This script run the vandy skimmer over MC files"
0221 echo " "
0222 echo " -h, --help Display this message"
0223 echo " -v, --verbose Enable verbose job creation (Default false) "
0224 echo " -s, --submit Submit condor jobs (default false)"
0225 echo " -g, --get Makes the filelist for selected sample before running (default false)"
0226 echo " -c, --chunk Forces the recreation of the data chunks (default off, on if -g is called"
0227 echo " "
0228 echo " -t, --type Which trigger type "
0229 echo " MB (default)"
0230 echo " Jets:Jet5, Jet15, Jet20, Jet30, Jet40, Jet50"
0231 echo " PhotonJets: PhotonJet5, PhotonJet10"
0232 echo " "
0233 echo " -G, --generator Which generator "
0234 echo " Pythia (default), Herwig "
0235 echo " "
0236 echo " -n, --number How many files per chunk (default 25, 0 is all) "
0237 echo " "
0238 echo " -j, -jobs How many chunks/jobs to submit/make (default 100, 0 is all)"
0239 echo " "
0240 exit 0
0241 ;;
0242 -v | --verbose)
0243 verbose=true
0244 shift
0245 ;;
0246 -V )
0247 superverbose=true
0248 verbose=true
0249 shift
0250 ;;
0251 -s | --submit)
0252 dosubmit=true
0253 shift
0254 ;;
0255 -g | --get)
0256 makedatalist=true
0257 forcechunk=true
0258 shift
0259 ;;
0260 -c | --chunk)
0261 forcechunk=true
0262 shift
0263 ;;
0264 -t | --type)
0265 triggertype=$(extract_argument $@)
0266 shift
0267 shift
0268 ;;
0269 -n | --number)
0270 filedensity=$(extract_argument $@)
0271 shift
0272 shift
0273 ;;
0274 -G | --generator)
0275 gen=$(extract_argument $@)
0276 shift
0277 shift
0278 ;;
0279 -o | --outdir)
0280 outDir=$(extract_argument $@)
0281 shift
0282 shift
0283 ;;
0284 -j | --jobs)
0285 nfiles=$(extract_argument $@)
0286 if [ "$verbose" = true ]; then
0287 echo "Will submit ${nfiles} jobs with ${filedensity} segment(s) each"
0288 fi
0289 shift
0290 shift
0291 ;;
0292 *)
0293 echo "Invalid option: $1 "
0294 exit 1
0295 ;;
0296 esac
0297 done
0298 }
0299 converttriggertype()
0300 {
0301 if [ "${triggertype}" = "MB" ]; then
0302 prodtype=26
0303 elif [ "${triggertype}" = "Jet5" ]; then
0304 prodtype=36
0305 elif [ "${triggertype}" = "Jet10" ]; then
0306 prodtype=12
0307 elif [ "${triggertype}" = "Jet12" ]; then
0308 prodtype=39
0309 elif [ "${triggertype}" = "Jet15" ]; then
0310 prodtype=33
0311 elif [ "${triggertype}" = "Jet20" ]; then
0312 prodtype=21
0313 elif [ "${triggertype}" = "Jet30" ]; then
0314 prodtype=11
0315 elif [ "${triggertype}" = "Jet40" ]; then
0316 prodtype=19
0317 elif [ "${triggertype}" = "Jet50" ]; then
0318 prodtype=34
0319 elif [ "${triggertype}" = "Jet60" ]; then
0320 prodtype=38
0321 elif [ "${triggertype}" = "PhotonJet5" ]; then
0322 prodtype=27
0323 elif [ "${triggertype}" = "PhotonJet10" ]; then
0324 prodtype=28
0325 elif [ "${triggertype}" = "PhotonJet10" ]; then
0326 prodtype=29
0327 fi
0328 }
0329 handle_options "$@"
0330 make_home_dir
0331 set_out_dir
0332 if [ "$verbose" = true ]; then
0333 echo "Running over ${nfiles} segement(s)"
0334 fi
0335 if [ "$makedatalist" = true ]; then
0336 get_dst_list
0337 fi
0338 chunk_dst_list
0339 make_condor_jobs
0340 if [ "$dosubmit" = true ]; then
0341 submit_condor_jobs
0342 fi
0343