File indexing completed on 2025-08-03 08:12:48
0001
0002 import numpy as np
0003 import subprocess
0004 import argparse
0005 import os
0006 import shutil
0007
0008 parser = argparse.ArgumentParser()
0009 subparser = parser.add_subparsers(dest='command')
0010
0011 create = subparser.add_parser('create', help='Create file lists.')
0012 run = subparser.add_parser('run', help='Run LEDTowerBuilder on the given file list.')
0013 evtDisp = subparser.add_parser('evtDisp', help='Create event display (json) given prdf and event number.')
0014
0015 create.add_argument('-i', '--run-list', type=str, nargs='+' , help='List of run numbers.')
0016 create.add_argument('-p', '--prdf-dir', type=str, default='/direct/sphenix+lustre01/sphnxpro/rawdata/commissioning/emcal/calib', help='Directory containing the prdf files. Default: /direct/sphenix+lustre01/sphnxpro/rawdata/commissioning/emcal/calib')
0017 create.add_argument('-o', '--output-dir', type=str, default='files', help='Directory to store the file lists. Default: files')
0018
0019 run.add_argument('-i', '--file-list', type=str, help='File list containing prdfs to analyze.', required=True)
0020 run.add_argument('-n', '--nevents', type=int, default = -1, help='Number of events to analyze. Default: -1 (analyze all)')
0021 run.add_argument('-s', '--skip', type=int, default = 0, help='Number of events to skip. Default: 0 (no skip)')
0022 run.add_argument('-o', '--output', type=str, default = 'data/LEDTowerBuilder.root', help='Output root file. Default: data/LEDTowerBuilder.root')
0023 run.add_argument('-m', '--max', type=int, default = 10000, help='Maximum number of events to analyze at once. Default: 10000')
0024
0025 evtDisp.add_argument('-i', '--prdf', type=str, help='Prdfs to analyze.', required=True)
0026 evtDisp.add_argument('-r', '--run', type=str, help='Run number.', required=True)
0027 evtDisp.add_argument('-n', '--event', type=str, default = '1', help='Event number to use. Default: 1.')
0028 evtDisp.add_argument('-o', '--output', type=str, help='Output json file.')
0029
0030 args = parser.parse_args()
0031
0032 def create_file_list():
0033 run_list = args.run_list
0034 prdf_dir = os.path.abspath(args.prdf_dir)
0035 output_dir = os.path.abspath(args.output_dir)
0036
0037 print(f'run list: {run_list}')
0038 print(f'prdf dir: {prdf_dir}')
0039 print(f'output dir: {output_dir}')
0040
0041 for run in run_list:
0042 print(f'run: {run}')
0043
0044 result = subprocess.run(['fd', run, prdf_dir], stdout=subprocess.PIPE, text=True)
0045 print(result.stdout)
0046
0047 with open(f'{output_dir}/file-list-{run}.txt',mode='w') as fw:
0048 fw.write(result.stdout)
0049
0050 def run_analysis():
0051 file_list = os.path.abspath(args.file_list)
0052 nevents = args.nevents
0053 output = args.output
0054 skip = args.skip
0055 max_events_per_run = args.max
0056
0057 print(f'file list: {file_list}')
0058 print(f'output: {output}')
0059
0060 total_events = 0
0061
0062 with open(file_list) as f:
0063 for line in f:
0064 line = line.strip()
0065 command = f'dpipe -d n -s f -i {line}'
0066 dpipe_process = subprocess.run(command.split(), stdout=subprocess.PIPE)
0067 events = subprocess.run(['wc','-l'], input=dpipe_process.stdout, stdout=subprocess.PIPE)
0068 events = int(events.stdout.decode('utf-8'))
0069
0070 total_events += events
0071 print(f'prdf: {line}, events: {events}')
0072 if(total_events > nevents+skip and nevents >= 0):
0073 break
0074
0075 nevents = total_events if nevents == -1 else nevents
0076 print(f'total events: {total_events}')
0077 if(nevents != total_events):
0078 print(f'events to analyze: {nevents}')
0079 print(f'skip: {skip}')
0080
0081 runs = int(np.ceil((nevents+skip) / max_events_per_run))
0082 max_events_per_run = min(max_events_per_run, nevents)
0083
0084 log = os.path.basename(output).split('.')[0]
0085 output_dir = os.path.dirname(output)
0086 process_events = max_events_per_run
0087
0088 if(runs > 1):
0089 print(f'Runs: {runs}')
0090 print(f'Max events per run: {max_events_per_run}')
0091
0092 merge_files = []
0093 for i in range(runs):
0094 subprocess.run(['echo', f'Run: {i}'])
0095 command = f'./bin/Fun4All_LEDTowerBuilder {process_events} {skip} {file_list} {output_dir}/test-{i}.root &> {output_dir}/log/log-test-{i}.txt &'
0096 print(command)
0097 skip += max_events_per_run
0098 process_events = min(max_events_per_run, nevents-(i+1)*max_events_per_run)
0099 merge_files.append(f'{output_dir}/test-{i}.root')
0100
0101 print('hadd command: ')
0102 merge_files = ' '.join(merge_files)
0103 print(f'hadd -n 50 {output} {merge_files}')
0104
0105 else:
0106 command = f'./bin/Fun4All_LEDTowerBuilder {process_events} {skip} {file_list} {output} &> {output_dir}/log/log-{log}.txt &'
0107 print(command)
0108
0109
0110
0111
0112
0113
0114
0115 def event_display():
0116 prdf_input = os.path.abspath(args.prdf)
0117 run = args.run
0118 event = args.event
0119 output = args.output
0120
0121 if(output is None):
0122 output = f'event-display/run-{run}-event-{event}.json'
0123
0124 print(f'prdf: {prdf_input}')
0125 print(f'run: {run}')
0126 print(f'event: {event}')
0127 print(f'output: {output}')
0128 print(f'display only: {display_only}')
0129
0130
0131 command = f'eventcombiner -v -i -e {event} -n 1 -f -p data/temp/test-{run}-{event}.prdf {prdf_input}'
0132 print(f'command: {command.split()}')
0133 subprocess.run(command.split())
0134
0135
0136 with open('files/test.txt','w') as f:
0137 f.write(f'data/temp/test-{run}-{event}.prdf\n')
0138
0139
0140 command = f'bin/Fun4All_LEDTowerBuilder 1 files/test.txt data/temp test'
0141 print(f'command: {command.split()}')
0142 subprocess.run(command.split())
0143
0144
0145 command = f'bin/event-display {run} {event} data/temp/test-CEMC.root {output} data/temp/test-HCALIN.root data/temp/test-HCALOUT.root'
0146 print(f'command: {command.split()}')
0147 subprocess.run(command.split())
0148
0149 if __name__ == '__main__':
0150 if(args.command == 'create'):
0151 create_file_list()
0152 if(args.command == 'run'):
0153 run_analysis()
0154 if(args.command == 'evtDisp'):
0155 event_display()