Back to home page

sPhenix code displayed by LXR

 
 

    


File indexing completed on 2025-12-18 09:22:11

0001 import psycopg2
0002 import csv
0003 from datetime import datetime
0004 
0005 def get_run_data(run_number, conn):
0006     # Prepare a cursor
0007     cursor = conn.cursor()
0008 
0009     # Prepare the query to fetch the timestamps for a specific run_number
0010     query1 = """
0011     SELECT runnumber, brtimestamp, ertimestamp
0012     FROM run
0013     WHERE runnumber = %s
0014     ORDER BY runnumber DESC;
0015     """
0016     cursor.execute(query1, (run_number,))
0017     timestamp = cursor.fetchone()  # Get the run number and timestamp data
0018 
0019     # Prepare the query to fetch rate_avg_value for the same run_number
0020     query2 = """
0021     SELECT runnumber, raw
0022     FROM gl1_scalers
0023     WHERE runnumber = %s
0024     AND index = 10
0025     ORDER BY runnumber DESC;
0026     """
0027     cursor.execute(query2, (run_number,))
0028     rate_value = cursor.fetchone()  # Get the run number and rate_avg_value data
0029 
0030     # Close the cursor
0031     cursor.close()
0032 
0033     return timestamp, rate_value
0034 
0035 def main():
0036     # Read run numbers from runList.txt
0037     with open('runList.txt', 'r') as f:
0038         run_list = [line.strip() for line in f]
0039 
0040     # Connect to the PostgreSQL database
0041     conn = psycopg2.connect(
0042         host="sphnxdaqdbreplica",
0043         database="daq"
0044     )
0045 
0046     # Initialize an empty list to store the merged data
0047     merged_data = []
0048 
0049     # Loop through each run number in the run_list
0050     for run_number in run_list:
0051         # Get run data (timestamps and rate_avg_value) for the current run_number
0052         timestamp, rate_value = get_run_data(run_number, conn)
0053 
0054         # Initialize default values
0055         begin_time = 0
0056         end_time = 0
0057         rate_avg_value = 0
0058 
0059         # Check if timestamp or rate_value is None and assign default values
0060         if timestamp is None:
0061             print(f"Run {run_number} has missing timestamp data. Setting default timestamp values.")
0062         else:
0063             begin_time, end_time = timestamp[1], timestamp[2]
0064         
0065         if begin_time is None or end_time is None:
0066             print(f"Run {run_number} has missing timestamp. Setting default timestamp values.")
0067             begin_time = 0
0068             end_time = 0
0069 
0070         # Calculate the duration (in seconds) as a float, handling invalid times
0071         if begin_time != 0 and end_time != 0:
0072             duration = (end_time - begin_time).total_seconds()
0073         else:
0074             duration = 0  # Set to 0 if there's an issue with timestamps
0075 
0076         # Check and assign rate_avg_value
0077         if rate_value is None or rate_value[1] is None:
0078             print(f"Run {run_number} has missing rate data. Setting default rate value to 0.")
0079             rate_avg_value = 0
0080         else:
0081             rate_avg_value = rate_value[1] / duration if duration > 0 else 0
0082 
0083         # Append the result for this run
0084         merged_data.append([run_number, begin_time, rate_avg_value])
0085 
0086     # Write the results to a CSV file
0087     with open('run_timestamps_with_rate.csv', 'w', newline='') as csvfile:
0088         csvwriter = csv.writer(csvfile)
0089         # Write the header
0090         csvwriter.writerow(['runnumber', 'timestamp', 'rate_avg_value'])
0091         # Write the merged data
0092         for row in merged_data:
0093             csvwriter.writerow(row)
0094 
0095     # Close the database connection
0096     conn.close()
0097 
0098 if __name__ == "__main__":
0099     main()
0100