Skip to content

Commit

Permalink
Merge pull request #328 from wuctlby/tool
Browse files Browse the repository at this point in the history
download train output by run list and compute resolution
  • Loading branch information
stefanopolitano authored Dec 6, 2024
2 parents 4f1d84b + db7fc83 commit 0fd31ff
Show file tree
Hide file tree
Showing 4 changed files with 328 additions and 0 deletions.
23 changes: 23 additions & 0 deletions run3/tool/download.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import os
import argparse

# Set up argument parser
parser = argparse.ArgumentParser(description='Merge files from input directories.')
parser.add_argument('--target_dir', type=str, default='./inputs/', help='Target directory for merged files')
parser.add_argument('--input_dirs', nargs='+', required=True, help='List of input directories')
parser.add_argument('--suffix', nargs='+', required=True, help='List of suffixes for each input directory')
parser.add_argument('--file_to_merge', nargs='+', default=['AnalysisResults'], help='List of files to merge')

args = parser.parse_args()

# Extract arguments
target_dir = args.target_dir
input_dirs = args.input_dirs
suffix = args.suffix
file_to_merge = args.file_to_merge

# Loop over the input directories and suffixes
for i, (input_dir, suf) in enumerate(zip(input_dirs, suffix)):
train_number = input_dir.split('/')[-2]
for file in file_to_merge:
os.system(f'alien.py cp -T 64 alien://{input_dir}/{file}.root file:{target_dir}/{file}_{suf}.root')
113 changes: 113 additions & 0 deletions run3/tool/input_data_reso.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001556
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001555
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001554
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001553
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001552
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001551
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001550
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001549
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001548
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001547
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001546
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001545
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001544
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001543
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001542
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001540
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001539
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001538
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001537
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001536
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001534
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001533
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001532
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001531
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001530
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001529
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001528
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001527
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001526
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001525
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001524
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001523
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001522
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001521
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001520
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001519
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001518
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001517
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001516
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001515
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001514
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001513
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001512
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001511
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001510
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001509
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001507
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001505
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001503
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001501
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001457
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001456
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001455
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001453
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001452
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001451
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001450
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001449
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001448
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001447
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001446
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001445
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001444
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001443
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001442
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001441
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001440
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001439
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001437
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001436
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001435
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001434
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001433
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001432
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001431
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001430
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001429
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001428
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001427
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001426
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001425
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001424
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001423
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001422
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001421
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001420
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001419
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001418
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001417
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001416
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001415
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001414
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001413
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001412
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001411
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001410
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001409
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001408
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001407
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001406
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001405
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001404
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001403
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001402
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001401
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001400
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001399
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001398
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001397
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001396
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001395
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001394
/alice/cern.ch/user/a/alihyperloop/jobs/0100/hy_1001393
113 changes: 113 additions & 0 deletions run3/tool/run_label.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
545367
545345
545332
545312
545311
545296
545295
545294
545291
545289
545262
545249
545246
545223
545222
545210
545185
545184
545171
545117
545103
545086
545066
545064
545063
545062
545060
545047
545044
545042
545041
545009
545008
545004
544992
544991
544968
544964
544963
544961
544947
544931
544917
544914
544913
544911
544896
544887
544886
544868
544813
544797
544795
544794
544767
544754
544742
544739
544696
544694
544693
544692
544674
544672
544653
544652
544640
544614
544585
544583
544582
544580
544568
544567
544565
544564
544551
544550
544549
544548
544518
544515
544514
544512
544511
544510
544508
544492
544491
544490
544477
544476
544475
544474
544454
544451
544392
544391
544390
544389
544185
544184
544124
544123
544122
544121
544116
544098
544095
544091
544032
544028
544013
79 changes: 79 additions & 0 deletions run3/tool/run_reso_runspecific.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
#!/bin/bash

# Bash script to process input directories with a suffix dynamically picked from run_label.txt

# Define default parameters
TARGET_DIR="./inputs/"
OUTPUT_BASE_DIR="./output_reso"
INPUT_FILE="input_data_reso.txt" # File with input directories (one per line)
RUN_LABEL="run_label.txt" # File with corresponding suffixes for each input directory
FILES_TO_MERGE=("AnalysisResults")
CURRENT_DIR=$(pwd)
MAX_JOBS=4 # Limit the number of concurrent jobs

# Check if input files exist
if [[ ! -f "$INPUT_FILE" ]]; then
echo "Error: Input file $INPUT_FILE not found."
exit 1
fi
if [[ ! -f "$RUN_LABEL" ]]; then
echo "Error: Run label file $RUN_LABEL not found."
exit 1
fi

# Read input directories and suffixes into arrays
mapfile -t INPUT_DIRS < "$INPUT_FILE"
mapfile -t RUN_SUFFIXES < "$RUN_LABEL"

# Ensure both files have the same number of lines
if [[ ${#INPUT_DIRS[@]} -ne ${#RUN_SUFFIXES[@]} ]]; then
echo "Error: Mismatch between number of input directories and run labels."
exit 1
fi

# Create the output directory
mkdir -p "$OUTPUT_BASE_DIR"

# Function to process each input directory
process_input_dir() {
local input_dir="$1"
local suffix="$2"

# Step 1: Download the file for the current input directory
python3 download.py \
--target_dir "$TARGET_DIR" \
--input_dirs "$input_dir" \
--suffix "$suffix" \
--file_to_merge "${FILES_TO_MERGE[@]}"

# Path to the downloaded AnalysisResults file
local analysis_results_path="$CURRENT_DIR/inputs/AnalysisResults_${suffix}.root"

# Step 2: Run the resolution
python3 compute_reso.py "$analysis_results_path" -c "k0100" -o "$OUTPUT_BASE_DIR" -s "$suffix"

# Step 3: (Eventually) Delete the original AnalysisResults file after use
# rm "$analysis_results_path"
echo "Processed and saved output for suffix $suffix."

# Step 4: Process all reso together

}

# Loop through input directories and their corresponding suffixes
for i in "${!INPUT_DIRS[@]}"; do
input_dir="${INPUT_DIRS[$i]}"
suffix="${RUN_SUFFIXES[$i]}"

process_input_dir "$input_dir" "$suffix" & # Start the job in the background

# Limit the number of concurrent jobs
while (( $(jobs -r -p | wc -l) >= MAX_JOBS )); do
sleep 1 # Wait for some jobs to finish
done
done

# Wait for all background jobs to finish
wait

echo "All outputs saved in $OUTPUT_BASE_DIR."

0 comments on commit 0fd31ff

Please sign in to comment.