From 72cdd357bcb52c469cad6ad5e59ed0858b53b288 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 26 Sep 2023 15:00:29 +0200 Subject: [PATCH 01/76] DL0 to DL1 reduction --- magicctapipe/scripts/lst1_magic/LST_runs.txt | 10 + .../scripts/lst1_magic/MAGIC_runs.txt | 30 ++ magicctapipe/scripts/lst1_magic/README.md | 268 ++++++++++- .../scripts/lst1_magic/coincident_events.py | 177 +++++++ .../scripts/lst1_magic/config_general.yaml | 26 + .../lst1_magic_event_coincidence.py | 312 +++++------- .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 340 +++++++------ .../lst1_magic/lst1_magic_stereo_reco.py | 81 ++-- .../scripts/lst1_magic/magic_calib_to_dl1.py | 38 +- .../scripts/lst1_magic/merge_hdf_files.py | 9 +- ...ing_runs_and_splitting_training_samples.py | 269 +++++++++++ .../lst1_magic/setting_up_config_and_dir.py | 451 ++++++++++++++++++ .../scripts/lst1_magic/stereo_events.py | 183 +++++++ 13 files changed, 1800 insertions(+), 394 deletions(-) create mode 100644 magicctapipe/scripts/lst1_magic/LST_runs.txt create mode 100644 magicctapipe/scripts/lst1_magic/MAGIC_runs.txt create mode 100644 magicctapipe/scripts/lst1_magic/coincident_events.py create mode 100644 magicctapipe/scripts/lst1_magic/config_general.yaml create mode 100644 magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py create mode 100644 magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py create mode 100644 magicctapipe/scripts/lst1_magic/stereo_events.py diff --git a/magicctapipe/scripts/lst1_magic/LST_runs.txt b/magicctapipe/scripts/lst1_magic/LST_runs.txt new file mode 100644 index 000000000..53b010ff0 --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/LST_runs.txt @@ -0,0 +1,10 @@ +2020_11_18,2923 +2020_11_18,2924 +2020_12_07,3093 +2020_12_07,3094 +2020_12_07,3095 +2020_12_07,3096 +2020_12_15,3265 +2020_12_15,3266 +2020_12_15,3267 +2020_12_15,3268 diff --git a/magicctapipe/scripts/lst1_magic/MAGIC_runs.txt b/magicctapipe/scripts/lst1_magic/MAGIC_runs.txt new file mode 100644 index 000000000..ce49672b0 --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/MAGIC_runs.txt @@ -0,0 +1,30 @@ +2020_11_19,5093174 +2020_11_19,5093175 +2020_12_08,5093491 +2020_12_08,5093492 +2020_12_16,5093711 +2020_12_16,5093712 +2020_12_16,5093713 +2020_12_16,5093714 +2021_02_14,5094483 +2021_02_14,5094484 +2021_02_14,5094485 +2021_02_14,5094486 +2021_02_14,5094487 +2021_02_14,5094488 +2021_03_16,5095265 +2021_03_16,5095266 +2021_03_16,5095267 +2021_03_16,5095268 +2021_03_16,5095271 +2021_03_16,5095272 +2021_03_16,5095273 +2021_03_16,5095277 +2021_03_16,5095278 +2021_03_16,5095281 +2021_03_18,5095376 +2021_03_18,5095377 +2021_03_18,5095380 +2021_03_18,5095381 +2021_03_18,5095382 +2021_03_18,5095383 diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index da9c695ca..3fb304f8c 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -1,12 +1,12 @@ -# Script for MAGIC and MAGIC+LST-1 analysis +# Script for MAGIC and MAGIC+LST analysis -This folder contains scripts to perform MAGIC-only or MAGIC+LST-1 analysis. +This folder contains scripts to perform MAGIC-only or MAGIC+LST analysis. Each script can be called from the command line from anywhere in your system (some console scripts are created during installation). Please run them with `-h` option for the first time to check what are the options available. ## MAGIC-only analysis -MAGIC-only analysis starts from MAGIC calibrated data (\_Y\_ files). The analysis flow is as following: +MAGIC-only analysis starts from MAGIC-calibrated data (\_Y\_ files). The analysis flow is as follows: - `magic_calib_to_dl1.py` on real and MC data (if you use MCs produced with MMCS), to convert them into DL1 format - if you use SimTelArray MCs, run `lst1_magic_mc_dl0_to_dl1.py` over them to convert them into DL1 format @@ -17,9 +17,9 @@ MAGIC-only analysis starts from MAGIC calibrated data (\_Y\_ files). The analysi - `lst1_magic_create_irf.py` to create the IRF (use `magic_stereo` as `irf_type` in the configuration file) - `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files -## MAGIC+LST-1 analysis +## MAGIC+LST analysis: overview -MAGIC+LST-1 analysis starts from MAGIC calibrated data (\_Y\_ files), LST-1 DL1 data and SimTelArray DL0 data. The analysis flow is as following: +MAGIC+LST analysis starts from MAGIC calibrated data (\_Y\_ files), LST DL1 data and SimTelArray DL0 data. The analysis flow is as following: - `magic_calib_to_dl1.py` on real MAGIC data, to convert them into DL1 format - `lst1_magic_mc_dl0_to_dl1.py` over SimTelArray MCs to convert them into DL1 format @@ -31,6 +31,260 @@ MAGIC+LST-1 analysis starts from MAGIC calibrated data (\_Y\_ files), LST-1 DL1 - `lst1_magic_create_irf.py` to create the IRF - `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files -## High level analysis +## MAGIC+LST analysis: data reduction tutorial (PRELIMINARY) -The folder [Notebooks](https://github.com/cta-observatory/magic-cta-pipe/tree/master/notebooks) contains Jupyter notebooks to perform checks on the IRF, to produce theta2 plots and SEDs. Note that the notebooks run with gammapy v0.20 or higher, therefore another conda environment is needed to run them, since the MAGIC+LST-1 pipeline at the moment depends on v0.19. +1) The very first step to reduce MAGIC-LST data is to have remote access/credentials to the IT Container, so provide one. Once you have it, the connection steps are the following: + +Authorized institute server (Client) → ssh connection to CTALaPalma → ssh connection to cp01/02 + +2) Once connected to the IT Container, install MAGIC-CTA-PIPE (e.g. in your home directory in the IT Container) following the tutorial here: https://github.com/ranieremenezes/magic-cta-pipe + +3) Do not forget to open the magic-lst environment with the command `conda activate magic-lst` before starting the analysis + +### DL0 to DL1 + +In this step, we will convert the MAGIC and Monte Carlo (MC) Data Level (DL) 0 to DL1 (our goal is to reach DL3). + +Now copy all the python scripts available here to your preferred directory (e.g. /fefs/aswg/workspace/yourname/yourprojectname) in the IT Container, as well as the files `config_general.yaml`, `MAGIC_runs.txt` and `LST_runs.txt`. + +The file `config_general.yaml` must contain the telescope IDs and the directories with the MC data, as shown below: +``` +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + +directories: + workspace_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" + target_name : "CrabTeste" + MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" + MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" + MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" + MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" + MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" + +general: + SimTel_version: "v1.4" + focal_length : "effective" #effective #nominal + MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" + LST_runs : "LST_runs.txt" + proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + +``` + +The file `MAGIC_runs.txt` looks like that: +``` +2020_11_19,5093174 +2020_11_19,5093175 +2020_12_08,5093491 +2020_12_08,5093492 +2020_12_16,5093711 +2020_12_16,5093712 +2020_12_16,5093713 +2020_12_16,5093714 +2021_02_14,5094483 +2021_02_14,5094484 +2021_02_14,5094485 +2021_02_14,5094486 +2021_02_14,5094487 +2021_02_14,5094488 +2021_03_16,5095265 +2021_03_16,5095266 +2021_03_16,5095267 +2021_03_16,5095268 +2021_03_16,5095271 +2021_03_16,5095272 +2021_03_16,5095273 +2021_03_16,5095277 +2021_03_16,5095278 +2021_03_16,5095281 +2021_03_18,5095376 +2021_03_18,5095377 +2021_03_18,5095380 +2021_03_18,5095381 +2021_03_18,5095382 +2021_03_18,5095383 +``` + + +The columns here represent the night and run in which you want to select data. Please do not add blank spaces in the rows, as these names will be used to i) find the MAGIC data in the IT Container and ii) create the subdirectories in your working directory. If there is no MAGIC data, please fill this file with "0,0". Similarly, the `LST_runs.txt` file looks like this: + +``` +2020_11_18,2923 +2020_11_18,2924 +2020_12_07,3093 +2020_12_15,3265 +2020_12_15,3266 +2020_12_15,3267 +2020_12_15,3268 +2021_02_13,3631 +2021_02_13,3633 +2021_02_13,3634 +2021_02_13,3635 +2021_02_13,3636 +2021_03_15,4069 +2021_03_15,4070 +2021_03_15,4071 +2021_03_17,4125 +``` +Note that the LST nights appear as being one day before MAGIC's!!! This is because LST saves the date at the beginning of the night, while MAGIC saves it at the end. If there is no LST data, please fill this file with "0,0". These files are the only ones we need to modify in order to convert DL0 into DL1 data. + +In this analysis, we use a wobble of 0.4°. + +To convert the MAGIC and SimTelArray MCs data into DL1 format, you first do the following: +> $ python setting_up_config_and_dir.py + +``` +***** Linking MC paths - this may take a few minutes ****** +*** Reducing DL0 to DL1 data - this can take many hours *** +Process name: yourprojectnameCrabTeste +To check the jobs submitted to the cluster, type: squeue -n yourprojectnameCrabTeste +``` +Note that this script can be run as +> $ python setting_up_config_and_dir.py --partial-analysis onlyMAGIC + +or + +> $ python setting_up_config_and_dir.py --partial-analysis onlyMC + +if you want to convert only MAGIC or only MC DL0 files to DL1, respectively. + + +The script `setting_up_config_and_dir.py` does a series of things: +- Creates a directory with your source name within the directory `yourprojectname` and several subdirectories inside it that are necessary for the rest of the data reduction. +- Generates a configuration file called config_step1.yaml with and telescope ID information and adopted imaging/cleaning cuts, and puts it in the directory created in the previous step. +- Links the MAGIC and MC data addresses to their respective subdirectories defined in the previous steps. +- Runs the scripts `lst1_magic_mc_dl0_to_dl1.py` and `magic_calib_to_dl1.py` for each one of the linked data files. + +In the file `config_general.yaml`, the sequence of telescopes is always LST1, LST2, LST3, LST4, MAGIC-I, MAGIC-II. So in this tutorial, we have +LST-1 ID = 1 +LST-2 ID = 0 +LST-3 ID = 0 +LST-4 ID = 0 +MAGIC-I ID = 2 +MAGIC-II ID = 3 +If the telescope ID is set to 0, this means that the telescope is not used in the analysis. + +You can check if this process is done by typing +> $ squeue -n yourprojectnameCrabTeste +or +> $ squeue -u your_user_name + +in the terminal. Once it is done, all of the subdirectories in `/fefs/aswg/workspace/yourname/yourprojectname/CrabTeste/DL1/` will be filled with files of the type `dl1_[...]_LST1_MAGIC1_MAGIC2_runXXXXXX.h5` for the MCs and `dl1_MX.RunXXXXXX.0XX.h5` for the MAGIC runs. The next step of the conversion of DL0 to DL1 is to split the DL1 MC proton sample into "train" and "test" datasets (these will be used later in the Random Forest event classification and to do some diagnostic plots) and to merge all the MAGIC data files such that in the end, we have only one datafile per night. To do so, we run the following script: + +> $ python merging_runs_and_splitting_training_samples.py + +``` +***** Splitting protons into 'train' and 'test' datasets... +***** Generating merge bashscripts... +***** Running merge_hdf_files.py in the MAGIC data files... +Process name: merging_CrabTeste +To check the jobs submitted to the cluster, type: squeue -n merging_CrabTeste +``` + +This script will slice the proton MC sample according to the entry "proton_train" in the "config_general.yaml" file, and then it will merge the MAGIC data files in the following order: +- MAGIC subruns are merged into single runs. +- MAGIC I and II runs are merged (only if both telescopes are used, of course). +- All runs in specific nights are merged, such that in the end we have only one datafile per night. +- Proton MC training data is merged. +- Proton MC testing data is merged. +- Diffuse MC gammas are merged. +- MC gammas are merged. + +### Coincident events and stereo parameters on DL1 + +To find coincident events between MAGIC and LST, starting from DL1 data, we run the following script: + +> $ python coincident_events.py + +This script creates the file config_coincidence.yaml containing the telescope IDs and the following parameters: +``` +event_coincidence: + timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time" + window_half_width: "300 ns" + time_offset: + start: "-10 us" + stop: "0 us" +``` + +It then links the real LST data files to the output directory [...]DL1/Observations/Coincident, and runs the script lst1_magic_event_coincidence.py in all of them. + +Once it is done, we add stereo parameters to the MAGIC+LST coincident DL1 data by running: + +> $ python stereo_events.py + +This script creates the file config_stereo.yaml with the follwoing parameters: +``` +stereo_reco: + quality_cuts: "(intensity > 50) & (width > 0)" + theta_uplim: "6 arcmin" +``` + +It then creates the output directories for the DL1 with stereo parameters [...]DL1/Observations/Coincident_stereo/SEVERALNIGHTS and [...]/DL1/MC/GAMMAorPROTON/Merged/StereoMerged, and then runs the script lst1_magic_stereo_reco.py in all of the coincident DL1 files. The stereo DL1 files for MC and real data are then saved in these directories. + +### Random forest + +Once we have the DL1 stereo parameters for all real and MC data, we can train the Random Forest: + +> $ python RF.py + +This script creates the file config_RF.yaml with several parameters related to the energy regressor, disp regressor, and event classifier, and then computes the RF (energy, disp, and classifier) based on the merged-stereo MC diffuse gammas and training proton samples by calling the script lst1_magic_train_rfs.py. The results are saved in [...]/DL1/MC/RFs. + +Once it is done, we can finally convert our DL1 stereo data files into DL2 by running: + +> $ python DL1_to_DL2.py + +This script runs `lst1_magic_dl1_stereo_to_dl2.py` on all DL1 stereo files, which applies the RFs saved in [...]/DL1/MC/RFs to stereo DL1 data (real and test MCs) and produces DL2 real and MC data. The results are saved in [...]/DL2/Observations and [...]/DL2/MC. + +### Instrument response function and DL3 + +Once the previous step is done, we compute the IRF with + +> $ python IRF.py + +which creates the configuration file config_IRF.yaml with several parameters. The main of which are shown below: + +``` +[...] +quality_cuts: "disp_diff_mean < 0.22" +event_type: "software" # select "software", "software_only_3tel", "magic_only" or "hardware" +weight_type_dl2: "simple" # select "simple", "variance" or "intensity" +[...] +gammaness: + cut_type: "dynamic" # select "global" or "dynamic" + [...] + +theta: + cut_type: "global" # select "global" or "dynamic" + global_cut_value: "0.2 deg" # used for the global cut + [...] +``` + +It then runs the script lst1_magic_create_irf.py over the DL2 MC gammas, generating the IRF and saving it at [...]/IRF. + +Optionally, but recommended, we can run the "diagnostic.py" script with: + +> $ python diagnostic.py + +This will create several diagnostic plots (gammaness, effective area, angular resolution, energy resolution, migration matrix, energy bias, and gamma-hadron classification comparisons. All of these plots will be saved in the directory defined on "target_name" in the config_general.yaml file. + +After the IRF, we run the DL2-to-DL3 conversion by doing: + +> $ python DL2_to_DL3.py + +which will save the DL3 files in the directory [...]/DL3. Finally, the last script to run is `create_dl3_index_files.py`. Since it is very fast, we can simply run it directly in the interactive mode by doing (remember that we must be in the magic-lst environment): + +> $ python create_dl3_index_files.py --input-dir ./CrabTeste/DL3 + +That's it. Now you can play with the DL3 data using the high-level notebooks. + +## High-level analysis + +Since the DL3 may have only a few MBs, it is typically convenient to download it to your own machine at this point. It will be necessary to have astropy and gammapy (version > 0.20) installed before proceeding. + +We prepared a [Jupyter Notebook](https://github.com/ranieremenezes/magic-cta-pipe/blob/master/magicctapipe/scripts/lst1_magic/SED_and_LC_from_DL3.ipynb) that quickly creates a counts map, a significance curve, an SED, and a light curve. You can give it a try. + +The folder [Notebooks](https://github.com/cta-observatory/magic-cta-pipe/tree/master/notebooks) contains Jupyter notebooks to perform checks on the IRF, to produce theta2 plots and SEDs. Note that the notebooks run with gammapy v0.20 or higher, while the gammapy version adopted in the MAGIC+LST-1 pipeline is v0.19. diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py new file mode 100644 index 000000000..2bc4749f5 --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -0,0 +1,177 @@ +""" +This scripts facilitates the usage of the script +"lst1_magic_event_coincidence.py". This script is +more like a "maneger" that organizes the analysis +process by: +1) Creating the bash scripts for looking for +coincidence events between MAGIC and LST in each +night. +2) Creating the subdirectories for the coincident +event files. + + +Usage: +$ python coincident_events.py + +""" + +import os +import numpy as np +import glob +import yaml +import logging +from pathlib import Path + +logger = logging.getLogger(__name__) +logger.addHandler(logging.StreamHandler()) +logger.setLevel(logging.INFO) + +def configfile_coincidence(ids, target_dir): + + """ + This function creates the configuration file needed for the event coincidence step + + Parameters + ---------- + ids: list + list of telescope IDs + target_dir: str + Path to the working directory + """ + + f = open(target_dir+'/config_coincidence.yaml','w') + f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") + f.write('event_coincidence:\n timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time"\n window_half_width: "300 ns"\n') + f.write(' time_offset:\n start: "-10 us"\n stop: "0 us"\n') + f.close() + + +def linking_lst(target_dir, LST_runs): + + """ + This function links the LST data paths to the working directory. This is a preparation step required for running lst1_magic_event_coincidence.py + + Parameters + ---------- + target_dir: str + Path to the working directory + LST_runs: matrix of strings + This matrix is imported from config_general.yaml and tells the function where to find the LST data and link them to our working directory + """ + + + coincidence_DL1_dir = target_dir+"/DL1/Observations" + if not os.path.exists(coincidence_DL1_dir+"/Coincident"): + os.mkdir(f"{coincidence_DL1_dir}/Coincident") + + for i in LST_runs: + lstObsDir = i[0].split("_")[0]+i[0].split("_")[1]+i[0].split("_")[2] + inputdir = f'/fefs/aswg/data/real/DL1/{lstObsDir}/v0.9/tailcut84' + outputdir = f'{coincidence_DL1_dir}/Coincident/{lstObsDir}' + list_of_subruns = np.sort(glob.glob(f"{inputdir}/dl1*Run*{i[1]}*.*.h5")) + if os.path.exists(f"{outputdir}/list_LST.txt"): + with open(f"{outputdir}/list_LST.txt", "a") as LSTdataPathFile: + for subrun in list_of_subruns: + LSTdataPathFile.write(subrun+"\n") #If this files already exists, simply append the new information + else: + os.mkdir(outputdir) + f = open(f"{outputdir}/list_LST.txt", "w") #If the file list_LST.txt does not exist, it will be created here + for subrun in list_of_subruns: + f.write(subrun+"\n") + f.close() + + +def bash_coincident(target_dir): + + """ + This function generates the bashscript for running the coincidence analysis. + + Parameters + ---------- + target_dir: str + Path to the working directory + """ + + process_name = target_dir.split("/")[-2:][1] + + listOfNightsLST = np.sort(glob.glob(target_dir+"/DL1/Observations/Coincident/*")) + listOfNightsMAGIC = np.sort(glob.glob(target_dir+"/DL1/Observations/Merged/Merged*")) + + for nightMAGIC,nightLST in zip(listOfNightsMAGIC,listOfNightsLST): + process_size = len(np.genfromtxt(nightLST+"/list_LST.txt",dtype="str")) - 1 + + f = open(f"LST_coincident_{nightLST.split('/')[-1]}.sh","w") + f.write("#!/bin/sh\n\n") + f.write("#SBATCH -p short\n") + f.write("#SBATCH -J "+process_name+"_coincidence\n") + f.write(f"#SBATCH --array=0-{process_size}%50\n") + f.write("#SBATCH -N 1\n\n") + f.write("ulimit -l unlimited\n") + f.write("ulimit -s unlimited\n") + f.write("ulimit -a\n\n") + + f.write(f"export INM={nightMAGIC}\n") + f.write(f"export OUTPUTDIR={nightLST}\n") + f.write("SAMPLE_LIST=($(<$OUTPUTDIR/list_LST.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write("export LOG=$OUTPUTDIR/coincidence_${SLURM_ARRAY_TASK_ID}.log\n") + f.write(f"conda run -n magic-lst python lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") + f.close() + + + +def main(): + + """ + Here we read the config_general.yaml file and call the functions defined above. + """ + + + with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + config = yaml.safe_load(f) + + telescope_ids = list(config["mc_tel_ids"].values()) + target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + + LST_runs_and_dates = config["general"]["LST_runs"] + LST_runs = np.genfromtxt(LST_runs_and_dates,dtype=str,delimiter=',') + + print("***** Generating file config_coincidence.yaml...") + print("***** This file can be found in ",target_dir) + configfile_coincidence(telescope_ids,target_dir) + + + print("***** Linking the paths to LST data files...") + linking_lst(target_dir, LST_runs) #linking the data paths to current working directory + + + print("***** Generating the bashscript...") + bash_coincident(target_dir) + + + print("***** Submitting processess to the cluster...") + print("Process name: "+target_dir.split("/")[-2:][1]+"_coincidence") + print("To check the jobs submitted to the cluster, type: squeue -n "+target_dir.split("/")[-2:][1]+"_coincidence") + + #Below we run the bash scripts to find the coincident events + list_of_coincidence_scripts = np.sort(glob.glob("LST_coincident*.sh")) + + for n,run in enumerate(list_of_coincidence_scripts): + if n == 0: + launch_jobs = f"coincidence{n}=$(sbatch --parsable {run})" + else: + launch_jobs = launch_jobs + f" && coincidence{n}=$(sbatch --parsable --dependency=afterany:$coincidence{n-1} {run})" + + #print(launch_jobs) + os.system(launch_jobs) + +if __name__ == "__main__": + main() + + + + + + + + diff --git a/magicctapipe/scripts/lst1_magic/config_general.yaml b/magicctapipe/scripts/lst1_magic/config_general.yaml new file mode 100644 index 000000000..dd2cfadde --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/config_general.yaml @@ -0,0 +1,26 @@ +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + +directories: + workspace_dir : "/fefs/aswg/workspace/raniere/" + target_name : "CrabTeste" + MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" + MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" + MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" + MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" + MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" + +general: + target_RA_deg : 83.633083 #RA in degrees + target_Dec_deg: 22.0145 #Dec in degrees + SimTel_version: "v1.4" + focal_length : "effective" #effective #nominal + MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" + LST_runs : "LST_runs.txt" + proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 8f3fbf771..2cf258a2a 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -2,24 +2,24 @@ # coding: utf-8 """ -This script searches for coincident events from LST-1 and MAGIC joint +This script searches for coincident events from LST and MAGIC joint observation data offline using their timestamps. It applies the -coincidence window to LST-1 events, and checks the coincidence within +coincidence window to LST events, and checks the coincidence within the time offset region specified in the configuration file. Since the optimal time offset changes depending on the telescope distance along the pointing direction, it is recommended to input one subrun file for -LST-1 data, whose observation time is usually around 10 seconds so the +LST data, whose observation time is usually around 10 seconds so the change of the distance is negligible. The MAGIC standard stereo analysis discards the events when one of the telescope images cannot survive the cleaning or fail to compute the DL1 parameters. However, it's possible to perform the stereo analysis if -LST-1 sees these events. Thus, it checks the coincidence for each -telescope combination (i.e., LST1 + M1 and LST1 + M2) and keeps the +LST sees these events. Thus, it checks the coincidence for each +telescope combination (e.g., LST1 + M1 and LST1 + M2) and keeps the MAGIC events even if they do not have their MAGIC-stereo counterparts. -The MAGIC-stereo events, observed during the LST-1 observation time -period but not coincident with any LST-1 events, are also saved in the +The MAGIC-stereo events, observed during the LST observation time +period but not coincident with any LST events, are also saved in the output file, but they are not yet used for the high level analysis. Unless there is any particular reason, please use the default half width @@ -27,25 +27,24 @@ accidental coincidence rate as much as possible by keeping the number of actual coincident events. -Please note that the time offset depends on the date of observations -as summarized below: -* before June 12 2021: -3.1 us -* June 13 2021 to Feb 28 2023: -6.5 us -* March 10 2023 to March 30 2023: -76039.3 us -* April 13 2023 to August 2023: -25.1 us -* after Sep 11 2023 : -6.2 us -By default, pre offset search is performed using large shower events. -The possible time offset is found among all possible combinations of -time offsets using those events. Finally, the time offset scan is performed -around the possible offset found by the pre offset search. Instead of that, -you can also define the offset scan range in the configuration file. - -Usage: +Please note that for the data taken before 12th June 2021, a coincidence +peak should be found around the time offset of -3.1 us, which can be +explained by the trigger time delays of both systems. For the data taken +after that date, however, there is an additional global offset appeared +and then the peak is shifted to the time offset of -6.5 us. Thus, it +would be needed to tune the offset scan region depending on the date +when data were taken. The reason of the shift is under investigation. + +Usage per single LST data file (indicated if you want to do tests): $ python lst1_magic_event_coincidence.py ---input-file-lst dl1/LST-1/dl1_LST-1.Run03265.0040.h5 +--input-file-lst dl1/LST/dl1_LST.Run03265.0040.h5 --input-dir-magic dl1/MAGIC (--output-dir dl1_coincidence) (--config-file config.yaml) + +Broader usage: +This script is called automatically from the script "coincident_events.py". +If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse @@ -61,14 +60,15 @@ from astropy import units as u from ctapipe.instrument import SubarrayDescription from magicctapipe.io import ( + format_object, get_stereo_events, load_lst_dl1_data_file, load_magic_dl1_data_files, save_pandas_data_in_table, + telescope_combinations, ) -from magicctapipe.io.io import TEL_NAMES -__all__ = ["event_coincidence"] +__all__ = ["telescope_positions","event_coincidence"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) @@ -80,44 +80,89 @@ # The final digit of timestamps TIME_ACCURACY = 100 * u.ns -# The telescope positions used in a simulation -TEL_POSITIONS = { - 1: [-8.09, 77.13, 0.78] * u.m, # LST-1 - 2: [39.3, -62.55, -0.97] * u.m, # MAGIC-I - 3: [-31.21, -14.57, 0.2] * u.m, # MAGIC-II -} + +def telescope_positions(config): + """ + This function computes the telescope positions with respect to the array baricenter. + The array can have any configuration, e.g.: M1+M2+LST1+LST2, the full MAGIC+LST array, etc. + + Parameters + ---------- + config: dict + dictionary generated from an yaml file with information about the telescope IDs. Typically evoked from "config_coincidence.yaml" in the main scripts. + + Returns + ------- + TEL_POSITIONS: dict + Dictionary with telescope positions in the baricenter reference frame of the adopted array. + """ + + #Telescope positions in meters in a generic reference frame: + RELATIVE_POSITIONS = { + "LST-1" : [-70.930, -52.070, 43.00], + "LST-2" : [-35.270, 66.140, 32.00], + "LST-3" : [75.280 , 50.490, 28.70], + "LST-4" : [30.910 , -64.540, 32.00], + "MAGIC-I" : [-23.540, -191.750, 41.25], + "MAGIC-II" : [-94.05, -143.770, 42.42] + } + + telescopes_in_use = {} + x = np.asarray([]) + y = np.asarray([]) + z = np.asarray([]) + for k, v in config["mc_tel_ids"].items(): + if v > 0: + telescopes_in_use[v] = RELATIVE_POSITIONS[k] + x = np.append(x,RELATIVE_POSITIONS[k][0]) + y = np.append(y,RELATIVE_POSITIONS[k][1]) + z = np.append(z,RELATIVE_POSITIONS[k][2]) + + average_xyz = np.asarray([np.mean(x), np.mean(y), np.mean(z)]) + + TEL_POSITIONS = {} + for k, v in telescopes_in_use.items(): + TEL_POSITIONS[k] = list(np.round(np.asarray(v)-average_xyz,2)) * u.m + + return TEL_POSITIONS + + def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): """ - Searches for coincident events from LST-1 and MAGIC joint + Searches for coincident events from LST and MAGIC joint observation data offline using their timestamps. Parameters ---------- input_file_lst: str - Path to an input LST-1 DL1 data file + Path to an input LST DL1 data file input_dir_magic: str Path to a directory where input MAGIC DL1 data files are stored output_dir: str Path to a directory where to save an output DL1 data file config: dict - Configuration for the LST-1 + MAGIC combined analysis + Configuration for the LST + MAGIC combined analysis """ config_coinc = config["event_coincidence"] - # Load the input LST-1 DL1 data file - logger.info(f"\nInput LST-1 DL1 data file: {input_file_lst}") + TEL_NAMES, _ = telescope_combinations(config) + + TEL_POSITIONS = telescope_positions(config) + + # Load the input LST DL1 data file + logger.info(f"\nInput LST DL1 data file: {input_file_lst}") event_data_lst, subarray_lst = load_lst_dl1_data_file(input_file_lst) # Load the input MAGIC DL1 data files logger.info(f"\nInput MAGIC directory: {input_dir_magic}") - event_data_magic, subarray_magic = load_magic_dl1_data_files(input_dir_magic) + event_data_magic, subarray_magic = load_magic_dl1_data_files(input_dir_magic, config) - # Exclude the parameters non-common to LST-1 and MAGIC data + # Exclude the parameters non-common to LST and MAGIC data timestamp_type_lst = config_coinc["timestamp_type_lst"] logger.info(f"\nLST timestamp type: {timestamp_type_lst}") @@ -137,21 +182,23 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): window_half_width = u.Quantity(window_half_width).to("ns") window_half_width = u.Quantity(window_half_width.round(), dtype=int) - pre_offset_search = False - if "pre_offset_search" in config_coinc: - pre_offset_search = config_coinc["pre_offset_search"] + logger.info("\nTime offsets:") + logger.info(format_object(config_coinc["time_offset"])) - if pre_offset_search: - logger.info("\nPre offset search will be performed.") - n_pre_offset_search_events = config_coinc["n_pre_offset_search_events"] - else: - logger.info("\noffset scan range defined in the config file will be used.") - offset_start = u.Quantity(config_coinc["time_offset"]["start"]) - offset_stop = u.Quantity(config_coinc["time_offset"]["stop"]) + offset_start = u.Quantity(config_coinc["time_offset"]["start"]) + offset_stop = u.Quantity(config_coinc["time_offset"]["stop"]) + + time_offsets = np.arange( + start=offset_start.to_value("ns").round(), + stop=offset_stop.to_value("ns").round(), + step=TIME_ACCURACY.to_value("ns").round(), + ) + + time_offsets = u.Quantity(time_offsets.round(), unit="ns", dtype=int) event_data = pd.DataFrame() features = pd.DataFrame() - profiles = pd.DataFrame(data={"time_offset": []}) + profiles = pd.DataFrame(data={"time_offset": time_offsets.to_value("us").round(1)}) # Arrange the LST timestamps. They are stored in the UNIX format in # units of seconds with 17 digits, 10 digits for the integral part @@ -171,10 +218,11 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): tel_ids = np.unique(event_data_magic.index.get_level_values("tel_id")) for tel_id in tel_ids: + tel_name = TEL_NAMES[tel_id] df_magic = event_data_magic.query(f"tel_id == {tel_id}").copy() - # Arrange the MAGIC timestamps as same as the LST-1 timestamps + # Arrange the MAGIC timestamps as same as the LST timestamps seconds = np.array([Decimal(str(time)) for time in df_magic["time_sec"]]) nseconds = np.array([Decimal(str(time)) for time in df_magic["time_nanosec"]]) @@ -184,123 +232,8 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): df_magic["timestamp"] = timestamps_magic.to_value("s") df_magic.drop(["time_sec", "time_nanosec"], axis=1, inplace=True) - # Pre offset search is performed to define the offset scan region. - # First, N events are extracted from largest intensity events for LST and - # MAGIC. Then, it counts the number of coincident events within a defined - # window after shifting all possible combinations (N x N) of time offsets. - if pre_offset_search: - logger.info( - "\nPre offset search using large-intensity shower events is ongoing..." - ) - - logger.info( - f"\nExtracting the {tel_name} events taken when LST-1 observed for pre offset search..." - ) - - time_lolim = timestamps_lst[0] - window_half_width - time_uplim = timestamps_lst[-1] + window_half_width - - cond_lolim = timestamps_magic >= time_lolim - cond_uplim = timestamps_magic <= time_uplim - - mask_lst_obs_window = np.logical_and(cond_lolim, cond_uplim) - n_events_magic = np.count_nonzero(mask_lst_obs_window) - - if n_events_magic == 0: - logger.info(f"--> No {tel_name} events are found. Skipping...") - continue - - logger.info(f"--> {n_events_magic} events are found.") - - # Extract indexes of MAGIC large shower events - index_large_intensity_magic = np.argsort( - df_magic["intensity"][mask_lst_obs_window] - )[::-1][:n_pre_offset_search_events] - - # If LST/MAGIC observations are not completely overlapped, only small - # numbers of MAGIC events are left for the pre offset search. - # To find large-intensity showers within the same time window, - # time cut around MAGIC observations is applied to the LST data set. - time_lolim = timestamps_magic[mask_lst_obs_window][0] - window_half_width - time_uplim = timestamps_magic[mask_lst_obs_window][-1] + window_half_width - - cond_lolim = timestamps_lst >= time_lolim - cond_uplim = timestamps_lst <= time_uplim - - mask_magic_obs_window = np.logical_and(cond_lolim, cond_uplim) - - if np.count_nonzero(mask_magic_obs_window) == 0: - logger.info( - f"\nNo LST events are found around {tel_name} events. Skipping..." - ) - continue - - # Extract indexes of LST large shower events - index_large_intensity_lst = np.argsort( - event_data_lst["intensity"][mask_magic_obs_window] - )[::-1][:n_pre_offset_search_events] - - # Crate an array of all combinations of [MAGIC timestamp, LST timestamp] - timestamps_magic_lst_combination = np.array( - np.meshgrid( - timestamps_magic[mask_lst_obs_window][ - index_large_intensity_magic - ].value, - timestamps_lst[mask_magic_obs_window][ - index_large_intensity_lst - ].value, - ) - ).reshape(2, -1) - - # Compute all combinations of time offset between MAGIC and LST - time_offsets_pre_search = ( - timestamps_magic_lst_combination[0] - - timestamps_magic_lst_combination[1] - ) - - time_offsets_pre_search = u.Quantity( - time_offsets_pre_search.round(), unit="ns", dtype=int - ) - - n_coincidences_pre_search = [ - np.sum( - np.abs(time_offsets_pre_search - time_offset).value - < window_half_width.value - ) - for time_offset in time_offsets_pre_search - ] - - n_coincidences_pre_search = np.array(n_coincidences_pre_search) - - offset_at_max_pre_search = time_offsets_pre_search[ - n_coincidences_pre_search == n_coincidences_pre_search.max() - ].mean() - offset_at_max_pre_search = offset_at_max_pre_search.to("us").round(1) - - logger.info( - f"\nPre offset search finds {offset_at_max_pre_search} as a possible offset" - ) - - # offset scan region is defined as 3 x half window width - # around the offset_at_max to cover "full window width" which will - # be used to compute weighted average of the time offset - offset_start = offset_at_max_pre_search - 3 * window_half_width - offset_stop = offset_at_max_pre_search + 3 * window_half_width - - logger.info("\nTime offsets scan region:") - logger.info(f" start: {offset_start.to('us').round(1)}") - logger.info(f" stop: {offset_stop.to('us').round(1)}") - - time_offsets = np.arange( - start=offset_start.to_value("ns").round(), - stop=offset_stop.to_value("ns").round(), - step=TIME_ACCURACY.to_value("ns").round(), - ) - - time_offsets = u.Quantity(time_offsets.round(), unit="ns", dtype=int) - - # Extract the MAGIC events taken when LST-1 observed - logger.info(f"\nExtracting the {tel_name} events taken when LST-1 observed...") + # Extract the MAGIC events taken when LST observed + logger.info(f"\nExtracting the {tel_name} events taken when LST observed...") time_lolim = timestamps_lst[0] + time_offsets[0] - window_half_width time_uplim = timestamps_lst[-1] + time_offsets[-1] + window_half_width @@ -321,7 +254,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): timestamps_magic = timestamps_magic[mask] # Start checking the event coincidence. The time offsets and the - # coincidence window are applied to the LST-1 events, and the + # coincidence window are applied to the LST events, and the # MAGIC events existing in the window, including the edges, are # recognized as the coincident events. At first, we scan the # number of coincident events in each time offset and find the @@ -335,6 +268,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): logger.info("\nChecking the event coincidence...") for time_offset in time_offsets: + times_lolim = timestamps_lst + time_offset - window_half_width times_uplim = timestamps_lst + time_offset + window_half_width @@ -392,7 +326,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): logger.info(f"--> Number of coincident events: {n_events_at_avg}") logger.info(f"--> Fraction over the {tel_name} events: {percentage:.1f}%") - # Keep only the LST-1 events coincident with the MAGIC events, + # Keep only the LST events coincident with the MAGIC events, # and assign the MAGIC observation and event IDs to them indices_lst, indices_magic = np.where(mask) @@ -406,8 +340,8 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): df_lst.reset_index(inplace=True) df_lst.set_index(["obs_id_magic", "event_id_magic", "tel_id"], inplace=True) - # Assign also the LST-1 observation and event IDs to the MAGIC - # events coincident with the LST-1 events + # Assign also the LST observation and event IDs to the MAGIC + # events coincident with the LST events obs_ids_lst = df_lst["obs_id_lst"].to_numpy() event_ids_lst = df_lst["event_id_lst"].to_numpy() @@ -441,8 +375,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): event_data = pd.concat([event_data, df_lst, df_magic]) features = pd.concat([features, df_feature]) - profiles = profiles.merge(df_profile, on="time_offset", how="outer") - profiles = profiles.sort_values("time_offset") + profiles = profiles.merge(df_profile) if event_data.empty: logger.info("\nNo coincident events are found. Exiting...") @@ -452,11 +385,11 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): event_data.drop_duplicates(inplace=True) # It sometimes happen that even if it is a MAGIC-stereo event, only - # M1 or M2 event is coincident with a LST-1 event. In that case we + # M1 or M2 event is coincident with a LST event. In that case we # keep both M1 and M2 events, since they are recognized as the same # shower event by the MAGIC-stereo hardware trigger. - # We also keep the MAGIC-stereo events not coincident with any LST-1 + # We also keep the MAGIC-stereo events not coincident with any LST # events, since the stereo reconstruction is still feasible, but not # yet used for the high level analysis. @@ -474,7 +407,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - event_data = get_stereo_events(event_data) + event_data = get_stereo_events(event_data, config) event_data.reset_index(inplace=True) event_data = event_data.astype({"obs_id": int, "event_id": int}) @@ -484,7 +417,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): input_file_name = Path(input_file_lst).name - output_file_name = input_file_name.replace("LST-1", "LST-1_MAGIC") + output_file_name = input_file_name.replace("LST", "MAGIC_LST") output_file = f"{output_dir}/{output_file_name}" save_pandas_data_in_table( @@ -500,24 +433,27 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): ) # Create the subarray description with the telescope coordinates - # relative to the center of the LST-1 and MAGIC positions - tel_descriptions = { - 1: subarray_lst.tel[1], # LST-1 - 2: subarray_magic.tel[2], # MAGIC-I - 3: subarray_magic.tel[3], # MAGIC-II - } - - subarray_lst1_magic = SubarrayDescription( - "LST1-MAGIC-Array", TEL_POSITIONS, tel_descriptions + # relative to the center of the LST and MAGIC positions + tel_descriptions = {} + for k, v in TEL_NAMES.items(): + if v[:3] == "LST": + tel_descriptions[k] = subarray_lst.tel[k] + else: + tel_descriptions[k] = subarray_magic.tel[k] + + + subarray_lst_magic = SubarrayDescription( + "LST-MAGIC-Array", TEL_POSITIONS, tel_descriptions ) # Save the subarray description - subarray_lst1_magic.to_hdf(output_file) + subarray_lst_magic.to_hdf(output_file) logger.info(f"\nOutput file: {output_file}") def main(): + start_time = time.time() parser = argparse.ArgumentParser() @@ -528,7 +464,7 @@ def main(): dest="input_file_lst", type=str, required=True, - help="Path to an input LST-1 DL1 data file", + help="Path to an input LST DL1 data file", ) parser.add_argument( @@ -575,4 +511,4 @@ def main(): if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 41cf519ee..2c9b43bfb 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -2,32 +2,31 @@ # coding: utf-8 """ -This script processes LST-1 and MAGIC events of simtel MC DL0 data +This script processes LST and MAGIC events of simtel MC DL0 data (*.simtel.gz) and computes the DL1 parameters, i.e., Hillas, timing and -leakage parameters. It saves only the events that all the DL1 parameters +leakage parameters. It saves only the events where all the DL1 parameters are successfully reconstructed. -Since it cannot identify the telescopes of the input file, please assign +Since it cannot identify the telescopes from the input file, please assign the correct telescope ID to each telescope in the configuration file. -When saving data to an output file, the telescope IDs will be reset to -the following ones to match with those of real data: - -LST-1: tel_id = 1, MAGIC-I: tel_id = 2, MAGIC-II: tel_id = 3 - -In addition, the telescope coordinate will be converted to the one -relative to the center of the LST-1 and MAGIC positions (including the +The telescope coordinates will be converted to those +relative to the center of the LST and MAGIC positions (including the altitude) for the convenience of the geometrical stereo reconstruction. -Usage: +Usage per single data file (indicated if you want to do tests): $ python lst1_magic_mc_dl0_to_dl1.py --input-file dl0/gamma_40deg_90deg_run1.simtel.gz (--output-dir dl1) -(--config-file config.yaml) +(--config-file config_step1.yaml) + +Broader usage: +This script is called automatically from the script "setting_up_config_and_dir.py". +If you want to analyse a target, this is the way to go. See this other script for more details. """ -import argparse -import logging +import argparse #Parser for command-line options, arguments etc +import logging #Used to manage the log file import re import time from pathlib import Path @@ -58,17 +57,101 @@ from magicctapipe.utils import calculate_disp, calculate_impact from traitlets.config import Config -__all__ = ["mc_dl0_to_dl1"] +__all__ = ["Calibrate_LST", "Calibrate_MAGIC","mc_dl0_to_dl1"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) -# The CORSIKA particle types +# The CORSIKA particle types #CORSIKA simulates Cherenkov light PARTICLE_TYPES = {1: "gamma", 3: "electron", 14: "proton", 402: "helium"} -def mc_dl0_to_dl1(input_file, output_dir, config): + +def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, increase_nsb, use_time_delta_cleaning, use_dynamic_cleaning ): + + """ + This function computes and returns signal_pixels, image, and peak_time for LST + """ + + calibrator_lst._calibrate_dl0(event, tel_id) + calibrator_lst._calibrate_dl1(event, tel_id) + + image = event.dl1.tel[tel_id].image.astype(np.float64) + peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) + + increase_psf = config_lst["increase_psf"]["use"] + use_only_main_island = config_lst["use_only_main_island"] + + if increase_nsb: + # Add extra noise in pixels + image = add_noise_in_pixels(rng, image, **config_lst["increase_nsb"]) + + if increase_psf: + # Smear the image + image = random_psf_smearer( + image=image, + fraction=config_lst["increase_psf"]["fraction"], + indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, + indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, + ) + + # Apply the image cleaning + signal_pixels = tailcuts_clean( + camera_geoms[tel_id], image, **config_lst["tailcuts_clean"] + ) + + if use_time_delta_cleaning: + signal_pixels = apply_time_delta_cleaning( + geom=camera_geoms[tel_id], + mask=signal_pixels, + arrival_times=peak_time, + **config_lst["time_delta_cleaning"], + ) + + if use_dynamic_cleaning: + signal_pixels = apply_dynamic_cleaning( + image, signal_pixels, **config_lst["dynamic_cleaning"] + ) + + if use_only_main_island: + _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) + n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) + + # The first index means the pixels not surviving + # the cleaning, so should not be considered + n_pixels_on_island[0] = 0 + max_island_label = np.argmax(n_pixels_on_island) + signal_pixels[island_labels != max_island_label] = False + + return signal_pixels, image, peak_time + + +def Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic): + + """ + This function computes and returns signal_pixels, image, and peak_time for MAGIC + """ + + calibrator_magic._calibrate_dl0(event, tel_id) + calibrator_magic._calibrate_dl1(event, tel_id) + + image = event.dl1.tel[tel_id].image.astype(np.float64) + peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) + use_charge_correction = config_magic["charge_correction"]["use"] + + if use_charge_correction: + # Scale the charges by the correction factor + image *= config_magic["charge_correction"]["factor"] + + # Apply the image cleaning + signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( + event_image=image, event_pulse_time=peak_time + ) + return signal_pixels, image, peak_time + + +def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): """ Processes LST-1 and MAGIC events of simtel MC DL0 data and computes the DL1 parameters. @@ -83,28 +166,24 @@ def mc_dl0_to_dl1(input_file, output_dir, config): Configuration for the LST-1 + MAGIC analysis """ - assigned_tel_ids = config["mc_tel_ids"] + assigned_tel_ids = config["mc_tel_ids"] #This variable becomes the dictionary {'LST-1': 1, 'MAGIC-I': 2, 'MAGIC-II': 3} - logger.info("\nAssigned telescope IDs:") + logger.info("\nAssigned telescope IDs:") #Here we are just adding infos to the log file logger.info(format_object(assigned_tel_ids)) - tel_id_lst1 = assigned_tel_ids["LST-1"] - tel_id_m1 = assigned_tel_ids["MAGIC-I"] - tel_id_m2 = assigned_tel_ids["MAGIC-II"] - # Load the input file logger.info(f"\nInput file: {input_file}") event_source = EventSource( input_file, - allowed_tels=list(assigned_tel_ids.values()), - focal_length_choice="effective", + allowed_tels=list(filter(lambda check_id: check_id > 0, assigned_tel_ids.values())), #Here we load the events for all telescopes with ID > 0. + focal_length_choice=focal_length, ) obs_id = event_source.obs_ids[0] - subarray = event_source.subarray + subarray = event_source.subarray - tel_descriptions = subarray.tel + tel_descriptions = subarray.tel tel_positions = subarray.positions logger.info("\nSubarray description:") @@ -136,7 +215,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info(format_object(config_lst["increase_psf"])) increase_nsb = config_lst["increase_nsb"].pop("use") - increase_psf = config_lst["increase_psf"].pop("use") + increase_psf = config_lst["increase_psf"]["use"] if increase_nsb: rng = np.random.default_rng(obs_id) @@ -177,8 +256,6 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info("\nMAGIC charge correction:") logger.info(format_object(config_magic["charge_correction"])) - use_charge_correction = config_magic["charge_correction"].pop("use") - if config_magic["magic_clean"]["find_hotpixels"]: logger.warning( "\nWARNING: Hot pixels do not exist in a simulation. " @@ -189,11 +266,6 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info("\nMAGIC image cleaning:") logger.info(format_object(config_magic["magic_clean"])) - magic_clean = { - tel_id_m1: MAGICClean(camera_geoms[tel_id_m1], config_magic["magic_clean"]), - tel_id_m2: MAGICClean(camera_geoms[tel_id_m2], config_magic["magic_clean"]), - } - # Prepare for saving data to an output file Path(output_dir).mkdir(exist_ok=True, parents=True) @@ -207,12 +279,34 @@ def mc_dl0_to_dl1(input_file, output_dir, config): zenith = 90 - sim_config["max_alt"].to_value("deg") azimuth = Angle(sim_config["max_az"]).wrap_at("360 deg").degree - + logger.info(np.asarray(list(assigned_tel_ids.values()))) + LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) + LSTs_in_use = np.where(LSTs_IDs > 0)[0] + 1 #Here we select which LSTs are/is in use + + if len(LSTs_in_use) == 0: + LSTs_in_use = ''.join(str(k) for k in LSTs_in_use) + elif len(LSTs_in_use) > 0: + LSTs_in_use = 'LST'+'_LST'.join(str(k) for k in LSTs_in_use) + print('lst',LSTs_in_use) + MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) + MAGICs_in_use = np.where(MAGICs_IDs > 0)[0] + 1 #Here we select which MAGICs are/is in use + + if len(MAGICs_in_use) == 0: + MAGICs_in_use = ''.join(str(k) for k in MAGICs_in_use) + elif len(MAGICs_in_use) > 0: + MAGICs_in_use = 'MAGIC'+'_MAGIC'.join(str(k) for k in MAGICs_in_use) + magic_clean = {} + for k in MAGICs_IDs: + if k > 0: + magic_clean[k] = MAGICClean(camera_geoms[k], config_magic["magic_clean"]) + output_file = ( f"{output_dir}/dl1_{particle_type}_zd_{zenith.round(3)}deg_" - f"az_{azimuth.round(3)}deg_LST-1_MAGIC_run{obs_id}.h5" - ) + f"az_{azimuth.round(3)}deg_{LSTs_in_use}_{MAGICs_in_use}_run{obs_id}.h5" + ) #The files are saved with the names of all telescopes involved + + # Loop over every shower event logger.info("\nProcessing the events...") @@ -224,83 +318,24 @@ def mc_dl0_to_dl1(input_file, output_dir, config): tels_with_trigger = event.trigger.tels_with_trigger # Check if the event triggers both M1 and M2 or not - trigger_m1 = tel_id_m1 in tels_with_trigger - trigger_m2 = tel_id_m2 in tels_with_trigger + if((set(MAGICs_IDs).issubset(set(tels_with_trigger))) and (MAGICs_in_use=="MAGIC1_MAGIC2")): + magic_stereo = True #If both have trigger, then magic_stereo = True + else: + magic_stereo = False - magic_stereo = trigger_m1 and trigger_m2 + for tel_id in tels_with_trigger: - for tel_id in tels_with_trigger: - if tel_id == tel_id_lst1: + if tel_id in LSTs_IDs: ##If the ID is in the LST list, we call Calibrate_LST() # Calibrate the LST-1 event - calibrator_lst._calibrate_dl0(event, tel_id) - calibrator_lst._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - - if increase_nsb: - # Add extra noise in pixels - image = add_noise_in_pixels( - rng, image, **config_lst["increase_nsb"] - ) - - if increase_psf: - # Smear the image - image = random_psf_smearer( - image=image, - fraction=config_lst["increase_psf"]["fraction"], - indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, - indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, - ) - - # Apply the image cleaning - signal_pixels = tailcuts_clean( - camera_geoms[tel_id], image, **config_lst["tailcuts_clean"] - ) - - if use_time_delta_cleaning: - signal_pixels = apply_time_delta_cleaning( - geom=camera_geoms[tel_id], - mask=signal_pixels, - arrival_times=peak_time, - **config_lst["time_delta_cleaning"], - ) - - if use_dynamic_cleaning: - signal_pixels = apply_dynamic_cleaning( - image, signal_pixels, **config_lst["dynamic_cleaning"] - ) - - if use_only_main_island: - _, island_labels = number_of_islands( - camera_geoms[tel_id], signal_pixels - ) - n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) - - # The first index means the pixels not surviving - # the cleaning, so should not be considered - n_pixels_on_island[0] = 0 - max_island_label = np.argmax(n_pixels_on_island) - signal_pixels[island_labels != max_island_label] = False - - else: + signal_pixels, image, peak_time = Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, increase_nsb, use_time_delta_cleaning, use_dynamic_cleaning) + elif tel_id in MAGICs_IDs: # Calibrate the MAGIC event - calibrator_magic._calibrate_dl0(event, tel_id) - calibrator_magic._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - - if use_charge_correction: - # Scale the charges by the correction factor - image *= config_magic["charge_correction"]["factor"] - - # Apply the image cleaning - signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( - event_image=image, event_pulse_time=peak_time + signal_pixels, image, peak_time = Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic) + else: + logger.info( + f"--> Telescope ID {tel_id} not in LST list or MAGIC list. Please check if the IDs are OK in the configuration file" ) - - if not any(signal_pixels): + if not any(signal_pixels): #So: if there is no event, we skip it and go back to the loop in the next event logger.info( f"--> {event.count} event (event ID: {event.index.event_id}, " f"telescope {tel_id}) could not survive the image cleaning. " @@ -326,6 +361,15 @@ def mc_dl0_to_dl1(input_file, output_dir, config): # Parametrize the image hillas_params = hillas_parameters(camera_geom_masked, image_masked) + # + if any(np.isnan(value) for value in hillas_params.values()): + logger.info( + f"--> {event.count} event (event ID: {event.index.event_id}, " + f"telescope {tel_id}): non-valid Hillas parameters. Skipping..." + ) + continue + + timing_params = timing_parameters( camera_geom_masked, image_masked, peak_time_masked, hillas_params ) @@ -388,17 +432,10 @@ def mc_dl0_to_dl1(input_file, output_dir, config): n_islands=n_islands, magic_stereo=magic_stereo, ) - + # Reset the telescope IDs - if tel_id == tel_id_lst1: - event_info.tel_id = 1 - - elif tel_id == tel_id_m1: - event_info.tel_id = 2 - - elif tel_id == tel_id_m2: - event_info.tel_id = 3 - + event_info.tel_id = tel_id + # Save the parameters to the output file writer.write( "parameters", @@ -409,28 +446,29 @@ def mc_dl0_to_dl1(input_file, output_dir, config): logger.info(f"\nIn total {n_events_processed} events are processed.") # Convert the telescope coordinate to the one relative to the center - # of the LST-1 and MAGIC positions, and reset the telescope IDs + # of the LST and MAGIC positions, and reset the telescope IDs position_mean = u.Quantity(list(tel_positions.values())).mean(axis=0) - tel_positions_lst1_magic = { - 1: tel_positions[tel_id_lst1] - position_mean, # LST-1 - 2: tel_positions[tel_id_m1] - position_mean, # MAGIC-I - 3: tel_positions[tel_id_m2] - position_mean, # MAGIC-II - } - - tel_descriptions_lst1_magic = { - 1: tel_descriptions[tel_id_lst1], # LST-1 - 2: tel_descriptions[tel_id_m1], # MAGIC-I - 3: tel_descriptions[tel_id_m2], # MAGIC-II - } - - subarray_lst1_magic = SubarrayDescription( - "LST1-MAGIC-Array", tel_positions_lst1_magic, tel_descriptions_lst1_magic + tel_positions_lst_magic = {} + tel_descriptions_lst_magic = {} + IDs_in_use = np.asarray(list(assigned_tel_ids.values())) + IDs_in_use = IDs_in_use[IDs_in_use > 0] + for k in IDs_in_use: + tel_positions_lst_magic[k] = tel_positions[k] - position_mean + tel_descriptions_lst_magic[k] = tel_descriptions[k] + + + subarray_lst_magic = SubarrayDescription( + "LST-MAGIC-Array", tel_positions_lst_magic, tel_descriptions_lst_magic ) + tel_positions = subarray_lst_magic.positions + logger.info("\nTelescope positions:") + logger.info(format_object(tel_positions)) + # Save the subarray description - subarray_lst1_magic.to_hdf(output_file) - + subarray_lst_magic.to_hdf(output_file) + # Save the simulation configuration with HDF5TableWriter(output_file, group_name="simulation", mode="a") as writer: writer.write("config", sim_config) @@ -439,10 +477,15 @@ def mc_dl0_to_dl1(input_file, output_dir, config): def main(): + + """ Here we collect the input parameters from the command line, load the configuration file and run mc_dl0_to_dl1()""" + start_time = time.time() parser = argparse.ArgumentParser() - + + + #Here we are simply collecting the parameters from the command line, as input file, output directory, and configuration file parser.add_argument( "--input-file", "-i", @@ -466,17 +509,26 @@ def main(): "-c", dest="config_file", type=str, - default="./config.yaml", + default="./config_step1.yaml", help="Path to a configuration file", ) + + parser.add_argument( + "--focal_length_choice", + "-f", + dest="focal_length_choice", + type=str, + default="effective", + help='Standard is "effective"', + ) - args = parser.parse_args() + args = parser.parse_args() #Here we select all 3 parameters collected above - with open(args.config_file, "rb") as f: - config = yaml.safe_load(f) + with open(args.config_file, "rb") as f: # "rb" mode opens the file in binary format for reading + config = yaml.safe_load(f) #Here we collect the inputs from the configuration file # Process the input data - mc_dl0_to_dl1(args.input_file, args.output_dir, config) + mc_dl0_to_dl1(args.input_file, args.output_dir, config, args.focal_length_choice) logger.info("\nDone.") @@ -485,4 +537,4 @@ def main(): if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index accec1cea..cdf367a31 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -7,7 +7,7 @@ specified in the configuration file are applied to the events before the reconstruction. -When the input is real data containing LST-1 and MAGIC events, it checks +When the input is real data containing LST and MAGIC events, it checks the angular distances of their pointing directions and excludes the events taken with larger distances than the limit specified in the configuration file. This is in principle to avoid the reconstruction of @@ -24,6 +24,10 @@ (--output-dir dl1_stereo) (--config-file config.yaml) (--magic-only) + +Broader usage: +This script is called automatically from the script "stereo_events.py". +If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse @@ -54,7 +58,7 @@ logger.setLevel(logging.INFO) -def calculate_pointing_separation(event_data): +def calculate_pointing_separation(event_data, config): """ Calculates the angular distance of the LST-1 and MAGIC pointing directions. @@ -63,30 +67,36 @@ def calculate_pointing_separation(event_data): ---------- event_data: pandas.core.frame.DataFrame Data frame of LST-1 and MAGIC events - + config: dict + Configuration for the LST-1 + MAGIC analysis Returns ------- theta: pandas.core.series.Series - Angular distance of the LST-1 and MAGIC pointing directions - in the unit of degree + Angular distance of the LST array and MAGIC pointing directions + in units of degree """ - - # Extract LST-1 events - df_lst = event_data.query("tel_id == 1") - - # Extract the MAGIC events seen by also LST-1 - df_magic = event_data.query("tel_id == [2, 3]") + + assigned_tel_ids = config["mc_tel_ids"] #This variable becomes a dictionary, e.g.: {'LST-1': 1, 'LST-2': 0, 'LST-3': 0, 'LST-4': 0, 'MAGIC-I': 2, 'MAGIC-II': 3} + LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) + LSTs_IDs = list(LSTs_IDs[LSTs_IDs > 0]) #Here we list only the LSTs in use + MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) + MAGICs_IDs = list(MAGICs_IDs[MAGICs_IDs > 0]) #Here we list only the MAGICs in use + + # Extract LST events + df_lst = event_data.query(f"tel_id == {LSTs_IDs}") + + # Extract the MAGIC events seen by also LST + df_magic = event_data.query(f"tel_id == {MAGICs_IDs}") df_magic = df_magic.loc[df_lst.index] - # Calculate the mean of the M1 and M2 pointing directions - pnt_az_magic, pnt_alt_magic = calculate_mean_direction( - lon=df_magic["pointing_az"], lat=df_magic["pointing_alt"], unit="rad" - ) + # Calculate the mean of the LSTs, and also of the M1 and M2 pointing directions + pnt_az_LST, pnt_alt_LST = calculate_mean_direction(lon=df_lst["pointing_az"], lat=df_lst["pointing_alt"], unit="rad") + pnt_az_magic, pnt_alt_magic = calculate_mean_direction(lon=df_magic["pointing_az"], lat=df_magic["pointing_alt"], unit="rad") # Calculate the angular distance of their pointing directions theta = angular_separation( - lon1=u.Quantity(df_lst["pointing_az"], unit="rad"), - lat1=u.Quantity(df_lst["pointing_alt"], unit="rad"), + lon1=u.Quantity(pnt_az_LST, unit="rad"), + lat1=u.Quantity(pnt_alt_LST, unit="rad"), lon2=u.Quantity(pnt_az_magic, unit="rad"), lat2=u.Quantity(pnt_alt_magic, unit="rad"), ) @@ -108,14 +118,15 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa output_dir: str Path to a directory where to save an output DL1-stereo data file config: dict - Configuration for the LST-1 + MAGIC analysis + Configuration file for the stereo LST + MAGIC analysis, i.e. config_stereo.yaml magic_only_analysis: bool If `True`, it reconstructs the stereo parameters using only MAGIC events """ config_stereo = config["stereo_reco"] - + assigned_tel_ids = config["mc_tel_ids"] #This variable becomes a dictionary, e.g.: {'LST-1': 1, 'LST-2': 0, 'LST-3': 0, 'LST-4': 0, 'MAGIC-I': 2, 'MAGIC-II': 3} + # Load the input file logger.info(f"\nInput file: {input_file}") @@ -142,25 +153,36 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa # Apply the event cuts logger.info(f"\nMAGIC-only analysis: {magic_only_analysis}") + LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) + if magic_only_analysis: - event_data.query("tel_id > 1", inplace=True) + event_data.query(f"tel_id > {LSTs_IDs.max()}", inplace=True) # Here we select only the events with the MAGIC tel_ids, i.e. above the maximum tel_id of the LSTs logger.info(f"\nQuality cuts: {config_stereo['quality_cuts']}") - event_data = get_stereo_events(event_data, config_stereo["quality_cuts"]) + event_data = get_stereo_events(event_data, config=config, quality_cuts=config_stereo["quality_cuts"]) - # Check the angular distance of the LST-1 and MAGIC pointing directions + # Check the angular distance of the LST and MAGIC pointing directions tel_ids = np.unique(event_data.index.get_level_values("tel_id")).tolist() - if (not is_simulation) and (tel_ids != [2, 3]): + Number_of_LSTs_in_use = len(LSTs_IDs[LSTs_IDs > 0]) + MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) + Number_of_MAGICs_in_use = len(MAGICs_IDs[MAGICs_IDs > 0]) + if (Number_of_LSTs_in_use > 0) and (Number_of_MAGICs_in_use > 0): #If we use the two arrays, i.e. MAGIC and LST, then the "if" statement below will work (except for MC simulations) + Two_arrays_are_used = True + else: + Two_arrays_are_used = False + + if (not is_simulation) and (Two_arrays_are_used): + logger.info( "\nChecking the angular distances of " - "the LST-1 and MAGIC pointing directions..." + "the LST and MAGIC pointing directions..." ) event_data.reset_index(level="tel_id", inplace=True) # Calculate the angular distance - theta = calculate_pointing_separation(event_data) + theta = calculate_pointing_separation(event_data, config) theta_uplim = u.Quantity(config_stereo["theta_uplim"]) mask = u.Quantity(theta, unit="deg") < theta_uplim @@ -201,6 +223,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa multi_indices = event_data.groupby(["obs_id", "event_id"]).size().index for i_evt, (obs_id, event_id) in enumerate(multi_indices): + if i_evt % 100 == 0: logger.info(f"{i_evt} events") @@ -218,6 +241,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa tel_ids = df_evt.index.get_level_values("tel_id") for tel_id in tel_ids: + df_tel = df_evt.loc[tel_id] # Assign the telescope information @@ -321,6 +345,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa def main(): + start_time = time.time() parser = argparse.ArgumentParser() @@ -348,7 +373,7 @@ def main(): "-c", dest="config_file", type=str, - default="./config.yaml", + default="./config_general.yaml", help="Path to a configuration file", ) @@ -363,7 +388,7 @@ def main(): with open(args.config_file, "rb") as f: config = yaml.safe_load(f) - + # Process the input data stereo_reconstruction(args.input_file, args.output_dir, config, args.magic_only) @@ -374,4 +399,4 @@ def main(): if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py index 645c10053..46b1067da 100644 --- a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py @@ -7,11 +7,6 @@ Hillas, timing and leakage parameters. It saves only the events that all the DL1 parameters are successfully reconstructed. -When saving data to an output file, the telescope IDs will be reset to -the following ones for the convenience of the combined analysis with -LST-1, whose telescope ID is 1: - -MAGIC-I: tel_id = 2, MAGIC-II: tel_id = 3 When the input is real data, it searches for all the subrun files with the same observation ID and stored in the same directory as the input @@ -27,12 +22,16 @@ this script, but since the MaTaJu cleaning is not yet implemented in this pipeline, it applies the standard cleaning instead. -Usage: +Usage per single data file (indicated if you want to do tests): $ python magic_calib_to_dl1.py --input-file calib/20201216_M1_05093711.001_Y_CrabNebula-W0.40+035.root (--output-dir dl1) (--config-file config.yaml) (--process-run) + +Broader usage: +This script is called automatically from the script "setting_up_config_and_dir.py". +If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse @@ -72,7 +71,7 @@ PEDESTAL_TYPES = ["fundamental", "from_extractor", "from_extractor_rndm"] -def magic_calib_to_dl1(input_file, output_dir, config, max_events, process_run=False): +def magic_calib_to_dl1(input_file, output_dir, config, process_run=False): """ Processes the events of MAGIC calibrated data and computes the DL1 parameters. @@ -94,7 +93,7 @@ def magic_calib_to_dl1(input_file, output_dir, config, max_events, process_run=F # Load the input file logger.info(f"\nInput file: {input_file}") - event_source = MAGICEventSource(input_file, process_run=process_run, max_events=max_events) + event_source = MAGICEventSource(input_file, process_run=process_run) is_simulation = event_source.is_simulation logger.info(f"\nIs simulation: {is_simulation}") @@ -300,10 +299,10 @@ def magic_calib_to_dl1(input_file, output_dir, config, max_events, process_run=F # Reset the telescope IDs if tel_id == 1: - event_info.tel_id = 2 # MAGIC-I + event_info.tel_id = config["mc_tel_ids"]["MAGIC-I"] # MAGIC-I elif tel_id == 2: - event_info.tel_id = 3 # MAGIC-II + event_info.tel_id = config["mc_tel_ids"]["MAGIC-II"] # MAGIC-II # Save the parameters to the output file writer.write( @@ -315,13 +314,13 @@ def magic_calib_to_dl1(input_file, output_dir, config, max_events, process_run=F # Reset the telescope IDs of the subarray description tel_positions_magic = { - 2: subarray.positions[1], # MAGIC-I - 3: subarray.positions[2], # MAGIC-II + config["mc_tel_ids"]["MAGIC-I"]: subarray.positions[1], # MAGIC-I + config["mc_tel_ids"]["MAGIC-II"]: subarray.positions[2], # MAGIC-II } tel_descriptions_magic = { - 2: subarray.tel[1], # MAGIC-I - 3: subarray.tel[2], # MAGIC-II + config["mc_tel_ids"]["MAGIC-I"]: subarray.tel[1], # MAGIC-I + config["mc_tel_ids"]["MAGIC-II"]: subarray.tel[2], # MAGIC-II } subarray_magic = SubarrayDescription( @@ -371,15 +370,6 @@ def main(): help="Path to a configuration file", ) - parser.add_argument( - "--max-evt", - "-m", - dest="max_events", - type=int, - default=None, - help="Max. number of processed showers", - ) - parser.add_argument( "--process-run", dest="process_run", @@ -393,7 +383,7 @@ def main(): config = yaml.safe_load(f) # Process the input data - magic_calib_to_dl1(args.input_file, args.output_dir, config, args.max_events, args.process_run) + magic_calib_to_dl1(args.input_file, args.output_dir, config, args.process_run) logger.info("\nDone.") diff --git a/magicctapipe/scripts/lst1_magic/merge_hdf_files.py b/magicctapipe/scripts/lst1_magic/merge_hdf_files.py index c11c450d7..0e8f48414 100644 --- a/magicctapipe/scripts/lst1_magic/merge_hdf_files.py +++ b/magicctapipe/scripts/lst1_magic/merge_hdf_files.py @@ -2,7 +2,7 @@ # coding: utf-8 """ -This script merges the HDF files produced by the LST-1 + MAGIC combined +This script merges the HDF files produced by the LST + MAGIC combined analysis pipeline. It parses information from the file names, so they should follow the convention, i.e., *Run*.*.h5 or *run*.h5. @@ -13,8 +13,7 @@ If the `--run-wise` argument is given, it merges input files run-wise. It is applicable only to real data since MC data are already produced run-wise. The `--subrun-wise` argument can be also used to merge MAGIC -DL1 real data subrun-wise (for example, dl1_M1.Run05093711.001.h5 -+ dl1_M2.Run05093711.001.h5 -> dl1_MAGIC.Run05093711.001.h5). +DL1 real data subrun-wise. Usage: $ python merge_hdf_files.py @@ -22,6 +21,10 @@ (--output-dir dl1_merged) (--run-wise) (--subrun-wise) + +Broader usage: +This script is called automatically from the script "merging_runs_and_spliting_training_samples.py". +If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py new file mode 100644 index 000000000..621d112f4 --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -0,0 +1,269 @@ +""" +This script split the proton MC data sample into "train" +and "test", deletes possible failed runs (only those files +that end up with a size < 1 kB), and generates the bash +scripts to merge the data files calling the script "merge_hdf_files.py" +in the follwoing order: + +MAGIC: +1) Merge the subruns into runs for M1 and M2 individually. +2) Merge the runs of M1 and M2 into M1-M2 runs. +3) Merge all the M1-M2 runs for a given night. +Workingdir/DL1/Observations/Merged + +MC: +1) Merges all MC runs in a node and save them at +Workingdir/DL1/MC/PARTICLE/Merged + + +Usage: +$ python merging_runs_and_splitting_training_samples.py + +""" + +import os +import numpy as np +import glob +import yaml +import logging +from tqdm import tqdm +from pathlib import Path + +logger = logging.getLogger(__name__) +logger.addHandler(logging.StreamHandler()) +logger.setLevel(logging.INFO) + +def cleaning(list_of_nodes, target_dir): + + """ + This function looks for failed runs in each node and remove them. + + Parameters + ---------- + target_dir: str + Path to the target directory. + list_of_nodes: array of str + List of nodes where the function will look for failed runs. + """ + + for i in tqdm(range(len(list_of_nodes)), desc="Cleaning failed runs"): + os.chdir(list_of_nodes[i]) + os.system('find . -type f -name "*.h5" -size -1k -delete') + + os.chdir(target_dir+"/../") + print("Cleaning done.") + +def split_train_test(target_dir, train_fraction): + + """ + This function splits the MC proton sample in 2, i.e. the "test" and the "train" subsamples. + It generates 2 subdirectories in the directory .../DL1/MC/protons named "test" and "train" and creates sub-sub-directories with the names of all nodes. + For each node sub-sub-directory we move 80% of the .h5 files (if it is in the "test" subdirectory) or 20% of the .h5 files (if it is in the "train" subdirectory). + + Parameters + ---------- + target_dir: str + Path to the working directory + train_fraction: float + Fraction of proton MC files to be used in the training RF dataset + """ + + proton_dir = target_dir+"/DL1/MC/protons" + + if not os.path.exists(proton_dir+"/train"): + os.mkdir(proton_dir+"/train") + if not os.path.exists(proton_dir+"/../protons_test"): + os.mkdir(proton_dir+"/../protons_test") + + list_of_dir = np.sort(glob.glob(proton_dir+'/node*' + os.path.sep)) + + for directory in tqdm(range(len(list_of_dir))): #tqdm allows us to print a progessbar in the terminal + if not os.path.exists(proton_dir+"/train/"+list_of_dir[directory].split("/")[-2]): + os.mkdir(proton_dir+"/train/"+list_of_dir[directory].split("/")[-2]) + if not os.path.exists(proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]): + os.mkdir(proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) + list_of_runs = np.sort(glob.glob(proton_dir+"/"+list_of_dir[directory].split("/")[-2]+"/*.h5")) + split_percent = int(len(list_of_runs)*train_fraction) + for j in list_of_runs[0:split_percent]: + os.system(f"mv {j} {proton_dir}/train/"+list_of_dir[directory].split("/")[-2]) + + os.system(f"cp {list_of_dir[directory]}*.txt "+proton_dir+"/train/"+list_of_dir[directory].split("/")[-2]) + os.system(f"mv {list_of_dir[directory]}*.txt "+proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) + os.system(f"mv {list_of_dir[directory]}*.h5 "+proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) + os.system(f"rm -r {list_of_dir[directory]}") + +def merge(target_dir, identification, MAGIC_runs): + + """ + This function creates the bash scripts to run merge_hdf_files.py in all MAGIC subruns. + + Parameters + ---------- + target_dir: str + Path to the working directory + identification: str + Tells which batch to create. Options: subruns, M1M2, nights + MAGIC_runs: matrix of strings + This matrix is imported from config_general.yaml and tells the function where to find the data and where to put the merged files + """ + + process_name = "merging_"+target_dir.split("/")[-2:][1] + + MAGIC_DL1_dir = target_dir+"/DL1/Observations" + if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): + if not os.path.exists(MAGIC_DL1_dir+"/Merged"): + os.mkdir(MAGIC_DL1_dir+"/Merged") + + f = open(f"Merge_{identification}.sh","w") + f.write('#!/bin/sh\n\n') + f.write('#SBATCH -p short\n') + f.write('#SBATCH -J '+process_name+'\n') + f.write('#SBATCH -N 1\n\n') + f.write('ulimit -l unlimited\n') + f.write('ulimit -s unlimited\n') + f.write('ulimit -a\n\n') + + if identification == "0_subruns": + if os.path.exists(MAGIC_DL1_dir+"/M1"): + for i in MAGIC_runs: + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run + f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + + if os.path.exists(MAGIC_DL1_dir+"/M2"): + for i in MAGIC_runs: + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run + f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + + elif identification == "1_M1M2": + if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): + for i in MAGIC_runs: + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/Merged"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") + f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') + else: + for i in MAGIC_runs: + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/Merged_{i[0]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night + f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') + + + f.close() + + +def mergeMC(target_dir, identification): + + """ + This function creates the bash scripts to run merge_hdf_files.py in all MC runs. + + Parameters + ---------- + target_dir: str + Path to the working directory + identification: str + Tells which batch to create. Options: protons, gammadiffuse + """ + + process_name = "merging_"+target_dir.split("/")[-2:][1] + + MC_DL1_dir = target_dir+"/DL1/MC" + if not os.path.exists(MC_DL1_dir+f"/{identification}/Merged"): + os.mkdir(MC_DL1_dir+f"/{identification}/Merged") + + if identification == "protons": + list_of_nodes = np.sort(glob.glob(MC_DL1_dir+f"/{identification}/train/node*")) + else: + list_of_nodes = np.sort(glob.glob(MC_DL1_dir+f"/{identification}/node*")) + + np.savetxt(MC_DL1_dir+f"/{identification}/list_of_nodes.txt",list_of_nodes, fmt='%s') + + + process_size = len(list_of_nodes) - 1 + + cleaning(list_of_nodes, target_dir) #This will delete the (possibly) failed runs. + + f = open(f"Merge_{identification}.sh","w") + f.write('#!/bin/sh\n\n') + f.write('#SBATCH -p short\n') + f.write('#SBATCH -J '+process_name+'\n') + f.write(f"#SBATCH --array=0-{process_size}%50\n") + f.write('#SBATCH --mem=7g\n') + f.write('#SBATCH -N 1\n\n') + f.write('ulimit -l unlimited\n') + f.write('ulimit -s unlimited\n') + f.write('ulimit -a\n\n') + + f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') + f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') + + f.close() + + +def main(): + + """ + Here we read the config_general.yaml file, split the pronton sample into "test" and "train", and merge the MAGIC files. + """ + + + with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + config = yaml.safe_load(f) + + + target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + + MAGIC_runs_and_dates = config["general"]["MAGIC_runs"] + MAGIC_runs = np.genfromtxt(MAGIC_runs_and_dates,dtype=str,delimiter=',') + + train_fraction = float(config["general"]["proton_train"]) + + + #Here we slice the proton MC data into "train" and "test": + print("***** Splitting protons into 'train' and 'test' datasets...") + split_train_test(target_dir, train_fraction) + + print("***** Generating merge bashscripts...") + merge(target_dir, "0_subruns", MAGIC_runs) #generating the bash script to merge the subruns + merge(target_dir, "1_M1M2", MAGIC_runs) #generating the bash script to merge the M1 and M2 runs + merge(target_dir, "2_nights", MAGIC_runs) #generating the bash script to merge all runs per night + + print("***** Generating mergeMC bashscripts...") + mergeMC(target_dir, "protons") #generating the bash script to merge the files + mergeMC(target_dir, "gammadiffuse") #generating the bash script to merge the files + mergeMC(target_dir, "gammas") #generating the bash script to merge the files + mergeMC(target_dir, "protons_test") + + + print("***** Running merge_hdf_files.py in the MAGIC data files...") + print("Process name: merging_"+target_dir.split("/")[-2:][1]) + print("To check the jobs submitted to the cluster, type: squeue -n merging_"+target_dir.split("/")[-2:][1]) + + #Below we run the bash scripts to merge the MAGIC files + list_of_merging_scripts = np.sort(glob.glob("Merge_*.sh")) + + for n,run in enumerate(list_of_merging_scripts): + if n == 0: + launch_jobs = f"merging{n}=$(sbatch --parsable {run})" + else: + launch_jobs = launch_jobs + f" && merging{n}=$(sbatch --parsable --dependency=afterany:$merging{n-1} {run})" + + #print(launch_jobs) + os.system(launch_jobs) + +if __name__ == "__main__": + main() + + + + + + + + diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py new file mode 100644 index 000000000..c93f0cc7e --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -0,0 +1,451 @@ +""" +This script facilitates the usage of other two scripts +of the MCP, i.e. "lst1_magic_mc_dl0_to_dl1.py" and +"magic_calib_to_dl1.py". This script is more like a +"manager" that organizes the analysis process by: +1) Creating the necessary directories and subdirectories. +2) Generatign all the bash script files that convert the +MAGIC and MC files from DL0 to DL1. +3) Launching these jobs in the IT container. + +Notice that in this stage we only use MAGIC + MC data. +No LST data is used here. + +Standard usage: +$ python setting_up_config_and_dir.py + +If you want to run only the MAGIC or only the MC conversion, +you can do as follows: + +Only MAGIC: +$ python setting_up_config_and_dir.py --partial-analysis onlyMAGIC + +Only MC: +$ python setting_up_config_and_dir.py --partial-analysis onlyMC + +""" + +import os +import numpy as np +import argparse +import glob +import time +import yaml +from pathlib import Path + +def config_file_gen(ids, target_dir): + + """ + Here we create the configuration file needed for transforming DL0 into DL1 + """ + + f = open(target_dir+'/config_DL0_to_DL1.yaml','w') + #f.write("directories:\n target: "+target_dir+"\n\n") + lines_of_config_file = [ + "mc_tel_ids:", + "\n LST-1: "+str(ids[0]), + "\n LST-2: "+str(ids[1]), + "\n LST-3: "+str(ids[2]), + "\n LST-4: "+str(ids[3]), + "\n MAGIC-I: "+str(ids[4]), + "\n MAGIC-II: "+str(ids[5]), + "\n", + "\nLST:", + "\n image_extractor:", + '\n type: "LocalPeakWindowSum"', + "\n window_shift: 4", + "\n window_width: 8", + "\n", + "\n increase_nsb:", + "\n use: true", + "\n extra_noise_in_dim_pixels: 1.27", + "\n extra_bias_in_dim_pixels: 0.665", + "\n transition_charge: 8", + "\n extra_noise_in_bright_pixels: 2.08", + "\n", + "\n increase_psf:", + "\n use: false", + "\n fraction: null", + "\n", + "\n tailcuts_clean:", + "\n picture_thresh: 8", + "\n boundary_thresh: 4", + "\n keep_isolated_pixels: false", + "\n min_number_picture_neighbors: 2", + "\n", + "\n time_delta_cleaning:", + "\n use: true", + "\n min_number_neighbors: 1", + "\n time_limit: 2", + "\n", + "\n dynamic_cleaning:", + "\n use: true", + "\n threshold: 267", + "\n fraction: 0.03", + "\n", + "\n use_only_main_island: false", + "\n", + "\nMAGIC:", + "\n image_extractor:", + '\n type: "SlidingWindowMaxSum"', + "\n window_width: 5", + "\n apply_integration_correction: false", + "\n", + "\n charge_correction:", + "\n use: true", + "\n factor: 1.143", + "\n", + "\n magic_clean:", + "\n use_time: true", + "\n use_sum: true", + "\n picture_thresh: 6", + "\n boundary_thresh: 3.5", + "\n max_time_off: 4.5", + "\n max_time_diff: 1.5", + "\n find_hotpixels: true", + '\n pedestal_type: "from_extractor_rndm"', + "\n", + "\n muon_ring:", + "\n thr_low: 25", + "\n tailcut: [12, 8]", + "\n ring_completeness_threshold: 25", + "\n"] + + f.writelines(lines_of_config_file) + f.close() + + +def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, telescope_ids, focal_length): + + """ + This function creates the lists list_nodes_gamma_complete.txt and list_folder_gamma.txt with the MC file paths. + After that, it generates a few bash scripts to link the MC paths to each subdirectory. + These bash scripts will be called later in the main() function below. + """ + + process_name = target_dir.split("/")[-2:][1] + + list_of_nodes = glob.glob(MC_path+"/node*") + f = open(target_dir+f"/list_nodes_{particle_type}_complete.txt","w") # creating list_nodes_gammas_complete.txt + for i in list_of_nodes: + f.write(i+"/output_"+SimTel_version+"\n") + + f.close() + + f = open(target_dir+f"/list_folder_{particle_type}.txt","w") # creating list_folder_gammas.txt + for i in list_of_nodes: + f.write(i.split("/")[-1]+"\n") + + f.close() + + #################################################################################### + ############ bash scripts that link the MC paths to each subdirectory. + #################################################################################### + + f = open(f"linking_MC_{particle_type}_paths.sh","w") + lines_of_config_file = [ + "#!/bin/sh\n\n", + "#SBATCH -p short\n", + "#SBATCH -J "+process_name+"\n\n", + "#SBATCH -N 1\n\n", + "ulimit -l unlimited\n", + "ulimit -s unlimited\n", + "ulimit -a\n\n", + "while read -r -u 3 lineA && read -r -u 4 lineB\n", + "do\n", + " cd "+target_dir+f"/DL1/MC/{particle_type}\n", + " mkdir $lineB\n", + " cd $lineA\n", + " ls -lR *.gz |wc -l\n", + " ls *.gz > "+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n", + ' string=$lineA"/"\n', + " export file="+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n\n", + " cat $file | while read line; do echo $string${line} >>"+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n", + ' echo "folder $lineB and node $lineA"\n', + 'done 3<"'+target_dir+f'/list_nodes_{particle_type}_complete.txt" 4<"'+target_dir+f'/list_folder_{particle_type}.txt"\n', + ""] + f.writelines(lines_of_config_file) + f.close() + + + ################################################################################################################ + ############################ bash script that applies lst1_magic_mc_dl0_to_dl1.py to all MC data files. + ################################################################################################################ + + number_of_nodes = glob.glob(MC_path+"/node*") + number_of_nodes = len(number_of_nodes) -1 + + f = open(f"linking_MC_{particle_type}_paths_r.sh","w") + lines_of_config_file = [ + '#!/bin/sh\n\n', + '#SBATCH -p xxl\n', + '#SBATCH -J '+process_name+'\n', + '#SBATCH --array=0-'+str(number_of_nodes)+'%50\n', + '#SBATCH --mem=10g\n', + '#SBATCH -N 1\n\n', + 'ulimit -l unlimited\n', + 'ulimit -s unlimited\n', + 'ulimit -a\n', + 'cd '+target_dir+f'/DL1/MC/{particle_type}\n\n', + 'export INF='+target_dir+'\n', + f'SAMPLE_LIST=($(<$INF/list_folder_{particle_type}.txt))\n', + 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n', + 'cd $SAMPLE\n\n', + 'export LOG='+target_dir+f'/DL1/MC/{particle_type}'+'/simtel_{$SAMPLE}_all.log\n', + 'cat list_dl0_ok.txt | while read line\n', + 'do\n', + ' cd '+target_dir+'/../\n', + ' conda run -n magic-lst python lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml >>$LOG 2>&1 --focal_length_choice '+focal_length+'\n\n', + 'done\n', + ""] + f.writelines(lines_of_config_file) + f.close() + + + + +def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs): + + """ + Below we create a bash script that links the the MAGIC data paths to each subdirectory. + """ + + process_name = target_dir.split("/")[-2:][1] + + f = open("linking_MAGIC_data_paths.sh","w") + f.write('#!/bin/sh\n\n') + f.write('#SBATCH -p short\n') + f.write('#SBATCH -J '+process_name+'\n') + f.write('#SBATCH -N 1\n\n') + f.write('ulimit -l unlimited\n') + f.write('ulimit -s unlimited\n') + f.write('ulimit -a\n') + + if telescope_ids[-1] > 0: + for i in MAGIC_runs: + f.write('export IN1=/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') + f.write('export OUT1='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n') + f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') + + f.write('\n') + if telescope_ids[-2] > 0: + for i in MAGIC_runs: + f.write('export IN1=/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') + f.write('export OUT1='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n') + f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') + + f.close() + + if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): + for i in MAGIC_runs: + if telescope_ids[-1] > 0: + + number_of_nodes = glob.glob('/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+f'/*{i[1]}.*_Y_*.root') + number_of_nodes = len(number_of_nodes) - 1 + + f = open(f"MAGIC-II_dl0_to_dl1_run_{i[1]}.sh","w") + lines_of_config_file = [ + '#!/bin/sh\n\n', + '#SBATCH -p long\n', + '#SBATCH -J '+process_name+'\n', + '#SBATCH --array=0-'+str(number_of_nodes)+'\n', + '#SBATCH -N 1\n\n', + 'ulimit -l unlimited\n', + 'ulimit -s unlimited\n', + 'ulimit -a\n\n', + 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n', + 'cd '+target_dir+'/../\n', + 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', + 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', + 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', + 'conda run -n magic-lst python magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + ""] + f.writelines(lines_of_config_file) + f.close() + + if telescope_ids[-2] > 0: + + number_of_nodes = glob.glob('/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+f'/*{i[1]}.*_Y_*.root') + number_of_nodes = len(number_of_nodes) - 1 + + f = open(f"MAGIC-I_dl0_to_dl1_run_{i[1]}.sh","w") + lines_of_config_file = [ + '#!/bin/sh\n\n', + '#SBATCH -p long\n', + '#SBATCH -J '+process_name+'\n', + '#SBATCH --array=0-'+str(number_of_nodes)+'\n', + '#SBATCH -N 1\n\n', + 'ulimit -l unlimited\n', + 'ulimit -s unlimited\n', + 'ulimit -a\n\n', + 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n', + 'cd '+target_dir+'/../\n', + 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', + 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', + 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', + 'conda run -n magic-lst python magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + ""] + f.writelines(lines_of_config_file) + f.close() + + +def directories_generator(target_dir, telescope_ids,MAGIC_runs): + + """ + Here we create all subdirectories for a given workspace and target name. + """ + + ########################################### + ##################### MC + ########################################### + + if not os.path.exists(target_dir): + os.mkdir(target_dir) + os.mkdir(target_dir+"/DL1") + os.mkdir(target_dir+"/DL1/Observations") + os.mkdir(target_dir+"/DL1/MC") + os.mkdir(target_dir+"/DL1/MC/gammas") + os.mkdir(target_dir+"/DL1/MC/gammadiffuse") + os.mkdir(target_dir+"/DL1/MC/electrons") + os.mkdir(target_dir+"/DL1/MC/protons") + os.mkdir(target_dir+"/DL1/MC/helium") + else: + overwrite = input("MC directory for "+target_dir.split("/")[-1]+" already exists. Would you like to overwrite it? [only 'y' or 'n']: ") + if overwrite == "y": + os.system("rm -r "+target_dir) + os.mkdir(target_dir) + os.mkdir(target_dir+"/DL1") + os.mkdir(target_dir+"/DL1/Observations") + os.mkdir(target_dir+"/DL1/MC") + os.mkdir(target_dir+"/DL1/MC/gammas") + os.mkdir(target_dir+"/DL1/MC/gammadiffuse") + os.mkdir(target_dir+"/DL1/MC/electrons") + os.mkdir(target_dir+"/DL1/MC/protons") + os.mkdir(target_dir+"/DL1/MC/helium") + else: + print("Directory not modified.") + + + + ########################################### + ##################### MAGIC + ########################################### + + if telescope_ids[-1] > 0: + if not os.path.exists(target_dir+"/DL1/Observations/M2"): + os.mkdir(target_dir+"/DL1/Observations/M2") + for i in MAGIC_runs: + if not os.path.exists(target_dir+"/DL1/Observations/M2/"+i[0]): + os.mkdir(target_dir+"/DL1/Observations/M2/"+i[0]) + os.mkdir(target_dir+"/DL1/Observations/M2/"+i[0]+"/"+i[1]) + else: + os.mkdir(target_dir+"/DL1/Observations/M2/"+i[0]+"/"+i[1]) + + if telescope_ids[-2] > 0: + if not os.path.exists(target_dir+"/DL1/Observations/M1"): + os.mkdir(target_dir+"/DL1/Observations/M1") + for i in MAGIC_runs: + if not os.path.exists(target_dir+"/DL1/Observations/M1/"+i[0]): + os.mkdir(target_dir+"/DL1/Observations/M1/"+i[0]) + os.mkdir(target_dir+"/DL1/Observations/M1/"+i[0]+"/"+i[1]) + else: + os.mkdir(target_dir+"/DL1/Observations/M1/"+i[0]+"/"+i[1]) + + + + + +def main(): + + """ Here we read the config_general.yaml file and call the functions to generate the necessary directories, bash scripts and launching the jobs.""" + + parser = argparse.ArgumentParser() + + #Here we are simply collecting the parameters from the command line, as input file, output directory, and configuration file + parser.add_argument( + "--partial-analysis", + "-p", + dest="partial_analysis", + type=str, + default="doEverything", + help="You can type 'onlyMAGIC' or 'onlyMC' to run this script only on MAGIC or MC data, respectively.", + ) + + args = parser.parse_args() + + + + with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + config = yaml.safe_load(f) + + + #Below we read the telescope IDs and runs + telescope_ids = list(config["mc_tel_ids"].values()) + SimTel_version = config["general"]["SimTel_version"] + MAGIC_runs_and_dates = config["general"]["MAGIC_runs"] + MAGIC_runs = np.genfromtxt(MAGIC_runs_and_dates,dtype=str,delimiter=',') #READ LIST OF DATES AND RUNS: format table in a way that each line looks like "2020_11_19,5093174" + focal_length = config["general"]["focal_length"] + + #Below we read the data paths + target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + MC_gammas = str(Path(config["directories"]["MC_gammas"])) + MC_electrons = str(Path(config["directories"]["MC_electrons"])) + MC_helium = str(Path(config["directories"]["MC_helium"])) + MC_protons = str(Path(config["directories"]["MC_protons"])) + MC_gammadiff = str(Path(config["directories"]["MC_gammadiff"])) + + + print("***** Linking MC paths - this may take a few minutes ******") + print("*** Reducing DL0 to DL1 data - this can take many hours ***") + print("Process name: ",target_dir.split('/')[-2:][1]) + print("To check the jobs submitted to the cluster, type: squeue -n",target_dir.split('/')[-2:][1]) + + directories_generator(target_dir, telescope_ids, MAGIC_runs) #Here we create all the necessary directories in the given workspace and collect the main directory of the target + config_file_gen(telescope_ids,target_dir) + + #Below we run the analysis on the MC data + if not args.partial_analysis=='onlyMAGIC': + lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, telescope_ids, focal_length) #gammas + #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, telescope_ids, focal_length) #electrons + #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, telescope_ids, focal_length) #helium + lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, telescope_ids, focal_length) #protons + lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, telescope_ids, focal_length) #gammadiffuse + + #Here we do the MC DL0 to DL1 conversion: + list_of_MC = glob.glob("linking_MC_*s.sh") + + #os.system("RES=$(sbatch --parsable linking_MC_gammas_paths.sh) && sbatch --dependency=afterok:$RES MC_dl0_to_dl1.sh") + + for n,run in enumerate(list_of_MC): + if n == 0: + launch_jobs_MC = f"linking{n}=$(sbatch --parsable {run}) && running{n}=$(sbatch --parsable --dependency=afterany:$linking{n} {run[0:-3]}_r.sh)" + else: + launch_jobs_MC = launch_jobs_MC + f" && linking{n}=$(sbatch --parsable --dependency=afterany:$running{n-1} {run}) && running{n}=$(sbatch --parsable --dependency=afterany:$linking{n} {run[0:-3]}_r.sh)" + + + os.system(launch_jobs_MC) + + #Below we run the analysis on the MAGIC data + if not args.partial_analysis=='onlyMC': + lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs) #MAGIC real data + if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): + + list_of_MAGIC_runs = glob.glob("MAGIC-*.sh") + + for n,run in enumerate(list_of_MAGIC_runs): + if n == 0: + launch_jobs = f"linking=$(sbatch --parsable linking_MAGIC_data_paths.sh) && RES{n}=$(sbatch --parsable --dependency=afterany:$linking {run})" + else: + launch_jobs = launch_jobs + f" && RES{n}=$(sbatch --parsable --dependency=afterany:$RES{n-1} {run})" + + os.system(launch_jobs) + +if __name__ == "__main__": + main() + + + + + + + diff --git a/magicctapipe/scripts/lst1_magic/stereo_events.py b/magicctapipe/scripts/lst1_magic/stereo_events.py new file mode 100644 index 000000000..4227a857c --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/stereo_events.py @@ -0,0 +1,183 @@ +""" +This scripts generates and runs the bashscripts +to compute the stereo parameters of DL1 MC and +Coincident MAGIC+LST data files. + +Usage: +$ python stereo_events.py + +""" + +import os +import numpy as np +import glob +import yaml +import logging +from pathlib import Path + +logger = logging.getLogger(__name__) +logger.addHandler(logging.StreamHandler()) +logger.setLevel(logging.INFO) + +def configfile_stereo(ids, target_dir): + + """ + This function creates the configuration file needed for the event stereo step + + Parameters + ---------- + ids: list + list of telescope IDs + target_dir: str + Path to the working directory + """ + + f = open(target_dir+'/config_stereo.yaml','w') + f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") + f.write('stereo_reco:\n quality_cuts: "(intensity > 50) & (width > 0)"\n theta_uplim: "6 arcmin"\n') + f.close() + + +def bash_stereo(target_dir): + + """ + This function generates the bashscript for running the stereo analysis. + + Parameters + ---------- + target_dir: str + Path to the working directory + """ + + process_name = target_dir.split("/")[-2:][1] + + if not os.path.exists(target_dir+"/DL1/Observations/Coincident_stereo"): + os.mkdir(target_dir+"/DL1/Observations/Coincident_stereo") + + listOfNightsLST = np.sort(glob.glob(target_dir+"/DL1/Observations/Coincident/*")) + + for nightLST in listOfNightsLST: + stereoDir = target_dir+"/DL1/Observations/Coincident_stereo/"+nightLST.split('/')[-1] + if not os.path.exists(stereoDir): + os.mkdir(stereoDir) + + os.system(f"ls {nightLST}/*LST*.h5 > {nightLST}/list_coin.txt") #generating a list with the DL1 coincident data files. + process_size = len(np.genfromtxt(nightLST+"/list_coin.txt",dtype="str")) - 1 + + f = open(f"StereoEvents_{nightLST.split('/')[-1]}.sh","w") + f.write("#!/bin/sh\n\n") + f.write("#SBATCH -p short\n") + f.write("#SBATCH -J "+process_name+"_stereo\n") + f.write(f"#SBATCH --array=0-{process_size}%100\n") + f.write("#SBATCH -N 1\n\n") + f.write("ulimit -l unlimited\n") + f.write("ulimit -s unlimited\n") + f.write("ulimit -a\n\n") + + f.write(f"export INPUTDIR={nightLST}\n") + f.write(f"export OUTPUTDIR={stereoDir}\n") + f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") + f.write(f"conda run -n magic-lst python lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.close() + +def bash_stereoMC(target_dir, identification): + + """ + This function generates the bashscript for running the stereo analysis. + + Parameters + ---------- + target_dir: str + Path to the working directory + identification: str + Particle name. Options: protons, gammadiffuse + """ + + process_name = target_dir.split("/")[-2:][1] + + if not os.path.exists(target_dir+f"/DL1/MC/{identification}/Merged/StereoMerged"): + os.mkdir(target_dir+f"/DL1/MC/{identification}/Merged/StereoMerged") + + inputdir = target_dir+f"/DL1/MC/{identification}/Merged" + + os.system(f"ls {inputdir}/dl1*.h5 > {inputdir}/list_coin.txt") #generating a list with the DL1 coincident data files. + process_size = len(np.genfromtxt(inputdir+"/list_coin.txt",dtype="str")) - 1 + + f = open(f"StereoEvents_{identification}.sh","w") + f.write("#!/bin/sh\n\n") + f.write("#SBATCH -p xxl\n") + f.write("#SBATCH -J "+process_name+"_stereo\n") + f.write(f"#SBATCH --array=0-{process_size}%100\n") + f.write('#SBATCH --mem=30g\n') + f.write("#SBATCH -N 1\n\n") + f.write("ulimit -l unlimited\n") + f.write("ulimit -s unlimited\n") + f.write("ulimit -a\n\n") + + f.write(f"export INPUTDIR={inputdir}\n") + f.write(f"export OUTPUTDIR={inputdir}/StereoMerged\n") + f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") + f.write(f"conda run -n magic-lst python lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.close() + + + + + +def main(): + + """ + Here we read the config_general.yaml file and call the functions defined above. + """ + + + with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + config = yaml.safe_load(f) + + + target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + telescope_ids = list(config["mc_tel_ids"].values()) + + print("***** Generating file config_stereo.yaml...") + print("***** This file can be found in ",target_dir) + configfile_stereo(telescope_ids, target_dir) + + print("***** Generating the bashscript...") + bash_stereo(target_dir) + + print("***** Generating the bashscript for MCs...") + bash_stereoMC(target_dir,"gammadiffuse") + bash_stereoMC(target_dir,"gammas") + bash_stereoMC(target_dir,"protons") + bash_stereoMC(target_dir,"protons_test") + + print("***** Submitting processes to the cluster...") + print("Process name: "+target_dir.split("/")[-2:][1]+"_stereo") + print("To check the jobs submitted to the cluster, type: squeue -n "+target_dir.split("/")[-2:][1]+"_stereo") + + #Below we run the bash scripts to find the stereo events + list_of_stereo_scripts = np.sort(glob.glob("StereoEvents_*.sh")) + + for n,run in enumerate(list_of_stereo_scripts): + if n == 0: + launch_jobs = f"stereo{n}=$(sbatch --parsable {run})" + else: + launch_jobs = launch_jobs + f" && stereo{n}=$(sbatch --parsable --dependency=afterany:$stereo{n-1} {run})" + + #print(launch_jobs) + os.system(launch_jobs) + +if __name__ == "__main__": + main() + + + + + + + + From f93a73a56503a8c40155de721d8c281b9db3cb31 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 26 Sep 2023 15:02:46 +0200 Subject: [PATCH 02/76] Add files via upload --- environment.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/environment.yml b/environment.yml index fd30be377..92b36356f 100644 --- a/environment.yml +++ b/environment.yml @@ -1,14 +1,13 @@ # A conda environment with all useful package for ctapipe developers -name: magic-lst1 +name: magic-lst channels: - default - conda-forge dependencies: - - python=3.8 + - python - pip - black - nbsphinx - - numpy=1.21 - ctapipe=0.12 - gammapy=0.19.0 - cython From a22c8723dfacc831dcd84ca9641dbfdcdade2e59 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 26 Sep 2023 15:03:36 +0200 Subject: [PATCH 03/76] Add files via upload --- magicctapipe/io/__init__.py | 4 + magicctapipe/io/gadf.py | 58 +++++++- magicctapipe/io/io.py | 281 +++++++++++++++++++++++++++++++----- 3 files changed, 303 insertions(+), 40 deletions(-) diff --git a/magicctapipe/io/__init__.py b/magicctapipe/io/__init__.py index fa5fb40f7..a2d0a5491 100644 --- a/magicctapipe/io/__init__.py +++ b/magicctapipe/io/__init__.py @@ -13,6 +13,7 @@ create_pointing_hdu, ) from .io import ( + telescope_combinations, format_object, get_dl2_mean, get_stereo_events, @@ -22,6 +23,7 @@ load_magic_dl1_data_files, load_mc_dl2_data_file, load_train_data_files, + load_train_data_files_tel, save_pandas_data_in_table, ) @@ -33,6 +35,7 @@ "create_gh_cuts_hdu", "create_gti_hdu", "create_pointing_hdu", + "telescope_combinations", "format_object", "get_dl2_mean", "get_stereo_events", @@ -42,5 +45,6 @@ "load_magic_dl1_data_files", "load_mc_dl2_data_file", "load_train_data_files", + "load_train_data_files_tel", "save_pandas_data_in_table", ] diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index f51b8807d..218e4b344 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -10,7 +10,6 @@ from astropy.table import QTable from astropy.time import Time from magicctapipe import __version__ -from magicctapipe.io.io import TEL_COMBINATIONS from magicctapipe.utils.functions import HEIGHT_ORM, LAT_ORM, LON_ORM from pyirf.binning import split_bin_lo_hi @@ -21,6 +20,7 @@ "create_pointing_hdu", ] + logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) @@ -29,6 +29,55 @@ MJDREF = Time(0, format="unix", scale="utc") +def telescope_combinations(config): + """ + Generates all possible telescope combinations without repetition. E.g.: "LST1_M1", "LST2_LST4_M2", "LST1_LST2_LST3_M1" and so on. + + Parameters + ---------- + config: dict + yaml file with information about the telescope IDs. Typically evoked from "config_general.yaml" in the main scripts. + + Returns + ------- + TEL_NAMES: dict + Dictionary with telescope IDs and names. + TEL_COMBINATIONS: dict + Dictionary with all telescope combinations with no repetions. + """ + + + TEL_NAMES = {} + for k, v in config["mc_tel_ids"].items(): #Here we swap the dictionary keys and values just for convenience. + if v > 0: + TEL_NAMES[v] = k + + TEL_COMBINATIONS = {} + keys = list(TEL_NAMES.keys()) + + def recursive_solution(current_tel, current_comb): + + if current_tel == len(keys): #The function stops once we reach the last telescope + return + + current_comb_name = current_comb[0] + '_' + TEL_NAMES[keys[current_tel]] #Name of the combo (at this point it can even be a single telescope) + current_comb_list = current_comb[1] + [keys[current_tel]] #List of telescopes (including individual telescopes) + + if len(current_comb_list) > 1: #We save them in the new dictionary excluding the single-telescope values + TEL_COMBINATIONS[current_comb_name[1:]] = current_comb_list; + + current_comb = [current_comb_name, current_comb_list] #We save the current results in this varible to recal the function recursively ("for" loop below) + + for i in range(1, len(keys)-current_tel): + recursive_solution(current_tel+i, current_comb) + + + for key in range(len(keys)): + recursive_solution(key, ['',[]]) + + + return TEL_NAMES, TEL_COMBINATIONS + @u.quantity_input def create_gh_cuts_hdu( gh_cuts, reco_energy_bins: u.TeV, fov_offset_bins: u.deg, **header_cards @@ -90,7 +139,7 @@ def create_gh_cuts_hdu( def create_event_hdu( - event_table, on_time, deadc, source_name, source_ra=None, source_dec=None + event_table, config, on_time, deadc, source_name, source_ra=None, source_dec=None ): """ Creates a fits binary table HDU for shower events. @@ -99,6 +148,8 @@ def create_event_hdu( ---------- event_table: astropy.table.table.QTable Table of the DL2 events surviving gammaness cuts + config: dict + yaml file with information about the telescope IDs. Typically called evoked from "config_DL3.yaml" in the main scripts. on_time: astropy.table.table.QTable ON time of the input data deadc: float @@ -125,7 +176,8 @@ def create_event_hdu( If the source name cannot be resolved and also either or both of source RA/Dec coordinate is set to None """ - + _, TEL_COMBINATIONS = telescope_combinations(config) + mjdreff, mjdrefi = np.modf(MJDREF.mjd) time_start = Time(event_table["timestamp"][0], format="unix", scale="utc") diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 72d496795..b4bd0ddd2 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -23,12 +23,15 @@ from pyirf.utils import calculate_source_fov_offset, calculate_theta __all__ = [ + "telescope_combinations", "format_object", + "get_stereo", "get_stereo_events", "get_dl2_mean", "load_lst_dl1_data_file", "load_magic_dl1_data_files", "load_train_data_files", + "load_train_data_files_tel", "load_mc_dl2_data_file", "load_dl2_data_file", "load_irf_files", @@ -39,17 +42,6 @@ logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) -# The telescope IDs and names -TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} - -# The telescope combination types -TEL_COMBINATIONS = { - "M1_M2": [2, 3], # combo_type = 0 - "LST1_M1": [1, 2], # combo_type = 1 - "LST1_M2": [1, 3], # combo_type = 2 - "LST1_M1_M2": [1, 2, 3], # combo_type = 3 -} - # The pandas multi index to classify the events simulated by different # telescope pointing directions but have the same observation ID GROUP_INDEX_TRAIN = ["obs_id", "event_id", "true_alt", "true_az"] @@ -60,12 +52,61 @@ # The upper limit of the trigger time differences of consecutive events, # used when calculating the ON time and dead time correction factor -TIME_DIFF_UPLIM = 1.0 * u.s +TIME_DIFF_UPLIM = 0.1 * u.s # The LST-1 and MAGIC readout dead times DEAD_TIME_LST = 7.6 * u.us DEAD_TIME_MAGIC = 26 * u.us +def telescope_combinations(config): + """ + Generates all possible telescope combinations without repetition. E.g.: "LST1_M1", "LST2_LST4_M2", "LST1_LST2_LST3_M1" and so on. + + Parameters + ---------- + config: dict + yaml file with information about the telescope IDs. Typically evoked from "config_general.yaml" in the main scripts. + + Returns + ------- + TEL_NAMES: dict + Dictionary with telescope IDs and names. + TEL_COMBINATIONS: dict + Dictionary with all telescope combinations with no repetions. + """ + + + TEL_NAMES = {} + for k, v in config["mc_tel_ids"].items(): #Here we swap the dictionary keys and values just for convenience. + if v > 0: + TEL_NAMES[v] = k + + TEL_COMBINATIONS = {} + keys = list(TEL_NAMES.keys()) + + def recursive_solution(current_tel, current_comb): + + if current_tel == len(keys): #The function stops once we reach the last telescope + return + + current_comb_name = current_comb[0] + '_' + TEL_NAMES[keys[current_tel]] #Name of the combo (at this point it can even be a single telescope) + current_comb_list = current_comb[1] + [keys[current_tel]] #List of telescopes (including individual telescopes) + + if len(current_comb_list) > 1: #We save them in the new dictionary excluding the single-telescope values + TEL_COMBINATIONS[current_comb_name[1:]] = current_comb_list; + + current_comb = [current_comb_name, current_comb_list] #We save the current results in this varible to recal the function recursively ("for" loop below) + + for i in range(1, len(keys)-current_tel): + recursive_solution(current_tel+i, current_comb) + + + for key in range(len(keys)): + recursive_solution(key, ['',[]]) + + + return TEL_NAMES, TEL_COMBINATIONS + def format_object(input_object): """ @@ -92,10 +133,51 @@ def format_object(input_object): string = string.replace("'", "").replace(",", "") return string + +def get_stereo( + event_data, config, quality_cuts=None, group_index=["obs_id", "event_id"] +): + + """ + Gets the stereo events surviving specified quality cuts without overwriting the + multiplicity and combo type. This function is useful when loading the DL2 data + when we apply one RF per telescope (and not per combo type). + + Parameters + ---------- + event_data: pandas.core.frame.DataFrame + Data frame of shower events + config: dict + Read from the yaml file with information about the telescope IDs. Typically called "config_general.yaml" + quality_cuts: str + Quality cuts applied to the input data + group_index: list + Index to group telescope events + + + Returns + ------- + event_data_stereo: pandas.core.frame.DataFrame + Data frame of the stereo events surviving the quality cuts + """ + + TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) + + event_data_stereo = event_data.copy() + + # Apply the quality cuts + if quality_cuts is not None: + event_data_stereo.query(quality_cuts, inplace=True) + max_multiplicity=len(TEL_NAMES.keys()) + # Extract stereo events + event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() + event_data_stereo.query(f"multiplicity >1 & multiplicity <= {max_multiplicity}", inplace=True) + + return event_data_stereo def get_stereo_events( - event_data, quality_cuts=None, group_index=["obs_id", "event_id"] + event_data, config, quality_cuts=None, group_index=["obs_id", "event_id"] ): """ Gets the stereo events surviving specified quality cuts. @@ -107,26 +189,32 @@ def get_stereo_events( ---------- event_data: pandas.core.frame.DataFrame Data frame of shower events + config: dict + Read from the yaml file with information about the telescope IDs. Typically called "config_general.yaml" quality_cuts: str Quality cuts applied to the input data group_index: list Index to group telescope events + Returns ------- event_data_stereo: pandas.core.frame.DataFrame Data frame of the stereo events surviving the quality cuts """ - + + TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) + event_data_stereo = event_data.copy() # Apply the quality cuts if quality_cuts is not None: event_data_stereo.query(quality_cuts, inplace=True) - + + max_multiplicity=len(TEL_NAMES.keys()) # Extract stereo events event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() - event_data_stereo.query("multiplicity == [2, 3]", inplace=True) + event_data_stereo.query(f"multiplicity >1 & multiplicity <= {max_multiplicity}", inplace=True) # Check the total number of events n_events_total = len(event_data_stereo.groupby(group_index).size()) @@ -373,7 +461,7 @@ def load_lst_dl1_data_file(input_file): return event_data, subarray -def load_magic_dl1_data_files(input_dir): +def load_magic_dl1_data_files(input_dir, config): """ Loads MAGIC DL1 data files for the event coincidence with LST-1. @@ -381,6 +469,8 @@ def load_magic_dl1_data_files(input_dir): ---------- input_dir: str Path to a directory where input MAGIC DL1 data files are stored + config: dict + yaml file with information about the telescope IDs. Typically called "config_general.yaml" Returns ------- @@ -394,7 +484,9 @@ def load_magic_dl1_data_files(input_dir): FileNotFoundError If any DL1 data files are not found in the input directory """ - + + TEL_NAMES, _ = telescope_combinations(config) + # Find the input files file_mask = f"{input_dir}/dl1_*.h5" @@ -446,7 +538,7 @@ def load_magic_dl1_data_files(input_dir): def load_train_data_files( - input_dir, offaxis_min=None, offaxis_max=None, true_event_class=None + input_dir, config, offaxis_min=None, offaxis_max=None, true_event_class=None ): """ Loads DL1-stereo data files and separates the shower events per @@ -456,6 +548,8 @@ def load_train_data_files( ---------- input_dir: str Path to a directory where input DL1-stereo files are stored + config: dict + yaml file with information about the telescope IDs. Typically called "config_general.yaml" offaxis_min: str Minimum shower off-axis angle allowed, whose format should be acceptable by `astropy.units.quantity.Quantity` @@ -464,6 +558,7 @@ def load_train_data_files( acceptable by `astropy.units.quantity.Quantity` true_event_class: int True event class of the input events + Returns ------- @@ -477,6 +572,8 @@ def load_train_data_files( If any DL1-stereo data files are not found in the input directory """ + + _, TEL_COMBINATIONS = telescope_combinations(config) # Find the input files file_mask = f"{input_dir}/dl1_stereo_*.h5" @@ -515,7 +612,7 @@ def load_train_data_files( if true_event_class is not None: event_data["true_event_class"] = true_event_class - event_data = get_stereo_events(event_data, group_index=GROUP_INDEX_TRAIN) + event_data = get_stereo_events(event_data, config, group_index=GROUP_INDEX_TRAIN) data_train = {} @@ -529,12 +626,101 @@ def load_train_data_files( return data_train -def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): +def load_train_data_files_tel(input_dir, config, offaxis_min=None, offaxis_max=None, true_event_class=None): + """ + Loads DL1-stereo data files and separates the shower events per + telescope combination type for training RFs. + + Parameters + ---------- + input_dir: str + Path to a directory where input DL1-stereo files are stored + config: dict + yaml file with information about the telescope IDs. Typically called "config_general.yaml" + offaxis_min: str + Minimum shower off-axis angle allowed, whose format should be + acceptable by `astropy.units.quantity.Quantity` + offaxis_max: str + Maximum shower off-axis angle allowed, whose format should be + acceptable by `astropy.units.quantity.Quantity` + true_event_class: int + True event class of the input events + + + Returns + ------- + data_train: dict + Data frames of the shower events separated telescope-wise + + + Raises + ------ + FileNotFoundError + If any DL1-stereo data files are not found in the input + directory + """ + + TEL_NAMES, _ = telescope_combinations(config) + + # Find the input files + file_mask = f"{input_dir}/dl1_stereo_*.h5" + + input_files = glob.glob(file_mask) + input_files.sort() + + if len(input_files) == 0: + raise FileNotFoundError( + "Could not find any DL1-stereo data files in the input directory." + ) + + # Load the input files + logger.info("\nThe following DL1-stereo data files are found:") + + data_list = [] + + for input_file in input_files: + logger.info(input_file) + + df_events = pd.read_hdf(input_file, key="events/parameters") + data_list.append(df_events) + + event_data = pd.concat(data_list) + event_data.set_index(GROUP_INDEX_TRAIN, inplace=True) + event_data.sort_index(inplace=True) + + if offaxis_min is not None: + offaxis_min = u.Quantity(offaxis_min).to_value("deg") + event_data.query(f"off_axis >= {offaxis_min}", inplace=True) + + if offaxis_max is not None: + offaxis_max = u.Quantity(offaxis_max).to_value("deg") + event_data.query(f"off_axis <= {offaxis_max}", inplace=True) + + if true_event_class is not None: + event_data["true_event_class"] = true_event_class + + event_data = get_stereo_events(event_data, config, group_index=GROUP_INDEX_TRAIN) + + data_train = {} + + # Loop over every telescope + for tel_id in TEL_NAMES.keys(): + df_events = event_data.query(f"tel_id == {tel_id}") + + if not df_events.empty: + data_train[tel_id] = df_events + + return data_train + + +def load_mc_dl2_data_file(config, input_file, quality_cuts, event_type, weight_type_dl2): """ Loads a MC DL2 data file for creating the IRFs. Parameters ---------- + config: dict + evoked from an yaml file with information about the telescope IDs. Typically called "config_RF.yaml" input_file: str Path to an input MC DL2 data file quality_cuts: str @@ -564,28 +750,38 @@ def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2) If the input event type is not known """ + TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) + combo_types = np.asarray(range(len(TEL_COMBINATIONS))) + three_or_more = [] + for n,combination in enumerate(TEL_COMBINATIONS.values()): + if len(combination) >= 3: + three_or_more.append(n) + + # Load the input file df_events = pd.read_hdf(input_file, key="events/parameters") df_events.set_index(["obs_id", "event_id", "tel_id"], inplace=True) df_events.sort_index(inplace=True) - df_events = get_stereo_events(df_events, quality_cuts) + df_events = get_stereo(df_events, config, quality_cuts) logger.info(f"\nExtracting the events of the '{event_type}' type...") if event_type == "software": # The events of the MAGIC-stereo combination are excluded - df_events.query("(combo_type > 0) & (magic_stereo == True)", inplace=True) + df_events.query(f"(combo_type < {combo_types[-1]}) & (magic_stereo == True)", inplace=True) + + elif event_type == "software_3tels_or_more": + df_events.query(f"combo_type == {three_or_more}", inplace=True) - elif event_type == "software_only_3tel": - df_events.query("combo_type == 3", inplace=True) + elif event_type == "software_6_tel": + df_events.query(f"combo_type < {combo_types[-1]}", inplace=True) elif event_type == "magic_only": - df_events.query("combo_type == 0", inplace=True) + df_events.query(f"combo_type == {combo_types[-1]}", inplace=True) elif event_type != "hardware": raise ValueError(f"Unknown event type '{event_type}'.") - n_events = len(df_events.groupby(["obs_id", "event_id"]).size()) logger.info(f"--> {n_events} stereo events") @@ -653,12 +849,14 @@ def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2) return event_table, pointing, sim_info -def load_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): +def load_dl2_data_file(config, input_file, quality_cuts, event_type, weight_type_dl2): """ Loads a DL2 data file for processing to DL3. Parameters ---------- + config: dict + evoked from an yaml file with information about the telescope IDs. Typically called "config_DL3.yaml" input_file: str Path to an input DL2 data file quality_cuts: str @@ -687,25 +885,36 @@ def load_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): ValueError If the input event type is not known """ - + + TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) + combo_types = np.asarray(range(len(TEL_COMBINATIONS))) + three_or_more = [] + for n,combination in enumerate(TEL_COMBINATIONS.values()): + if len(combination) >= 3: + three_or_more.append(n) + + # Load the input file event_data = pd.read_hdf(input_file, key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - event_data = get_stereo_events(event_data, quality_cuts) + event_data = get_stereo(event_data, config, quality_cuts) logger.info(f"\nExtracting the events of the '{event_type}' type...") if event_type == "software": # The events of the MAGIC-stereo combination are excluded - event_data.query("combo_type > 0", inplace=True) + event_data.query(f"combo_type < {combo_types[-1]}", inplace=True) - elif event_type == "software_only_3tel": - event_data.query("combo_type == 3", inplace=True) + elif event_type == "software_3tels_or_more": + event_data.query(f"combo_type == {three_or_more}", inplace=True) + elif event_type == "software_6_tel": + df_events.query(f"combo_type < {combo_types[-1]}", inplace=True) + elif event_type == "magic_only": - event_data.query("combo_type == 0", inplace=True) + event_data.query(f"combo_type == {combo_types[-1]}", inplace=True) elif event_type == "hardware": logger.warning( @@ -840,7 +1049,6 @@ def load_irf_files(input_dir_irf): "migration_bins": [], "source_offset_bins": [], "bkg_fov_offset_bins": [], - "file_names": [], } # Find the input files @@ -862,7 +1070,7 @@ def load_irf_files(input_dir_irf): for input_file in input_files_irf: logger.info(input_file) irf_hdus = fits.open(input_file) - irf_data["file_names"].append(input_file) + # Read the header header = irf_hdus["EFFECTIVE AREA"].header @@ -980,7 +1188,6 @@ def load_irf_files(input_dir_irf): irf_data["grid_points"] = np.array(irf_data["grid_points"]) irf_data["energy_dispersion"] = np.array(irf_data["energy_dispersion"]) irf_data["migration_bins"] = np.array(irf_data["migration_bins"]) - irf_data["file_names"] = np.array(irf_data["file_names"]) if "gh_cuts" in irf_data: irf_data["gh_cuts"] = np.array(irf_data["gh_cuts"]) From d9c69d8e43cb1c1301fb0beb10a1419deedd45f5 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Tue, 26 Sep 2023 16:07:36 +0200 Subject: [PATCH 04/76] "Merge" with master --- environment.yml | 3 +- magicctapipe/io/io.py | 7 +- .../lst1_magic_event_coincidence.py | 175 +++++++++++++++--- .../scripts/lst1_magic/magic_calib_to_dl1.py | 18 +- .../lst1_magic/setting_up_config_and_dir.py | 5 +- 5 files changed, 171 insertions(+), 37 deletions(-) diff --git a/environment.yml b/environment.yml index 92b36356f..6ee574701 100644 --- a/environment.yml +++ b/environment.yml @@ -4,10 +4,11 @@ channels: - default - conda-forge dependencies: - - python + - python=3.8 - pip - black - nbsphinx + - numpy=1.21 - ctapipe=0.12 - gammapy=0.19.0 - cython diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index b4bd0ddd2..331bbda95 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -52,7 +52,7 @@ # The upper limit of the trigger time differences of consecutive events, # used when calculating the ON time and dead time correction factor -TIME_DIFF_UPLIM = 0.1 * u.s +TIME_DIFF_UPLIM = 1.0 * u.s # The LST-1 and MAGIC readout dead times DEAD_TIME_LST = 7.6 * u.us @@ -911,7 +911,7 @@ def load_dl2_data_file(config, input_file, quality_cuts, event_type, weight_type event_data.query(f"combo_type == {three_or_more}", inplace=True) elif event_type == "software_6_tel": - df_events.query(f"combo_type < {combo_types[-1]}", inplace=True) + event_data.query(f"combo_type < {combo_types[-1]}", inplace=True) elif event_type == "magic_only": event_data.query(f"combo_type == {combo_types[-1]}", inplace=True) @@ -1049,6 +1049,7 @@ def load_irf_files(input_dir_irf): "migration_bins": [], "source_offset_bins": [], "bkg_fov_offset_bins": [], + "file_names": [], } # Find the input files @@ -1070,6 +1071,7 @@ def load_irf_files(input_dir_irf): for input_file in input_files_irf: logger.info(input_file) irf_hdus = fits.open(input_file) + irf_data["file_names"].append(input_file) # Read the header header = irf_hdus["EFFECTIVE AREA"].header @@ -1099,6 +1101,7 @@ def load_irf_files(input_dir_irf): irf_data["energy_bins"].append(energy_bins) irf_data["fov_offset_bins"].append(fov_offset_bins) irf_data["migration_bins"].append(migration_bins) + irf_data["file_names"] = np.array(irf_data["file_names"]) # Read additional IRF data and bins if they exist if "PSF" in irf_hdus: diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 2cf258a2a..779d5cdb0 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -27,13 +27,18 @@ accidental coincidence rate as much as possible by keeping the number of actual coincident events. -Please note that for the data taken before 12th June 2021, a coincidence -peak should be found around the time offset of -3.1 us, which can be -explained by the trigger time delays of both systems. For the data taken -after that date, however, there is an additional global offset appeared -and then the peak is shifted to the time offset of -6.5 us. Thus, it -would be needed to tune the offset scan region depending on the date -when data were taken. The reason of the shift is under investigation. +Please note that the time offset depends on the date of observations +as summarized below: +* before June 12 2021: -3.1 us +* June 13 2021 to Feb 28 2023: -6.5 us +* March 10 2023 to March 30 2023: -76039.3 us +* April 13 2023 to August 2023: -25.1 us +* after Sep 11 2023 : -6.2 us +By default, pre offset search is performed using large shower events. +The possible time offset is found among all possible combinations of +time offsets using those events. Finally, the time offset scan is performed +around the possible offset found by the pre offset search. Instead of that, +you can also define the offset scan range in the configuration file. Usage per single LST data file (indicated if you want to do tests): $ python lst1_magic_event_coincidence.py @@ -181,24 +186,23 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): window_half_width = u.Quantity(window_half_width).to("ns") window_half_width = u.Quantity(window_half_width.round(), dtype=int) + pre_offset_search = False + if "pre_offset_search" in config_coinc: + pre_offset_search = config_coinc["pre_offset_search"] + + if pre_offset_search: + logger.info("\nPre offset search will be performed.") + n_pre_offset_search_events = config_coinc["n_pre_offset_search_events"] + else: + logger.info("\noffset scan range defined in the config file will be used.") + offset_start = u.Quantity(config_coinc["time_offset"]["start"]) + offset_stop = u.Quantity(config_coinc["time_offset"]["stop"]) - logger.info("\nTime offsets:") - logger.info(format_object(config_coinc["time_offset"])) - - offset_start = u.Quantity(config_coinc["time_offset"]["start"]) - offset_stop = u.Quantity(config_coinc["time_offset"]["stop"]) - - time_offsets = np.arange( - start=offset_start.to_value("ns").round(), - stop=offset_stop.to_value("ns").round(), - step=TIME_ACCURACY.to_value("ns").round(), - ) - - time_offsets = u.Quantity(time_offsets.round(), unit="ns", dtype=int) - + event_data = pd.DataFrame() features = pd.DataFrame() - profiles = pd.DataFrame(data={"time_offset": time_offsets.to_value("us").round(1)}) + + profiles = pd.DataFrame(data={"time_offset": []}) # Arrange the LST timestamps. They are stored in the UNIX format in # units of seconds with 17 digits, 10 digits for the integral part @@ -232,12 +236,125 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): df_magic["timestamp"] = timestamps_magic.to_value("s") df_magic.drop(["time_sec", "time_nanosec"], axis=1, inplace=True) - # Extract the MAGIC events taken when LST observed - logger.info(f"\nExtracting the {tel_name} events taken when LST observed...") + # Pre offset search is performed to define the offset scan region. + # First, N events are extracted from largest intensity events for LST and + # MAGIC. Then, it counts the number of coincident events within a defined + # window after shifting all possible combinations (N x N) of time offsets. + if pre_offset_search: + logger.info( + "\nPre offset search using large-intensity shower events is ongoing..." + ) + + logger.info( + f"\nExtracting the {tel_name} events taken when LST-1 observed for pre offset search..." + ) + + time_lolim = timestamps_lst[0] - window_half_width + time_uplim = timestamps_lst[-1] + window_half_width + cond_lolim = timestamps_magic >= time_lolim + cond_uplim = timestamps_magic <= time_uplim + + mask_lst_obs_window = np.logical_and(cond_lolim, cond_uplim) + n_events_magic = np.count_nonzero(mask_lst_obs_window) + + if n_events_magic == 0: + logger.info(f"--> No {tel_name} events are found. Skipping...") + continue + + logger.info(f"--> {n_events_magic} events are found.") + + # Extract indexes of MAGIC large shower events + index_large_intensity_magic = np.argsort( + df_magic["intensity"][mask_lst_obs_window] + )[::-1][:n_pre_offset_search_events] + + # If LST/MAGIC observations are not completely overlapped, only small + # numbers of MAGIC events are left for the pre offset search. + # To find large-intensity showers within the same time window, + # time cut around MAGIC observations is applied to the LST data set. + time_lolim = timestamps_magic[mask_lst_obs_window][0] - window_half_width + time_uplim = timestamps_magic[mask_lst_obs_window][-1] + window_half_width + + cond_lolim = timestamps_lst >= time_lolim + cond_uplim = timestamps_lst <= time_uplim + + mask_magic_obs_window = np.logical_and(cond_lolim, cond_uplim) + + if np.count_nonzero(mask_magic_obs_window) == 0: + logger.info( + f"\nNo LST events are found around {tel_name} events. Skipping..." + ) + continue + + # Extract indexes of LST large shower events + index_large_intensity_lst = np.argsort( + event_data_lst["intensity"][mask_magic_obs_window] + )[::-1][:n_pre_offset_search_events] + + # Crate an array of all combinations of [MAGIC timestamp, LST timestamp] + timestamps_magic_lst_combination = np.array( + np.meshgrid( + timestamps_magic[mask_lst_obs_window][ + index_large_intensity_magic + ].value, + timestamps_lst[mask_magic_obs_window][ + index_large_intensity_lst + ].value, + ) + ).reshape(2, -1) + + # Compute all combinations of time offset between MAGIC and LST + time_offsets_pre_search = ( + timestamps_magic_lst_combination[0] + - timestamps_magic_lst_combination[1] + ) + + time_offsets_pre_search = u.Quantity( + time_offsets_pre_search.round(), unit="ns", dtype=int + ) + + n_coincidences_pre_search = [ + np.sum( + np.abs(time_offsets_pre_search - time_offset).value + < window_half_width.value + ) + for time_offset in time_offsets_pre_search + ] + + n_coincidences_pre_search = np.array(n_coincidences_pre_search) + + offset_at_max_pre_search = time_offsets_pre_search[ + n_coincidences_pre_search == n_coincidences_pre_search.max() + ].mean() + offset_at_max_pre_search = offset_at_max_pre_search.to("us").round(1) + + logger.info( + f"\nPre offset search finds {offset_at_max_pre_search} as a possible offset" + ) + + # offset scan region is defined as 3 x half window width + # around the offset_at_max to cover "full window width" which will + # be used to compute weighted average of the time offset + offset_start = offset_at_max_pre_search - 3 * window_half_width + offset_stop = offset_at_max_pre_search + 3 * window_half_width + + logger.info("\nTime offsets scan region:") + logger.info(f" start: {offset_start.to('us').round(1)}") + logger.info(f" stop: {offset_stop.to('us').round(1)}") + + time_offsets = np.arange( + start=offset_start.to_value("ns").round(), + stop=offset_stop.to_value("ns").round(), + step=TIME_ACCURACY.to_value("ns").round(), + ) + + time_offsets = u.Quantity(time_offsets.round(), unit="ns", dtype=int) + + # Extract the MAGIC events taken when LST-1 observed + logger.info(f"\nExtracting the {tel_name} events taken when LST-1 observed...") time_lolim = timestamps_lst[0] + time_offsets[0] - window_half_width time_uplim = timestamps_lst[-1] + time_offsets[-1] + window_half_width - cond_lolim = timestamps_magic >= time_lolim cond_uplim = timestamps_magic <= time_uplim @@ -268,7 +385,6 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): logger.info("\nChecking the event coincidence...") for time_offset in time_offsets: - times_lolim = timestamps_lst + time_offset - window_half_width times_uplim = timestamps_lst + time_offset + window_half_width @@ -283,12 +399,16 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): ) n_coincidences.append(n_coincidence) + + + if not any(n_coincidences): logger.info("\nNo coincident events are found. Skipping...") continue n_coincidences = np.array(n_coincidences) + # Sometimes there are more than one time offset maximizing the # number of coincidences, so here we calculate the mean of them @@ -375,7 +495,8 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): event_data = pd.concat([event_data, df_lst, df_magic]) features = pd.concat([features, df_feature]) - profiles = profiles.merge(df_profile) + profiles = profiles.merge(df_profile, on="time_offset", how="outer") + profiles = profiles.sort_values("time_offset") if event_data.empty: logger.info("\nNo coincident events are found. Exiting...") diff --git a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py index 46b1067da..2e575b28b 100644 --- a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py @@ -71,7 +71,7 @@ PEDESTAL_TYPES = ["fundamental", "from_extractor", "from_extractor_rndm"] -def magic_calib_to_dl1(input_file, output_dir, config, process_run=False): +def magic_calib_to_dl1(input_file, output_dir, config, max_events, process_run=False): """ Processes the events of MAGIC calibrated data and computes the DL1 parameters. @@ -93,7 +93,7 @@ def magic_calib_to_dl1(input_file, output_dir, config, process_run=False): # Load the input file logger.info(f"\nInput file: {input_file}") - event_source = MAGICEventSource(input_file, process_run=process_run) + event_source = MAGICEventSource(input_file, process_run=process_run, max_events=max_events) is_simulation = event_source.is_simulation logger.info(f"\nIs simulation: {is_simulation}") @@ -370,6 +370,17 @@ def main(): help="Path to a configuration file", ) + parser.add_argument( + "--max-evt", + "-m", + dest="max_events", + type=int, + default=None, + help="Max. number of processed showers", + ) + + + parser.add_argument( "--process-run", dest="process_run", @@ -383,8 +394,7 @@ def main(): config = yaml.safe_load(f) # Process the input data - magic_calib_to_dl1(args.input_file, args.output_dir, config, args.process_run) - + magic_calib_to_dl1(args.input_file, args.output_dir, config, args.max_events, args.process_run) logger.info("\nDone.") process_time = time.time() - start_time diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index c93f0cc7e..47876d7f0 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -29,7 +29,6 @@ import numpy as np import argparse import glob -import time import yaml from pathlib import Path @@ -389,8 +388,8 @@ def main(): #Below we read the data paths target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] MC_gammas = str(Path(config["directories"]["MC_gammas"])) - MC_electrons = str(Path(config["directories"]["MC_electrons"])) - MC_helium = str(Path(config["directories"]["MC_helium"])) + #MC_electrons = str(Path(config["directories"]["MC_electrons"])) + #MC_helium = str(Path(config["directories"]["MC_helium"])) MC_protons = str(Path(config["directories"]["MC_protons"])) MC_gammadiff = str(Path(config["directories"]["MC_gammadiff"])) From 493ad1cfffefe4bfaec5c13fb6615ff30a23ee90 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 08:19:23 +0200 Subject: [PATCH 05/76] remove duplicate function (io, gadf) --- magicctapipe/io/__init__.py | 13 +++++----- magicctapipe/io/gadf.py | 50 +------------------------------------ 2 files changed, 8 insertions(+), 55 deletions(-) diff --git a/magicctapipe/io/__init__.py b/magicctapipe/io/__init__.py index a2d0a5491..91d472d01 100644 --- a/magicctapipe/io/__init__.py +++ b/magicctapipe/io/__init__.py @@ -6,12 +6,6 @@ RealEventInfoContainer, SimEventInfoContainer, ) -from .gadf import ( - create_event_hdu, - create_gh_cuts_hdu, - create_gti_hdu, - create_pointing_hdu, -) from .io import ( telescope_combinations, format_object, @@ -26,6 +20,13 @@ load_train_data_files_tel, save_pandas_data_in_table, ) +from .gadf import ( + create_event_hdu, + create_gh_cuts_hdu, + create_gti_hdu, + create_pointing_hdu, +) + __all__ = [ "BaseEventInfoContainer", diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index 218e4b344..9c0703614 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -12,7 +12,7 @@ from magicctapipe import __version__ from magicctapipe.utils.functions import HEIGHT_ORM, LAT_ORM, LON_ORM from pyirf.binning import split_bin_lo_hi - +from .io import telescope_combinations __all__ = [ "create_gh_cuts_hdu", "create_event_hdu", @@ -29,54 +29,6 @@ MJDREF = Time(0, format="unix", scale="utc") -def telescope_combinations(config): - """ - Generates all possible telescope combinations without repetition. E.g.: "LST1_M1", "LST2_LST4_M2", "LST1_LST2_LST3_M1" and so on. - - Parameters - ---------- - config: dict - yaml file with information about the telescope IDs. Typically evoked from "config_general.yaml" in the main scripts. - - Returns - ------- - TEL_NAMES: dict - Dictionary with telescope IDs and names. - TEL_COMBINATIONS: dict - Dictionary with all telescope combinations with no repetions. - """ - - - TEL_NAMES = {} - for k, v in config["mc_tel_ids"].items(): #Here we swap the dictionary keys and values just for convenience. - if v > 0: - TEL_NAMES[v] = k - - TEL_COMBINATIONS = {} - keys = list(TEL_NAMES.keys()) - - def recursive_solution(current_tel, current_comb): - - if current_tel == len(keys): #The function stops once we reach the last telescope - return - - current_comb_name = current_comb[0] + '_' + TEL_NAMES[keys[current_tel]] #Name of the combo (at this point it can even be a single telescope) - current_comb_list = current_comb[1] + [keys[current_tel]] #List of telescopes (including individual telescopes) - - if len(current_comb_list) > 1: #We save them in the new dictionary excluding the single-telescope values - TEL_COMBINATIONS[current_comb_name[1:]] = current_comb_list; - - current_comb = [current_comb_name, current_comb_list] #We save the current results in this varible to recal the function recursively ("for" loop below) - - for i in range(1, len(keys)-current_tel): - recursive_solution(current_tel+i, current_comb) - - - for key in range(len(keys)): - recursive_solution(key, ['',[]]) - - - return TEL_NAMES, TEL_COMBINATIONS @u.quantity_input def create_gh_cuts_hdu( From 37027e38e588d34c55ed1e3fe97c90ef44b262aa Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 08:22:08 +0200 Subject: [PATCH 06/76] pyflakes --- .../scripts/lst1_magic/lst1_magic_event_coincidence.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 779d5cdb0..6c8f4dd41 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -64,8 +64,7 @@ import yaml from astropy import units as u from ctapipe.instrument import SubarrayDescription -from magicctapipe.io import ( - format_object, +from magicctapipe.io import ( get_stereo_events, load_lst_dl1_data_file, load_magic_dl1_data_files, From 89cac19505d9c55b1a16310bf26173efc9e7e03c Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 08:34:33 +0200 Subject: [PATCH 07/76] trying to fix tests --- magicctapipe/reco/estimators.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/magicctapipe/reco/estimators.py b/magicctapipe/reco/estimators.py index 5bd02a4c3..44d4a4f0b 100644 --- a/magicctapipe/reco/estimators.py +++ b/magicctapipe/reco/estimators.py @@ -7,13 +7,14 @@ import numpy as np import pandas as pd import sklearn.ensemble -from magicctapipe.io.io import TEL_NAMES + __all__ = ["EnergyRegressor", "DispRegressor", "EventClassifier"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) +TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! class EnergyRegressor: @@ -60,7 +61,7 @@ def fit(self, event_data): event_data: pandas.core.frame.DataFrame Data frame of shower events """ - + self.telescope_rfs.clear() # Loop over every telescope From ea94f715397827e45ad5593730d2fc4b219273a9 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 08:43:05 +0200 Subject: [PATCH 08/76] Try to fix tests --- .../scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py | 7 ++++++- magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py | 4 +++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py index d3d129ecb..2bed34eaa 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py @@ -38,7 +38,12 @@ logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) - +TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 +} #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! def apply_rfs(event_data, estimator): """ diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py b/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py index 8afaaf3b2..a63a130f6 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py @@ -41,7 +41,7 @@ import pandas as pd import yaml from magicctapipe.io import format_object, load_train_data_files -from magicctapipe.io.io import GROUP_INDEX_TRAIN, TEL_NAMES +from magicctapipe.io.io import GROUP_INDEX_TRAIN from magicctapipe.reco import DispRegressor, EnergyRegressor, EventClassifier __all__ = [ @@ -54,6 +54,8 @@ logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) +TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # True event class of gamma and proton MCs EVENT_CLASS_GAMMA = 0 From 7fb3dc38b8034f2d22b8b83877c1f0dd9d57bbcc Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 08:43:44 +0200 Subject: [PATCH 09/76] tests --- magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py | 1 - 1 file changed, 1 deletion(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py index 2bed34eaa..aa528152b 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py @@ -30,7 +30,6 @@ from ctapipe.coordinates import TelescopeFrame from ctapipe.instrument import SubarrayDescription from magicctapipe.io import get_stereo_events, save_pandas_data_in_table -from magicctapipe.io.io import TEL_COMBINATIONS from magicctapipe.reco import DispRegressor, EnergyRegressor, EventClassifier __all__ = ["apply_rfs", "reconstruct_arrival_direction", "dl1_stereo_to_dl2"] From 8d8849ff593fb7e56183f7de4139b63d36e79caf Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 10:14:02 +0200 Subject: [PATCH 10/76] Fix test functions --- magicctapipe/conftest.py | 5 + magicctapipe/io/__init__.py | 4 + magicctapipe/io/gadf.py | 143 ++++++- magicctapipe/io/io.py | 419 ++++++++++++++++++++- magicctapipe/io/tests/test_io.py | 24 +- magicctapipe/io/tests/test_io_monly.py | 24 +- magicctapipe/resources/config_general.yaml | 26 ++ 7 files changed, 618 insertions(+), 27 deletions(-) create mode 100644 magicctapipe/resources/config_general.yaml diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index c8c2c51ea..c98dcc9ce 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -384,6 +384,11 @@ def config_monly(): config_path = resource_file("config_monly.yaml") return config_path +@pytest.fixture(scope="session") +def config_gen(): + config_path = resource_file("config_general.yaml") + return config_path + """ Data processing diff --git a/magicctapipe/io/__init__.py b/magicctapipe/io/__init__.py index 91d472d01..7a72c7e57 100644 --- a/magicctapipe/io/__init__.py +++ b/magicctapipe/io/__init__.py @@ -10,7 +10,9 @@ telescope_combinations, format_object, get_dl2_mean, + get_stereo, get_stereo_events, + get_stereo_events_old, load_dl2_data_file, load_irf_files, load_lst_dl1_data_file, @@ -39,7 +41,9 @@ "telescope_combinations", "format_object", "get_dl2_mean", + "get_stereo", "get_stereo_events", + "get_stereo_events_old", "load_dl2_data_file", "load_irf_files", "load_lst_dl1_data_file", diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index 9c0703614..5e162244d 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -89,6 +89,147 @@ def create_gh_cuts_hdu( return gh_cuts_hdu +def create_event_hdu( + event_table, on_time, deadc, source_name, source_ra=None, source_dec=None +): + """ + Creates a fits binary table HDU for shower events. + + Parameters + ---------- + event_table: astropy.table.table.QTable + Table of the DL2 events surviving gammaness cuts + on_time: astropy.table.table.QTable + ON time of the input data + deadc: float + Dead time correction factor + source_name: str + Name of the observed source + source_ra: str + Right ascension of the observed source, whose format should be + acceptable by `astropy.coordinates.sky_coordinate.SkyCoord` + (Used only when the source name cannot be resolved) + source_dec: str + Declination of the observed source, whose format should be + acceptable by `astropy.coordinates.sky_coordinate.SkyCoord` + (Used only when the source name cannot be resolved) + + Returns + ------- + event_hdu: astropy.io.fits.hdu.table.BinTableHDU + Event HDU + + Raises + ------ + ValueError + If the source name cannot be resolved and also either or both of + source RA/Dec coordinate is set to None + """ + + mjdreff, mjdrefi = np.modf(MJDREF.mjd) + + time_start = Time(event_table["timestamp"][0], format="unix", scale="utc") + time_start_iso = time_start.to_value("iso", "date_hms") + + time_end = Time(event_table["timestamp"][-1], format="unix", scale="utc") + time_end_iso = time_end.to_value("iso", "date_hms") + + # Calculate the elapsed and effective time + elapsed_time = time_end - time_start + effective_time = on_time * deadc + + # Get the instruments used for the observation + combo_types_unique = np.unique(event_table["combo_type"]) + tel_combos = np.array(list(TEL_COMBINATIONS.keys()))[combo_types_unique] + + tel_list = [tel_combo.split("_") for tel_combo in tel_combos] + tel_list_unique = np.unique(sum(tel_list, [])) + + instruments = "_".join(tel_list_unique) + + # Transfer the RA/Dec directions to the galactic coordinate + event_coords = SkyCoord( + ra=event_table["reco_ra"], dec=event_table["reco_dec"], frame="icrs" + ) + + event_coords = event_coords.galactic + + try: + # Try to get the source coordinate from the input name + source_coord = SkyCoord.from_name(source_name, frame="icrs") + + except Exception: + logger.warning( + f"WARNING: The source name '{source_name}' could not be resolved. " + f"Setting the input RA/Dec coordinate ({source_ra}, {source_dec})..." + ) + + if (source_ra is None) or (source_dec is None): + raise ValueError("The input RA/Dec coordinate is set to `None`.") + + source_coord = SkyCoord(ra=source_ra, dec=source_dec, frame="icrs") + + # Create a table + qtable = QTable( + data={ + "EVENT_ID": event_table["event_id"], + "TIME": event_table["timestamp"], + "RA": event_table["reco_ra"], + "DEC": event_table["reco_dec"], + "ENERGY": event_table["reco_energy"], + "GAMMANESS": event_table["gammaness"], + "MULTIP": event_table["multiplicity"], + "GLON": event_coords.l.to("deg"), + "GLAT": event_coords.b.to("deg"), + "ALT": event_table["reco_alt"].to("deg"), + "AZ": event_table["reco_az"].to("deg"), + } + ) + + # Create a header + header = fits.Header( + cards=[ + ("CREATED", Time.now().utc.iso), + ("HDUCLAS1", "EVENTS"), + ("OBS_ID", np.unique(event_table["obs_id"])[0]), + ("DATE-OBS", time_start_iso[:10]), + ("TIME-OBS", time_start_iso[11:]), + ("DATE-END", time_end_iso[:10]), + ("TIME-END", time_end_iso[11:]), + ("TSTART", time_start.value), + ("TSTOP", time_end.value), + ("MJDREFI", mjdrefi), + ("MJDREFF", mjdreff), + ("TIMEUNIT", "s"), + ("TIMESYS", "UTC"), + ("TIMEREF", "TOPOCENTER"), + ("ONTIME", on_time.value), + ("TELAPSE", elapsed_time.to_value("s")), + ("DEADC", deadc), + ("LIVETIME", effective_time.value), + ("OBJECT", source_name), + ("OBS_MODE", "WOBBLE"), + ("N_TELS", np.max(event_table["multiplicity"])), + ("TELLIST", instruments), + ("INSTRUME", instruments), + ("RA_PNT", event_table["pointing_ra"][0].value, "deg"), + ("DEC_PNT", event_table["pointing_dec"][0].value, "deg"), + ("ALT_PNT", event_table["pointing_alt"][0].to_value("deg"), "deg"), + ("AZ_PNT", event_table["pointing_az"][0].to_value("deg"), "deg"), + ("RA_OBJ", source_coord.ra.to_value("deg"), "deg"), + ("DEC_OBJ", source_coord.dec.to_value("deg"), "deg"), + ("FOVALIGN", "RADEC"), + ] + ) + + # Create a HDU + event_hdu = fits.BinTableHDU(qtable, header=header, name="EVENTS") + + return event_hdu + + + +''' def create_event_hdu( event_table, config, on_time, deadc, source_name, source_ra=None, source_dec=None @@ -230,7 +371,7 @@ def create_event_hdu( event_hdu = fits.BinTableHDU(qtable, header=header, name="EVENTS") return event_hdu - +''' def create_gti_hdu(event_table): """ diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 331bbda95..69f97edfb 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -27,6 +27,7 @@ "format_object", "get_stereo", "get_stereo_events", + "get_stereo_events_old", "get_dl2_mean", "load_lst_dl1_data_file", "load_magic_dl1_data_files", @@ -175,6 +176,84 @@ def get_stereo( event_data_stereo.query(f"multiplicity >1 & multiplicity <= {max_multiplicity}", inplace=True) return event_data_stereo +def get_stereo_events_old( + event_data, quality_cuts=None, group_index=["obs_id", "event_id"] +): + """ + Gets the stereo events surviving specified quality cuts. + + It also adds the telescope multiplicity `multiplicity` and + combination types `combo_type` to the output data frame. + + Parameters + ---------- + event_data: pandas.core.frame.DataFrame + Data frame of shower events + quality_cuts: str + Quality cuts applied to the input data + group_index: list + Index to group telescope events + + Returns + ------- + event_data_stereo: pandas.core.frame.DataFrame + Data frame of the stereo events surviving the quality cuts + """ + + event_data_stereo = event_data.copy() + + # Apply the quality cuts + if quality_cuts is not None: + event_data_stereo.query(quality_cuts, inplace=True) + + # Extract stereo events + event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() + event_data_stereo.query("multiplicity == [2, 3]", inplace=True) + + # Check the total number of events + n_events_total = len(event_data_stereo.groupby(group_index).size()) + logger.info(f"\nIn total {n_events_total} stereo events are found:") + + n_events_per_combo = {} + + # Loop over every telescope combination type + for combo_type, (tel_combo, tel_ids) in enumerate(TEL_COMBINATIONS.items()): + multiplicity = len(tel_ids) + + df_events = event_data_stereo.query( + f"(tel_id == {tel_ids}) & (multiplicity == {multiplicity})" + ).copy() + + # Here we recalculate the multiplicity and apply the cut again, + # since with the above cut the events belonging to other + # combination types are also extracted. For example, in case of + # tel_id = [1, 2], the tel 1 events of the combination [1, 3] + # and the tel 2 events of the combination [2, 3] remain in the + # data frame, whose multiplicity will be recalculated as 1 and + # so will be removed with the following cuts. + + df_events["multiplicity"] = df_events.groupby(group_index).size() + df_events.query(f"multiplicity == {multiplicity}", inplace=True) + + # Assign the combination type + event_data_stereo.loc[df_events.index, "combo_type"] = combo_type + + n_events = len(df_events.groupby(group_index).size()) + percentage = 100 * n_events / n_events_total + + key = f"{tel_combo} (type {combo_type})" + value = f"{n_events:.0f} events ({percentage:.1f}%)" + + n_events_per_combo[key] = value + + event_data_stereo = event_data_stereo.astype({"combo_type": int}) + + # Show the number of events per combination type + logger.info(format_object(n_events_per_combo)) + + return event_data_stereo + + def get_stereo_events( event_data, config, quality_cuts=None, group_index=["obs_id", "event_id"] @@ -536,7 +615,7 @@ def load_magic_dl1_data_files(input_dir, config): return event_data, subarray - +''' def load_train_data_files( input_dir, config, offaxis_min=None, offaxis_max=None, true_event_class=None ): @@ -625,6 +704,89 @@ def load_train_data_files( return data_train +''' +def load_train_data_files( + input_dir, offaxis_min=None, offaxis_max=None, true_event_class=None +): + """ + Loads DL1-stereo data files and separates the shower events per + telescope combination type for training RFs. + + Parameters + ---------- + input_dir: str + Path to a directory where input DL1-stereo files are stored + offaxis_min: str + Minimum shower off-axis angle allowed, whose format should be + acceptable by `astropy.units.quantity.Quantity` + offaxis_max: str + Maximum shower off-axis angle allowed, whose format should be + acceptable by `astropy.units.quantity.Quantity` + true_event_class: int + True event class of the input events + + Returns + ------- + data_train: dict + Data frames of the shower events separated by the telescope + combination types + + Raises + ------ + FileNotFoundError + If any DL1-stereo data files are not found in the input + directory + """ + + # Find the input files + file_mask = f"{input_dir}/dl1_stereo_*.h5" + + input_files = glob.glob(file_mask) + input_files.sort() + + if len(input_files) == 0: + raise FileNotFoundError( + "Could not find any DL1-stereo data files in the input directory." + ) + + # Load the input files + logger.info("\nThe following DL1-stereo data files are found:") + + data_list = [] + + for input_file in input_files: + logger.info(input_file) + + df_events = pd.read_hdf(input_file, key="events/parameters") + data_list.append(df_events) + + event_data = pd.concat(data_list) + event_data.set_index(GROUP_INDEX_TRAIN, inplace=True) + event_data.sort_index(inplace=True) + + if offaxis_min is not None: + offaxis_min = u.Quantity(offaxis_min).to_value("deg") + event_data.query(f"off_axis >= {offaxis_min}", inplace=True) + + if offaxis_max is not None: + offaxis_max = u.Quantity(offaxis_max).to_value("deg") + event_data.query(f"off_axis <= {offaxis_max}", inplace=True) + + if true_event_class is not None: + event_data["true_event_class"] = true_event_class + + event_data = get_stereo_events_old(event_data, group_index=GROUP_INDEX_TRAIN) + + data_train = {} + + # Loop over every telescope combination type + for combo_type, tel_combo in enumerate(TEL_COMBINATIONS.keys()): + df_events = event_data.query(f"combo_type == {combo_type}") + + if not df_events.empty: + data_train[tel_combo] = df_events + + return data_train def load_train_data_files_tel(input_dir, config, offaxis_min=None, offaxis_max=None, true_event_class=None): """ @@ -712,7 +874,7 @@ def load_train_data_files_tel(input_dir, config, offaxis_min=None, offaxis_max=N return data_train - +''' def load_mc_dl2_data_file(config, input_file, quality_cuts, event_type, weight_type_dl2): """ Loads a MC DL2 data file for creating the IRFs. @@ -991,7 +1153,260 @@ def load_dl2_data_file(config, input_file, quality_cuts, event_type, weight_type logger.info(f"--> Total correction factor: {deadc.round(3)}") return event_table, on_time, deadc +''' +def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): + """ + Loads a MC DL2 data file for creating the IRFs. + + Parameters + ---------- + input_file: str + Path to an input MC DL2 data file + quality_cuts: str + Quality cuts applied to the input events + event_type: str + Type of the events which will be used - + "software" uses software coincident events, + "software_only_3tel" uses only 3-tel combination events, + "magic_only" uses only MAGIC-stereo combination events, and + "hardware" uses all the telescope combination events + weight_type_dl2: str + Type of the weight for averaging telescope-wise DL2 parameters - + "simple", "variance" or "intensity" are allowed + Returns + ------- + event_table: astropy.table.table.QTable + Table of the MC DL2 events surviving the cuts + pointing: numpy.ndarray + Telescope pointing direction (zd, az) in the unit of degree + sim_info: pyirf.simulations.SimulatedEventsInfo + Container of the simulation information + + Raises + ------ + ValueError + If the input event type is not known + """ + + # Load the input file + df_events = pd.read_hdf(input_file, key="events/parameters") + df_events.set_index(["obs_id", "event_id", "tel_id"], inplace=True) + df_events.sort_index(inplace=True) + + df_events = get_stereo_events_old(df_events, quality_cuts) + + logger.info(f"\nExtracting the events of the '{event_type}' type...") + + if event_type == "software": + # The events of the MAGIC-stereo combination are excluded + df_events.query("(combo_type > 0) & (magic_stereo == True)", inplace=True) + + elif event_type == "software_only_3tel": + df_events.query("combo_type == 3", inplace=True) + + elif event_type == "magic_only": + df_events.query("combo_type == 0", inplace=True) + + elif event_type != "hardware": + raise ValueError(f"Unknown event type '{event_type}'.") + + n_events = len(df_events.groupby(["obs_id", "event_id"]).size()) + logger.info(f"--> {n_events} stereo events") + + # Get the mean DL2 parameters + df_dl2_mean = get_dl2_mean(df_events, weight_type_dl2) + df_dl2_mean.reset_index(inplace=True) + + # Convert the pandas data frame to the astropy QTable + event_table = QTable.from_pandas(df_dl2_mean) + + event_table["pointing_alt"] *= u.rad + event_table["pointing_az"] *= u.rad + event_table["true_alt"] *= u.deg + event_table["true_az"] *= u.deg + event_table["reco_alt"] *= u.deg + event_table["reco_az"] *= u.deg + event_table["true_energy"] *= u.TeV + event_table["reco_energy"] *= u.TeV + + # Calculate some angular distances + event_table["theta"] = calculate_theta( + event_table, event_table["true_az"], event_table["true_alt"] + ) + + event_table["true_source_fov_offset"] = calculate_source_fov_offset(event_table) + event_table["reco_source_fov_offset"] = calculate_source_fov_offset( + event_table, prefix="reco" + ) + + # Get the telescope pointing direction + pointing_zd = 90 - event_table["pointing_alt"].mean().to_value("deg") + pointing_az = event_table["pointing_az"].mean().to_value("deg") + + pointing = np.array([pointing_zd, pointing_az]).round(3) + + # Get the simulation configuration + sim_config = pd.read_hdf(input_file, key="simulation/config") + + n_total_showers = ( + sim_config["num_showers"][0] + * sim_config["shower_reuse"][0] + * len(np.unique(event_table["obs_id"])) + ) + + min_viewcone_radius = sim_config["min_viewcone_radius"][0] * u.deg + max_viewcone_radius = sim_config["max_viewcone_radius"][0] * u.deg + + viewcone_diff = max_viewcone_radius - min_viewcone_radius + + if viewcone_diff < u.Quantity(0.001, unit="deg"): + # Handle ring-wobble MCs as same as point-like MCs + viewcone = 0 * u.deg + else: + viewcone = max_viewcone_radius + + sim_info = SimulatedEventsInfo( + n_showers=n_total_showers, + energy_min=u.Quantity(sim_config["energy_range_min"][0], unit="TeV"), + energy_max=u.Quantity(sim_config["energy_range_max"][0], unit="TeV"), + max_impact=u.Quantity(sim_config["max_scatter_range"][0], unit="m"), + spectral_index=sim_config["spectral_index"][0], + viewcone=viewcone, + ) + + return event_table, pointing, sim_info + + +def load_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): + """ + Loads a DL2 data file for processing to DL3. + + Parameters + ---------- + input_file: str + Path to an input DL2 data file + quality_cuts: str + Quality cuts applied to the input events + event_type: str + Type of the events which will be used - + "software" uses software coincident events, + "software_only_3tel" uses only 3-tel combination events, + "magic_only" uses only MAGIC-stereo combination events, and + "hardware" uses all the telescope combination events + weight_type_dl2: str + Type of the weight for averaging telescope-wise DL2 parameters - + "simple", "variance" or "intensity" are allowed + + Returns + ------- + event_table: astropy.table.table.QTable + Table of the MC DL2 events surviving the cuts + on_time: astropy.units.quantity.Quantity + ON time of the input data + deadc: float + Dead time correction factor + + Raises + ------ + ValueError + If the input event type is not known + """ + + # Load the input file + event_data = pd.read_hdf(input_file, key="events/parameters") + event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) + event_data.sort_index(inplace=True) + + event_data = get_stereo_events_old(event_data, quality_cuts) + + logger.info(f"\nExtracting the events of the '{event_type}' type...") + + if event_type == "software": + # The events of the MAGIC-stereo combination are excluded + event_data.query("combo_type > 0", inplace=True) + + elif event_type == "software_only_3tel": + event_data.query("combo_type == 3", inplace=True) + + elif event_type == "magic_only": + event_data.query("combo_type == 0", inplace=True) + + elif event_type == "hardware": + logger.warning( + "WARNING: Please confirm that this type is correct for the input data, " + "since the hardware trigger between LST-1 and MAGIC may NOT be used." + ) + + else: + raise ValueError(f"Unknown event type '{event_type}'.") + + n_events = len(event_data.groupby(["obs_id", "event_id"]).size()) + logger.info(f"--> {n_events} stereo events") + + # Get the mean DL2 parameters + df_dl2_mean = get_dl2_mean(event_data, weight_type_dl2) + df_dl2_mean.reset_index(inplace=True) + + # Convert the pandas data frame to astropy QTable + event_table = QTable.from_pandas(df_dl2_mean) + + event_table["pointing_alt"] *= u.rad + event_table["pointing_az"] *= u.rad + event_table["pointing_ra"] *= u.deg + event_table["pointing_dec"] *= u.deg + event_table["reco_alt"] *= u.deg + event_table["reco_az"] *= u.deg + event_table["reco_ra"] *= u.deg + event_table["reco_dec"] *= u.deg + event_table["reco_energy"] *= u.TeV + event_table["timestamp"] *= u.s + + # Calculate the ON time + time_diffs = np.diff(event_table["timestamp"]) + on_time = time_diffs[time_diffs < TIME_DIFF_UPLIM].sum() + + # Calculate the dead time correction factor. Here we use the + # following equations to get the correction factor `deadc`: + + # rate = 1 / ( - dead_time) + # deadc = 1 / (1 + rate * dead_time) = 1 - dead_time / + + logger.info("\nCalculating the dead time correction factor...") + + event_data.query(f"0 < time_diff < {TIME_DIFF_UPLIM.to_value('s')}", inplace=True) + + deadc_list = [] + + # Calculate the LST-1 correction factor + time_diffs_lst = event_data.query("tel_id == 1")["time_diff"] + + if len(time_diffs_lst) > 0: + deadc_lst = 1 - DEAD_TIME_LST.to_value("s") / time_diffs_lst.mean() + logger.info(f"LST-1: {deadc_lst.round(3)}") + + deadc_list.append(deadc_lst) + + # Calculate the MAGIC correction factor with one of the telescopes + # whose number of events is larger than the other + time_diffs_m1 = event_data.query("tel_id == 2")["time_diff"] + time_diffs_m2 = event_data.query("tel_id == 3")["time_diff"] + + if len(time_diffs_m1) > len(time_diffs_m2): + deadc_magic = 1 - DEAD_TIME_MAGIC.to_value("s") / time_diffs_m1.mean() + logger.info(f"MAGIC(-I): {deadc_magic.round(3)}") + else: + deadc_magic = 1 - DEAD_TIME_MAGIC.to_value("s") / time_diffs_m2.mean() + logger.info(f"MAGIC(-II): {deadc_magic.round(3)}") + + deadc_list.append(deadc_magic) + + # Calculate the total correction factor as the multiplicity of the + # telescope-wise correction factors + deadc = np.prod(deadc_list) + logger.info(f"--> Total correction factor: {deadc.round(3)}") + + return event_table, on_time, deadc def load_irf_files(input_dir_irf): """ diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index e6cdfe9a3..41dbb5405 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -36,7 +36,7 @@ def test_save_pandas_data_in_table(temp_pandas, pd_test): assert df.equals(df1) -def test_get_stereo_events_mc(gamma_stereo, p_stereo): +def test_get_stereo_events_mc(gamma_stereo, p_stereo, config_gen): """ Check on stereo data reading """ @@ -52,12 +52,12 @@ def test_get_stereo_events_mc(gamma_stereo, p_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) + data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] > 1) assert np.all(data["combo_type"] >= 0) -def test_get_stereo_events_mc_cut(gamma_stereo, p_stereo): +def test_get_stereo_events_mc_cut(gamma_stereo, p_stereo, config_gen): """ Check on quality cuts """ @@ -71,7 +71,7 @@ def test_get_stereo_events_mc_cut(gamma_stereo, p_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) @@ -296,12 +296,12 @@ def test_load_lst_dl1_data_file(dl1_lst): assert s1.all() -def test_load_magic_dl1_data_files(merge_magic): +def test_load_magic_dl1_data_files(merge_magic, config_gen): """ Check on MAGIC DL1 """ - events, _ = load_magic_dl1_data_files(str(merge_magic)) + events, _ = load_magic_dl1_data_files(str(merge_magic), config_gen) assert list(events.index.names) == ["obs_id_magic", "event_id_magic", "tel_id"] assert "event_id" not in events.columns events = events.reset_index() @@ -310,7 +310,7 @@ def test_load_magic_dl1_data_files(merge_magic): assert s1.all() -def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): +def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc, config_gen): """ Check on MAGIC DL1: exceptions (no DL1 files) """ @@ -318,10 +318,10 @@ def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): FileNotFoundError, match="Could not find any DL1 data files in the input directory.", ): - _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc)) + _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc), config_gen) -def test_get_stereo_events_data(coincidence_stereo): +def test_get_stereo_events_data(coincidence_stereo, config_gen): """ Check on stereo data reading """ @@ -330,12 +330,12 @@ def test_get_stereo_events_data(coincidence_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) + data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] > 1) assert np.all(data["combo_type"] >= 0) -def test_get_stereo_events_data_cut(coincidence_stereo): +def test_get_stereo_events_data_cut(coincidence_stereo, config_gen): """ Check on quality cuts """ @@ -344,7 +344,7 @@ def test_get_stereo_events_data_cut(coincidence_stereo): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) diff --git a/magicctapipe/io/tests/test_io_monly.py b/magicctapipe/io/tests/test_io_monly.py index c498b87c4..5666e5b6c 100644 --- a/magicctapipe/io/tests/test_io_monly.py +++ b/magicctapipe/io/tests/test_io_monly.py @@ -36,7 +36,7 @@ def test_save_pandas_data_in_table(temp_pandas, pd_test): assert df.equals(df1) -def test_get_stereo_events_mc(gamma_stereo_monly, p_stereo_monly): +def test_get_stereo_events_mc(gamma_stereo_monly, p_stereo_monly, config_gen): """ Check on stereo data reading """ @@ -52,12 +52,12 @@ def test_get_stereo_events_mc(gamma_stereo_monly, p_stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) + data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] == 2) assert np.all(data["combo_type"] == 0) -def test_get_stereo_events_mc_cut(gamma_stereo_monly, p_stereo_monly): +def test_get_stereo_events_mc_cut(gamma_stereo_monly, p_stereo_monly, config_gen): """ Check on quality cuts """ @@ -71,7 +71,7 @@ def test_get_stereo_events_mc_cut(gamma_stereo_monly, p_stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) @@ -299,12 +299,12 @@ def test_load_lst_dl1_data_file(dl1_lst): assert s1.all() -def test_load_magic_dl1_data_files(merge_magic_monly): +def test_load_magic_dl1_data_files(merge_magic_monly, config_gen): """ Check on MAGIC DL1 """ - events, _ = load_magic_dl1_data_files(str(merge_magic_monly)) + events, _ = load_magic_dl1_data_files(str(merge_magic_monly), config_gen) assert list(events.index.names) == ["obs_id_magic", "event_id_magic", "tel_id"] assert "event_id" not in events.columns events = events.reset_index() @@ -313,7 +313,7 @@ def test_load_magic_dl1_data_files(merge_magic_monly): assert s1.all() -def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): +def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc, config_gen): """ Check on MAGIC DL1: exceptions (no DL1 files) """ @@ -321,10 +321,10 @@ def test_load_magic_dl1_data_files_exc(temp_DL1_M_exc): FileNotFoundError, match="Could not find any DL1 data files in the input directory.", ): - _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc)) + _, _ = load_magic_dl1_data_files(str(temp_DL1_M_exc), config_gen) -def test_get_stereo_events_data(stereo_monly): +def test_get_stereo_events_data(stereo_monly, config_gen): """ Check on stereo data reading """ @@ -333,12 +333,12 @@ def test_get_stereo_events_data(stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data) + data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] == 2) assert np.all(data["combo_type"] == 0) -def test_get_stereo_events_data_cut(stereo_monly): +def test_get_stereo_events_data_cut(stereo_monly, config_gen): """ Check on quality cuts """ @@ -347,7 +347,7 @@ def test_get_stereo_events_data_cut(stereo_monly): event_data = pd.read_hdf(str(file), key="events/parameters") event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) - data = get_stereo_events(event_data, "intensity>50") + data = get_stereo_events(event_data, config_gen, "intensity>50") assert np.all(data["intensity"] > 50) diff --git a/magicctapipe/resources/config_general.yaml b/magicctapipe/resources/config_general.yaml new file mode 100644 index 000000000..dd2cfadde --- /dev/null +++ b/magicctapipe/resources/config_general.yaml @@ -0,0 +1,26 @@ +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + +directories: + workspace_dir : "/fefs/aswg/workspace/raniere/" + target_name : "CrabTeste" + MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" + MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" + MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" + MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" + MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" + +general: + target_RA_deg : 83.633083 #RA in degrees + target_Dec_deg: 22.0145 #Dec in degrees + SimTel_version: "v1.4" + focal_length : "effective" #effective #nominal + MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" + LST_runs : "LST_runs.txt" + proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + From 14786b97e0b3ecaf53313d1cea04fa370e8e75b6 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 10:20:49 +0200 Subject: [PATCH 11/76] pyflakes --- magicctapipe/io/gadf.py | 8 +++++++- magicctapipe/io/io.py | 15 ++++++++++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index 5e162244d..8a87567e6 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -12,7 +12,7 @@ from magicctapipe import __version__ from magicctapipe.utils.functions import HEIGHT_ORM, LAT_ORM, LON_ORM from pyirf.binning import split_bin_lo_hi -from .io import telescope_combinations +#from .io import telescope_combinations __all__ = [ "create_gh_cuts_hdu", "create_event_hdu", @@ -125,6 +125,12 @@ def create_event_hdu( If the source name cannot be resolved and also either or both of source RA/Dec coordinate is set to None """ + TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 + } #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! mjdreff, mjdrefi = np.modf(MJDREF.mjd) diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 69f97edfb..57c76c4cd 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -199,6 +199,12 @@ def get_stereo_events_old( event_data_stereo: pandas.core.frame.DataFrame Data frame of the stereo events surviving the quality cuts """ + TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 + } #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! event_data_stereo = event_data.copy() @@ -737,6 +743,12 @@ def load_train_data_files( If any DL1-stereo data files are not found in the input directory """ + TEL_COMBINATIONS = { + "M1_M2": [2, 3], # combo_type = 0 + "LST1_M1": [1, 2], # combo_type = 1 + "LST1_M2": [1, 3], # combo_type = 2 + "LST1_M1_M2": [1, 2, 3], # combo_type = 3 + } #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # Find the input files file_mask = f"{input_dir}/dl1_stereo_*.h5" @@ -1188,7 +1200,7 @@ def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2) ValueError If the input event type is not known """ - + # Load the input file df_events = pd.read_hdf(input_file, key="events/parameters") df_events.set_index(["obs_id", "event_id", "tel_id"], inplace=True) @@ -1312,6 +1324,7 @@ def load_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): ValueError If the input event type is not known """ + # Load the input file event_data = pd.read_hdf(input_file, key="events/parameters") From c0395e503e1f6dbade2df6686afbab1c8b168535 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 11:14:05 +0200 Subject: [PATCH 12/76] readme + bug --- magicctapipe/conftest.py | 6 ++- magicctapipe/scripts/lst1_magic/README.md | 63 ----------------------- 2 files changed, 5 insertions(+), 64 deletions(-) diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index c98dcc9ce..8d0cf4455 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -7,6 +7,7 @@ from math import trunc from ctapipe.utils.download import download_file_cached from magicctapipe.utils import resource_file +import yaml maxjoint = 13000 maxmonly = 500 @@ -50,6 +51,7 @@ @pytest.fixture(scope="session") def temp_DL1_gamma(tmp_path_factory): return tmp_path_factory.mktemp("DL1_gammas") + @pytest.fixture(scope="session") @@ -387,7 +389,9 @@ def config_monly(): @pytest.fixture(scope="session") def config_gen(): config_path = resource_file("config_general.yaml") - return config_path + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config """ diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index 3fb304f8c..58c88d93a 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -225,66 +225,3 @@ stereo_reco: It then creates the output directories for the DL1 with stereo parameters [...]DL1/Observations/Coincident_stereo/SEVERALNIGHTS and [...]/DL1/MC/GAMMAorPROTON/Merged/StereoMerged, and then runs the script lst1_magic_stereo_reco.py in all of the coincident DL1 files. The stereo DL1 files for MC and real data are then saved in these directories. -### Random forest - -Once we have the DL1 stereo parameters for all real and MC data, we can train the Random Forest: - -> $ python RF.py - -This script creates the file config_RF.yaml with several parameters related to the energy regressor, disp regressor, and event classifier, and then computes the RF (energy, disp, and classifier) based on the merged-stereo MC diffuse gammas and training proton samples by calling the script lst1_magic_train_rfs.py. The results are saved in [...]/DL1/MC/RFs. - -Once it is done, we can finally convert our DL1 stereo data files into DL2 by running: - -> $ python DL1_to_DL2.py - -This script runs `lst1_magic_dl1_stereo_to_dl2.py` on all DL1 stereo files, which applies the RFs saved in [...]/DL1/MC/RFs to stereo DL1 data (real and test MCs) and produces DL2 real and MC data. The results are saved in [...]/DL2/Observations and [...]/DL2/MC. - -### Instrument response function and DL3 - -Once the previous step is done, we compute the IRF with - -> $ python IRF.py - -which creates the configuration file config_IRF.yaml with several parameters. The main of which are shown below: - -``` -[...] -quality_cuts: "disp_diff_mean < 0.22" -event_type: "software" # select "software", "software_only_3tel", "magic_only" or "hardware" -weight_type_dl2: "simple" # select "simple", "variance" or "intensity" -[...] -gammaness: - cut_type: "dynamic" # select "global" or "dynamic" - [...] - -theta: - cut_type: "global" # select "global" or "dynamic" - global_cut_value: "0.2 deg" # used for the global cut - [...] -``` - -It then runs the script lst1_magic_create_irf.py over the DL2 MC gammas, generating the IRF and saving it at [...]/IRF. - -Optionally, but recommended, we can run the "diagnostic.py" script with: - -> $ python diagnostic.py - -This will create several diagnostic plots (gammaness, effective area, angular resolution, energy resolution, migration matrix, energy bias, and gamma-hadron classification comparisons. All of these plots will be saved in the directory defined on "target_name" in the config_general.yaml file. - -After the IRF, we run the DL2-to-DL3 conversion by doing: - -> $ python DL2_to_DL3.py - -which will save the DL3 files in the directory [...]/DL3. Finally, the last script to run is `create_dl3_index_files.py`. Since it is very fast, we can simply run it directly in the interactive mode by doing (remember that we must be in the magic-lst environment): - -> $ python create_dl3_index_files.py --input-dir ./CrabTeste/DL3 - -That's it. Now you can play with the DL3 data using the high-level notebooks. - -## High-level analysis - -Since the DL3 may have only a few MBs, it is typically convenient to download it to your own machine at this point. It will be necessary to have astropy and gammapy (version > 0.20) installed before proceeding. - -We prepared a [Jupyter Notebook](https://github.com/ranieremenezes/magic-cta-pipe/blob/master/magicctapipe/scripts/lst1_magic/SED_and_LC_from_DL3.ipynb) that quickly creates a counts map, a significance curve, an SED, and a light curve. You can give it a try. - -The folder [Notebooks](https://github.com/cta-observatory/magic-cta-pipe/tree/master/notebooks) contains Jupyter notebooks to perform checks on the IRF, to produce theta2 plots and SEDs. Note that the notebooks run with gammapy v0.20 or higher, while the gammapy version adopted in the MAGIC+LST-1 pipeline is v0.19. From ef7822bd6972367c69d617247e172da88cf39db8 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 11:57:13 +0200 Subject: [PATCH 13/76] test (gamma combo_types) --- magicctapipe/io/tests/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index 41dbb5405..9d9a6c720 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -94,7 +94,7 @@ def test_load_train_data_files_g(gamma_stereo): """ events = load_train_data_files(str(gamma_stereo[0])) - assert list(events.keys()) == ["M1_M2", "LST1_M1", "LST1_M2", "LST1_M1_M2"] + assert list(events.keys()) == ["LST1_M1", "LST1_M2", "LST1_M1_M2"] data = events["LST1_M1"] assert np.all(data["combo_type"]) == 1 assert "off_axis" in data.columns From 31aa48c3f60deab8c84edf18ab9d8685e60c6025 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 13:56:27 +0200 Subject: [PATCH 14/76] use pre-offset search in automatic script --- magicctapipe/scripts/lst1_magic/coincident_events.py | 2 ++ .../scripts/lst1_magic/lst1_magic_event_coincidence.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py index 2bc4749f5..1fcb29ce5 100644 --- a/magicctapipe/scripts/lst1_magic/coincident_events.py +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -42,6 +42,8 @@ def configfile_coincidence(ids, target_dir): f = open(target_dir+'/config_coincidence.yaml','w') f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") f.write('event_coincidence:\n timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time"\n window_half_width: "300 ns"\n') + f.write(' pre_offset_search: true\n') + f.write(' n_pre_offset_search_events: 100\n') f.write(' time_offset:\n start: "-10 us"\n stop: "0 us"\n') f.close() diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 6c8f4dd41..8f9c5631f 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -186,8 +186,8 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): window_half_width = u.Quantity(window_half_width).to("ns") window_half_width = u.Quantity(window_half_width.round(), dtype=int) pre_offset_search = False - if "pre_offset_search" in config_coinc: - pre_offset_search = config_coinc["pre_offset_search"] + if "pre_offset_search" in config_coinc: #looking for the boolean value of pre_offset_search in the configuration file + pre_offset_search = config_coinc["pre_offset_search"] if pre_offset_search: logger.info("\nPre offset search will be performed.") From 19760ac5deade45b8e9db4a64c95f16c79bb9d09 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 15:29:26 +0200 Subject: [PATCH 15/76] Fixed tests (now they don't fail, locally) --- magicctapipe/io/tests/test_io.py | 5 +++-- magicctapipe/io/tests/test_io_monly.py | 6 +++--- magicctapipe/resources/config.yaml | 6 +++++- magicctapipe/resources/config_monly.yaml | 5 +++++ .../scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py | 4 ++-- .../scripts/lst1_magic/lst1_magic_event_coincidence.py | 8 ++++---- .../scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 2 +- 7 files changed, 23 insertions(+), 13 deletions(-) diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index 9d9a6c720..5852c88f3 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -16,6 +16,7 @@ import pandas as pd + def test_format_object(): """ Simple check on a string @@ -94,7 +95,7 @@ def test_load_train_data_files_g(gamma_stereo): """ events = load_train_data_files(str(gamma_stereo[0])) - assert list(events.keys()) == ["LST1_M1", "LST1_M2", "LST1_M1_M2"] + assert list(events.keys()) == ["M1_M2", "LST1_M1", "LST1_M2", "LST1_M1_M2"] data = events["LST1_M1"] assert np.all(data["combo_type"]) == 1 assert "off_axis" in data.columns @@ -221,7 +222,7 @@ def test_load_irf_files(IRF): """ Check on IRF dictionaries """ - + irf, header = load_irf_files(str(IRF)) assert set(list(irf.keys())).issubset( set( diff --git a/magicctapipe/io/tests/test_io_monly.py b/magicctapipe/io/tests/test_io_monly.py index 5666e5b6c..0823b3f54 100644 --- a/magicctapipe/io/tests/test_io_monly.py +++ b/magicctapipe/io/tests/test_io_monly.py @@ -54,7 +54,7 @@ def test_get_stereo_events_mc(gamma_stereo_monly, p_stereo_monly, config_gen): event_data.sort_index(inplace=True) data = get_stereo_events(event_data, config_gen) assert np.all(data["multiplicity"] == 2) - assert np.all(data["combo_type"] == 0) + assert np.all(data["combo_type"] == 3) def test_get_stereo_events_mc_cut(gamma_stereo_monly, p_stereo_monly, config_gen): @@ -334,8 +334,8 @@ def test_get_stereo_events_data(stereo_monly, config_gen): event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) event_data.sort_index(inplace=True) data = get_stereo_events(event_data, config_gen) - assert np.all(data["multiplicity"] == 2) - assert np.all(data["combo_type"] == 0) + assert np.all(data["multiplicity"] == 2) + assert np.all(data["combo_type"] == 3) def test_get_stereo_events_data_cut(stereo_monly, config_gen): diff --git a/magicctapipe/resources/config.yaml b/magicctapipe/resources/config.yaml index f6067ee2c..8394a031d 100644 --- a/magicctapipe/resources/config.yaml +++ b/magicctapipe/resources/config.yaml @@ -1,9 +1,11 @@ mc_tel_ids: LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 MAGIC-I: 2 MAGIC-II: 3 - LST: image_extractor: type: "LocalPeakWindowSum" @@ -240,6 +242,8 @@ create_irf: dl2_to_dl3: interpolation_method: "nearest" # select "nearest", "linear" or "cubic" + interpolation_scheme: "cosZdAz" # select "cosZdAz" or "cosZd" + max_distance: "45. deg" # angle type Quantity, or comment out to remove the cut source_name: "Crab" source_ra: null # used when the source name cannot be resolved source_dec: null # used when the source name cannot be resolved diff --git a/magicctapipe/resources/config_monly.yaml b/magicctapipe/resources/config_monly.yaml index 200673f2f..5cc7a085f 100644 --- a/magicctapipe/resources/config_monly.yaml +++ b/magicctapipe/resources/config_monly.yaml @@ -1,5 +1,8 @@ mc_tel_ids: LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 MAGIC-I: 2 MAGIC-II: 3 @@ -240,6 +243,8 @@ create_irf: dl2_to_dl3: interpolation_method: "nearest" # select "nearest", "linear" or "cubic" + interpolation_scheme: "cosZdAz" # select "cosZdAz" or "cosZd" + max_distance: "45. deg" # angle type Quantity, or comment out to remove the cut source_name: "Crab" source_ra: null # used when the source name cannot be resolved source_dec: null # used when the source name cannot be resolved diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py index aa528152b..8a4c5631d 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py @@ -29,7 +29,7 @@ from astropy.coordinates import AltAz, SkyCoord, angular_separation from ctapipe.coordinates import TelescopeFrame from ctapipe.instrument import SubarrayDescription -from magicctapipe.io import get_stereo_events, save_pandas_data_in_table +from magicctapipe.io import get_stereo_events_old, save_pandas_data_in_table from magicctapipe.reco import DispRegressor, EnergyRegressor, EventClassifier __all__ = ["apply_rfs", "reconstruct_arrival_direction", "dl1_stereo_to_dl2"] @@ -266,7 +266,7 @@ def dl1_stereo_to_dl2(input_file_dl1, input_dir_rfs, output_dir): is_simulation = "true_energy" in event_data.columns logger.info(f"\nIs simulation: {is_simulation}") - event_data = get_stereo_events(event_data) + event_data = get_stereo_events_old(event_data) subarray = SubarrayDescription.from_hdf(input_file_dl1) tel_descriptions = subarray.tel diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 8f9c5631f..f795f8efb 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -191,7 +191,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): if pre_offset_search: logger.info("\nPre offset search will be performed.") - n_pre_offset_search_events = config_coinc["n_pre_offset_search_events"] + n_pre_offset_search_events = config_coinc["n_pre_offset_search_events"] #n_pre_offset_search_events is the number of events used to estimate the time offset. Around 100 events may be enough else: logger.info("\noffset scan range defined in the config file will be used.") offset_start = u.Quantity(config_coinc["time_offset"]["start"]) @@ -245,7 +245,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): ) logger.info( - f"\nExtracting the {tel_name} events taken when LST-1 observed for pre offset search..." + f"\nExtracting the {tel_name} events taken when LST observed for pre offset search..." ) time_lolim = timestamps_lst[0] - window_half_width @@ -349,8 +349,8 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): time_offsets = u.Quantity(time_offsets.round(), unit="ns", dtype=int) - # Extract the MAGIC events taken when LST-1 observed - logger.info(f"\nExtracting the {tel_name} events taken when LST-1 observed...") + # Extract the MAGIC events taken when LST observed + logger.info(f"\nExtracting the {tel_name} events taken when LST observed...") time_lolim = timestamps_lst[0] + time_offsets[0] - window_half_width time_uplim = timestamps_lst[-1] + time_offsets[-1] + window_half_width diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 2c9b43bfb..70b2683a0 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -287,7 +287,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): LSTs_in_use = ''.join(str(k) for k in LSTs_in_use) elif len(LSTs_in_use) > 0: LSTs_in_use = 'LST'+'_LST'.join(str(k) for k in LSTs_in_use) - print('lst',LSTs_in_use) + MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) MAGICs_in_use = np.where(MAGICs_IDs > 0)[0] + 1 #Here we select which MAGICs are/is in use From f2f634dfc0bd591ef526456e1e0fdee7c463b0ca Mon Sep 17 00:00:00 2001 From: Raniere Date: Wed, 27 Sep 2023 15:56:22 +0200 Subject: [PATCH 16/76] Update README.md Updated README --- magicctapipe/scripts/lst1_magic/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index 58c88d93a..ec581bbc3 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -202,9 +202,19 @@ To find coincident events between MAGIC and LST, starting from DL1 data, we run This script creates the file config_coincidence.yaml containing the telescope IDs and the following parameters: ``` +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + event_coincidence: timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time" window_half_width: "300 ns" + pre_offset_search: true + n_pre_offset_search_events: 100 time_offset: start: "-10 us" stop: "0 us" From 332500ddda376262d7002c1cf21d20a6b77a82f7 Mon Sep 17 00:00:00 2001 From: Raniere Date: Wed, 27 Sep 2023 15:58:05 +0200 Subject: [PATCH 17/76] Updated README --- magicctapipe/scripts/lst1_magic/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index ec581bbc3..0ac74d2ae 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -228,6 +228,14 @@ Once it is done, we add stereo parameters to the MAGIC+LST coincident DL1 data b This script creates the file config_stereo.yaml with the follwoing parameters: ``` +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + stereo_reco: quality_cuts: "(intensity > 50) & (width > 0)" theta_uplim: "6 arcmin" From e2c6c31723b48fa0be41bdaea20406e33f595ee3 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 27 Sep 2023 16:44:22 +0200 Subject: [PATCH 18/76] failed git test --- magicctapipe/io/tests/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index 5852c88f3..ce8cb05d8 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -109,7 +109,7 @@ def test_load_train_data_files_off(gamma_stereo): events = load_train_data_files( str(gamma_stereo[0]), offaxis_min="0.2 deg", offaxis_max="0.5 deg" ) - data = events["LST1_M2"] + data = events["LST1_M1"] assert np.all(data["off_axis"] >= 0.2) assert np.all(data["off_axis"] <= 0.5) From 24cbdf658d7011b5036738bb4f50df0efa92deed Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 08:59:19 +0200 Subject: [PATCH 19/76] Remove commented functions + remove get_stereo() --- magicctapipe/io/__init__.py | 2 - magicctapipe/io/gadf.py | 141 ----------- magicctapipe/io/io.py | 473 +++--------------------------------- 3 files changed, 32 insertions(+), 584 deletions(-) diff --git a/magicctapipe/io/__init__.py b/magicctapipe/io/__init__.py index 7a72c7e57..fb116d1ef 100644 --- a/magicctapipe/io/__init__.py +++ b/magicctapipe/io/__init__.py @@ -10,7 +10,6 @@ telescope_combinations, format_object, get_dl2_mean, - get_stereo, get_stereo_events, get_stereo_events_old, load_dl2_data_file, @@ -41,7 +40,6 @@ "telescope_combinations", "format_object", "get_dl2_mean", - "get_stereo", "get_stereo_events", "get_stereo_events_old", "load_dl2_data_file", diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index 8a87567e6..48501259f 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -235,149 +235,8 @@ def create_event_hdu( -''' -def create_event_hdu( - event_table, config, on_time, deadc, source_name, source_ra=None, source_dec=None -): - """ - Creates a fits binary table HDU for shower events. - - Parameters - ---------- - event_table: astropy.table.table.QTable - Table of the DL2 events surviving gammaness cuts - config: dict - yaml file with information about the telescope IDs. Typically called evoked from "config_DL3.yaml" in the main scripts. - on_time: astropy.table.table.QTable - ON time of the input data - deadc: float - Dead time correction factor - source_name: str - Name of the observed source - source_ra: str - Right ascension of the observed source, whose format should be - acceptable by `astropy.coordinates.sky_coordinate.SkyCoord` - (Used only when the source name cannot be resolved) - source_dec: str - Declination of the observed source, whose format should be - acceptable by `astropy.coordinates.sky_coordinate.SkyCoord` - (Used only when the source name cannot be resolved) - - Returns - ------- - event_hdu: astropy.io.fits.hdu.table.BinTableHDU - Event HDU - - Raises - ------ - ValueError - If the source name cannot be resolved and also either or both of - source RA/Dec coordinate is set to None - """ - _, TEL_COMBINATIONS = telescope_combinations(config) - - mjdreff, mjdrefi = np.modf(MJDREF.mjd) - - time_start = Time(event_table["timestamp"][0], format="unix", scale="utc") - time_start_iso = time_start.to_value("iso", "date_hms") - - time_end = Time(event_table["timestamp"][-1], format="unix", scale="utc") - time_end_iso = time_end.to_value("iso", "date_hms") - - # Calculate the elapsed and effective time - elapsed_time = time_end - time_start - effective_time = on_time * deadc - - # Get the instruments used for the observation - combo_types_unique = np.unique(event_table["combo_type"]) - tel_combos = np.array(list(TEL_COMBINATIONS.keys()))[combo_types_unique] - - tel_list = [tel_combo.split("_") for tel_combo in tel_combos] - tel_list_unique = np.unique(sum(tel_list, [])) - - instruments = "_".join(tel_list_unique) - - # Transfer the RA/Dec directions to the galactic coordinate - event_coords = SkyCoord( - ra=event_table["reco_ra"], dec=event_table["reco_dec"], frame="icrs" - ) - - event_coords = event_coords.galactic - - try: - # Try to get the source coordinate from the input name - source_coord = SkyCoord.from_name(source_name, frame="icrs") - - except Exception: - logger.warning( - f"WARNING: The source name '{source_name}' could not be resolved. " - f"Setting the input RA/Dec coordinate ({source_ra}, {source_dec})..." - ) - - if (source_ra is None) or (source_dec is None): - raise ValueError("The input RA/Dec coordinate is set to `None`.") - - source_coord = SkyCoord(ra=source_ra, dec=source_dec, frame="icrs") - - # Create a table - qtable = QTable( - data={ - "EVENT_ID": event_table["event_id"], - "TIME": event_table["timestamp"], - "RA": event_table["reco_ra"], - "DEC": event_table["reco_dec"], - "ENERGY": event_table["reco_energy"], - "GAMMANESS": event_table["gammaness"], - "MULTIP": event_table["multiplicity"], - "GLON": event_coords.l.to("deg"), - "GLAT": event_coords.b.to("deg"), - "ALT": event_table["reco_alt"].to("deg"), - "AZ": event_table["reco_az"].to("deg"), - } - ) - # Create a header - header = fits.Header( - cards=[ - ("CREATED", Time.now().utc.iso), - ("HDUCLAS1", "EVENTS"), - ("OBS_ID", np.unique(event_table["obs_id"])[0]), - ("DATE-OBS", time_start_iso[:10]), - ("TIME-OBS", time_start_iso[11:]), - ("DATE-END", time_end_iso[:10]), - ("TIME-END", time_end_iso[11:]), - ("TSTART", time_start.value), - ("TSTOP", time_end.value), - ("MJDREFI", mjdrefi), - ("MJDREFF", mjdreff), - ("TIMEUNIT", "s"), - ("TIMESYS", "UTC"), - ("TIMEREF", "TOPOCENTER"), - ("ONTIME", on_time.value), - ("TELAPSE", elapsed_time.to_value("s")), - ("DEADC", deadc), - ("LIVETIME", effective_time.value), - ("OBJECT", source_name), - ("OBS_MODE", "WOBBLE"), - ("N_TELS", np.max(event_table["multiplicity"])), - ("TELLIST", instruments), - ("INSTRUME", instruments), - ("RA_PNT", event_table["pointing_ra"][0].value, "deg"), - ("DEC_PNT", event_table["pointing_dec"][0].value, "deg"), - ("ALT_PNT", event_table["pointing_alt"][0].to_value("deg"), "deg"), - ("AZ_PNT", event_table["pointing_az"][0].to_value("deg"), "deg"), - ("RA_OBJ", source_coord.ra.to_value("deg"), "deg"), - ("DEC_OBJ", source_coord.dec.to_value("deg"), "deg"), - ("FOVALIGN", "RADEC"), - ] - ) - - # Create a HDU - event_hdu = fits.BinTableHDU(qtable, header=header, name="EVENTS") - - return event_hdu -''' def create_gti_hdu(event_table): """ diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 57c76c4cd..2c5028b08 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -24,8 +24,7 @@ __all__ = [ "telescope_combinations", - "format_object", - "get_stereo", + "format_object", "get_stereo_events", "get_stereo_events_old", "get_dl2_mean", @@ -135,47 +134,7 @@ def format_object(input_object): return string -def get_stereo( - event_data, config, quality_cuts=None, group_index=["obs_id", "event_id"] -): - - """ - Gets the stereo events surviving specified quality cuts without overwriting the - multiplicity and combo type. This function is useful when loading the DL2 data - when we apply one RF per telescope (and not per combo type). - - Parameters - ---------- - event_data: pandas.core.frame.DataFrame - Data frame of shower events - config: dict - Read from the yaml file with information about the telescope IDs. Typically called "config_general.yaml" - quality_cuts: str - Quality cuts applied to the input data - group_index: list - Index to group telescope events - - - Returns - ------- - event_data_stereo: pandas.core.frame.DataFrame - Data frame of the stereo events surviving the quality cuts - """ - - TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) - - event_data_stereo = event_data.copy() - # Apply the quality cuts - if quality_cuts is not None: - event_data_stereo.query(quality_cuts, inplace=True) - - max_multiplicity=len(TEL_NAMES.keys()) - # Extract stereo events - event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() - event_data_stereo.query(f"multiplicity >1 & multiplicity <= {max_multiplicity}", inplace=True) - - return event_data_stereo def get_stereo_events_old( event_data, quality_cuts=None, group_index=["obs_id", "event_id"] ): @@ -262,7 +221,7 @@ def get_stereo_events_old( def get_stereo_events( - event_data, config, quality_cuts=None, group_index=["obs_id", "event_id"] + event_data, config, quality_cuts=None, group_index=["obs_id", "event_id"], eval_multi_combo=True ): """ Gets the stereo events surviving specified quality cuts. @@ -300,47 +259,47 @@ def get_stereo_events( # Extract stereo events event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() event_data_stereo.query(f"multiplicity >1 & multiplicity <= {max_multiplicity}", inplace=True) + if eval_multi_combo==True: + # Check the total number of events + n_events_total = len(event_data_stereo.groupby(group_index).size()) + logger.info(f"\nIn total {n_events_total} stereo events are found:") - # Check the total number of events - n_events_total = len(event_data_stereo.groupby(group_index).size()) - logger.info(f"\nIn total {n_events_total} stereo events are found:") + n_events_per_combo = {} - n_events_per_combo = {} + # Loop over every telescope combination type + for combo_type, (tel_combo, tel_ids) in enumerate(TEL_COMBINATIONS.items()): + multiplicity = len(tel_ids) - # Loop over every telescope combination type - for combo_type, (tel_combo, tel_ids) in enumerate(TEL_COMBINATIONS.items()): - multiplicity = len(tel_ids) + df_events = event_data_stereo.query( + f"(tel_id == {tel_ids}) & (multiplicity == {multiplicity})" + ).copy() - df_events = event_data_stereo.query( - f"(tel_id == {tel_ids}) & (multiplicity == {multiplicity})" - ).copy() + # Here we recalculate the multiplicity and apply the cut again, + # since with the above cut the events belonging to other + # combination types are also extracted. For example, in case of + # tel_id = [1, 2], the tel 1 events of the combination [1, 3] + # and the tel 2 events of the combination [2, 3] remain in the + # data frame, whose multiplicity will be recalculated as 1 and + # so will be removed with the following cuts. - # Here we recalculate the multiplicity and apply the cut again, - # since with the above cut the events belonging to other - # combination types are also extracted. For example, in case of - # tel_id = [1, 2], the tel 1 events of the combination [1, 3] - # and the tel 2 events of the combination [2, 3] remain in the - # data frame, whose multiplicity will be recalculated as 1 and - # so will be removed with the following cuts. + df_events["multiplicity"] = df_events.groupby(group_index).size() + df_events.query(f"multiplicity == {multiplicity}", inplace=True) - df_events["multiplicity"] = df_events.groupby(group_index).size() - df_events.query(f"multiplicity == {multiplicity}", inplace=True) + # Assign the combination type + event_data_stereo.loc[df_events.index, "combo_type"] = combo_type - # Assign the combination type - event_data_stereo.loc[df_events.index, "combo_type"] = combo_type + n_events = len(df_events.groupby(group_index).size()) + percentage = 100 * n_events / n_events_total - n_events = len(df_events.groupby(group_index).size()) - percentage = 100 * n_events / n_events_total + key = f"{tel_combo} (type {combo_type})" + value = f"{n_events:.0f} events ({percentage:.1f}%)" - key = f"{tel_combo} (type {combo_type})" - value = f"{n_events:.0f} events ({percentage:.1f}%)" - - n_events_per_combo[key] = value + n_events_per_combo[key] = value - event_data_stereo = event_data_stereo.astype({"combo_type": int}) + event_data_stereo = event_data_stereo.astype({"combo_type": int}) - # Show the number of events per combination type - logger.info(format_object(n_events_per_combo)) + # Show the number of events per combination type + logger.info(format_object(n_events_per_combo)) return event_data_stereo @@ -621,96 +580,7 @@ def load_magic_dl1_data_files(input_dir, config): return event_data, subarray -''' -def load_train_data_files( - input_dir, config, offaxis_min=None, offaxis_max=None, true_event_class=None -): - """ - Loads DL1-stereo data files and separates the shower events per - telescope combination type for training RFs. - Parameters - ---------- - input_dir: str - Path to a directory where input DL1-stereo files are stored - config: dict - yaml file with information about the telescope IDs. Typically called "config_general.yaml" - offaxis_min: str - Minimum shower off-axis angle allowed, whose format should be - acceptable by `astropy.units.quantity.Quantity` - offaxis_max: str - Maximum shower off-axis angle allowed, whose format should be - acceptable by `astropy.units.quantity.Quantity` - true_event_class: int - True event class of the input events - - - Returns - ------- - data_train: dict - Data frames of the shower events separated by the telescope - combination types - - Raises - ------ - FileNotFoundError - If any DL1-stereo data files are not found in the input - directory - """ - - _, TEL_COMBINATIONS = telescope_combinations(config) - - # Find the input files - file_mask = f"{input_dir}/dl1_stereo_*.h5" - - input_files = glob.glob(file_mask) - input_files.sort() - - if len(input_files) == 0: - raise FileNotFoundError( - "Could not find any DL1-stereo data files in the input directory." - ) - - # Load the input files - logger.info("\nThe following DL1-stereo data files are found:") - - data_list = [] - - for input_file in input_files: - logger.info(input_file) - - df_events = pd.read_hdf(input_file, key="events/parameters") - data_list.append(df_events) - - event_data = pd.concat(data_list) - event_data.set_index(GROUP_INDEX_TRAIN, inplace=True) - event_data.sort_index(inplace=True) - - if offaxis_min is not None: - offaxis_min = u.Quantity(offaxis_min).to_value("deg") - event_data.query(f"off_axis >= {offaxis_min}", inplace=True) - - if offaxis_max is not None: - offaxis_max = u.Quantity(offaxis_max).to_value("deg") - event_data.query(f"off_axis <= {offaxis_max}", inplace=True) - - if true_event_class is not None: - event_data["true_event_class"] = true_event_class - - event_data = get_stereo_events(event_data, config, group_index=GROUP_INDEX_TRAIN) - - data_train = {} - - # Loop over every telescope combination type - for combo_type, tel_combo in enumerate(TEL_COMBINATIONS.keys()): - df_events = event_data.query(f"combo_type == {combo_type}") - - if not df_events.empty: - data_train[tel_combo] = df_events - - return data_train - -''' def load_train_data_files( input_dir, offaxis_min=None, offaxis_max=None, true_event_class=None ): @@ -886,286 +756,7 @@ def load_train_data_files_tel(input_dir, config, offaxis_min=None, offaxis_max=N return data_train -''' -def load_mc_dl2_data_file(config, input_file, quality_cuts, event_type, weight_type_dl2): - """ - Loads a MC DL2 data file for creating the IRFs. - - Parameters - ---------- - config: dict - evoked from an yaml file with information about the telescope IDs. Typically called "config_RF.yaml" - input_file: str - Path to an input MC DL2 data file - quality_cuts: str - Quality cuts applied to the input events - event_type: str - Type of the events which will be used - - "software" uses software coincident events, - "software_only_3tel" uses only 3-tel combination events, - "magic_only" uses only MAGIC-stereo combination events, and - "hardware" uses all the telescope combination events - weight_type_dl2: str - Type of the weight for averaging telescope-wise DL2 parameters - - "simple", "variance" or "intensity" are allowed - - Returns - ------- - event_table: astropy.table.table.QTable - Table of the MC DL2 events surviving the cuts - pointing: numpy.ndarray - Telescope pointing direction (zd, az) in the unit of degree - sim_info: pyirf.simulations.SimulatedEventsInfo - Container of the simulation information - - Raises - ------ - ValueError - If the input event type is not known - """ - - TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) - combo_types = np.asarray(range(len(TEL_COMBINATIONS))) - three_or_more = [] - for n,combination in enumerate(TEL_COMBINATIONS.values()): - if len(combination) >= 3: - three_or_more.append(n) - - - # Load the input file - df_events = pd.read_hdf(input_file, key="events/parameters") - df_events.set_index(["obs_id", "event_id", "tel_id"], inplace=True) - df_events.sort_index(inplace=True) - - df_events = get_stereo(df_events, config, quality_cuts) - - logger.info(f"\nExtracting the events of the '{event_type}' type...") - - if event_type == "software": - # The events of the MAGIC-stereo combination are excluded - df_events.query(f"(combo_type < {combo_types[-1]}) & (magic_stereo == True)", inplace=True) - - elif event_type == "software_3tels_or_more": - df_events.query(f"combo_type == {three_or_more}", inplace=True) - - elif event_type == "software_6_tel": - df_events.query(f"combo_type < {combo_types[-1]}", inplace=True) - elif event_type == "magic_only": - df_events.query(f"combo_type == {combo_types[-1]}", inplace=True) - - elif event_type != "hardware": - raise ValueError(f"Unknown event type '{event_type}'.") - n_events = len(df_events.groupby(["obs_id", "event_id"]).size()) - logger.info(f"--> {n_events} stereo events") - - # Get the mean DL2 parameters - df_dl2_mean = get_dl2_mean(df_events, weight_type_dl2) - df_dl2_mean.reset_index(inplace=True) - - # Convert the pandas data frame to the astropy QTable - event_table = QTable.from_pandas(df_dl2_mean) - - event_table["pointing_alt"] *= u.rad - event_table["pointing_az"] *= u.rad - event_table["true_alt"] *= u.deg - event_table["true_az"] *= u.deg - event_table["reco_alt"] *= u.deg - event_table["reco_az"] *= u.deg - event_table["true_energy"] *= u.TeV - event_table["reco_energy"] *= u.TeV - - # Calculate some angular distances - event_table["theta"] = calculate_theta( - event_table, event_table["true_az"], event_table["true_alt"] - ) - - event_table["true_source_fov_offset"] = calculate_source_fov_offset(event_table) - event_table["reco_source_fov_offset"] = calculate_source_fov_offset( - event_table, prefix="reco" - ) - - # Get the telescope pointing direction - pointing_zd = 90 - event_table["pointing_alt"].mean().to_value("deg") - pointing_az = event_table["pointing_az"].mean().to_value("deg") - - pointing = np.array([pointing_zd, pointing_az]).round(3) - - # Get the simulation configuration - sim_config = pd.read_hdf(input_file, key="simulation/config") - - n_total_showers = ( - sim_config["num_showers"][0] - * sim_config["shower_reuse"][0] - * len(np.unique(event_table["obs_id"])) - ) - - min_viewcone_radius = sim_config["min_viewcone_radius"][0] * u.deg - max_viewcone_radius = sim_config["max_viewcone_radius"][0] * u.deg - - viewcone_diff = max_viewcone_radius - min_viewcone_radius - - if viewcone_diff < u.Quantity(0.001, unit="deg"): - # Handle ring-wobble MCs as same as point-like MCs - viewcone = 0 * u.deg - else: - viewcone = max_viewcone_radius - - sim_info = SimulatedEventsInfo( - n_showers=n_total_showers, - energy_min=u.Quantity(sim_config["energy_range_min"][0], unit="TeV"), - energy_max=u.Quantity(sim_config["energy_range_max"][0], unit="TeV"), - max_impact=u.Quantity(sim_config["max_scatter_range"][0], unit="m"), - spectral_index=sim_config["spectral_index"][0], - viewcone=viewcone, - ) - - return event_table, pointing, sim_info - - -def load_dl2_data_file(config, input_file, quality_cuts, event_type, weight_type_dl2): - """ - Loads a DL2 data file for processing to DL3. - - Parameters - ---------- - config: dict - evoked from an yaml file with information about the telescope IDs. Typically called "config_DL3.yaml" - input_file: str - Path to an input DL2 data file - quality_cuts: str - Quality cuts applied to the input events - event_type: str - Type of the events which will be used - - "software" uses software coincident events, - "software_only_3tel" uses only 3-tel combination events, - "magic_only" uses only MAGIC-stereo combination events, and - "hardware" uses all the telescope combination events - weight_type_dl2: str - Type of the weight for averaging telescope-wise DL2 parameters - - "simple", "variance" or "intensity" are allowed - - Returns - ------- - event_table: astropy.table.table.QTable - Table of the MC DL2 events surviving the cuts - on_time: astropy.units.quantity.Quantity - ON time of the input data - deadc: float - Dead time correction factor - - Raises - ------ - ValueError - If the input event type is not known - """ - - TEL_NAMES, TEL_COMBINATIONS = telescope_combinations(config) - combo_types = np.asarray(range(len(TEL_COMBINATIONS))) - three_or_more = [] - for n,combination in enumerate(TEL_COMBINATIONS.values()): - if len(combination) >= 3: - three_or_more.append(n) - - - # Load the input file - event_data = pd.read_hdf(input_file, key="events/parameters") - event_data.set_index(["obs_id", "event_id", "tel_id"], inplace=True) - event_data.sort_index(inplace=True) - - event_data = get_stereo(event_data, config, quality_cuts) - - logger.info(f"\nExtracting the events of the '{event_type}' type...") - - if event_type == "software": - # The events of the MAGIC-stereo combination are excluded - event_data.query(f"combo_type < {combo_types[-1]}", inplace=True) - - elif event_type == "software_3tels_or_more": - event_data.query(f"combo_type == {three_or_more}", inplace=True) - - elif event_type == "software_6_tel": - event_data.query(f"combo_type < {combo_types[-1]}", inplace=True) - - elif event_type == "magic_only": - event_data.query(f"combo_type == {combo_types[-1]}", inplace=True) - - elif event_type == "hardware": - logger.warning( - "WARNING: Please confirm that this type is correct for the input data, " - "since the hardware trigger between LST-1 and MAGIC may NOT be used." - ) - - else: - raise ValueError(f"Unknown event type '{event_type}'.") - - n_events = len(event_data.groupby(["obs_id", "event_id"]).size()) - logger.info(f"--> {n_events} stereo events") - - # Get the mean DL2 parameters - df_dl2_mean = get_dl2_mean(event_data, weight_type_dl2) - df_dl2_mean.reset_index(inplace=True) - - # Convert the pandas data frame to astropy QTable - event_table = QTable.from_pandas(df_dl2_mean) - - event_table["pointing_alt"] *= u.rad - event_table["pointing_az"] *= u.rad - event_table["pointing_ra"] *= u.deg - event_table["pointing_dec"] *= u.deg - event_table["reco_alt"] *= u.deg - event_table["reco_az"] *= u.deg - event_table["reco_ra"] *= u.deg - event_table["reco_dec"] *= u.deg - event_table["reco_energy"] *= u.TeV - event_table["timestamp"] *= u.s - - # Calculate the ON time - time_diffs = np.diff(event_table["timestamp"]) - on_time = time_diffs[time_diffs < TIME_DIFF_UPLIM].sum() - - # Calculate the dead time correction factor. Here we use the - # following equations to get the correction factor `deadc`: - - # rate = 1 / ( - dead_time) - # deadc = 1 / (1 + rate * dead_time) = 1 - dead_time / - - logger.info("\nCalculating the dead time correction factor...") - - event_data.query(f"0 < time_diff < {TIME_DIFF_UPLIM.to_value('s')}", inplace=True) - - deadc_list = [] - - # Calculate the LST-1 correction factor - time_diffs_lst = event_data.query("tel_id == 1")["time_diff"] - - if len(time_diffs_lst) > 0: - deadc_lst = 1 - DEAD_TIME_LST.to_value("s") / time_diffs_lst.mean() - logger.info(f"LST-1: {deadc_lst.round(3)}") - - deadc_list.append(deadc_lst) - - # Calculate the MAGIC correction factor with one of the telescopes - # whose number of events is larger than the other - time_diffs_m1 = event_data.query("tel_id == 2")["time_diff"] - time_diffs_m2 = event_data.query("tel_id == 3")["time_diff"] - - if len(time_diffs_m1) > len(time_diffs_m2): - deadc_magic = 1 - DEAD_TIME_MAGIC.to_value("s") / time_diffs_m1.mean() - logger.info(f"MAGIC(-I): {deadc_magic.round(3)}") - else: - deadc_magic = 1 - DEAD_TIME_MAGIC.to_value("s") / time_diffs_m2.mean() - logger.info(f"MAGIC(-II): {deadc_magic.round(3)}") - - deadc_list.append(deadc_magic) - - # Calculate the total correction factor as the multiplicity of the - # telescope-wise correction factors - deadc = np.prod(deadc_list) - logger.info(f"--> Total correction factor: {deadc.round(3)}") - - return event_table, on_time, deadc -''' def load_mc_dl2_data_file(input_file, quality_cuts, event_type, weight_type_dl2): """ Loads a MC DL2 data file for creating the IRFs. From 93d729afb9d83b663690ccdfa628ab163b4be0fa Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 09:05:47 +0200 Subject: [PATCH 20/76] config.yaml default --- magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 2 +- magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 70b2683a0..e358931d4 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -509,7 +509,7 @@ def main(): "-c", dest="config_file", type=str, - default="./config_step1.yaml", + default="./config.yaml", help="Path to a configuration file", ) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index cdf367a31..d967598fe 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -373,7 +373,7 @@ def main(): "-c", dest="config_file", type=str, - default="./config_general.yaml", + default="./config.yaml", help="Path to a configuration file", ) From fe694208ece5ee2eba4518768db6ec2d43b9cb40 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 09:11:38 +0200 Subject: [PATCH 21/76] LST_version --- magicctapipe/scripts/lst1_magic/coincident_events.py | 7 ++++--- magicctapipe/scripts/lst1_magic/config_general.yaml | 1 + 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py index 1fcb29ce5..40e82bda9 100644 --- a/magicctapipe/scripts/lst1_magic/coincident_events.py +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -48,7 +48,7 @@ def configfile_coincidence(ids, target_dir): f.close() -def linking_lst(target_dir, LST_runs): +def linking_lst(target_dir, LST_runs, LST_version): """ This function links the LST data paths to the working directory. This is a preparation step required for running lst1_magic_event_coincidence.py @@ -68,7 +68,7 @@ def linking_lst(target_dir, LST_runs): for i in LST_runs: lstObsDir = i[0].split("_")[0]+i[0].split("_")[1]+i[0].split("_")[2] - inputdir = f'/fefs/aswg/data/real/DL1/{lstObsDir}/v0.9/tailcut84' + inputdir = f'/fefs/aswg/data/real/DL1/{lstObsDir}/{LST_version}/tailcut84' outputdir = f'{coincidence_DL1_dir}/Coincident/{lstObsDir}' list_of_subruns = np.sort(glob.glob(f"{inputdir}/dl1*Run*{i[1]}*.*.h5")) if os.path.exists(f"{outputdir}/list_LST.txt"): @@ -137,6 +137,7 @@ def main(): LST_runs_and_dates = config["general"]["LST_runs"] LST_runs = np.genfromtxt(LST_runs_and_dates,dtype=str,delimiter=',') + LST_version = config["general"]["LST_version"] print("***** Generating file config_coincidence.yaml...") print("***** This file can be found in ",target_dir) @@ -144,7 +145,7 @@ def main(): print("***** Linking the paths to LST data files...") - linking_lst(target_dir, LST_runs) #linking the data paths to current working directory + linking_lst(target_dir, LST_runs, LST_version) #linking the data paths to current working directory print("***** Generating the bashscript...") diff --git a/magicctapipe/scripts/lst1_magic/config_general.yaml b/magicctapipe/scripts/lst1_magic/config_general.yaml index dd2cfadde..93b9347fb 100644 --- a/magicctapipe/scripts/lst1_magic/config_general.yaml +++ b/magicctapipe/scripts/lst1_magic/config_general.yaml @@ -19,6 +19,7 @@ general: target_RA_deg : 83.633083 #RA in degrees target_Dec_deg: 22.0145 #Dec in degrees SimTel_version: "v1.4" + LST_version: "v0.9" focal_length : "effective" #effective #nominal MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" LST_runs : "LST_runs.txt" From 517a0460d4418d3e66724e2a34d48424a5bff24d Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 09:18:37 +0200 Subject: [PATCH 22/76] calib module for Calibration functions --- magicctapipe/image/__init__.py | 7 ++ magicctapipe/image/calib.py | 102 ++++++++++++++++++ .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 91 +--------------- 3 files changed, 112 insertions(+), 88 deletions(-) create mode 100644 magicctapipe/image/calib.py diff --git a/magicctapipe/image/__init__.py b/magicctapipe/image/__init__.py index 34fa8194d..99bd24db3 100644 --- a/magicctapipe/image/__init__.py +++ b/magicctapipe/image/__init__.py @@ -9,10 +9,17 @@ get_leakage, ) +from .calib import ( + Calibrate_LST, + Calibrate_MAGIC +) + __all__ = [ "MAGICClean", "PixelTreatment", "get_num_islands_MAGIC", "clean_image_params", "get_leakage", + "Calibrate_LST", + "Calibrate_MAGIC" ] diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py new file mode 100644 index 000000000..ecd2d4ca1 --- /dev/null +++ b/magicctapipe/image/calib.py @@ -0,0 +1,102 @@ + + +import numpy as np + + +from ctapipe.image import ( + apply_time_delta_cleaning, + number_of_islands, + tailcuts_clean, +) + +from lstchain.image.cleaning import apply_dynamic_cleaning +from lstchain.image.modifier import ( + add_noise_in_pixels, + random_psf_smearer, +) + +__all__ = [ + "Calibrate_LST", "Calibrate_MAGIC" +] + +def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, increase_nsb, use_time_delta_cleaning, use_dynamic_cleaning ): + + """ + This function computes and returns signal_pixels, image, and peak_time for LST + """ + + calibrator_lst._calibrate_dl0(event, tel_id) + calibrator_lst._calibrate_dl1(event, tel_id) + + image = event.dl1.tel[tel_id].image.astype(np.float64) + peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) + + increase_psf = config_lst["increase_psf"]["use"] + use_only_main_island = config_lst["use_only_main_island"] + + if increase_nsb: + # Add extra noise in pixels + image = add_noise_in_pixels(rng, image, **config_lst["increase_nsb"]) + + if increase_psf: + # Smear the image + image = random_psf_smearer( + image=image, + fraction=config_lst["increase_psf"]["fraction"], + indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, + indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, + ) + + # Apply the image cleaning + signal_pixels = tailcuts_clean( + camera_geoms[tel_id], image, **config_lst["tailcuts_clean"] + ) + + if use_time_delta_cleaning: + signal_pixels = apply_time_delta_cleaning( + geom=camera_geoms[tel_id], + mask=signal_pixels, + arrival_times=peak_time, + **config_lst["time_delta_cleaning"], + ) + + if use_dynamic_cleaning: + signal_pixels = apply_dynamic_cleaning( + image, signal_pixels, **config_lst["dynamic_cleaning"] + ) + + if use_only_main_island: + _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) + n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) + + # The first index means the pixels not surviving + # the cleaning, so should not be considered + n_pixels_on_island[0] = 0 + max_island_label = np.argmax(n_pixels_on_island) + signal_pixels[island_labels != max_island_label] = False + + return signal_pixels, image, peak_time + + +def Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic): + + """ + This function computes and returns signal_pixels, image, and peak_time for MAGIC + """ + + calibrator_magic._calibrate_dl0(event, tel_id) + calibrator_magic._calibrate_dl1(event, tel_id) + + image = event.dl1.tel[tel_id].image.astype(np.float64) + peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) + use_charge_correction = config_magic["charge_correction"]["use"] + + if use_charge_correction: + # Scale the charges by the correction factor + image *= config_magic["charge_correction"]["factor"] + + # Apply the image cleaning + signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( + event_image=image, event_pulse_time=peak_time + ) + return signal_pixels, image, peak_time diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index e358931d4..cc2fa9b05 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -37,27 +37,23 @@ from astropy.coordinates import Angle, angular_separation from ctapipe.calib import CameraCalibrator from ctapipe.image import ( - apply_time_delta_cleaning, hillas_parameters, leakage_parameters, number_of_islands, - tailcuts_clean, timing_parameters, ) from ctapipe.instrument import SubarrayDescription from ctapipe.io import EventSource, HDF5TableWriter -from lstchain.image.cleaning import apply_dynamic_cleaning -from lstchain.image.modifier import ( - add_noise_in_pixels, - random_psf_smearer, +from lstchain.image.modifier import ( set_numba_seed, ) from magicctapipe.image import MAGICClean +from magicctapipe.image.calib import Calibrate_LST, Calibrate_MAGIC from magicctapipe.io import SimEventInfoContainer, format_object from magicctapipe.utils import calculate_disp, calculate_impact from traitlets.config import Config -__all__ = ["Calibrate_LST", "Calibrate_MAGIC","mc_dl0_to_dl1"] +__all__ = ["mc_dl0_to_dl1"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) @@ -68,87 +64,6 @@ -def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, increase_nsb, use_time_delta_cleaning, use_dynamic_cleaning ): - - """ - This function computes and returns signal_pixels, image, and peak_time for LST - """ - - calibrator_lst._calibrate_dl0(event, tel_id) - calibrator_lst._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - - increase_psf = config_lst["increase_psf"]["use"] - use_only_main_island = config_lst["use_only_main_island"] - - if increase_nsb: - # Add extra noise in pixels - image = add_noise_in_pixels(rng, image, **config_lst["increase_nsb"]) - - if increase_psf: - # Smear the image - image = random_psf_smearer( - image=image, - fraction=config_lst["increase_psf"]["fraction"], - indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, - indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, - ) - - # Apply the image cleaning - signal_pixels = tailcuts_clean( - camera_geoms[tel_id], image, **config_lst["tailcuts_clean"] - ) - - if use_time_delta_cleaning: - signal_pixels = apply_time_delta_cleaning( - geom=camera_geoms[tel_id], - mask=signal_pixels, - arrival_times=peak_time, - **config_lst["time_delta_cleaning"], - ) - - if use_dynamic_cleaning: - signal_pixels = apply_dynamic_cleaning( - image, signal_pixels, **config_lst["dynamic_cleaning"] - ) - - if use_only_main_island: - _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) - n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) - - # The first index means the pixels not surviving - # the cleaning, so should not be considered - n_pixels_on_island[0] = 0 - max_island_label = np.argmax(n_pixels_on_island) - signal_pixels[island_labels != max_island_label] = False - - return signal_pixels, image, peak_time - - -def Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic): - - """ - This function computes and returns signal_pixels, image, and peak_time for MAGIC - """ - - calibrator_magic._calibrate_dl0(event, tel_id) - calibrator_magic._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - use_charge_correction = config_magic["charge_correction"]["use"] - - if use_charge_correction: - # Scale the charges by the correction factor - image *= config_magic["charge_correction"]["factor"] - - # Apply the image cleaning - signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( - event_image=image, event_pulse_time=peak_time - ) - return signal_pixels, image, peak_time def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): From 75093a62eb45ff201712c443aff44e0c80487e97 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 09:51:50 +0200 Subject: [PATCH 23/76] Focal length (MCs) --- magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index cc2fa9b05..6906d0392 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -433,8 +433,9 @@ def main(): "-f", dest="focal_length_choice", type=str, + choices = ["nominal", "effective"], default="effective", - help='Standard is "effective"', + help='Choice of focal length, either "effective" or "nominal". The default (and standard) value is "effective"', ) args = parser.parse_args() #Here we select all 3 parameters collected above From 756782e22f8e79301d63bf1358fae8206cfd206e Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 10:23:34 +0200 Subject: [PATCH 24/76] Config and scripts paths + setup.py --- .../scripts/lst1_magic/coincident_events.py | 27 ++++++++--- .../scripts/lst1_magic/config_general.yaml | 1 + ...ing_runs_and_splitting_training_samples.py | 48 ++++++++++++------- .../lst1_magic/setting_up_config_and_dir.py | 39 ++++++++++----- .../scripts/lst1_magic/stereo_events.py | 39 ++++++++++----- setup.py | 6 +++ 6 files changed, 113 insertions(+), 47 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py index 40e82bda9..94cf1d726 100644 --- a/magicctapipe/scripts/lst1_magic/coincident_events.py +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -21,6 +21,7 @@ import yaml import logging from pathlib import Path +import argparse logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) @@ -83,7 +84,7 @@ def linking_lst(target_dir, LST_runs, LST_version): f.close() -def bash_coincident(target_dir): +def bash_coincident(target_dir, scripts_dir): """ This function generates the bashscript for running the coincidence analysis. @@ -117,7 +118,7 @@ def bash_coincident(target_dir): f.write("SAMPLE_LIST=($(<$OUTPUTDIR/list_LST.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/coincidence_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") + f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") f.close() @@ -127,13 +128,27 @@ def main(): """ Here we read the config_general.yaml file and call the functions defined above. """ - - - with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + parser = argparse.ArgumentParser() + parser.add_argument( + "--config-file", + "-c", + dest="config_file", + type=str, + default="./config_general.yaml", + help="Path to a configuration file", + ) + + args = parser.parse_args() + with open( + args.config_file, "rb" + ) as f: # "rb" mode opens the file in binary format for reading config = yaml.safe_load(f) + telescope_ids = list(config["mc_tel_ids"].values()) target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + + scripts_dir = str(Path(config["directories"]["scripts_dir"])) LST_runs_and_dates = config["general"]["LST_runs"] LST_runs = np.genfromtxt(LST_runs_and_dates,dtype=str,delimiter=',') @@ -149,7 +164,7 @@ def main(): print("***** Generating the bashscript...") - bash_coincident(target_dir) + bash_coincident(target_dir, scripts_dir) print("***** Submitting processess to the cluster...") diff --git a/magicctapipe/scripts/lst1_magic/config_general.yaml b/magicctapipe/scripts/lst1_magic/config_general.yaml index 93b9347fb..76780074c 100644 --- a/magicctapipe/scripts/lst1_magic/config_general.yaml +++ b/magicctapipe/scripts/lst1_magic/config_general.yaml @@ -8,6 +8,7 @@ mc_tel_ids: directories: workspace_dir : "/fefs/aswg/workspace/raniere/" + scripts_dir : "/fefs/aswg/workspace/raniere/" target_name : "CrabTeste" MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index 621d112f4..e2a69cade 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -28,6 +28,7 @@ import logging from tqdm import tqdm from pathlib import Path +import argparse logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) @@ -92,7 +93,7 @@ def split_train_test(target_dir, train_fraction): os.system(f"mv {list_of_dir[directory]}*.h5 "+proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) os.system(f"rm -r {list_of_dir[directory]}") -def merge(target_dir, identification, MAGIC_runs): +def merge(target_dir, identification, MAGIC_runs, scripts_dir): """ This function creates the bash scripts to run merge_hdf_files.py in all MAGIC subruns. @@ -130,7 +131,7 @@ def merge(target_dir, identification, MAGIC_runs): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') if os.path.exists(MAGIC_DL1_dir+"/M2"): for i in MAGIC_runs: @@ -138,25 +139,25 @@ def merge(target_dir, identification, MAGIC_runs): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') elif identification == "1_M1M2": if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): for i in MAGIC_runs: if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/Merged"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") - f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') + f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') else: for i in MAGIC_runs: if not os.path.exists(MAGIC_DL1_dir+f"/Merged/Merged_{i[0]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night - f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') + f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') f.close() -def mergeMC(target_dir, identification): +def mergeMC(target_dir, identification, scripts_dir): """ This function creates the bash scripts to run merge_hdf_files.py in all MC runs. @@ -201,7 +202,7 @@ def mergeMC(target_dir, identification): f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') - f.write(f'conda run -n magic-lst python merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') + f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') f.close() @@ -211,12 +212,24 @@ def main(): """ Here we read the config_general.yaml file, split the pronton sample into "test" and "train", and merge the MAGIC files. """ - - - with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + parser = argparse.ArgumentParser() + parser.add_argument( + "--config-file", + "-c", + dest="config_file", + type=str, + default="./config_general.yaml", + help="Path to a configuration file", + ) + + args = parser.parse_args() + with open( + args.config_file, "rb" + ) as f: # "rb" mode opens the file in binary format for reading config = yaml.safe_load(f) + target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] MAGIC_runs_and_dates = config["general"]["MAGIC_runs"] @@ -224,21 +237,22 @@ def main(): train_fraction = float(config["general"]["proton_train"]) + scripts_dir = str(Path(config["directories"]["scripts_dir"])) #Here we slice the proton MC data into "train" and "test": print("***** Splitting protons into 'train' and 'test' datasets...") split_train_test(target_dir, train_fraction) print("***** Generating merge bashscripts...") - merge(target_dir, "0_subruns", MAGIC_runs) #generating the bash script to merge the subruns - merge(target_dir, "1_M1M2", MAGIC_runs) #generating the bash script to merge the M1 and M2 runs - merge(target_dir, "2_nights", MAGIC_runs) #generating the bash script to merge all runs per night + merge(target_dir, "0_subruns", MAGIC_runs, scripts_dir) #generating the bash script to merge the subruns + merge(target_dir, "1_M1M2", MAGIC_runs, scripts_dir) #generating the bash script to merge the M1 and M2 runs + merge(target_dir, "2_nights", MAGIC_runs, scripts_dir) #generating the bash script to merge all runs per night print("***** Generating mergeMC bashscripts...") - mergeMC(target_dir, "protons") #generating the bash script to merge the files - mergeMC(target_dir, "gammadiffuse") #generating the bash script to merge the files - mergeMC(target_dir, "gammas") #generating the bash script to merge the files - mergeMC(target_dir, "protons_test") + mergeMC(target_dir, "protons", scripts_dir) #generating the bash script to merge the files + mergeMC(target_dir, "gammadiffuse", scripts_dir) #generating the bash script to merge the files + mergeMC(target_dir, "gammas", scripts_dir) #generating the bash script to merge the files + mergeMC(target_dir, "protons_test", scripts_dir) print("***** Running merge_hdf_files.py in the MAGIC data files...") diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index 47876d7f0..e1ee6360c 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -114,7 +114,7 @@ def config_file_gen(ids, target_dir): f.close() -def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, telescope_ids, focal_length): +def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, focal_length, scripts_dir): """ This function creates the lists list_nodes_gamma_complete.txt and list_folder_gamma.txt with the MC file paths. @@ -194,7 +194,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, 'cat list_dl0_ok.txt | while read line\n', 'do\n', ' cd '+target_dir+'/../\n', - ' conda run -n magic-lst python lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml >>$LOG 2>&1 --focal_length_choice '+focal_length+'\n\n', + f' conda run -n magic-lst python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml >>$LOG 2>&1 --focal_length_choice '+focal_length+'\n\n', 'done\n', ""] f.writelines(lines_of_config_file) @@ -203,7 +203,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, -def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs): +def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir): """ Below we create a bash script that links the the MAGIC data paths to each subdirectory. @@ -257,7 +257,7 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs): 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - 'conda run -n magic-lst python magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) f.close() @@ -282,7 +282,7 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs): 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - 'conda run -n magic-lst python magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) f.close() @@ -370,11 +370,24 @@ def main(): help="You can type 'onlyMAGIC' or 'onlyMC' to run this script only on MAGIC or MC data, respectively.", ) + parser.add_argument( + "--config-file", + "-c", + dest="config_file", + type=str, + default="./config_general.yaml", + help="Path to a configuration file", + ) + + + args = parser.parse_args() - with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + with open( + args.config_file, "rb" + ) as f: # "rb" mode opens the file in binary format for reading config = yaml.safe_load(f) @@ -392,6 +405,8 @@ def main(): #MC_helium = str(Path(config["directories"]["MC_helium"])) MC_protons = str(Path(config["directories"]["MC_protons"])) MC_gammadiff = str(Path(config["directories"]["MC_gammadiff"])) + + scripts_dir = str(Path(config["directories"]["scripts_dir"])) print("***** Linking MC paths - this may take a few minutes ******") @@ -404,11 +419,11 @@ def main(): #Below we run the analysis on the MC data if not args.partial_analysis=='onlyMAGIC': - lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, telescope_ids, focal_length) #gammas - #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, telescope_ids, focal_length) #electrons - #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, telescope_ids, focal_length) #helium - lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, telescope_ids, focal_length) #protons - lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, telescope_ids, focal_length) #gammadiffuse + lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, scripts_dir) #gammas + #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, scripts_dir) #electrons + #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, scripts_dir) #helium + lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, focal_length, scripts_dir) #protons + lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, focal_length, scripts_dir) #gammadiffuse #Here we do the MC DL0 to DL1 conversion: list_of_MC = glob.glob("linking_MC_*s.sh") @@ -426,7 +441,7 @@ def main(): #Below we run the analysis on the MAGIC data if not args.partial_analysis=='onlyMC': - lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs) #MAGIC real data + lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) #MAGIC real data if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): list_of_MAGIC_runs = glob.glob("MAGIC-*.sh") diff --git a/magicctapipe/scripts/lst1_magic/stereo_events.py b/magicctapipe/scripts/lst1_magic/stereo_events.py index 4227a857c..2c97091ea 100644 --- a/magicctapipe/scripts/lst1_magic/stereo_events.py +++ b/magicctapipe/scripts/lst1_magic/stereo_events.py @@ -14,6 +14,7 @@ import yaml import logging from pathlib import Path +import argparse logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) @@ -38,7 +39,7 @@ def configfile_stereo(ids, target_dir): f.close() -def bash_stereo(target_dir): +def bash_stereo(target_dir, scripts_dir): """ This function generates the bashscript for running the stereo analysis. @@ -79,10 +80,10 @@ def bash_stereo(target_dir): f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") f.close() -def bash_stereoMC(target_dir, identification): +def bash_stereoMC(target_dir, identification, scripts_dir): """ This function generates the bashscript for running the stereo analysis. @@ -121,7 +122,7 @@ def bash_stereoMC(target_dir, identification): f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") f.close() @@ -134,12 +135,26 @@ def main(): Here we read the config_general.yaml file and call the functions defined above. """ - - with open("config_general.yaml", "rb") as f: # "rb" mode opens the file in binary format for reading + parser = argparse.ArgumentParser() + parser.add_argument( + "--config-file", + "-c", + dest="config_file", + type=str, + default="./config_general.yaml", + help="Path to a configuration file", + ) + + args = parser.parse_args() + with open( + args.config_file, "rb" + ) as f: # "rb" mode opens the file in binary format for reading config = yaml.safe_load(f) - target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + + scripts_dir = str(Path(config["directories"]["scripts_dir"])) + telescope_ids = list(config["mc_tel_ids"].values()) print("***** Generating file config_stereo.yaml...") @@ -147,13 +162,13 @@ def main(): configfile_stereo(telescope_ids, target_dir) print("***** Generating the bashscript...") - bash_stereo(target_dir) + bash_stereo(target_dir, scripts_dir) print("***** Generating the bashscript for MCs...") - bash_stereoMC(target_dir,"gammadiffuse") - bash_stereoMC(target_dir,"gammas") - bash_stereoMC(target_dir,"protons") - bash_stereoMC(target_dir,"protons_test") + bash_stereoMC(target_dir,"gammadiffuse", scripts_dir) + bash_stereoMC(target_dir,"gammas", scripts_dir) + bash_stereoMC(target_dir,"protons", scripts_dir) + bash_stereoMC(target_dir,"protons_test", scripts_dir) print("***** Submitting processes to the cluster...") print("Process name: "+target_dir.split("/")[-2:][1]+"_stereo") diff --git a/setup.py b/setup.py index f8c4a145e..565f090e8 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,12 @@ "lst1_magic_train_rfs = magicctapipe.scripts.lst1_magic.lst1_magic_train_rfs:main", "magic_calib_to_dl1 = magicctapipe.scripts.lst1_magic.magic_calib_to_dl1:main", "merge_hdf_files = magicctapipe.scripts.lst1_magic.merge_hdf_files:main", + "setting_up_config_and_dir = magicctapipe.scripts.lst1_magic.setting_up_config_and_dir:main", + "merging_runs_and_splitting_training_samples = magicctapipe.scripts.lst1_magic.merging_runs_and_splitting_training_samples:main", + "coincident_events = magicctapipe.scripts.lst1_magic.coincident_events:main", + "stereo_events = magicctapipe.scripts.lst1_magic.stereo_events:main", + + ] tests_require = ["pytest", "pandas>=0.24.0", "importlib_resources;python_version<'3.9'"] From cded8f5e95781bee92ae603ff4078bd553018564 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 12:46:33 +0200 Subject: [PATCH 25/76] partial analysis --- .../scripts/lst1_magic/setting_up_config_and_dir.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index e1ee6360c..f7b406af7 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -362,9 +362,10 @@ def main(): #Here we are simply collecting the parameters from the command line, as input file, output directory, and configuration file parser.add_argument( - "--partial-analysis", - "-p", - dest="partial_analysis", + "--analysis-type", + "-t", + choices=['onlyMAGIC', 'onlyMC'] + dest="analysis_type", type=str, default="doEverything", help="You can type 'onlyMAGIC' or 'onlyMC' to run this script only on MAGIC or MC data, respectively.", @@ -418,7 +419,7 @@ def main(): config_file_gen(telescope_ids,target_dir) #Below we run the analysis on the MC data - if not args.partial_analysis=='onlyMAGIC': + if not args.analysis_type=='onlyMAGIC': lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, scripts_dir) #gammas #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, scripts_dir) #electrons #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, scripts_dir) #helium @@ -440,7 +441,7 @@ def main(): os.system(launch_jobs_MC) #Below we run the analysis on the MAGIC data - if not args.partial_analysis=='onlyMC': + if not args.analysis_type=='onlyMC': lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) #MAGIC real data if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): From 53135993911f23d46298ae4a76d770b6d204d326 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 12:50:10 +0200 Subject: [PATCH 26/76] Bug --- magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index f7b406af7..c21cb56a4 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -364,7 +364,7 @@ def main(): parser.add_argument( "--analysis-type", "-t", - choices=['onlyMAGIC', 'onlyMC'] + choices=['onlyMAGIC', 'onlyMC'], dest="analysis_type", type=str, default="doEverything", From 09134580f957e7bd1526e18f0b9e35b7ab0466a7 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 12:52:48 +0200 Subject: [PATCH 27/76] config file in resources --- magicctapipe/resources/config_general.yaml | 2 ++ .../scripts/lst1_magic/config_general.yaml | 28 ------------------- 2 files changed, 2 insertions(+), 28 deletions(-) delete mode 100644 magicctapipe/scripts/lst1_magic/config_general.yaml diff --git a/magicctapipe/resources/config_general.yaml b/magicctapipe/resources/config_general.yaml index dd2cfadde..0a694d326 100644 --- a/magicctapipe/resources/config_general.yaml +++ b/magicctapipe/resources/config_general.yaml @@ -8,6 +8,7 @@ mc_tel_ids: directories: workspace_dir : "/fefs/aswg/workspace/raniere/" + scripts_dir : "/fefs/aswg/workspace/raniere/" target_name : "CrabTeste" MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" @@ -19,6 +20,7 @@ general: target_RA_deg : 83.633083 #RA in degrees target_Dec_deg: 22.0145 #Dec in degrees SimTel_version: "v1.4" + LST_version : "v0.9" focal_length : "effective" #effective #nominal MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" LST_runs : "LST_runs.txt" diff --git a/magicctapipe/scripts/lst1_magic/config_general.yaml b/magicctapipe/scripts/lst1_magic/config_general.yaml deleted file mode 100644 index 76780074c..000000000 --- a/magicctapipe/scripts/lst1_magic/config_general.yaml +++ /dev/null @@ -1,28 +0,0 @@ -mc_tel_ids: - LST-1: 1 - LST-2: 0 - LST-3: 0 - LST-4: 0 - MAGIC-I: 2 - MAGIC-II: 3 - -directories: - workspace_dir : "/fefs/aswg/workspace/raniere/" - scripts_dir : "/fefs/aswg/workspace/raniere/" - target_name : "CrabTeste" - MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" - MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" - MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" - MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" - MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" - -general: - target_RA_deg : 83.633083 #RA in degrees - target_Dec_deg: 22.0145 #Dec in degrees - SimTel_version: "v1.4" - LST_version: "v0.9" - focal_length : "effective" #effective #nominal - MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" - LST_runs : "LST_runs.txt" - proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest - From c487e0d6a5da8ee204f91ce169c84f377bb41b72 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 28 Sep 2023 14:02:28 +0200 Subject: [PATCH 28/76] readme and resources --- magicctapipe/{scripts/lst1_magic => resources}/LST_runs.txt | 0 magicctapipe/{scripts/lst1_magic => resources}/MAGIC_runs.txt | 0 magicctapipe/scripts/lst1_magic/README.md | 4 ++-- 3 files changed, 2 insertions(+), 2 deletions(-) rename magicctapipe/{scripts/lst1_magic => resources}/LST_runs.txt (100%) rename magicctapipe/{scripts/lst1_magic => resources}/MAGIC_runs.txt (100%) diff --git a/magicctapipe/scripts/lst1_magic/LST_runs.txt b/magicctapipe/resources/LST_runs.txt similarity index 100% rename from magicctapipe/scripts/lst1_magic/LST_runs.txt rename to magicctapipe/resources/LST_runs.txt diff --git a/magicctapipe/scripts/lst1_magic/MAGIC_runs.txt b/magicctapipe/resources/MAGIC_runs.txt similarity index 100% rename from magicctapipe/scripts/lst1_magic/MAGIC_runs.txt rename to magicctapipe/resources/MAGIC_runs.txt diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index 0ac74d2ae..b9bc397bc 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -144,11 +144,11 @@ Process name: yourprojectnameCrabTeste To check the jobs submitted to the cluster, type: squeue -n yourprojectnameCrabTeste ``` Note that this script can be run as -> $ python setting_up_config_and_dir.py --partial-analysis onlyMAGIC +> $ python setting_up_config_and_dir.py --analysis-type onlyMAGIC or -> $ python setting_up_config_and_dir.py --partial-analysis onlyMC +> $ python setting_up_config_and_dir.py --analysis-type onlyMC if you want to convert only MAGIC or only MC DL0 files to DL1, respectively. From 2bdf63b590735eedc123439f323f793b0aff3e7a Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 09:00:53 +0200 Subject: [PATCH 29/76] README + analysis_type + 'with open...' --- magicctapipe/scripts/lst1_magic/README.md | 8 +- .../scripts/lst1_magic/coincident_events.py | 56 +-- ...ing_runs_and_splitting_training_samples.py | 110 ++--- .../lst1_magic/setting_up_config_and_dir.py | 390 +++++++++--------- .../scripts/lst1_magic/stereo_events.py | 78 ++-- 5 files changed, 323 insertions(+), 319 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index b9bc397bc..5004ffcb6 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -37,7 +37,7 @@ MAGIC+LST analysis starts from MAGIC calibrated data (\_Y\_ files), LST DL1 data Authorized institute server (Client) → ssh connection to CTALaPalma → ssh connection to cp01/02 -2) Once connected to the IT Container, install MAGIC-CTA-PIPE (e.g. in your home directory in the IT Container) following the tutorial here: https://github.com/ranieremenezes/magic-cta-pipe +2) Once connected to the IT Container, install MAGIC-CTA-PIPE (e.g. in your home directory in the IT Container) following the tutorial here: https://github.com/cta-observatory/magic-cta-pipe 3) Do not forget to open the magic-lst environment with the command `conda activate magic-lst` before starting the analysis @@ -59,6 +59,7 @@ mc_tel_ids: directories: workspace_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" + scripts_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" target_name : "CrabTeste" MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" @@ -67,12 +68,15 @@ directories: MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" general: + target_RA_deg : 83.633083 #RA in degrees + target_Dec_deg: 22.0145 #Dec in degrees SimTel_version: "v1.4" + LST_version : "v0.9" focal_length : "effective" #effective #nominal MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" LST_runs : "LST_runs.txt" proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest - + ``` The file `MAGIC_runs.txt` looks like that: diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py index 94cf1d726..3512f9592 100644 --- a/magicctapipe/scripts/lst1_magic/coincident_events.py +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -40,13 +40,13 @@ def configfile_coincidence(ids, target_dir): Path to the working directory """ - f = open(target_dir+'/config_coincidence.yaml','w') - f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") - f.write('event_coincidence:\n timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time"\n window_half_width: "300 ns"\n') - f.write(' pre_offset_search: true\n') - f.write(' n_pre_offset_search_events: 100\n') - f.write(' time_offset:\n start: "-10 us"\n stop: "0 us"\n') - f.close() + with open(target_dir+'/config_coincidence.yaml','w') as f: + f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") + f.write('event_coincidence:\n timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time"\n window_half_width: "300 ns"\n') + f.write(' pre_offset_search: true\n') + f.write(' n_pre_offset_search_events: 100\n') + f.write(' time_offset:\n start: "-10 us"\n stop: "0 us"\n') + def linking_lst(target_dir, LST_runs, LST_version): @@ -78,10 +78,10 @@ def linking_lst(target_dir, LST_runs, LST_version): LSTdataPathFile.write(subrun+"\n") #If this files already exists, simply append the new information else: os.mkdir(outputdir) - f = open(f"{outputdir}/list_LST.txt", "w") #If the file list_LST.txt does not exist, it will be created here - for subrun in list_of_subruns: - f.write(subrun+"\n") - f.close() + with open(f"{outputdir}/list_LST.txt", "w") as f: #If the file list_LST.txt does not exist, it will be created here + for subrun in list_of_subruns: + f.write(subrun+"\n") + def bash_coincident(target_dir, scripts_dir): @@ -103,23 +103,23 @@ def bash_coincident(target_dir, scripts_dir): for nightMAGIC,nightLST in zip(listOfNightsMAGIC,listOfNightsLST): process_size = len(np.genfromtxt(nightLST+"/list_LST.txt",dtype="str")) - 1 - f = open(f"LST_coincident_{nightLST.split('/')[-1]}.sh","w") - f.write("#!/bin/sh\n\n") - f.write("#SBATCH -p short\n") - f.write("#SBATCH -J "+process_name+"_coincidence\n") - f.write(f"#SBATCH --array=0-{process_size}%50\n") - f.write("#SBATCH -N 1\n\n") - f.write("ulimit -l unlimited\n") - f.write("ulimit -s unlimited\n") - f.write("ulimit -a\n\n") - - f.write(f"export INM={nightMAGIC}\n") - f.write(f"export OUTPUTDIR={nightLST}\n") - f.write("SAMPLE_LIST=($(<$OUTPUTDIR/list_LST.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write("export LOG=$OUTPUTDIR/coincidence_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") - f.close() + with open(f"LST_coincident_{nightLST.split('/')[-1]}.sh","w") as f: + f.write("#!/bin/sh\n\n") + f.write("#SBATCH -p short\n") + f.write("#SBATCH -J "+process_name+"_coincidence\n") + f.write(f"#SBATCH --array=0-{process_size}%50\n") + f.write("#SBATCH -N 1\n\n") + f.write("ulimit -l unlimited\n") + f.write("ulimit -s unlimited\n") + f.write("ulimit -a\n\n") + + f.write(f"export INM={nightMAGIC}\n") + f.write(f"export OUTPUTDIR={nightLST}\n") + f.write("SAMPLE_LIST=($(<$OUTPUTDIR/list_LST.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write("export LOG=$OUTPUTDIR/coincidence_${SLURM_ARRAY_TASK_ID}.log\n") + f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") + diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index e2a69cade..3122c971d 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -115,46 +115,46 @@ def merge(target_dir, identification, MAGIC_runs, scripts_dir): if not os.path.exists(MAGIC_DL1_dir+"/Merged"): os.mkdir(MAGIC_DL1_dir+"/Merged") - f = open(f"Merge_{identification}.sh","w") - f.write('#!/bin/sh\n\n') - f.write('#SBATCH -p short\n') - f.write('#SBATCH -J '+process_name+'\n') - f.write('#SBATCH -N 1\n\n') - f.write('ulimit -l unlimited\n') - f.write('ulimit -s unlimited\n') - f.write('ulimit -a\n\n') - - if identification == "0_subruns": - if os.path.exists(MAGIC_DL1_dir+"/M1"): - for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') - - if os.path.exists(MAGIC_DL1_dir+"/M2"): - for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') - - elif identification == "1_M1M2": - if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): + with open(f"Merge_{identification}.sh","w") as f: + f.write('#!/bin/sh\n\n') + f.write('#SBATCH -p short\n') + f.write('#SBATCH -J '+process_name+'\n') + f.write('#SBATCH -N 1\n\n') + f.write('ulimit -l unlimited\n') + f.write('ulimit -s unlimited\n') + f.write('ulimit -a\n\n') + + if identification == "0_subruns": + if os.path.exists(MAGIC_DL1_dir+"/M1"): + for i in MAGIC_runs: + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run + f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + + if os.path.exists(MAGIC_DL1_dir+"/M2"): + for i in MAGIC_runs: + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run + f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + + elif identification == "1_M1M2": + if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): + for i in MAGIC_runs: + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/Merged"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") + f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') + else: for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/Merged"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") - f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') - else: - for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/Merged_{i[0]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night - f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') + if not os.path.exists(MAGIC_DL1_dir+f"/Merged/Merged_{i[0]}"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night + f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') + - f.close() def mergeMC(target_dir, identification, scripts_dir): @@ -188,24 +188,24 @@ def mergeMC(target_dir, identification, scripts_dir): cleaning(list_of_nodes, target_dir) #This will delete the (possibly) failed runs. - f = open(f"Merge_{identification}.sh","w") - f.write('#!/bin/sh\n\n') - f.write('#SBATCH -p short\n') - f.write('#SBATCH -J '+process_name+'\n') - f.write(f"#SBATCH --array=0-{process_size}%50\n") - f.write('#SBATCH --mem=7g\n') - f.write('#SBATCH -N 1\n\n') - f.write('ulimit -l unlimited\n') - f.write('ulimit -s unlimited\n') - f.write('ulimit -a\n\n') - - f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') - f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') - - f.close() - + with open(f"Merge_{identification}.sh","w") as f: + f.write('#!/bin/sh\n\n') + f.write('#SBATCH -p short\n') + f.write('#SBATCH -J '+process_name+'\n') + f.write(f"#SBATCH --array=0-{process_size}%50\n") + f.write('#SBATCH --mem=7g\n') + f.write('#SBATCH -N 1\n\n') + f.write('ulimit -l unlimited\n') + f.write('ulimit -s unlimited\n') + f.write('ulimit -a\n\n') + + f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') + f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') + + + def main(): diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index c21cb56a4..9af69728d 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -37,81 +37,81 @@ def config_file_gen(ids, target_dir): """ Here we create the configuration file needed for transforming DL0 into DL1 """ - - f = open(target_dir+'/config_DL0_to_DL1.yaml','w') - #f.write("directories:\n target: "+target_dir+"\n\n") - lines_of_config_file = [ - "mc_tel_ids:", - "\n LST-1: "+str(ids[0]), - "\n LST-2: "+str(ids[1]), - "\n LST-3: "+str(ids[2]), - "\n LST-4: "+str(ids[3]), - "\n MAGIC-I: "+str(ids[4]), - "\n MAGIC-II: "+str(ids[5]), - "\n", - "\nLST:", - "\n image_extractor:", - '\n type: "LocalPeakWindowSum"', - "\n window_shift: 4", - "\n window_width: 8", - "\n", - "\n increase_nsb:", - "\n use: true", - "\n extra_noise_in_dim_pixels: 1.27", - "\n extra_bias_in_dim_pixels: 0.665", - "\n transition_charge: 8", - "\n extra_noise_in_bright_pixels: 2.08", - "\n", - "\n increase_psf:", - "\n use: false", - "\n fraction: null", - "\n", - "\n tailcuts_clean:", - "\n picture_thresh: 8", - "\n boundary_thresh: 4", - "\n keep_isolated_pixels: false", - "\n min_number_picture_neighbors: 2", - "\n", - "\n time_delta_cleaning:", - "\n use: true", - "\n min_number_neighbors: 1", - "\n time_limit: 2", - "\n", - "\n dynamic_cleaning:", - "\n use: true", - "\n threshold: 267", - "\n fraction: 0.03", - "\n", - "\n use_only_main_island: false", - "\n", - "\nMAGIC:", - "\n image_extractor:", - '\n type: "SlidingWindowMaxSum"', - "\n window_width: 5", - "\n apply_integration_correction: false", - "\n", - "\n charge_correction:", - "\n use: true", - "\n factor: 1.143", - "\n", - "\n magic_clean:", - "\n use_time: true", - "\n use_sum: true", - "\n picture_thresh: 6", - "\n boundary_thresh: 3.5", - "\n max_time_off: 4.5", - "\n max_time_diff: 1.5", - "\n find_hotpixels: true", - '\n pedestal_type: "from_extractor_rndm"', - "\n", - "\n muon_ring:", - "\n thr_low: 25", - "\n tailcut: [12, 8]", - "\n ring_completeness_threshold: 25", - "\n"] - - f.writelines(lines_of_config_file) - f.close() + with open(target_dir+'/config_DL0_to_DL1.yaml','w') as f: + + #f.write("directories:\n target: "+target_dir+"\n\n") + lines_of_config_file = [ + "mc_tel_ids:", + "\n LST-1: "+str(ids[0]), + "\n LST-2: "+str(ids[1]), + "\n LST-3: "+str(ids[2]), + "\n LST-4: "+str(ids[3]), + "\n MAGIC-I: "+str(ids[4]), + "\n MAGIC-II: "+str(ids[5]), + "\n", + "\nLST:", + "\n image_extractor:", + '\n type: "LocalPeakWindowSum"', + "\n window_shift: 4", + "\n window_width: 8", + "\n", + "\n increase_nsb:", + "\n use: true", + "\n extra_noise_in_dim_pixels: 1.27", + "\n extra_bias_in_dim_pixels: 0.665", + "\n transition_charge: 8", + "\n extra_noise_in_bright_pixels: 2.08", + "\n", + "\n increase_psf:", + "\n use: false", + "\n fraction: null", + "\n", + "\n tailcuts_clean:", + "\n picture_thresh: 8", + "\n boundary_thresh: 4", + "\n keep_isolated_pixels: false", + "\n min_number_picture_neighbors: 2", + "\n", + "\n time_delta_cleaning:", + "\n use: true", + "\n min_number_neighbors: 1", + "\n time_limit: 2", + "\n", + "\n dynamic_cleaning:", + "\n use: true", + "\n threshold: 267", + "\n fraction: 0.03", + "\n", + "\n use_only_main_island: false", + "\n", + "\nMAGIC:", + "\n image_extractor:", + '\n type: "SlidingWindowMaxSum"', + "\n window_width: 5", + "\n apply_integration_correction: false", + "\n", + "\n charge_correction:", + "\n use: true", + "\n factor: 1.143", + "\n", + "\n magic_clean:", + "\n use_time: true", + "\n use_sum: true", + "\n picture_thresh: 6", + "\n boundary_thresh: 3.5", + "\n max_time_off: 4.5", + "\n max_time_diff: 1.5", + "\n find_hotpixels: true", + '\n pedestal_type: "from_extractor_rndm"', + "\n", + "\n muon_ring:", + "\n thr_low: 25", + "\n tailcut: [12, 8]", + "\n ring_completeness_threshold: 25", + "\n"] + + f.writelines(lines_of_config_file) + def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, focal_length, scripts_dir): @@ -125,46 +125,46 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, process_name = target_dir.split("/")[-2:][1] list_of_nodes = glob.glob(MC_path+"/node*") - f = open(target_dir+f"/list_nodes_{particle_type}_complete.txt","w") # creating list_nodes_gammas_complete.txt - for i in list_of_nodes: - f.write(i+"/output_"+SimTel_version+"\n") + with open(target_dir+f"/list_nodes_{particle_type}_complete.txt","w") as f:# creating list_nodes_gammas_complete.txt + for i in list_of_nodes: + f.write(i+"/output_"+SimTel_version+"\n") - f.close() - f = open(target_dir+f"/list_folder_{particle_type}.txt","w") # creating list_folder_gammas.txt - for i in list_of_nodes: - f.write(i.split("/")[-1]+"\n") - f.close() + with open(target_dir+f"/list_folder_{particle_type}.txt","w") as f:# creating list_folder_gammas.txt + for i in list_of_nodes: + f.write(i.split("/")[-1]+"\n") + + #################################################################################### ############ bash scripts that link the MC paths to each subdirectory. #################################################################################### - f = open(f"linking_MC_{particle_type}_paths.sh","w") - lines_of_config_file = [ - "#!/bin/sh\n\n", - "#SBATCH -p short\n", - "#SBATCH -J "+process_name+"\n\n", - "#SBATCH -N 1\n\n", - "ulimit -l unlimited\n", - "ulimit -s unlimited\n", - "ulimit -a\n\n", - "while read -r -u 3 lineA && read -r -u 4 lineB\n", - "do\n", - " cd "+target_dir+f"/DL1/MC/{particle_type}\n", - " mkdir $lineB\n", - " cd $lineA\n", - " ls -lR *.gz |wc -l\n", - " ls *.gz > "+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n", - ' string=$lineA"/"\n', - " export file="+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n\n", - " cat $file | while read line; do echo $string${line} >>"+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n", - ' echo "folder $lineB and node $lineA"\n', - 'done 3<"'+target_dir+f'/list_nodes_{particle_type}_complete.txt" 4<"'+target_dir+f'/list_folder_{particle_type}.txt"\n', - ""] - f.writelines(lines_of_config_file) - f.close() + with open(f"linking_MC_{particle_type}_paths.sh","w") as f: + lines_of_config_file = [ + "#!/bin/sh\n\n", + "#SBATCH -p short\n", + "#SBATCH -J "+process_name+"\n\n", + "#SBATCH -N 1\n\n", + "ulimit -l unlimited\n", + "ulimit -s unlimited\n", + "ulimit -a\n\n", + "while read -r -u 3 lineA && read -r -u 4 lineB\n", + "do\n", + " cd "+target_dir+f"/DL1/MC/{particle_type}\n", + " mkdir $lineB\n", + " cd $lineA\n", + " ls -lR *.gz |wc -l\n", + " ls *.gz > "+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n", + ' string=$lineA"/"\n', + " export file="+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n\n", + " cat $file | while read line; do echo $string${line} >>"+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n", + ' echo "folder $lineB and node $lineA"\n', + 'done 3<"'+target_dir+f'/list_nodes_{particle_type}_complete.txt" 4<"'+target_dir+f'/list_folder_{particle_type}.txt"\n', + ""] + f.writelines(lines_of_config_file) + ################################################################################################################ @@ -174,31 +174,31 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, number_of_nodes = glob.glob(MC_path+"/node*") number_of_nodes = len(number_of_nodes) -1 - f = open(f"linking_MC_{particle_type}_paths_r.sh","w") - lines_of_config_file = [ - '#!/bin/sh\n\n', - '#SBATCH -p xxl\n', - '#SBATCH -J '+process_name+'\n', - '#SBATCH --array=0-'+str(number_of_nodes)+'%50\n', - '#SBATCH --mem=10g\n', - '#SBATCH -N 1\n\n', - 'ulimit -l unlimited\n', - 'ulimit -s unlimited\n', - 'ulimit -a\n', - 'cd '+target_dir+f'/DL1/MC/{particle_type}\n\n', - 'export INF='+target_dir+'\n', - f'SAMPLE_LIST=($(<$INF/list_folder_{particle_type}.txt))\n', - 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n', - 'cd $SAMPLE\n\n', - 'export LOG='+target_dir+f'/DL1/MC/{particle_type}'+'/simtel_{$SAMPLE}_all.log\n', - 'cat list_dl0_ok.txt | while read line\n', - 'do\n', - ' cd '+target_dir+'/../\n', - f' conda run -n magic-lst python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml >>$LOG 2>&1 --focal_length_choice '+focal_length+'\n\n', - 'done\n', - ""] - f.writelines(lines_of_config_file) - f.close() + with open(f"linking_MC_{particle_type}_paths_r.sh","w") as f: + lines_of_config_file = [ + '#!/bin/sh\n\n', + '#SBATCH -p xxl\n', + '#SBATCH -J '+process_name+'\n', + '#SBATCH --array=0-'+str(number_of_nodes)+'%50\n', + '#SBATCH --mem=10g\n', + '#SBATCH -N 1\n\n', + 'ulimit -l unlimited\n', + 'ulimit -s unlimited\n', + 'ulimit -a\n', + 'cd '+target_dir+f'/DL1/MC/{particle_type}\n\n', + 'export INF='+target_dir+'\n', + f'SAMPLE_LIST=($(<$INF/list_folder_{particle_type}.txt))\n', + 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n', + 'cd $SAMPLE\n\n', + 'export LOG='+target_dir+f'/DL1/MC/{particle_type}'+'/simtel_{$SAMPLE}_all.log\n', + 'cat list_dl0_ok.txt | while read line\n', + 'do\n', + ' cd '+target_dir+'/../\n', + f' conda run -n magic-lst python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml >>$LOG 2>&1 --focal_length_choice '+focal_length+'\n\n', + 'done\n', + ""] + f.writelines(lines_of_config_file) + @@ -211,29 +211,29 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) process_name = target_dir.split("/")[-2:][1] - f = open("linking_MAGIC_data_paths.sh","w") - f.write('#!/bin/sh\n\n') - f.write('#SBATCH -p short\n') - f.write('#SBATCH -J '+process_name+'\n') - f.write('#SBATCH -N 1\n\n') - f.write('ulimit -l unlimited\n') - f.write('ulimit -s unlimited\n') - f.write('ulimit -a\n') - - if telescope_ids[-1] > 0: - for i in MAGIC_runs: - f.write('export IN1=/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') - f.write('export OUT1='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n') - f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') - - f.write('\n') - if telescope_ids[-2] > 0: - for i in MAGIC_runs: - f.write('export IN1=/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') - f.write('export OUT1='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n') - f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') - - f.close() + with open("linking_MAGIC_data_paths.sh","w") as f: + f.write('#!/bin/sh\n\n') + f.write('#SBATCH -p short\n') + f.write('#SBATCH -J '+process_name+'\n') + f.write('#SBATCH -N 1\n\n') + f.write('ulimit -l unlimited\n') + f.write('ulimit -s unlimited\n') + f.write('ulimit -a\n') + + if telescope_ids[-1] > 0: + for i in MAGIC_runs: + f.write('export IN1=/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') + f.write('export OUT1='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n') + f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') + + f.write('\n') + if telescope_ids[-2] > 0: + for i in MAGIC_runs: + f.write('export IN1=/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') + f.write('export OUT1='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n') + f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') + + if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): for i in MAGIC_runs: @@ -242,50 +242,50 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) number_of_nodes = glob.glob('/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+f'/*{i[1]}.*_Y_*.root') number_of_nodes = len(number_of_nodes) - 1 - f = open(f"MAGIC-II_dl0_to_dl1_run_{i[1]}.sh","w") - lines_of_config_file = [ - '#!/bin/sh\n\n', - '#SBATCH -p long\n', - '#SBATCH -J '+process_name+'\n', - '#SBATCH --array=0-'+str(number_of_nodes)+'\n', - '#SBATCH -N 1\n\n', - 'ulimit -l unlimited\n', - 'ulimit -s unlimited\n', - 'ulimit -a\n\n', - 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n', - 'cd '+target_dir+'/../\n', - 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', - 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', - 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', - ""] - f.writelines(lines_of_config_file) - f.close() + with open(f"MAGIC-II_dl0_to_dl1_run_{i[1]}.sh","w") as f: + lines_of_config_file = [ + '#!/bin/sh\n\n', + '#SBATCH -p long\n', + '#SBATCH -J '+process_name+'\n', + '#SBATCH --array=0-'+str(number_of_nodes)+'\n', + '#SBATCH -N 1\n\n', + 'ulimit -l unlimited\n', + 'ulimit -s unlimited\n', + 'ulimit -a\n\n', + 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n', + 'cd '+target_dir+'/../\n', + 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', + 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', + 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', + f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + ""] + f.writelines(lines_of_config_file) + if telescope_ids[-2] > 0: number_of_nodes = glob.glob('/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+f'/*{i[1]}.*_Y_*.root') number_of_nodes = len(number_of_nodes) - 1 - f = open(f"MAGIC-I_dl0_to_dl1_run_{i[1]}.sh","w") - lines_of_config_file = [ - '#!/bin/sh\n\n', - '#SBATCH -p long\n', - '#SBATCH -J '+process_name+'\n', - '#SBATCH --array=0-'+str(number_of_nodes)+'\n', - '#SBATCH -N 1\n\n', - 'ulimit -l unlimited\n', - 'ulimit -s unlimited\n', - 'ulimit -a\n\n', - 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n', - 'cd '+target_dir+'/../\n', - 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', - 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', - 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', - ""] - f.writelines(lines_of_config_file) - f.close() + with open(f"MAGIC-I_dl0_to_dl1_run_{i[1]}.sh","w") as f: + lines_of_config_file = [ + '#!/bin/sh\n\n', + '#SBATCH -p long\n', + '#SBATCH -J '+process_name+'\n', + '#SBATCH --array=0-'+str(number_of_nodes)+'\n', + '#SBATCH -N 1\n\n', + 'ulimit -l unlimited\n', + 'ulimit -s unlimited\n', + 'ulimit -a\n\n', + 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n', + 'cd '+target_dir+'/../\n', + 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', + 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', + 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', + f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + ""] + f.writelines(lines_of_config_file) + def directories_generator(target_dir, telescope_ids,MAGIC_runs): @@ -419,7 +419,7 @@ def main(): config_file_gen(telescope_ids,target_dir) #Below we run the analysis on the MC data - if not args.analysis_type=='onlyMAGIC': + if (args.analysis_type=='onlyMC') or (args.analysis_type=='doEverything'): lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, scripts_dir) #gammas #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, scripts_dir) #electrons #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, scripts_dir) #helium @@ -441,7 +441,7 @@ def main(): os.system(launch_jobs_MC) #Below we run the analysis on the MAGIC data - if not args.analysis_type=='onlyMC': + if (args.analysis_type=='onlyMAGIC') or (args.analysis_type=='doEverything'): lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) #MAGIC real data if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): diff --git a/magicctapipe/scripts/lst1_magic/stereo_events.py b/magicctapipe/scripts/lst1_magic/stereo_events.py index 2c97091ea..bc69f8bac 100644 --- a/magicctapipe/scripts/lst1_magic/stereo_events.py +++ b/magicctapipe/scripts/lst1_magic/stereo_events.py @@ -33,10 +33,10 @@ def configfile_stereo(ids, target_dir): Path to the working directory """ - f = open(target_dir+'/config_stereo.yaml','w') - f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") - f.write('stereo_reco:\n quality_cuts: "(intensity > 50) & (width > 0)"\n theta_uplim: "6 arcmin"\n') - f.close() + with open(target_dir+'/config_stereo.yaml','w') as f: + f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") + f.write('stereo_reco:\n quality_cuts: "(intensity > 50) & (width > 0)"\n theta_uplim: "6 arcmin"\n') + def bash_stereo(target_dir, scripts_dir): @@ -65,23 +65,23 @@ def bash_stereo(target_dir, scripts_dir): os.system(f"ls {nightLST}/*LST*.h5 > {nightLST}/list_coin.txt") #generating a list with the DL1 coincident data files. process_size = len(np.genfromtxt(nightLST+"/list_coin.txt",dtype="str")) - 1 - f = open(f"StereoEvents_{nightLST.split('/')[-1]}.sh","w") - f.write("#!/bin/sh\n\n") - f.write("#SBATCH -p short\n") - f.write("#SBATCH -J "+process_name+"_stereo\n") - f.write(f"#SBATCH --array=0-{process_size}%100\n") - f.write("#SBATCH -N 1\n\n") - f.write("ulimit -l unlimited\n") - f.write("ulimit -s unlimited\n") - f.write("ulimit -a\n\n") - - f.write(f"export INPUTDIR={nightLST}\n") - f.write(f"export OUTPUTDIR={stereoDir}\n") - f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") - f.close() + with open(f"StereoEvents_{nightLST.split('/')[-1]}.sh","w") as f: + f.write("#!/bin/sh\n\n") + f.write("#SBATCH -p short\n") + f.write("#SBATCH -J "+process_name+"_stereo\n") + f.write(f"#SBATCH --array=0-{process_size}%100\n") + f.write("#SBATCH -N 1\n\n") + f.write("ulimit -l unlimited\n") + f.write("ulimit -s unlimited\n") + f.write("ulimit -a\n\n") + + f.write(f"export INPUTDIR={nightLST}\n") + f.write(f"export OUTPUTDIR={stereoDir}\n") + f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") + f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + def bash_stereoMC(target_dir, identification, scripts_dir): @@ -106,24 +106,24 @@ def bash_stereoMC(target_dir, identification, scripts_dir): os.system(f"ls {inputdir}/dl1*.h5 > {inputdir}/list_coin.txt") #generating a list with the DL1 coincident data files. process_size = len(np.genfromtxt(inputdir+"/list_coin.txt",dtype="str")) - 1 - f = open(f"StereoEvents_{identification}.sh","w") - f.write("#!/bin/sh\n\n") - f.write("#SBATCH -p xxl\n") - f.write("#SBATCH -J "+process_name+"_stereo\n") - f.write(f"#SBATCH --array=0-{process_size}%100\n") - f.write('#SBATCH --mem=30g\n') - f.write("#SBATCH -N 1\n\n") - f.write("ulimit -l unlimited\n") - f.write("ulimit -s unlimited\n") - f.write("ulimit -a\n\n") - - f.write(f"export INPUTDIR={inputdir}\n") - f.write(f"export OUTPUTDIR={inputdir}/StereoMerged\n") - f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") - f.close() + with open(f"StereoEvents_{identification}.sh","w") as f: + f.write("#!/bin/sh\n\n") + f.write("#SBATCH -p xxl\n") + f.write("#SBATCH -J "+process_name+"_stereo\n") + f.write(f"#SBATCH --array=0-{process_size}%100\n") + f.write('#SBATCH --mem=30g\n') + f.write("#SBATCH -N 1\n\n") + f.write("ulimit -l unlimited\n") + f.write("ulimit -s unlimited\n") + f.write("ulimit -a\n\n") + + f.write(f"export INPUTDIR={inputdir}\n") + f.write(f"export OUTPUTDIR={inputdir}/StereoMerged\n") + f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") + f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") + f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") + f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + From 10e88496e91f6041d135d08c78fe25b0c168c9a2 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 09:15:19 +0200 Subject: [PATCH 30/76] env name, minor fixes --- magicctapipe/resources/config_general.yaml | 1 + .../scripts/lst1_magic/coincident_events.py | 7 +++-- ...ing_runs_and_splitting_training_samples.py | 29 ++++++++++--------- .../lst1_magic/setting_up_config_and_dir.py | 27 ++++++++--------- .../scripts/lst1_magic/stereo_events.py | 20 +++++++------ 5 files changed, 45 insertions(+), 39 deletions(-) diff --git a/magicctapipe/resources/config_general.yaml b/magicctapipe/resources/config_general.yaml index 0a694d326..c0aa53bca 100644 --- a/magicctapipe/resources/config_general.yaml +++ b/magicctapipe/resources/config_general.yaml @@ -25,4 +25,5 @@ general: MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" LST_runs : "LST_runs.txt" proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + env_name : magic-lst diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py index 3512f9592..945dee861 100644 --- a/magicctapipe/scripts/lst1_magic/coincident_events.py +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -84,7 +84,7 @@ def linking_lst(target_dir, LST_runs, LST_version): -def bash_coincident(target_dir, scripts_dir): +def bash_coincident(target_dir, scripts_dir, env_name): """ This function generates the bashscript for running the coincidence analysis. @@ -118,7 +118,7 @@ def bash_coincident(target_dir, scripts_dir): f.write("SAMPLE_LIST=($(<$OUTPUTDIR/list_LST.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/coincidence_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") + f.write(f"conda run -n {env_name} python {scripts_dir}/lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") @@ -149,6 +149,7 @@ def main(): target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] scripts_dir = str(Path(config["directories"]["scripts_dir"])) + env_name = config["general"]["env_name"] LST_runs_and_dates = config["general"]["LST_runs"] LST_runs = np.genfromtxt(LST_runs_and_dates,dtype=str,delimiter=',') @@ -164,7 +165,7 @@ def main(): print("***** Generating the bashscript...") - bash_coincident(target_dir, scripts_dir) + bash_coincident(target_dir, scripts_dir, env_name) print("***** Submitting processess to the cluster...") diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index 3122c971d..0909d324a 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -93,7 +93,7 @@ def split_train_test(target_dir, train_fraction): os.system(f"mv {list_of_dir[directory]}*.h5 "+proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) os.system(f"rm -r {list_of_dir[directory]}") -def merge(target_dir, identification, MAGIC_runs, scripts_dir): +def merge(target_dir, identification, MAGIC_runs, scripts_dir, env_name): """ This function creates the bash scripts to run merge_hdf_files.py in all MAGIC subruns. @@ -131,7 +131,7 @@ def merge(target_dir, identification, MAGIC_runs, scripts_dir): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') if os.path.exists(MAGIC_DL1_dir+"/M2"): for i in MAGIC_runs: @@ -139,25 +139,25 @@ def merge(target_dir, identification, MAGIC_runs, scripts_dir): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n magic-lst {scripts_dir}/python merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') elif identification == "1_M1M2": if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): for i in MAGIC_runs: if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/Merged"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") - f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') + f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') else: for i in MAGIC_runs: if not os.path.exists(MAGIC_DL1_dir+f"/Merged/Merged_{i[0]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night - f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') + f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') -def mergeMC(target_dir, identification, scripts_dir): +def mergeMC(target_dir, identification, scripts_dir, env_name): """ This function creates the bash scripts to run merge_hdf_files.py in all MC runs. @@ -202,7 +202,7 @@ def mergeMC(target_dir, identification, scripts_dir): f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') - f.write(f'conda run -n magic-lst python {scripts_dir}/merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') + f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') @@ -238,21 +238,22 @@ def main(): train_fraction = float(config["general"]["proton_train"]) scripts_dir = str(Path(config["directories"]["scripts_dir"])) + env_name = config["general"]["env_name"] #Here we slice the proton MC data into "train" and "test": print("***** Splitting protons into 'train' and 'test' datasets...") split_train_test(target_dir, train_fraction) print("***** Generating merge bashscripts...") - merge(target_dir, "0_subruns", MAGIC_runs, scripts_dir) #generating the bash script to merge the subruns - merge(target_dir, "1_M1M2", MAGIC_runs, scripts_dir) #generating the bash script to merge the M1 and M2 runs - merge(target_dir, "2_nights", MAGIC_runs, scripts_dir) #generating the bash script to merge all runs per night + merge(target_dir, "0_subruns", MAGIC_runs, scripts_dir, env_name) #generating the bash script to merge the subruns + merge(target_dir, "1_M1M2", MAGIC_runs, scripts_dir, env_name) #generating the bash script to merge the M1 and M2 runs + merge(target_dir, "2_nights", MAGIC_runs, scripts_dir, env_name) #generating the bash script to merge all runs per night print("***** Generating mergeMC bashscripts...") - mergeMC(target_dir, "protons", scripts_dir) #generating the bash script to merge the files - mergeMC(target_dir, "gammadiffuse", scripts_dir) #generating the bash script to merge the files - mergeMC(target_dir, "gammas", scripts_dir) #generating the bash script to merge the files - mergeMC(target_dir, "protons_test", scripts_dir) + mergeMC(target_dir, "protons", scripts_dir, env_name) #generating the bash script to merge the files + mergeMC(target_dir, "gammadiffuse", scripts_dir, env_name) #generating the bash script to merge the files + mergeMC(target_dir, "gammas", scripts_dir, env_name) #generating the bash script to merge the files + mergeMC(target_dir, "protons_test", scripts_dir, env_name) print("***** Running merge_hdf_files.py in the MAGIC data files...") diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index 9af69728d..2afe3c100 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -18,10 +18,10 @@ you can do as follows: Only MAGIC: -$ python setting_up_config_and_dir.py --partial-analysis onlyMAGIC +$ python setting_up_config_and_dir.py --analysis type onlyMAGIC Only MC: -$ python setting_up_config_and_dir.py --partial-analysis onlyMC +$ python setting_up_config_and_dir.py --analysis type onlyMC """ @@ -114,7 +114,7 @@ def config_file_gen(ids, target_dir): -def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, focal_length, scripts_dir): +def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, focal_length, scripts_dir, env_name): """ This function creates the lists list_nodes_gamma_complete.txt and list_folder_gamma.txt with the MC file paths. @@ -194,7 +194,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, 'cat list_dl0_ok.txt | while read line\n', 'do\n', ' cd '+target_dir+'/../\n', - f' conda run -n magic-lst python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml >>$LOG 2>&1 --focal_length_choice '+focal_length+'\n\n', + f' conda run -n {env_name} python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml --focal_length_choice '+focal_length+ '>>$LOG 2>&1\n\n', 'done\n', ""] f.writelines(lines_of_config_file) @@ -203,7 +203,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, -def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir): +def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, env_name): """ Below we create a bash script that links the the MAGIC data paths to each subdirectory. @@ -257,7 +257,7 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) @@ -282,7 +282,7 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n magic-lst python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) @@ -408,6 +408,7 @@ def main(): MC_gammadiff = str(Path(config["directories"]["MC_gammadiff"])) scripts_dir = str(Path(config["directories"]["scripts_dir"])) + env_name = config["general"]["env_name"] print("***** Linking MC paths - this may take a few minutes ******") @@ -420,11 +421,11 @@ def main(): #Below we run the analysis on the MC data if (args.analysis_type=='onlyMC') or (args.analysis_type=='doEverything'): - lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, scripts_dir) #gammas - #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, scripts_dir) #electrons - #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, scripts_dir) #helium - lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, focal_length, scripts_dir) #protons - lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, focal_length, scripts_dir) #gammadiffuse + lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, scripts_dir, env_name) #gammas + #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, scripts_dir, env_name) #electrons + #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, scripts_dir, env_name) #helium + lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, focal_length, scripts_dir, env_name) #protons + lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, focal_length, scripts_dir, env_name) #gammadiffuse #Here we do the MC DL0 to DL1 conversion: list_of_MC = glob.glob("linking_MC_*s.sh") @@ -442,7 +443,7 @@ def main(): #Below we run the analysis on the MAGIC data if (args.analysis_type=='onlyMAGIC') or (args.analysis_type=='doEverything'): - lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir) #MAGIC real data + lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, env_name) #MAGIC real data if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): list_of_MAGIC_runs = glob.glob("MAGIC-*.sh") diff --git a/magicctapipe/scripts/lst1_magic/stereo_events.py b/magicctapipe/scripts/lst1_magic/stereo_events.py index bc69f8bac..b722d7f1e 100644 --- a/magicctapipe/scripts/lst1_magic/stereo_events.py +++ b/magicctapipe/scripts/lst1_magic/stereo_events.py @@ -39,7 +39,7 @@ def configfile_stereo(ids, target_dir): -def bash_stereo(target_dir, scripts_dir): +def bash_stereo(target_dir, scripts_dir, env_name): """ This function generates the bashscript for running the stereo analysis. @@ -80,10 +80,10 @@ def bash_stereo(target_dir, scripts_dir): f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.write(f"conda run -n {env_name} python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") -def bash_stereoMC(target_dir, identification, scripts_dir): +def bash_stereoMC(target_dir, identification, scripts_dir, env_name): """ This function generates the bashscript for running the stereo analysis. @@ -122,7 +122,7 @@ def bash_stereoMC(target_dir, identification, scripts_dir): f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n magic-lst python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.write(f"conda run -n {env_name} python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") @@ -155,6 +155,8 @@ def main(): scripts_dir = str(Path(config["directories"]["scripts_dir"])) + env_name = config["general"]["env_name"] + telescope_ids = list(config["mc_tel_ids"].values()) print("***** Generating file config_stereo.yaml...") @@ -162,13 +164,13 @@ def main(): configfile_stereo(telescope_ids, target_dir) print("***** Generating the bashscript...") - bash_stereo(target_dir, scripts_dir) + bash_stereo(target_dir, scripts_dir, env_name) print("***** Generating the bashscript for MCs...") - bash_stereoMC(target_dir,"gammadiffuse", scripts_dir) - bash_stereoMC(target_dir,"gammas", scripts_dir) - bash_stereoMC(target_dir,"protons", scripts_dir) - bash_stereoMC(target_dir,"protons_test", scripts_dir) + bash_stereoMC(target_dir,"gammadiffuse", scripts_dir, env_name) + bash_stereoMC(target_dir,"gammas", scripts_dir, env_name) + bash_stereoMC(target_dir,"protons", scripts_dir, env_name) + bash_stereoMC(target_dir,"protons_test", scripts_dir, env_name) print("***** Submitting processes to the cluster...") print("Process name: "+target_dir.split("/")[-2:][1]+"_stereo") From 85dbbb65c0fc18fd8003ee807a4462ee021e9c38 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 09:58:46 +0200 Subject: [PATCH 31/76] minor fixes --- magicctapipe/conftest.py | 10 ++-- magicctapipe/image/calib.py | 5 -- magicctapipe/io/gadf.py | 10 +--- magicctapipe/io/io.py | 15 ++--- magicctapipe/reco/estimators.py | 2 +- .../{config.yaml => test_config.yaml} | 0 ..._general.yaml => test_config_general.yaml} | 18 +++--- ...nfig_monly.yaml => test_config_monly.yaml} | 0 magicctapipe/scripts/lst1_magic/README.md | 60 ++++--------------- .../lst1_magic_dl1_stereo_to_dl2.py | 2 +- .../lst1_magic_event_coincidence.py | 2 +- .../lst1_magic/lst1_magic_stereo_reco.py | 6 +- .../lst1_magic/lst1_magic_train_rfs.py | 2 +- ...ing_runs_and_splitting_training_samples.py | 6 +- 14 files changed, 44 insertions(+), 94 deletions(-) rename magicctapipe/resources/{config.yaml => test_config.yaml} (100%) rename magicctapipe/resources/{config_general.yaml => test_config_general.yaml} (56%) rename magicctapipe/resources/{config_monly.yaml => test_config_monly.yaml} (100%) diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index 8d0cf4455..bf9539bf7 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -47,11 +47,9 @@ Temporary paths """ - @pytest.fixture(scope="session") def temp_DL1_gamma(tmp_path_factory): - return tmp_path_factory.mktemp("DL1_gammas") - + return tmp_path_factory.mktemp("DL1_gammas") @pytest.fixture(scope="session") @@ -377,18 +375,18 @@ def dl1_lst(base_url, env_prefix): @pytest.fixture(scope="session") def config(): - config_path = resource_file("config.yaml") + config_path = resource_file("test_config.yaml") return config_path @pytest.fixture(scope="session") def config_monly(): - config_path = resource_file("config_monly.yaml") + config_path = resource_file("test_config_monly.yaml") return config_path @pytest.fixture(scope="session") def config_gen(): - config_path = resource_file("config_general.yaml") + config_path = resource_file("test_config_general.yaml") with open(config_path, "rb") as f: config = yaml.safe_load(f) return config diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index ecd2d4ca1..20866cac1 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -1,14 +1,9 @@ - - import numpy as np - - from ctapipe.image import ( apply_time_delta_cleaning, number_of_islands, tailcuts_clean, ) - from lstchain.image.cleaning import apply_dynamic_cleaning from lstchain.image.modifier import ( add_noise_in_pixels, diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index 48501259f..861550c9e 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -20,7 +20,6 @@ "create_pointing_hdu", ] - logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) @@ -28,8 +27,6 @@ # The MJD reference time MJDREF = Time(0, format="unix", scale="utc") - - @u.quantity_input def create_gh_cuts_hdu( gh_cuts, reco_energy_bins: u.TeV, fov_offset_bins: u.deg, **header_cards @@ -130,7 +127,7 @@ def create_event_hdu( "LST1_M1": [1, 2], # combo_type = 1 "LST1_M2": [1, 3], # combo_type = 2 "LST1_M1_M2": [1, 2, 3], # combo_type = 3 - } #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + } #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) mjdreff, mjdrefi = np.modf(MJDREF.mjd) @@ -233,11 +230,6 @@ def create_event_hdu( return event_hdu - - - - - def create_gti_hdu(event_table): """ Creates a fits binary table HDU for Good Time Interval (GTI). diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 2c5028b08..42216b5d3 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -65,7 +65,7 @@ def telescope_combinations(config): Parameters ---------- config: dict - yaml file with information about the telescope IDs. Typically evoked from "config_general.yaml" in the main scripts. + yaml file with information about the telescope IDs. Returns ------- @@ -163,8 +163,7 @@ def get_stereo_events_old( "LST1_M1": [1, 2], # combo_type = 1 "LST1_M2": [1, 3], # combo_type = 2 "LST1_M1_M2": [1, 2, 3], # combo_type = 3 - } #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - + } #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) event_data_stereo = event_data.copy() # Apply the quality cuts @@ -234,11 +233,13 @@ def get_stereo_events( event_data: pandas.core.frame.DataFrame Data frame of shower events config: dict - Read from the yaml file with information about the telescope IDs. Typically called "config_general.yaml" + Read from the yaml file with information about the telescope IDs. quality_cuts: str Quality cuts applied to the input data group_index: list Index to group telescope events + eval_multi_combo: bool + If True, multiplicity is recalculated, combination type is assigned to each event and the fraction of events per combination type is shown Returns @@ -514,7 +515,7 @@ def load_magic_dl1_data_files(input_dir, config): input_dir: str Path to a directory where input MAGIC DL1 data files are stored config: dict - yaml file with information about the telescope IDs. Typically called "config_general.yaml" + yaml file with information about the telescope IDs. Returns ------- @@ -618,7 +619,7 @@ def load_train_data_files( "LST1_M1": [1, 2], # combo_type = 1 "LST1_M2": [1, 3], # combo_type = 2 "LST1_M1_M2": [1, 2, 3], # combo_type = 3 - } #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + } #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) # Find the input files file_mask = f"{input_dir}/dl1_stereo_*.h5" @@ -680,7 +681,7 @@ def load_train_data_files_tel(input_dir, config, offaxis_min=None, offaxis_max=N input_dir: str Path to a directory where input DL1-stereo files are stored config: dict - yaml file with information about the telescope IDs. Typically called "config_general.yaml" + yaml file with information about the telescope IDs. offaxis_min: str Minimum shower off-axis angle allowed, whose format should be acceptable by `astropy.units.quantity.Quantity` diff --git a/magicctapipe/reco/estimators.py b/magicctapipe/reco/estimators.py index 44d4a4f0b..97caed90b 100644 --- a/magicctapipe/reco/estimators.py +++ b/magicctapipe/reco/estimators.py @@ -14,7 +14,7 @@ logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) -TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) class EnergyRegressor: diff --git a/magicctapipe/resources/config.yaml b/magicctapipe/resources/test_config.yaml similarity index 100% rename from magicctapipe/resources/config.yaml rename to magicctapipe/resources/test_config.yaml diff --git a/magicctapipe/resources/config_general.yaml b/magicctapipe/resources/test_config_general.yaml similarity index 56% rename from magicctapipe/resources/config_general.yaml rename to magicctapipe/resources/test_config_general.yaml index c0aa53bca..3f30ae127 100644 --- a/magicctapipe/resources/config_general.yaml +++ b/magicctapipe/resources/test_config_general.yaml @@ -17,13 +17,13 @@ directories: MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" general: - target_RA_deg : 83.633083 #RA in degrees - target_Dec_deg: 22.0145 #Dec in degrees - SimTel_version: "v1.4" - LST_version : "v0.9" - focal_length : "effective" #effective #nominal - MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" - LST_runs : "LST_runs.txt" - proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest - env_name : magic-lst + target_RA_deg : 83.633083 #RA in degrees + target_Dec_deg : 22.0145 #Dec in degrees + SimTel_version : "v1.4" + LST_version : "v0.9" + focal_length : "effective" #effective #nominal + MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" + LST_runs : "LST_runs.txt" + proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + env_name : magic-lst diff --git a/magicctapipe/resources/config_monly.yaml b/magicctapipe/resources/test_config_monly.yaml similarity index 100% rename from magicctapipe/resources/config_monly.yaml rename to magicctapipe/resources/test_config_monly.yaml diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index 5004ffcb6..81c14a6a3 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -68,14 +68,15 @@ directories: MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" general: - target_RA_deg : 83.633083 #RA in degrees - target_Dec_deg: 22.0145 #Dec in degrees - SimTel_version: "v1.4" - LST_version : "v0.9" - focal_length : "effective" #effective #nominal - MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" - LST_runs : "LST_runs.txt" - proton_train : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + target_RA_deg : 83.633083 #RA in degrees + target_Dec_deg : 22.0145 #Dec in degrees + SimTel_version : "v1.4" + LST_version : "v0.9" + focal_length : "effective" #effective #nominal + MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" + LST_runs : "LST_runs.txt" + proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + env_name : magic-lst ``` @@ -85,32 +86,7 @@ The file `MAGIC_runs.txt` looks like that: 2020_11_19,5093175 2020_12_08,5093491 2020_12_08,5093492 -2020_12_16,5093711 -2020_12_16,5093712 -2020_12_16,5093713 -2020_12_16,5093714 -2021_02_14,5094483 -2021_02_14,5094484 -2021_02_14,5094485 -2021_02_14,5094486 -2021_02_14,5094487 -2021_02_14,5094488 -2021_03_16,5095265 -2021_03_16,5095266 -2021_03_16,5095267 -2021_03_16,5095268 -2021_03_16,5095271 -2021_03_16,5095272 -2021_03_16,5095273 -2021_03_16,5095277 -2021_03_16,5095278 -2021_03_16,5095281 -2021_03_18,5095376 -2021_03_18,5095377 -2021_03_18,5095380 -2021_03_18,5095381 -2021_03_18,5095382 -2021_03_18,5095383 + ``` @@ -120,19 +96,7 @@ The columns here represent the night and run in which you want to select data. P 2020_11_18,2923 2020_11_18,2924 2020_12_07,3093 -2020_12_15,3265 -2020_12_15,3266 -2020_12_15,3267 -2020_12_15,3268 -2021_02_13,3631 -2021_02_13,3633 -2021_02_13,3634 -2021_02_13,3635 -2021_02_13,3636 -2021_03_15,4069 -2021_03_15,4070 -2021_03_15,4071 -2021_03_17,4125 + ``` Note that the LST nights appear as being one day before MAGIC's!!! This is because LST saves the date at the beginning of the night, while MAGIC saves it at the end. If there is no LST data, please fill this file with "0,0". These files are the only ones we need to modify in order to convert DL0 into DL1 data. @@ -189,7 +153,7 @@ Process name: merging_CrabTeste To check the jobs submitted to the cluster, type: squeue -n merging_CrabTeste ``` -This script will slice the proton MC sample according to the entry "proton_train" in the "config_general.yaml" file, and then it will merge the MAGIC data files in the following order: +This script will slice the proton MC sample according to the entry "proton_train_fraction" in the "config_general.yaml" file, and then it will merge the MAGIC data files in the following order: - MAGIC subruns are merged into single runs. - MAGIC I and II runs are merged (only if both telescopes are used, of course). - All runs in specific nights are merged, such that in the end we have only one datafile per night. diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py index 8a4c5631d..70d21c2ff 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_dl1_stereo_to_dl2.py @@ -42,7 +42,7 @@ "LST1_M1": [1, 2], # combo_type = 1 "LST1_M2": [1, 3], # combo_type = 2 "LST1_M1_M2": [1, 2, 3], # combo_type = 3 -} #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +} #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) def apply_rfs(event_data, estimator): """ diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index f795f8efb..8d627ec18 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -93,7 +93,7 @@ def telescope_positions(config): Parameters ---------- config: dict - dictionary generated from an yaml file with information about the telescope IDs. Typically evoked from "config_coincidence.yaml" in the main scripts. + dictionary generated from an yaml file with information about the telescope IDs. Returns ------- diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index d967598fe..0330fb4c6 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -60,15 +60,15 @@ def calculate_pointing_separation(event_data, config): """ - Calculates the angular distance of the LST-1 and MAGIC pointing + Calculates the angular distance of the LST and MAGIC pointing directions. Parameters ---------- event_data: pandas.core.frame.DataFrame - Data frame of LST-1 and MAGIC events + Data frame of LST and MAGIC events config: dict - Configuration for the LST-1 + MAGIC analysis + Configuration for the LST + MAGIC analysis Returns ------- theta: pandas.core.series.Series diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py b/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py index a63a130f6..d5bc2fbf8 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_train_rfs.py @@ -54,7 +54,7 @@ logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) -TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #####TO BE REMOVED WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +TEL_NAMES = {1: "LST-1", 2: "MAGIC-I", 3: "MAGIC-II"} #TODO: REMOVE WHEN SWITCHING TO THE NEW RFs IMPLEMENTTATION (1 RF PER TELESCOPE) # True event class of gamma and proton MCs diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index 0909d324a..dfb952b63 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -1,9 +1,9 @@ """ -This script split the proton MC data sample into "train" +This script splits the proton MC data sample into "train" and "test", deletes possible failed runs (only those files that end up with a size < 1 kB), and generates the bash scripts to merge the data files calling the script "merge_hdf_files.py" -in the follwoing order: +in the following order: MAGIC: 1) Merge the subruns into runs for M1 and M2 individually. @@ -235,7 +235,7 @@ def main(): MAGIC_runs_and_dates = config["general"]["MAGIC_runs"] MAGIC_runs = np.genfromtxt(MAGIC_runs_and_dates,dtype=str,delimiter=',') - train_fraction = float(config["general"]["proton_train"]) + train_fraction = float(config["general"]["proton_train_fraction"]) scripts_dir = str(Path(config["directories"]["scripts_dir"])) env_name = config["general"]["env_name"] From 76fb3fdafa6074c24b15d0b07bb753b32132d794 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 11:27:40 +0200 Subject: [PATCH 32/76] f_string --- .../scripts/lst1_magic/coincident_events.py | 29 ++++++++-------- .../scripts/lst1_magic/stereo_events.py | 34 +++++++++---------- 2 files changed, 31 insertions(+), 32 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py index 945dee861..448acdc4f 100644 --- a/magicctapipe/scripts/lst1_magic/coincident_events.py +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -40,8 +40,8 @@ def configfile_coincidence(ids, target_dir): Path to the working directory """ - with open(target_dir+'/config_coincidence.yaml','w') as f: - f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") + with open(f'{target_dir}/config_coincidence.yaml','w') as f: + f.write(f"mc_tel_ids:\n LST-1: {ids[0]}\n LST-2: {ids[1]}\n LST-3: {ids[2]}\n LST-4: {ids[3]}\n MAGIC-I: {ids[4]}\n MAGIC-II: {ids[5]}\n\n") f.write('event_coincidence:\n timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time"\n window_half_width: "300 ns"\n') f.write(' pre_offset_search: true\n') f.write(' n_pre_offset_search_events: 100\n') @@ -63,8 +63,8 @@ def linking_lst(target_dir, LST_runs, LST_version): """ - coincidence_DL1_dir = target_dir+"/DL1/Observations" - if not os.path.exists(coincidence_DL1_dir+"/Coincident"): + coincidence_DL1_dir = f"{target_dir}/DL1/Observations" + if not os.path.exists(f"{coincidence_DL1_dir}/Coincident"): os.mkdir(f"{coincidence_DL1_dir}/Coincident") for i in LST_runs: @@ -75,12 +75,12 @@ def linking_lst(target_dir, LST_runs, LST_version): if os.path.exists(f"{outputdir}/list_LST.txt"): with open(f"{outputdir}/list_LST.txt", "a") as LSTdataPathFile: for subrun in list_of_subruns: - LSTdataPathFile.write(subrun+"\n") #If this files already exists, simply append the new information + LSTdataPathFile.write(f"{subrun}\n") #If this files already exists, simply append the new information else: os.mkdir(outputdir) with open(f"{outputdir}/list_LST.txt", "w") as f: #If the file list_LST.txt does not exist, it will be created here for subrun in list_of_subruns: - f.write(subrun+"\n") + f.write(f"{subrun}\n") @@ -97,16 +97,16 @@ def bash_coincident(target_dir, scripts_dir, env_name): process_name = target_dir.split("/")[-2:][1] - listOfNightsLST = np.sort(glob.glob(target_dir+"/DL1/Observations/Coincident/*")) - listOfNightsMAGIC = np.sort(glob.glob(target_dir+"/DL1/Observations/Merged/Merged*")) + listOfNightsLST = np.sort(glob.glob(f"{target_dir}/DL1/Observations/Coincident/*")) + listOfNightsMAGIC = np.sort(glob.glob(f"{target_dir}/DL1/Observations/Merged/Merged*")) for nightMAGIC,nightLST in zip(listOfNightsMAGIC,listOfNightsLST): - process_size = len(np.genfromtxt(nightLST+"/list_LST.txt",dtype="str")) - 1 + process_size = len(np.genfromtxt(f"{nightLST}/list_LST.txt",dtype="str")) - 1 with open(f"LST_coincident_{nightLST.split('/')[-1]}.sh","w") as f: f.write("#!/bin/sh\n\n") f.write("#SBATCH -p short\n") - f.write("#SBATCH -J "+process_name+"_coincidence\n") + f.write(f"#SBATCH -J {process_name}_coincidence\n") f.write(f"#SBATCH --array=0-{process_size}%50\n") f.write("#SBATCH -N 1\n\n") f.write("ulimit -l unlimited\n") @@ -146,8 +146,7 @@ def main(): telescope_ids = list(config["mc_tel_ids"].values()) - target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] - + target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}"' scripts_dir = str(Path(config["directories"]["scripts_dir"])) env_name = config["general"]["env_name"] @@ -169,8 +168,8 @@ def main(): print("***** Submitting processess to the cluster...") - print("Process name: "+target_dir.split("/")[-2:][1]+"_coincidence") - print("To check the jobs submitted to the cluster, type: squeue -n "+target_dir.split("/")[-2:][1]+"_coincidence") + print(f"Process name: {target_dir.split('/')[-2:][1]}_coincidence") + print(f"To check the jobs submitted to the cluster, type: squeue -n {target_dir.split('/')[-2:][1]}_coincidence") #Below we run the bash scripts to find the coincident events list_of_coincidence_scripts = np.sort(glob.glob("LST_coincident*.sh")) @@ -179,7 +178,7 @@ def main(): if n == 0: launch_jobs = f"coincidence{n}=$(sbatch --parsable {run})" else: - launch_jobs = launch_jobs + f" && coincidence{n}=$(sbatch --parsable --dependency=afterany:$coincidence{n-1} {run})" + launch_jobs = f"{launch_jobs} && coincidence{n}=$(sbatch --parsable --dependency=afterany:$coincidence{n-1} {run})" #print(launch_jobs) os.system(launch_jobs) diff --git a/magicctapipe/scripts/lst1_magic/stereo_events.py b/magicctapipe/scripts/lst1_magic/stereo_events.py index b722d7f1e..c431588bb 100644 --- a/magicctapipe/scripts/lst1_magic/stereo_events.py +++ b/magicctapipe/scripts/lst1_magic/stereo_events.py @@ -33,8 +33,8 @@ def configfile_stereo(ids, target_dir): Path to the working directory """ - with open(target_dir+'/config_stereo.yaml','w') as f: - f.write("mc_tel_ids:\n LST-1: "+str(ids[0])+"\n LST-2: "+str(ids[1])+"\n LST-3: "+str(ids[2])+"\n LST-4: "+str(ids[3])+"\n MAGIC-I: "+str(ids[4])+"\n MAGIC-II: "+str(ids[5])+"\n\n") + with open(f'{target_dir}/config_stereo.yaml','w') as f: + f.write(f"mc_tel_ids:\n LST-1: {ids[0]}\n LST-2: {ids[1]}\n LST-3: {ids[2]}\n LST-4: {ids[3]}\n MAGIC-I: {ids[4]}\n MAGIC-II: {ids[5]}\n\n") f.write('stereo_reco:\n quality_cuts: "(intensity > 50) & (width > 0)"\n theta_uplim: "6 arcmin"\n') @@ -52,23 +52,23 @@ def bash_stereo(target_dir, scripts_dir, env_name): process_name = target_dir.split("/")[-2:][1] - if not os.path.exists(target_dir+"/DL1/Observations/Coincident_stereo"): - os.mkdir(target_dir+"/DL1/Observations/Coincident_stereo") + if not os.path.exists(f"{target_dir}/DL1/Observations/Coincident_stereo"): + os.mkdir(f"{target_dir}/DL1/Observations/Coincident_stereo") - listOfNightsLST = np.sort(glob.glob(target_dir+"/DL1/Observations/Coincident/*")) + listOfNightsLST = np.sort(glob.glob(f"{target_dir}/DL1/Observations/Coincident/*")) for nightLST in listOfNightsLST: - stereoDir = target_dir+"/DL1/Observations/Coincident_stereo/"+nightLST.split('/')[-1] + stereoDir = f"{target_dir}/DL1/Observations/Coincident_stereo/{nightLST.split('/')[-1]}" if not os.path.exists(stereoDir): os.mkdir(stereoDir) os.system(f"ls {nightLST}/*LST*.h5 > {nightLST}/list_coin.txt") #generating a list with the DL1 coincident data files. - process_size = len(np.genfromtxt(nightLST+"/list_coin.txt",dtype="str")) - 1 + process_size = len(np.genfromtxt(f"{nightLST}/list_coin.txt",dtype="str")) - 1 with open(f"StereoEvents_{nightLST.split('/')[-1]}.sh","w") as f: f.write("#!/bin/sh\n\n") f.write("#SBATCH -p short\n") - f.write("#SBATCH -J "+process_name+"_stereo\n") + f.write(f"#SBATCH -J {process_name}_stereo\n") f.write(f"#SBATCH --array=0-{process_size}%100\n") f.write("#SBATCH -N 1\n\n") f.write("ulimit -l unlimited\n") @@ -98,18 +98,18 @@ def bash_stereoMC(target_dir, identification, scripts_dir, env_name): process_name = target_dir.split("/")[-2:][1] - if not os.path.exists(target_dir+f"/DL1/MC/{identification}/Merged/StereoMerged"): - os.mkdir(target_dir+f"/DL1/MC/{identification}/Merged/StereoMerged") + if not os.path.exists(f"{target_dir}/DL1/MC/{identification}/Merged/StereoMerged"): + os.mkdir(f"{target_dir}/DL1/MC/{identification}/Merged/StereoMerged") - inputdir = target_dir+f"/DL1/MC/{identification}/Merged" + inputdir = f"{target_dir}/DL1/MC/{identification}/Merged" os.system(f"ls {inputdir}/dl1*.h5 > {inputdir}/list_coin.txt") #generating a list with the DL1 coincident data files. - process_size = len(np.genfromtxt(inputdir+"/list_coin.txt",dtype="str")) - 1 + process_size = len(np.genfromtxt(f"{inputdir}/list_coin.txt",dtype="str")) - 1 with open(f"StereoEvents_{identification}.sh","w") as f: f.write("#!/bin/sh\n\n") f.write("#SBATCH -p xxl\n") - f.write("#SBATCH -J "+process_name+"_stereo\n") + f.write(f"#SBATCH -J {process_name}_stereo\n") f.write(f"#SBATCH --array=0-{process_size}%100\n") f.write('#SBATCH --mem=30g\n') f.write("#SBATCH -N 1\n\n") @@ -151,7 +151,7 @@ def main(): ) as f: # "rb" mode opens the file in binary format for reading config = yaml.safe_load(f) - target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}' scripts_dir = str(Path(config["directories"]["scripts_dir"])) @@ -173,8 +173,8 @@ def main(): bash_stereoMC(target_dir,"protons_test", scripts_dir, env_name) print("***** Submitting processes to the cluster...") - print("Process name: "+target_dir.split("/")[-2:][1]+"_stereo") - print("To check the jobs submitted to the cluster, type: squeue -n "+target_dir.split("/")[-2:][1]+"_stereo") + print(f"Process name: {target_dir.split('/')[-2:][1]}_stereo") + print(f"To check the jobs submitted to the cluster, type: squeue -n {target_dir.split('/')[-2:][1]}_stereo") #Below we run the bash scripts to find the stereo events list_of_stereo_scripts = np.sort(glob.glob("StereoEvents_*.sh")) @@ -183,7 +183,7 @@ def main(): if n == 0: launch_jobs = f"stereo{n}=$(sbatch --parsable {run})" else: - launch_jobs = launch_jobs + f" && stereo{n}=$(sbatch --parsable --dependency=afterany:$stereo{n-1} {run})" + launch_jobs = f"{launch_jobs} && stereo{n}=$(sbatch --parsable --dependency=afterany:$stereo{n-1} {run})" #print(launch_jobs) os.system(launch_jobs) From 09118072012effb0eb59d0065dc89d2f3099b917 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 12:10:25 +0200 Subject: [PATCH 33/76] f-string --- ...ing_runs_and_splitting_training_samples.py | 88 +++++----- .../lst1_magic/setting_up_config_and_dir.py | 158 +++++++++--------- 2 files changed, 123 insertions(+), 123 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index dfb952b63..4ac9de277 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -51,7 +51,7 @@ def cleaning(list_of_nodes, target_dir): os.chdir(list_of_nodes[i]) os.system('find . -type f -name "*.h5" -size -1k -delete') - os.chdir(target_dir+"/../") + os.chdir(f"{target_dir}/../") print("Cleaning done.") def split_train_test(target_dir, train_fraction): @@ -69,28 +69,28 @@ def split_train_test(target_dir, train_fraction): Fraction of proton MC files to be used in the training RF dataset """ - proton_dir = target_dir+"/DL1/MC/protons" + proton_dir = f"{target_dir}/DL1/MC/protons" - if not os.path.exists(proton_dir+"/train"): - os.mkdir(proton_dir+"/train") - if not os.path.exists(proton_dir+"/../protons_test"): - os.mkdir(proton_dir+"/../protons_test") + if not os.path.exists(f"{proton_dir}/train"): + os.mkdir(f"{proton_dir}/train") + if not os.path.exists(f"{proton_dir}/../protons_test"): + os.mkdir(f"{proton_dir}/../protons_test") - list_of_dir = np.sort(glob.glob(proton_dir+'/node*' + os.path.sep)) + list_of_dir = np.sort(glob.glob(f'{proton_dir}/node*{os.path.sep}')) for directory in tqdm(range(len(list_of_dir))): #tqdm allows us to print a progessbar in the terminal - if not os.path.exists(proton_dir+"/train/"+list_of_dir[directory].split("/")[-2]): - os.mkdir(proton_dir+"/train/"+list_of_dir[directory].split("/")[-2]) - if not os.path.exists(proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]): - os.mkdir(proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) - list_of_runs = np.sort(glob.glob(proton_dir+"/"+list_of_dir[directory].split("/")[-2]+"/*.h5")) + if not os.path.exists(f"{proton_dir}/train/{list_of_dir[directory].split('/')[-2]}"): + os.mkdir(f"{proton_dir}/train/{list_of_dir[directory].split('/')[-2]}") + if not os.path.exists(f"{proton_dir}/../protons_test/{list_of_dir[directory].split('/')[-2]}"): + os.mkdir(f'{proton_dir}/../protons_test/{list_of_dir[directory].split("/")[-2]}') + list_of_runs = np.sort(glob.glob(f'{proton_dir}/{list_of_dir[directory].split("/")[-2]}/*.h5')) split_percent = int(len(list_of_runs)*train_fraction) for j in list_of_runs[0:split_percent]: - os.system(f"mv {j} {proton_dir}/train/"+list_of_dir[directory].split("/")[-2]) + os.system(f"mv {j} {proton_dir}/train/{list_of_dir[directory].split('/')[-2]}") - os.system(f"cp {list_of_dir[directory]}*.txt "+proton_dir+"/train/"+list_of_dir[directory].split("/")[-2]) - os.system(f"mv {list_of_dir[directory]}*.txt "+proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) - os.system(f"mv {list_of_dir[directory]}*.h5 "+proton_dir+"/../protons_test/"+list_of_dir[directory].split("/")[-2]) + os.system(f"cp {list_of_dir[directory]}*.txt {proton_dir}/train/{list_of_dir[directory].split('/')[-2]}") + os.system(f"mv {list_of_dir[directory]}*.txt {proton_dir}/../protons_test/{list_of_dir[directory].split('/')[-2]}") + os.system(f"mv {list_of_dir[directory]}*.h5 {proton_dir}/../protons_test/{list_of_dir[directory].split('/')[-2]}") os.system(f"rm -r {list_of_dir[directory]}") def merge(target_dir, identification, MAGIC_runs, scripts_dir, env_name): @@ -108,48 +108,48 @@ def merge(target_dir, identification, MAGIC_runs, scripts_dir, env_name): This matrix is imported from config_general.yaml and tells the function where to find the data and where to put the merged files """ - process_name = "merging_"+target_dir.split("/")[-2:][1] + process_name = f"merging_{target_dir.split('/')[-2:][1]}" - MAGIC_DL1_dir = target_dir+"/DL1/Observations" - if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): - if not os.path.exists(MAGIC_DL1_dir+"/Merged"): - os.mkdir(MAGIC_DL1_dir+"/Merged") + MAGIC_DL1_dir = f"{target_dir}/DL1/Observations" + if os.path.exists(f"{MAGIC_DL1_dir}/M1") & os.path.exists(f"{MAGIC_DL1_dir}/M2"): + if not os.path.exists(f"{MAGIC_DL1_dir}/Merged"): + os.mkdir(f"{MAGIC_DL1_dir}/Merged") with open(f"Merge_{identification}.sh","w") as f: f.write('#!/bin/sh\n\n') f.write('#SBATCH -p short\n') - f.write('#SBATCH -J '+process_name+'\n') + f.write(f'#SBATCH -J {process_name}\n') f.write('#SBATCH -N 1\n\n') f.write('ulimit -l unlimited\n') f.write('ulimit -s unlimited\n') f.write('ulimit -a\n\n') if identification == "0_subruns": - if os.path.exists(MAGIC_DL1_dir+"/M1"): + if os.path.exists(f"{MAGIC_DL1_dir}/M1"): for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): + if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): + if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') - if os.path.exists(MAGIC_DL1_dir+"/M2"): + if os.path.exists(f"{MAGIC_DL1_dir}/M2"): for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}"): + if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/{i[1]}"): + if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') elif identification == "1_M1M2": - if os.path.exists(MAGIC_DL1_dir+"/M1") & os.path.exists(MAGIC_DL1_dir+"/M2"): + if os.path.exists(f"{MAGIC_DL1_dir}/M1") & os.path.exists(f"{MAGIC_DL1_dir}/M2"): for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/{i[0]}/Merged"): + if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') else: for i in MAGIC_runs: - if not os.path.exists(MAGIC_DL1_dir+f"/Merged/Merged_{i[0]}"): + if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') @@ -170,18 +170,18 @@ def mergeMC(target_dir, identification, scripts_dir, env_name): Tells which batch to create. Options: protons, gammadiffuse """ - process_name = "merging_"+target_dir.split("/")[-2:][1] + process_name = f"merging_{target_dir.split('/')[-2:][1]}" - MC_DL1_dir = target_dir+"/DL1/MC" - if not os.path.exists(MC_DL1_dir+f"/{identification}/Merged"): - os.mkdir(MC_DL1_dir+f"/{identification}/Merged") + MC_DL1_dir = f"{target_dir}/DL1/MC" + if not os.path.exists(f"{MC_DL1_dir}/{identification}/Merged"): + os.mkdir(f"{MC_DL1_dir}/{identification}/Merged") if identification == "protons": - list_of_nodes = np.sort(glob.glob(MC_DL1_dir+f"/{identification}/train/node*")) + list_of_nodes = np.sort(glob.glob(f"{MC_DL1_dir}/{identification}/train/node*")) else: - list_of_nodes = np.sort(glob.glob(MC_DL1_dir+f"/{identification}/node*")) + list_of_nodes = np.sort(glob.glob(f"{MC_DL1_dir}/{identification}/node*")) - np.savetxt(MC_DL1_dir+f"/{identification}/list_of_nodes.txt",list_of_nodes, fmt='%s') + np.savetxt(f"{MC_DL1_dir}/{identification}/list_of_nodes.txt",list_of_nodes, fmt='%s') process_size = len(list_of_nodes) - 1 @@ -191,7 +191,7 @@ def mergeMC(target_dir, identification, scripts_dir, env_name): with open(f"Merge_{identification}.sh","w") as f: f.write('#!/bin/sh\n\n') f.write('#SBATCH -p short\n') - f.write('#SBATCH -J '+process_name+'\n') + f.write(f'#SBATCH -J {process_name}n') f.write(f"#SBATCH --array=0-{process_size}%50\n") f.write('#SBATCH --mem=7g\n') f.write('#SBATCH -N 1\n\n') @@ -201,7 +201,7 @@ def mergeMC(target_dir, identification, scripts_dir, env_name): f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') + f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'.join('/merged_${SLURM_ARRAY_TASK_ID}.log\n')) f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') @@ -230,7 +230,7 @@ def main(): - target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}' MAGIC_runs_and_dates = config["general"]["MAGIC_runs"] MAGIC_runs = np.genfromtxt(MAGIC_runs_and_dates,dtype=str,delimiter=',') @@ -257,8 +257,8 @@ def main(): print("***** Running merge_hdf_files.py in the MAGIC data files...") - print("Process name: merging_"+target_dir.split("/")[-2:][1]) - print("To check the jobs submitted to the cluster, type: squeue -n merging_"+target_dir.split("/")[-2:][1]) + print(f"Process name: merging_{target_dir.split('/')[-2:][1]}") + print(f"To check the jobs submitted to the cluster, type: squeue -n merging_{target_dir.split('/')[-2:][1]}") #Below we run the bash scripts to merge the MAGIC files list_of_merging_scripts = np.sort(glob.glob("Merge_*.sh")) @@ -267,7 +267,7 @@ def main(): if n == 0: launch_jobs = f"merging{n}=$(sbatch --parsable {run})" else: - launch_jobs = launch_jobs + f" && merging{n}=$(sbatch --parsable --dependency=afterany:$merging{n-1} {run})" + launch_jobs = f"{launch_jobs} && merging{n}=$(sbatch --parsable --dependency=afterany:$merging{n-1} {run})" #print(launch_jobs) os.system(launch_jobs) diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index 2afe3c100..a3d74499c 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -37,17 +37,17 @@ def config_file_gen(ids, target_dir): """ Here we create the configuration file needed for transforming DL0 into DL1 """ - with open(target_dir+'/config_DL0_to_DL1.yaml','w') as f: + with open(f'{target_dir}/config_DL0_to_DL1.yaml','w') as f: - #f.write("directories:\n target: "+target_dir+"\n\n") + #f.write(f"directories:\n target: {target_dir}\n\n") lines_of_config_file = [ "mc_tel_ids:", - "\n LST-1: "+str(ids[0]), - "\n LST-2: "+str(ids[1]), - "\n LST-3: "+str(ids[2]), - "\n LST-4: "+str(ids[3]), - "\n MAGIC-I: "+str(ids[4]), - "\n MAGIC-II: "+str(ids[5]), + f"\n LST-1: {ids[0]}", + f"\n LST-2: {ids[1]}", + f"\n LST-3: {ids[2]}", + f"\n LST-4: {ids[3]}", + f"\n MAGIC-I: {ids[4]}", + f"\n MAGIC-II: {ids[5]}", "\n", "\nLST:", "\n image_extractor:", @@ -124,16 +124,16 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, process_name = target_dir.split("/")[-2:][1] - list_of_nodes = glob.glob(MC_path+"/node*") - with open(target_dir+f"/list_nodes_{particle_type}_complete.txt","w") as f:# creating list_nodes_gammas_complete.txt + list_of_nodes = glob.glob(f"{MC_path}/node*") + with open(f"{target_dir}/list_nodes_{particle_type}_complete.txt","w") as f:# creating list_nodes_gammas_complete.txt for i in list_of_nodes: - f.write(i+"/output_"+SimTel_version+"\n") + f.write(f"{i}/output_{SimTel_version}\n") - with open(target_dir+f"/list_folder_{particle_type}.txt","w") as f:# creating list_folder_gammas.txt + with open(f"{target_dir}/list_folder_{particle_type}.txt","w") as f:# creating list_folder_gammas.txt for i in list_of_nodes: - f.write(i.split("/")[-1]+"\n") + f.write(f'{i.split("/")[-1]}\n') @@ -145,23 +145,23 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, lines_of_config_file = [ "#!/bin/sh\n\n", "#SBATCH -p short\n", - "#SBATCH -J "+process_name+"\n\n", + f"#SBATCH -J {process_name}\n\n", "#SBATCH -N 1\n\n", "ulimit -l unlimited\n", "ulimit -s unlimited\n", "ulimit -a\n\n", "while read -r -u 3 lineA && read -r -u 4 lineB\n", "do\n", - " cd "+target_dir+f"/DL1/MC/{particle_type}\n", + f" cd {target_dir}/DL1/MC/{particle_type}\n", " mkdir $lineB\n", " cd $lineA\n", " ls -lR *.gz |wc -l\n", - " ls *.gz > "+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n", + f" ls *.gz > {target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n", ' string=$lineA"/"\n', - " export file="+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n\n", - " cat $file | while read line; do echo $string${line} >>"+target_dir+f"/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n", + f" export file={target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n\n", + " cat $file | while read line; do echo $string${line}".join(f" >>{target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n"), ' echo "folder $lineB and node $lineA"\n', - 'done 3<"'+target_dir+f'/list_nodes_{particle_type}_complete.txt" 4<"'+target_dir+f'/list_folder_{particle_type}.txt"\n', + f'done 3<"{target_dir}/list_nodes_{particle_type}_complete.txt" 4<"{target_dir}/list_folder_{particle_type}.txt"\n', ""] f.writelines(lines_of_config_file) @@ -171,30 +171,30 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, ############################ bash script that applies lst1_magic_mc_dl0_to_dl1.py to all MC data files. ################################################################################################################ - number_of_nodes = glob.glob(MC_path+"/node*") + number_of_nodes = glob.glob(f"{MC_path}/node*") number_of_nodes = len(number_of_nodes) -1 with open(f"linking_MC_{particle_type}_paths_r.sh","w") as f: lines_of_config_file = [ '#!/bin/sh\n\n', '#SBATCH -p xxl\n', - '#SBATCH -J '+process_name+'\n', - '#SBATCH --array=0-'+str(number_of_nodes)+'%50\n', + f'#SBATCH -J {process_name}\n', + f'#SBATCH --array=0-{number_of_nodes}%50\n', '#SBATCH --mem=10g\n', '#SBATCH -N 1\n\n', 'ulimit -l unlimited\n', 'ulimit -s unlimited\n', 'ulimit -a\n', - 'cd '+target_dir+f'/DL1/MC/{particle_type}\n\n', - 'export INF='+target_dir+'\n', + f'cd {target_dir}/DL1/MC/{particle_type}\n\n', + f'export INF={target_dir}\n', f'SAMPLE_LIST=($(<$INF/list_folder_{particle_type}.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n', 'cd $SAMPLE\n\n', - 'export LOG='+target_dir+f'/DL1/MC/{particle_type}'+'/simtel_{$SAMPLE}_all.log\n', + f'export LOG={target_dir}/DL1/MC/{particle_type}'.join('/simtel_{$SAMPLE}_all.log\n'), 'cat list_dl0_ok.txt | while read line\n', 'do\n', - ' cd '+target_dir+'/../\n', - f' conda run -n {env_name} python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir '+target_dir+f'/DL1/MC/{particle_type}/$SAMPLE --config-file '+target_dir+'/config_DL0_to_DL1.yaml --focal_length_choice '+focal_length+ '>>$LOG 2>&1\n\n', + f' cd {target_dir}/../\n', + f' conda run -n {env_name} python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir {target_dir}/DL1/MC/{particle_type}/$SAMPLE --config-file {target_dir}/config_DL0_to_DL1.yaml --focal_length_choice {focal_length}>>$LOG 2>&1\n\n', 'done\n', ""] f.writelines(lines_of_config_file) @@ -214,7 +214,7 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, with open("linking_MAGIC_data_paths.sh","w") as f: f.write('#!/bin/sh\n\n') f.write('#SBATCH -p short\n') - f.write('#SBATCH -J '+process_name+'\n') + f.write(f'#SBATCH -J {process_name}\n') f.write('#SBATCH -N 1\n\n') f.write('ulimit -l unlimited\n') f.write('ulimit -s unlimited\n') @@ -222,16 +222,16 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, if telescope_ids[-1] > 0: for i in MAGIC_runs: - f.write('export IN1=/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') - f.write('export OUT1='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n') - f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') + f.write(f'export IN1=/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}\n') + f.write(f'export OUT1={target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}\n') + f.write(f'ls $IN1/*{i[1][-2:]}.*_Y_*.root > $OUT1/list_dl0.txt\n') f.write('\n') if telescope_ids[-2] > 0: for i in MAGIC_runs: - f.write('export IN1=/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+'\n') - f.write('export OUT1='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n') - f.write('ls $IN1/*'+i[1][-2:]+'.*_Y_*.root > $OUT1/list_dl0.txt\n') + f.write(f'export IN1=/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}\n') + f.write(f'export OUT1={target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}\n') + f.write(f'ls $IN1/*{i[1][-2:]}.*_Y_*.root > $OUT1/list_dl0.txt\n') @@ -239,50 +239,50 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, for i in MAGIC_runs: if telescope_ids[-1] > 0: - number_of_nodes = glob.glob('/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+f'/*{i[1]}.*_Y_*.root') + number_of_nodes = glob.glob(f'/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}/*{i[1]}.*_Y_*.root') number_of_nodes = len(number_of_nodes) - 1 with open(f"MAGIC-II_dl0_to_dl1_run_{i[1]}.sh","w") as f: lines_of_config_file = [ '#!/bin/sh\n\n', '#SBATCH -p long\n', - '#SBATCH -J '+process_name+'\n', - '#SBATCH --array=0-'+str(number_of_nodes)+'\n', + f'#SBATCH -J {process_name}\n', + f'#SBATCH --array=0-{number_of_nodes}\n', '#SBATCH -N 1\n\n', 'ulimit -l unlimited\n', 'ulimit -s unlimited\n', 'ulimit -a\n\n', - 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M2/'+i[0]+'/'+i[1]+'\n', - 'cd '+target_dir+'/../\n', + f'export OUTPUTDIR={target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}\n', + f'cd {target_dir}/../\n', 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) if telescope_ids[-2] > 0: - number_of_nodes = glob.glob('/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/'+i[0].split("_")[0]+"/"+i[0].split("_")[1]+"/"+i[0].split("_")[2]+f'/*{i[1]}.*_Y_*.root') + number_of_nodes = glob.glob(f'/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}/*{i[1]}.*_Y_*.root') number_of_nodes = len(number_of_nodes) - 1 with open(f"MAGIC-I_dl0_to_dl1_run_{i[1]}.sh","w") as f: lines_of_config_file = [ '#!/bin/sh\n\n', '#SBATCH -p long\n', - '#SBATCH -J '+process_name+'\n', - '#SBATCH --array=0-'+str(number_of_nodes)+'\n', + f'#SBATCH -J {process_name}\n', + f'#SBATCH --array=0-{number_of_nodes}\n', '#SBATCH -N 1\n\n', 'ulimit -l unlimited\n', 'ulimit -s unlimited\n', 'ulimit -a\n\n', - 'export OUTPUTDIR='+target_dir+'/DL1/Observations/M1/'+i[0]+'/'+i[1]+'\n', - 'cd '+target_dir+'/../\n', + f'export OUTPUTDIR={target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}\n', + f'cd {target_dir}/../\n', 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file '+target_dir+'/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) @@ -300,27 +300,27 @@ def directories_generator(target_dir, telescope_ids,MAGIC_runs): if not os.path.exists(target_dir): os.mkdir(target_dir) - os.mkdir(target_dir+"/DL1") - os.mkdir(target_dir+"/DL1/Observations") - os.mkdir(target_dir+"/DL1/MC") - os.mkdir(target_dir+"/DL1/MC/gammas") - os.mkdir(target_dir+"/DL1/MC/gammadiffuse") - os.mkdir(target_dir+"/DL1/MC/electrons") - os.mkdir(target_dir+"/DL1/MC/protons") - os.mkdir(target_dir+"/DL1/MC/helium") + os.mkdir(f"{target_dir}/DL1") + os.mkdir(f"{target_dir}/DL1/Observations") + os.mkdir(f"{target_dir}/DL1/MC") + os.mkdir(f"{target_dir}/DL1/MC/gammas") + os.mkdir(f"{target_dir}/DL1/MC/gammadiffuse") + os.mkdir(f"{target_dir}/DL1/MC/electrons") + os.mkdir(f"{target_dir}/DL1/MC/protons") + os.mkdir(f"{target_dir}/DL1/MC/helium") else: - overwrite = input("MC directory for "+target_dir.split("/")[-1]+" already exists. Would you like to overwrite it? [only 'y' or 'n']: ") + overwrite = input(f'MC directory for {target_dir.split("/")[-1]} already exists. Would you like to overwrite it? [only "y" or "n"]: ') if overwrite == "y": - os.system("rm -r "+target_dir) + os.system(f"rm -r {target_dir}") os.mkdir(target_dir) - os.mkdir(target_dir+"/DL1") - os.mkdir(target_dir+"/DL1/Observations") - os.mkdir(target_dir+"/DL1/MC") - os.mkdir(target_dir+"/DL1/MC/gammas") - os.mkdir(target_dir+"/DL1/MC/gammadiffuse") - os.mkdir(target_dir+"/DL1/MC/electrons") - os.mkdir(target_dir+"/DL1/MC/protons") - os.mkdir(target_dir+"/DL1/MC/helium") + os.mkdir(f"{target_dir}/DL1") + os.mkdir(f"{target_dir}/DL1/Observations") + os.mkdir(f"{target_dir}/DL1/MC") + os.mkdir(f"{target_dir}/DL1/MC/gammas") + os.mkdir(f"{target_dir}/DL1/MC/gammadiffuse") + os.mkdir(f"{target_dir}/DL1/MC/electrons") + os.mkdir(f"{target_dir}/DL1/MC/protons") + os.mkdir(f"{target_dir}/DL1/MC/helium") else: print("Directory not modified.") @@ -331,24 +331,24 @@ def directories_generator(target_dir, telescope_ids,MAGIC_runs): ########################################### if telescope_ids[-1] > 0: - if not os.path.exists(target_dir+"/DL1/Observations/M2"): - os.mkdir(target_dir+"/DL1/Observations/M2") + if not os.path.exists(f"{target_dir}/DL1/Observations/M2"): + os.mkdir(f"{target_dir}/DL1/Observations/M2") for i in MAGIC_runs: - if not os.path.exists(target_dir+"/DL1/Observations/M2/"+i[0]): - os.mkdir(target_dir+"/DL1/Observations/M2/"+i[0]) - os.mkdir(target_dir+"/DL1/Observations/M2/"+i[0]+"/"+i[1]) + if not os.path.exists(f"{target_dir}/DL1/Observations/M2/{i[0]}"): + os.mkdir(f"{target_dir}/DL1/Observations/M2/{i[0]}") + os.mkdir(f"{target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}") else: - os.mkdir(target_dir+"/DL1/Observations/M2/"+i[0]+"/"+i[1]) + os.mkdir(f"{target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}") if telescope_ids[-2] > 0: - if not os.path.exists(target_dir+"/DL1/Observations/M1"): - os.mkdir(target_dir+"/DL1/Observations/M1") + if not os.path.exists(f"{target_dir}/DL1/Observations/M1"): + os.mkdir(f"{target_dir}/DL1/Observations/M1") for i in MAGIC_runs: - if not os.path.exists(target_dir+"/DL1/Observations/M1/"+i[0]): - os.mkdir(target_dir+"/DL1/Observations/M1/"+i[0]) - os.mkdir(target_dir+"/DL1/Observations/M1/"+i[0]+"/"+i[1]) + if not os.path.exists(f"{target_dir}/DL1/Observations/M1/{i[0]}"): + os.mkdir(f"{target_dir}/DL1/Observations/M1/{i[0]}") + os.mkdir(f"{target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}") else: - os.mkdir(target_dir+"/DL1/Observations/M1/"+i[0]+"/"+i[1]) + os.mkdir(f"{target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}") @@ -400,7 +400,7 @@ def main(): focal_length = config["general"]["focal_length"] #Below we read the data paths - target_dir = str(Path(config["directories"]["workspace_dir"]))+"/"+config["directories"]["target_name"] + target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}' MC_gammas = str(Path(config["directories"]["MC_gammas"])) #MC_electrons = str(Path(config["directories"]["MC_electrons"])) #MC_helium = str(Path(config["directories"]["MC_helium"])) @@ -436,7 +436,7 @@ def main(): if n == 0: launch_jobs_MC = f"linking{n}=$(sbatch --parsable {run}) && running{n}=$(sbatch --parsable --dependency=afterany:$linking{n} {run[0:-3]}_r.sh)" else: - launch_jobs_MC = launch_jobs_MC + f" && linking{n}=$(sbatch --parsable --dependency=afterany:$running{n-1} {run}) && running{n}=$(sbatch --parsable --dependency=afterany:$linking{n} {run[0:-3]}_r.sh)" + launch_jobs_MC = f"{launch_jobs_MC} && linking{n}=$(sbatch --parsable --dependency=afterany:$running{n-1} {run}) && running{n}=$(sbatch --parsable --dependency=afterany:$linking{n} {run[0:-3]}_r.sh)" os.system(launch_jobs_MC) @@ -452,7 +452,7 @@ def main(): if n == 0: launch_jobs = f"linking=$(sbatch --parsable linking_MAGIC_data_paths.sh) && RES{n}=$(sbatch --parsable --dependency=afterany:$linking {run})" else: - launch_jobs = launch_jobs + f" && RES{n}=$(sbatch --parsable --dependency=afterany:$RES{n-1} {run})" + launch_jobs = f"{launch_jobs} && RES{n}=$(sbatch --parsable --dependency=afterany:$RES{n-1} {run})" os.system(launch_jobs) From 273690bef075d2ac76089882afd70173770240e4 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 13:37:27 +0200 Subject: [PATCH 34/76] Console scripts in bash --- .../resources/test_config_general.yaml | 3 +- magicctapipe/scripts/lst1_magic/README.md | 1 - .../scripts/lst1_magic/coincident_events.py | 8 ++--- ...ing_runs_and_splitting_training_samples.py | 31 +++++++++---------- .../lst1_magic/setting_up_config_and_dir.py | 24 +++++++------- .../scripts/lst1_magic/stereo_events.py | 19 ++++++------ 6 files changed, 41 insertions(+), 45 deletions(-) diff --git a/magicctapipe/resources/test_config_general.yaml b/magicctapipe/resources/test_config_general.yaml index 3f30ae127..7e96f5ce4 100644 --- a/magicctapipe/resources/test_config_general.yaml +++ b/magicctapipe/resources/test_config_general.yaml @@ -7,8 +7,7 @@ mc_tel_ids: MAGIC-II: 3 directories: - workspace_dir : "/fefs/aswg/workspace/raniere/" - scripts_dir : "/fefs/aswg/workspace/raniere/" + workspace_dir : "/fefs/aswg/workspace/raniere/" target_name : "CrabTeste" MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index 81c14a6a3..6ed4afecc 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -59,7 +59,6 @@ mc_tel_ids: directories: workspace_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" - scripts_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" target_name : "CrabTeste" MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/coincident_events.py index 448acdc4f..60eea4090 100644 --- a/magicctapipe/scripts/lst1_magic/coincident_events.py +++ b/magicctapipe/scripts/lst1_magic/coincident_events.py @@ -84,7 +84,7 @@ def linking_lst(target_dir, LST_runs, LST_version): -def bash_coincident(target_dir, scripts_dir, env_name): +def bash_coincident(target_dir, env_name): """ This function generates the bashscript for running the coincidence analysis. @@ -118,7 +118,7 @@ def bash_coincident(target_dir, scripts_dir, env_name): f.write("SAMPLE_LIST=($(<$OUTPUTDIR/list_LST.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/coincidence_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n {env_name} python {scripts_dir}/lst1_magic_event_coincidence.py --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") + f.write(f"conda run -n {env_name} lst1_magic_event_coincidence --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") @@ -147,7 +147,7 @@ def main(): telescope_ids = list(config["mc_tel_ids"].values()) target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}"' - scripts_dir = str(Path(config["directories"]["scripts_dir"])) + env_name = config["general"]["env_name"] LST_runs_and_dates = config["general"]["LST_runs"] @@ -164,7 +164,7 @@ def main(): print("***** Generating the bashscript...") - bash_coincident(target_dir, scripts_dir, env_name) + bash_coincident(target_dir, env_name) print("***** Submitting processess to the cluster...") diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index 4ac9de277..a16fa3ab2 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -93,7 +93,7 @@ def split_train_test(target_dir, train_fraction): os.system(f"mv {list_of_dir[directory]}*.h5 {proton_dir}/../protons_test/{list_of_dir[directory].split('/')[-2]}") os.system(f"rm -r {list_of_dir[directory]}") -def merge(target_dir, identification, MAGIC_runs, scripts_dir, env_name): +def merge(target_dir, identification, MAGIC_runs, env_name): """ This function creates the bash scripts to run merge_hdf_files.py in all MAGIC subruns. @@ -131,7 +131,7 @@ def merge(target_dir, identification, MAGIC_runs, scripts_dir, env_name): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') if os.path.exists(f"{MAGIC_DL1_dir}/M2"): for i in MAGIC_runs: @@ -139,25 +139,25 @@ def merge(target_dir, identification, MAGIC_runs, scripts_dir, env_name): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') + f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') elif identification == "1_M1M2": if os.path.exists(f"{MAGIC_DL1_dir}/M1") & os.path.exists(f"{MAGIC_DL1_dir}/M2"): for i in MAGIC_runs: if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") - f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') + f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') else: for i in MAGIC_runs: if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}"): os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night - f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') + f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') -def mergeMC(target_dir, identification, scripts_dir, env_name): +def mergeMC(target_dir, identification, env_name): """ This function creates the bash scripts to run merge_hdf_files.py in all MC runs. @@ -202,7 +202,7 @@ def mergeMC(target_dir, identification, scripts_dir, env_name): f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'.join('/merged_${SLURM_ARRAY_TASK_ID}.log\n')) - f.write(f'conda run -n {env_name} python {scripts_dir}/merge_hdf_files.py --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') + f.write(f'conda run -n {env_name} merge_hdf_files --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') @@ -236,8 +236,7 @@ def main(): MAGIC_runs = np.genfromtxt(MAGIC_runs_and_dates,dtype=str,delimiter=',') train_fraction = float(config["general"]["proton_train_fraction"]) - - scripts_dir = str(Path(config["directories"]["scripts_dir"])) + env_name = config["general"]["env_name"] #Here we slice the proton MC data into "train" and "test": @@ -245,15 +244,15 @@ def main(): split_train_test(target_dir, train_fraction) print("***** Generating merge bashscripts...") - merge(target_dir, "0_subruns", MAGIC_runs, scripts_dir, env_name) #generating the bash script to merge the subruns - merge(target_dir, "1_M1M2", MAGIC_runs, scripts_dir, env_name) #generating the bash script to merge the M1 and M2 runs - merge(target_dir, "2_nights", MAGIC_runs, scripts_dir, env_name) #generating the bash script to merge all runs per night + merge(target_dir, "0_subruns", MAGIC_runs, env_name) #generating the bash script to merge the subruns + merge(target_dir, "1_M1M2", MAGIC_runs, env_name) #generating the bash script to merge the M1 and M2 runs + merge(target_dir, "2_nights", MAGIC_runs, env_name) #generating the bash script to merge all runs per night print("***** Generating mergeMC bashscripts...") - mergeMC(target_dir, "protons", scripts_dir, env_name) #generating the bash script to merge the files - mergeMC(target_dir, "gammadiffuse", scripts_dir, env_name) #generating the bash script to merge the files - mergeMC(target_dir, "gammas", scripts_dir, env_name) #generating the bash script to merge the files - mergeMC(target_dir, "protons_test", scripts_dir, env_name) + mergeMC(target_dir, "protons", env_name) #generating the bash script to merge the files + mergeMC(target_dir, "gammadiffuse", env_name) #generating the bash script to merge the files + mergeMC(target_dir, "gammas", env_name) #generating the bash script to merge the files + mergeMC(target_dir, "protons_test", env_name) print("***** Running merge_hdf_files.py in the MAGIC data files...") diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index a3d74499c..956ca6a15 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -114,7 +114,7 @@ def config_file_gen(ids, target_dir): -def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, focal_length, scripts_dir, env_name): +def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, focal_length, env_name): """ This function creates the lists list_nodes_gamma_complete.txt and list_folder_gamma.txt with the MC file paths. @@ -194,7 +194,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, 'cat list_dl0_ok.txt | while read line\n', 'do\n', f' cd {target_dir}/../\n', - f' conda run -n {env_name} python {scripts_dir}/lst1_magic_mc_dl0_to_dl1.py --input-file $line --output-dir {target_dir}/DL1/MC/{particle_type}/$SAMPLE --config-file {target_dir}/config_DL0_to_DL1.yaml --focal_length_choice {focal_length}>>$LOG 2>&1\n\n', + f' conda run -n {env_name} lst1_magic_mc_dl0_to_dl1 --input-file $line --output-dir {target_dir}/DL1/MC/{particle_type}/$SAMPLE --config-file {target_dir}/config_DL0_to_DL1.yaml --focal_length_choice {focal_length}>>$LOG 2>&1\n\n', 'done\n', ""] f.writelines(lines_of_config_file) @@ -203,7 +203,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, -def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, env_name): +def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, env_name): """ Below we create a bash script that links the the MAGIC data paths to each subdirectory. @@ -257,7 +257,7 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n {env_name} magic_calib_to_dl1 --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) @@ -282,7 +282,7 @@ def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n {env_name} python {scripts_dir}/magic_calib_to_dl1.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', + f'conda run -n {env_name} magic_calib_to_dl1 --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', ""] f.writelines(lines_of_config_file) @@ -407,7 +407,7 @@ def main(): MC_protons = str(Path(config["directories"]["MC_protons"])) MC_gammadiff = str(Path(config["directories"]["MC_gammadiff"])) - scripts_dir = str(Path(config["directories"]["scripts_dir"])) + env_name = config["general"]["env_name"] @@ -421,11 +421,11 @@ def main(): #Below we run the analysis on the MC data if (args.analysis_type=='onlyMC') or (args.analysis_type=='doEverything'): - lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, scripts_dir, env_name) #gammas - #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, scripts_dir, env_name) #electrons - #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, scripts_dir, env_name) #helium - lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, focal_length, scripts_dir, env_name) #protons - lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, focal_length, scripts_dir, env_name) #gammadiffuse + lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, env_name) #gammas + #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, env_name) #electrons + #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, env_name) #helium + lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, focal_length, env_name) #protons + lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, focal_length, env_name) #gammadiffuse #Here we do the MC DL0 to DL1 conversion: list_of_MC = glob.glob("linking_MC_*s.sh") @@ -443,7 +443,7 @@ def main(): #Below we run the analysis on the MAGIC data if (args.analysis_type=='onlyMAGIC') or (args.analysis_type=='doEverything'): - lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, scripts_dir, env_name) #MAGIC real data + lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, env_name) #MAGIC real data if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): list_of_MAGIC_runs = glob.glob("MAGIC-*.sh") diff --git a/magicctapipe/scripts/lst1_magic/stereo_events.py b/magicctapipe/scripts/lst1_magic/stereo_events.py index c431588bb..3f531d8c7 100644 --- a/magicctapipe/scripts/lst1_magic/stereo_events.py +++ b/magicctapipe/scripts/lst1_magic/stereo_events.py @@ -39,7 +39,7 @@ def configfile_stereo(ids, target_dir): -def bash_stereo(target_dir, scripts_dir, env_name): +def bash_stereo(target_dir, env_name): """ This function generates the bashscript for running the stereo analysis. @@ -80,10 +80,10 @@ def bash_stereo(target_dir, scripts_dir, env_name): f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n {env_name} python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.write(f"conda run -n {env_name} lst1_magic_stereo_reco --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") -def bash_stereoMC(target_dir, identification, scripts_dir, env_name): +def bash_stereoMC(target_dir, identification, env_name): """ This function generates the bashscript for running the stereo analysis. @@ -122,7 +122,7 @@ def bash_stereoMC(target_dir, identification, scripts_dir, env_name): f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n {env_name} python {scripts_dir}/lst1_magic_stereo_reco.py --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") + f.write(f"conda run -n {env_name} lst1_magic_stereo_reco --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") @@ -153,7 +153,6 @@ def main(): target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}' - scripts_dir = str(Path(config["directories"]["scripts_dir"])) env_name = config["general"]["env_name"] @@ -164,13 +163,13 @@ def main(): configfile_stereo(telescope_ids, target_dir) print("***** Generating the bashscript...") - bash_stereo(target_dir, scripts_dir, env_name) + bash_stereo(target_dir, env_name) print("***** Generating the bashscript for MCs...") - bash_stereoMC(target_dir,"gammadiffuse", scripts_dir, env_name) - bash_stereoMC(target_dir,"gammas", scripts_dir, env_name) - bash_stereoMC(target_dir,"protons", scripts_dir, env_name) - bash_stereoMC(target_dir,"protons_test", scripts_dir, env_name) + bash_stereoMC(target_dir,"gammadiffuse", env_name) + bash_stereoMC(target_dir,"gammas", env_name) + bash_stereoMC(target_dir,"protons", env_name) + bash_stereoMC(target_dir,"protons_test", env_name) print("***** Submitting processes to the cluster...") print(f"Process name: {target_dir.split('/')[-2:][1]}_stereo") From 5f96f5b62b451c66b18018326afdfd12b31b2e59 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 14:06:16 +0200 Subject: [PATCH 35/76] Calibration docstring --- magicctapipe/image/calib.py | 52 +++++++++++++++++++++++++++++++++++-- 1 file changed, 50 insertions(+), 2 deletions(-) diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index 20866cac1..677480442 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -17,7 +17,35 @@ def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, increase_nsb, use_time_delta_cleaning, use_dynamic_cleaning ): """ - This function computes and returns signal_pixels, image, and peak_time for LST + This function computes and returns some information for a single event of a telescope of LST type + + Parameters + ---------- + event: event + From an EventSource + tel_id: int + telescope ID + rng: numpy Random Generator + If increase_nsb=True, used to add noise in camera pixels + config_lst: dictionary + Parameters for image extraction and calibration + camera_geoms: telescope.camera.geometry + Camera geometry + calibrator_lst: CameraCalibrator (ctapipe.calib) + ctapipe object needed to calibrate the camera + increase_nsb: bool + Whether to add noise in camera pixels + use_time_delta_cleaning: bool + Whether to use this kind of cleaning (cf. ctapipe) + use_dynamic_cleaning: bool + Whether to use this kind of cleaning (cf. lstchain) + + Returns + ------- + signal_pixels: Mask of the pixels selected by the cleaning + image: Array of number of p.e. in the camera pixels + peak_time: Array of the signal peak time in the camera pixels + """ calibrator_lst._calibrate_dl0(event, tel_id) @@ -76,7 +104,27 @@ def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, def Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic): """ - This function computes and returns signal_pixels, image, and peak_time for MAGIC + This function computes and returns some information for a single event of a telescope of MAGIC type + + Parameters + ---------- + event: event + From an EventSource + tel_id: int + telescope ID + config_magic: dictionary + Parameters for image extraction and calibration + magic_clean: dictionary (1 entry per MAGIC telescope) + Each entry is a MAGICClean object using the telescope camera geometry + calibrator_magic: CameraCalibrator (ctapipe.calib) + ctapipe object needed to calibrate the camera + + + Returns + ------- + signal_pixels: Mask of the pixels selected by the cleaning + image: Array of number of p.e. in the camera pixels + peak_time: Array of the signal peak time in the camera pixels """ calibrator_magic._calibrate_dl0(event, tel_id) From 174e37710e998e1d070c1e7abc2aecea82cbfc1c Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 14:14:34 +0200 Subject: [PATCH 36/76] Fixed Calibrate_LST (input) --- magicctapipe/image/calib.py | 26 +++++++++---------- .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 18 +++---------- 2 files changed, 17 insertions(+), 27 deletions(-) diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index 677480442..e401fec53 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -7,14 +7,16 @@ from lstchain.image.cleaning import apply_dynamic_cleaning from lstchain.image.modifier import ( add_noise_in_pixels, - random_psf_smearer, + random_psf_smearer, + set_numba_seed ) + __all__ = [ "Calibrate_LST", "Calibrate_MAGIC" ] -def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, increase_nsb, use_time_delta_cleaning, use_dynamic_cleaning ): +def Calibrate_LST(event, tel_id, obs_id, config_lst, camera_geoms, calibrator_lst): """ This function computes and returns some information for a single event of a telescope of LST type @@ -24,21 +26,16 @@ def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, event: event From an EventSource tel_id: int - telescope ID - rng: numpy Random Generator - If increase_nsb=True, used to add noise in camera pixels + Telescope ID + obs_id: int + Observation ID config_lst: dictionary Parameters for image extraction and calibration camera_geoms: telescope.camera.geometry Camera geometry calibrator_lst: CameraCalibrator (ctapipe.calib) ctapipe object needed to calibrate the camera - increase_nsb: bool - Whether to add noise in camera pixels - use_time_delta_cleaning: bool - Whether to use this kind of cleaning (cf. ctapipe) - use_dynamic_cleaning: bool - Whether to use this kind of cleaning (cf. lstchain) + Returns ------- @@ -53,15 +50,18 @@ def Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, image = event.dl1.tel[tel_id].image.astype(np.float64) peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - + increase_nsb = config_lst["increase_nsb"].pop("use") increase_psf = config_lst["increase_psf"]["use"] - use_only_main_island = config_lst["use_only_main_island"] + use_time_delta_cleaning = config_lst["time_delta_cleaning"].pop("use") + use_dynamic_cleaning = config_lst["dynamic_cleaning"].pop("use") if increase_nsb: + rng = np.random.default_rng(obs_id) # Add extra noise in pixels image = add_noise_in_pixels(rng, image, **config_lst["increase_nsb"]) if increase_psf: + set_numba_seed(obs_id) # Smear the image image = random_psf_smearer( image=image, diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 6906d0392..c802a3ecf 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -44,9 +44,7 @@ ) from ctapipe.instrument import SubarrayDescription from ctapipe.io import EventSource, HDF5TableWriter -from lstchain.image.modifier import ( - set_numba_seed, -) + from magicctapipe.image import MAGICClean from magicctapipe.image.calib import Calibrate_LST, Calibrate_MAGIC from magicctapipe.io import SimEventInfoContainer, format_object @@ -129,14 +127,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): logger.info("\nLST PSF modifier:") logger.info(format_object(config_lst["increase_psf"])) - increase_nsb = config_lst["increase_nsb"].pop("use") - increase_psf = config_lst["increase_psf"]["use"] - - if increase_nsb: - rng = np.random.default_rng(obs_id) - - if increase_psf: - set_numba_seed(obs_id) + logger.info("\nLST tailcuts cleaning:") logger.info(format_object(config_lst["tailcuts_clean"])) @@ -147,8 +138,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): logger.info("\nLST dynamic cleaning:") logger.info(format_object(config_lst["dynamic_cleaning"])) - use_time_delta_cleaning = config_lst["time_delta_cleaning"].pop("use") - use_dynamic_cleaning = config_lst["dynamic_cleaning"].pop("use") + use_only_main_island = config_lst["use_only_main_island"] logger.info(f"\nLST use only main island: {use_only_main_island}") @@ -242,7 +232,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): if tel_id in LSTs_IDs: ##If the ID is in the LST list, we call Calibrate_LST() # Calibrate the LST-1 event - signal_pixels, image, peak_time = Calibrate_LST(event, tel_id, rng, config_lst, camera_geoms, calibrator_lst, increase_nsb, use_time_delta_cleaning, use_dynamic_cleaning) + signal_pixels, image, peak_time = Calibrate_LST(event, tel_id, obs_id, config_lst, camera_geoms, calibrator_lst) elif tel_id in MAGICs_IDs: # Calibrate the MAGIC event signal_pixels, image, peak_time = Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic) From 9923e480acdcc73fa9723524ff26e555894c8f80 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 14:19:02 +0200 Subject: [PATCH 37/76] Bug --- magicctapipe/image/calib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index e401fec53..d38de1340 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -54,6 +54,7 @@ def Calibrate_LST(event, tel_id, obs_id, config_lst, camera_geoms, calibrator_ls increase_psf = config_lst["increase_psf"]["use"] use_time_delta_cleaning = config_lst["time_delta_cleaning"].pop("use") use_dynamic_cleaning = config_lst["dynamic_cleaning"].pop("use") + use_only_main_island = config_lst["use_only_main_island"] if increase_nsb: rng = np.random.default_rng(obs_id) From 972ee8ddbf2fcffa3980d92bac7ba884b018c460 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 2 Oct 2023 14:36:03 +0200 Subject: [PATCH 38/76] one single Calibrate function --- magicctapipe/image/__init__.py | 3 +- magicctapipe/image/calib.py | 106 ++++++++++++++++++++++++++++++++- 2 files changed, 107 insertions(+), 2 deletions(-) diff --git a/magicctapipe/image/__init__.py b/magicctapipe/image/__init__.py index 99bd24db3..830acf510 100644 --- a/magicctapipe/image/__init__.py +++ b/magicctapipe/image/__init__.py @@ -21,5 +21,6 @@ "clean_image_params", "get_leakage", "Calibrate_LST", - "Calibrate_MAGIC" + "Calibrate_MAGIC", + "Calibrate" ] diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index d38de1340..26492aa26 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -13,7 +13,7 @@ __all__ = [ - "Calibrate_LST", "Calibrate_MAGIC" + "Calibrate_LST", "Calibrate_MAGIC", "Calibrate" ] def Calibrate_LST(event, tel_id, obs_id, config_lst, camera_geoms, calibrator_lst): @@ -144,3 +144,107 @@ def Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic): event_image=image, event_pulse_time=peak_time ) return signal_pixels, image, peak_time + + + + + +def Calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_geoms=None, magic_clean=None): + + """ + This function computes and returns some information for a single event of a telescope + + Parameters + ---------- + event: event + From an EventSource + tel_id: int + Telescope ID + config: dictionary + Parameters for image extraction and calibration + calibrator: CameraCalibrator (ctapipe.calib) + ctapipe object needed to calibrate the camera + LST_bool: bool + Whether the telescope is a LST + obs_id: int + Observation ID. Unsed in case of LSt telescope + camera_geoms: telescope.camera.geometry + Camera geometry. Used in case of LST telescope + magic_clean: dictionary (1 entry per MAGIC telescope) + Each entry is a MAGICClean object using the telescope camera geometry. Used in case of MAGIC telescope + + + Returns + ------- + signal_pixels: Mask of the pixels selected by the cleaning + image: Array of number of p.e. in the camera pixels + peak_time: Array of the signal peak time in the camera pixels + + """ + + calibrator._calibrate_dl0(event, tel_id) + calibrator._calibrate_dl1(event, tel_id) + + image = event.dl1.tel[tel_id].image.astype(np.float64) + peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) + if LST_bool==False: + use_charge_correction = config["charge_correction"]["use"] + + if use_charge_correction: + # Scale the charges by the correction factor + image *= config["charge_correction"]["factor"] + # Apply the image cleaning + signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( + event_image=image, event_pulse_time=peak_time + ) + else: + increase_nsb = config["increase_nsb"].pop("use") + increase_psf = config["increase_psf"]["use"] + use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") + use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") + use_only_main_island = config["use_only_main_island"] + + if increase_nsb: + rng = np.random.default_rng(obs_id) + # Add extra noise in pixels + image = add_noise_in_pixels(rng, image, **config["increase_nsb"]) + + if increase_psf: + set_numba_seed(obs_id) + # Smear the image + image = random_psf_smearer( + image=image, + fraction=config["increase_psf"]["fraction"], + indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, + indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, + ) + + # Apply the image cleaning + signal_pixels = tailcuts_clean( + camera_geoms[tel_id], image, **config["tailcuts_clean"] + ) + + if use_time_delta_cleaning: + signal_pixels = apply_time_delta_cleaning( + geom=camera_geoms[tel_id], + mask=signal_pixels, + arrival_times=peak_time, + **config["time_delta_cleaning"], + ) + + if use_dynamic_cleaning: + signal_pixels = apply_dynamic_cleaning( + image, signal_pixels, **config["dynamic_cleaning"] + ) + + if use_only_main_island: + _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) + n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) + + # The first index means the pixels not surviving + # the cleaning, so should not be considered + n_pixels_on_island[0] = 0 + max_island_label = np.argmax(n_pixels_on_island) + signal_pixels[island_labels != max_island_label] = False + + return signal_pixels, image, peak_time From 1a486e48a39f6b3c8daf534a0384532c603b308f Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Tue, 3 Oct 2023 09:21:44 +0200 Subject: [PATCH 39/76] Bug (bash scripts) --- .../lst1_magic/merging_runs_and_splitting_training_samples.py | 2 +- magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index a16fa3ab2..e3cde17be 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -201,7 +201,7 @@ def mergeMC(target_dir, identification, env_name): f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'.join('/merged_${SLURM_ARRAY_TASK_ID}.log\n')) + f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') f.write(f'conda run -n {env_name} merge_hdf_files --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py index 956ca6a15..7b1ba62d6 100644 --- a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py +++ b/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py @@ -159,7 +159,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, f" ls *.gz > {target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n", ' string=$lineA"/"\n', f" export file={target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n\n", - " cat $file | while read line; do echo $string${line}".join(f" >>{target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n"), + " cat $file | while read line; do echo $string${line}"+f" >>{target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n", ' echo "folder $lineB and node $lineA"\n', f'done 3<"{target_dir}/list_nodes_{particle_type}_complete.txt" 4<"{target_dir}/list_folder_{particle_type}.txt"\n', ""] @@ -190,7 +190,7 @@ def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, f'SAMPLE_LIST=($(<$INF/list_folder_{particle_type}.txt))\n', 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n', 'cd $SAMPLE\n\n', - f'export LOG={target_dir}/DL1/MC/{particle_type}'.join('/simtel_{$SAMPLE}_all.log\n'), + f'export LOG={target_dir}/DL1/MC/{particle_type}'+'/simtel_{$SAMPLE}_all.log\n', 'cat list_dl0_ok.txt | while read line\n', 'do\n', f' cd {target_dir}/../\n', From 733afad0ec1cb2faed127ee48765ab488b95c05b Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 3 Oct 2023 10:57:43 +0200 Subject: [PATCH 40/76] Writing bash script with writelines() --- ...ing_runs_and_splitting_training_samples.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py index e3cde17be..cb3e58c74 100644 --- a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py +++ b/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py @@ -189,20 +189,23 @@ def mergeMC(target_dir, identification, env_name): cleaning(list_of_nodes, target_dir) #This will delete the (possibly) failed runs. with open(f"Merge_{identification}.sh","w") as f: - f.write('#!/bin/sh\n\n') - f.write('#SBATCH -p short\n') - f.write(f'#SBATCH -J {process_name}n') - f.write(f"#SBATCH --array=0-{process_size}%50\n") - f.write('#SBATCH --mem=7g\n') - f.write('#SBATCH -N 1\n\n') - f.write('ulimit -l unlimited\n') - f.write('ulimit -s unlimited\n') - f.write('ulimit -a\n\n') - - f.write(f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write(f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n') - f.write(f'conda run -n {env_name} merge_hdf_files --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n') + lines_bash_file = [ + '#!/bin/sh\n\n', + '#SBATCH -p short\n', + f'#SBATCH -J {process_name}n', + f"#SBATCH --array=0-{process_size}%50\n", + '#SBATCH --mem=7g\n', + '#SBATCH -N 1\n\n', + 'ulimit -l unlimited\n', + 'ulimit -s unlimited\n', + 'ulimit -a\n\n', + f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n", + "SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n", + f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n', + f'conda run -n {env_name} merge_hdf_files --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n' + ] + f.writelines(lines_bash_file) + f.close() From aaaa867f891dd931352d6c970b6fd3b5adcf3f85 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 3 Oct 2023 11:06:07 +0200 Subject: [PATCH 41/76] List of functions in alphabetical order --- magicctapipe/io/io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 42216b5d3..d943672f1 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -23,7 +23,6 @@ from pyirf.utils import calculate_source_fov_offset, calculate_theta __all__ = [ - "telescope_combinations", "format_object", "get_stereo_events", "get_stereo_events_old", @@ -36,6 +35,7 @@ "load_dl2_data_file", "load_irf_files", "save_pandas_data_in_table", + "telescope_combinations", ] logger = logging.getLogger(__name__) From 06e3f6cd638a99457070b42b442a87a3e9881c0a Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 3 Oct 2023 11:09:20 +0200 Subject: [PATCH 42/76] List of functions in alphabetical order --- magicctapipe/io/io.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index d943672f1..6c26fb09f 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -23,17 +23,17 @@ from pyirf.utils import calculate_source_fov_offset, calculate_theta __all__ = [ - "format_object", + "format_object", + "get_dl2_mean", "get_stereo_events", "get_stereo_events_old", - "get_dl2_mean", + "load_dl2_data_file", + "load_irf_files", "load_lst_dl1_data_file", "load_magic_dl1_data_files", + "load_mc_dl2_data_file", "load_train_data_files", "load_train_data_files_tel", - "load_mc_dl2_data_file", - "load_dl2_data_file", - "load_irf_files", "save_pandas_data_in_table", "telescope_combinations", ] From 5d2432273afd8d0908b745e41fb0a0f54e7793a0 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 3 Oct 2023 11:18:16 +0200 Subject: [PATCH 43/76] List of functions in alphabetical order --- .../scripts/lst1_magic/lst1_magic_event_coincidence.py | 4 ++-- magicctapipe/scripts/lst1_magic/merge_hdf_files.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 8d627ec18..307bcdea8 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -72,7 +72,7 @@ telescope_combinations, ) -__all__ = ["telescope_positions","event_coincidence"] +__all__ = ["event_coincidence","telescope_positions"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) @@ -631,4 +631,4 @@ def main(): if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/magicctapipe/scripts/lst1_magic/merge_hdf_files.py b/magicctapipe/scripts/lst1_magic/merge_hdf_files.py index 0e8f48414..7b747935d 100644 --- a/magicctapipe/scripts/lst1_magic/merge_hdf_files.py +++ b/magicctapipe/scripts/lst1_magic/merge_hdf_files.py @@ -38,7 +38,7 @@ import tables from ctapipe.instrument import SubarrayDescription -__all__ = ["write_data_to_table", "merge_hdf_files"] +__all__ = ["merge_hdf_files","write_data_to_table"] logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) From 9a56f9eb9cc95ae7948efa457fd5746eea62f190 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 3 Oct 2023 11:28:14 +0200 Subject: [PATCH 44/76] List of functions in alphabetical order --- magicctapipe/io/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/magicctapipe/io/__init__.py b/magicctapipe/io/__init__.py index fb116d1ef..0b44e3700 100644 --- a/magicctapipe/io/__init__.py +++ b/magicctapipe/io/__init__.py @@ -7,7 +7,6 @@ SimEventInfoContainer, ) from .io import ( - telescope_combinations, format_object, get_dl2_mean, get_stereo_events, @@ -20,6 +19,7 @@ load_train_data_files, load_train_data_files_tel, save_pandas_data_in_table, + telescope_combinations, ) from .gadf import ( create_event_hdu, @@ -37,7 +37,6 @@ "create_gh_cuts_hdu", "create_gti_hdu", "create_pointing_hdu", - "telescope_combinations", "format_object", "get_dl2_mean", "get_stereo_events", @@ -50,4 +49,5 @@ "load_train_data_files", "load_train_data_files_tel", "save_pandas_data_in_table", + "telescope_combinations", ] From 937f0252e4ead4b2955ef20ebbb6f13b34a21240 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 3 Oct 2023 11:38:18 +0200 Subject: [PATCH 45/76] Standardized inline comments --- magicctapipe/io/io.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 6c26fb09f..ab3f08795 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -77,7 +77,7 @@ def telescope_combinations(config): TEL_NAMES = {} - for k, v in config["mc_tel_ids"].items(): #Here we swap the dictionary keys and values just for convenience. + for k, v in config["mc_tel_ids"].items(): # Here we swap the dictionary keys and values just for convenience. if v > 0: TEL_NAMES[v] = k @@ -86,16 +86,16 @@ def telescope_combinations(config): def recursive_solution(current_tel, current_comb): - if current_tel == len(keys): #The function stops once we reach the last telescope + if current_tel == len(keys): # The function stops once we reach the last telescope return - current_comb_name = current_comb[0] + '_' + TEL_NAMES[keys[current_tel]] #Name of the combo (at this point it can even be a single telescope) - current_comb_list = current_comb[1] + [keys[current_tel]] #List of telescopes (including individual telescopes) + current_comb_name = current_comb[0] + '_' + TEL_NAMES[keys[current_tel]] # Name of the combo (at this point it can even be a single telescope) + current_comb_list = current_comb[1] + [keys[current_tel]] # List of telescopes (including individual telescopes) - if len(current_comb_list) > 1: #We save them in the new dictionary excluding the single-telescope values + if len(current_comb_list) > 1: # We save them in the new dictionary excluding the single-telescope values TEL_COMBINATIONS[current_comb_name[1:]] = current_comb_list; - current_comb = [current_comb_name, current_comb_list] #We save the current results in this varible to recal the function recursively ("for" loop below) + current_comb = [current_comb_name, current_comb_list] # We save the current results in this varible to recal the function recursively ("for" loop below) for i in range(1, len(keys)-current_tel): recursive_solution(current_tel+i, current_comb) From 69e7b6f1ea0b37dcbb473defc91db3bf3a66ad7c Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Tue, 3 Oct 2023 12:02:36 +0200 Subject: [PATCH 46/76] Moved IT scripts to a different folder --- .../{ => IT_container_data_MC_bash_scripts}/coincident_events.py | 0 .../merging_runs_and_splitting_training_samples.py | 0 .../setting_up_config_and_dir.py | 0 .../{ => IT_container_data_MC_bash_scripts}/stereo_events.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename magicctapipe/scripts/lst1_magic/{ => IT_container_data_MC_bash_scripts}/coincident_events.py (100%) rename magicctapipe/scripts/lst1_magic/{ => IT_container_data_MC_bash_scripts}/merging_runs_and_splitting_training_samples.py (100%) rename magicctapipe/scripts/lst1_magic/{ => IT_container_data_MC_bash_scripts}/setting_up_config_and_dir.py (100%) rename magicctapipe/scripts/lst1_magic/{ => IT_container_data_MC_bash_scripts}/stereo_events.py (100%) diff --git a/magicctapipe/scripts/lst1_magic/coincident_events.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/coincident_events.py similarity index 100% rename from magicctapipe/scripts/lst1_magic/coincident_events.py rename to magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/coincident_events.py diff --git a/magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/merging_runs_and_splitting_training_samples.py similarity index 100% rename from magicctapipe/scripts/lst1_magic/merging_runs_and_splitting_training_samples.py rename to magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/merging_runs_and_splitting_training_samples.py diff --git a/magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/setting_up_config_and_dir.py similarity index 100% rename from magicctapipe/scripts/lst1_magic/setting_up_config_and_dir.py rename to magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/setting_up_config_and_dir.py diff --git a/magicctapipe/scripts/lst1_magic/stereo_events.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/stereo_events.py similarity index 100% rename from magicctapipe/scripts/lst1_magic/stereo_events.py rename to magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/stereo_events.py From c5cfff6fea780cd857de334a46cf11ffeaf236d9 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Tue, 3 Oct 2023 12:07:59 +0200 Subject: [PATCH 47/76] Moving tutorial --- .../README.md | 212 ++++++++++++++++++ 1 file changed, 212 insertions(+) create mode 100644 magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md diff --git a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md new file mode 100644 index 000000000..6ed4afecc --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md @@ -0,0 +1,212 @@ +# Script for MAGIC and MAGIC+LST analysis + +This folder contains scripts to perform MAGIC-only or MAGIC+LST analysis. + +Each script can be called from the command line from anywhere in your system (some console scripts are created during installation). Please run them with `-h` option for the first time to check what are the options available. + +## MAGIC-only analysis + +MAGIC-only analysis starts from MAGIC-calibrated data (\_Y\_ files). The analysis flow is as follows: + +- `magic_calib_to_dl1.py` on real and MC data (if you use MCs produced with MMCS), to convert them into DL1 format +- if you use SimTelArray MCs, run `lst1_magic_mc_dl0_to_dl1.py` over them to convert them into DL1 format +- optionally, but recommended, `merge_hdf_files.py` to merge subruns and/or runs together +- `lst1_magic_stereo_reco.py` to add stereo parameters to the DL1 data (use `--magic-only` argument if the MC DL1 data contains LST-1 events) +- `lst1_magic_train_rfs.py` to train the RFs (energy, direction, classification) on train gamma MCs and protons +- `lst1_magic_dl1_stereo_to_dl2.py` to apply the RFs to stereo DL1 data (real and test MCs) and produce DL2 data +- `lst1_magic_create_irf.py` to create the IRF (use `magic_stereo` as `irf_type` in the configuration file) +- `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files + +## MAGIC+LST analysis: overview + +MAGIC+LST analysis starts from MAGIC calibrated data (\_Y\_ files), LST DL1 data and SimTelArray DL0 data. The analysis flow is as following: + +- `magic_calib_to_dl1.py` on real MAGIC data, to convert them into DL1 format +- `lst1_magic_mc_dl0_to_dl1.py` over SimTelArray MCs to convert them into DL1 format +- optionally, but recommended, `merge_hdf_files.py` on MAGIC data to merge subruns and/or runs together +- `lst1_magic_event_coincidence.py` to find coincident events between MAGIC and LST-1, starting from DL1 data +- `lst1_magic_stereo_reco.py` to add stereo parameters to the DL1 data +- `lst1_magic_train_rfs.py` to train the RFs (energy, direction, classification) on train gamma MCs and protons +- `lst1_magic_dl1_stereo_to_dl2.py` to apply the RFs to stereo DL1 data (real and test MCs) and produce DL2 data +- `lst1_magic_create_irf.py` to create the IRF +- `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files + +## MAGIC+LST analysis: data reduction tutorial (PRELIMINARY) + +1) The very first step to reduce MAGIC-LST data is to have remote access/credentials to the IT Container, so provide one. Once you have it, the connection steps are the following: + +Authorized institute server (Client) → ssh connection to CTALaPalma → ssh connection to cp01/02 + +2) Once connected to the IT Container, install MAGIC-CTA-PIPE (e.g. in your home directory in the IT Container) following the tutorial here: https://github.com/cta-observatory/magic-cta-pipe + +3) Do not forget to open the magic-lst environment with the command `conda activate magic-lst` before starting the analysis + +### DL0 to DL1 + +In this step, we will convert the MAGIC and Monte Carlo (MC) Data Level (DL) 0 to DL1 (our goal is to reach DL3). + +Now copy all the python scripts available here to your preferred directory (e.g. /fefs/aswg/workspace/yourname/yourprojectname) in the IT Container, as well as the files `config_general.yaml`, `MAGIC_runs.txt` and `LST_runs.txt`. + +The file `config_general.yaml` must contain the telescope IDs and the directories with the MC data, as shown below: +``` +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + +directories: + workspace_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" + target_name : "CrabTeste" + MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" + MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" + MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" + MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" + MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" + +general: + target_RA_deg : 83.633083 #RA in degrees + target_Dec_deg : 22.0145 #Dec in degrees + SimTel_version : "v1.4" + LST_version : "v0.9" + focal_length : "effective" #effective #nominal + MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" + LST_runs : "LST_runs.txt" + proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + env_name : magic-lst + +``` + +The file `MAGIC_runs.txt` looks like that: +``` +2020_11_19,5093174 +2020_11_19,5093175 +2020_12_08,5093491 +2020_12_08,5093492 + +``` + + +The columns here represent the night and run in which you want to select data. Please do not add blank spaces in the rows, as these names will be used to i) find the MAGIC data in the IT Container and ii) create the subdirectories in your working directory. If there is no MAGIC data, please fill this file with "0,0". Similarly, the `LST_runs.txt` file looks like this: + +``` +2020_11_18,2923 +2020_11_18,2924 +2020_12_07,3093 + +``` +Note that the LST nights appear as being one day before MAGIC's!!! This is because LST saves the date at the beginning of the night, while MAGIC saves it at the end. If there is no LST data, please fill this file with "0,0". These files are the only ones we need to modify in order to convert DL0 into DL1 data. + +In this analysis, we use a wobble of 0.4°. + +To convert the MAGIC and SimTelArray MCs data into DL1 format, you first do the following: +> $ python setting_up_config_and_dir.py + +``` +***** Linking MC paths - this may take a few minutes ****** +*** Reducing DL0 to DL1 data - this can take many hours *** +Process name: yourprojectnameCrabTeste +To check the jobs submitted to the cluster, type: squeue -n yourprojectnameCrabTeste +``` +Note that this script can be run as +> $ python setting_up_config_and_dir.py --analysis-type onlyMAGIC + +or + +> $ python setting_up_config_and_dir.py --analysis-type onlyMC + +if you want to convert only MAGIC or only MC DL0 files to DL1, respectively. + + +The script `setting_up_config_and_dir.py` does a series of things: +- Creates a directory with your source name within the directory `yourprojectname` and several subdirectories inside it that are necessary for the rest of the data reduction. +- Generates a configuration file called config_step1.yaml with and telescope ID information and adopted imaging/cleaning cuts, and puts it in the directory created in the previous step. +- Links the MAGIC and MC data addresses to their respective subdirectories defined in the previous steps. +- Runs the scripts `lst1_magic_mc_dl0_to_dl1.py` and `magic_calib_to_dl1.py` for each one of the linked data files. + +In the file `config_general.yaml`, the sequence of telescopes is always LST1, LST2, LST3, LST4, MAGIC-I, MAGIC-II. So in this tutorial, we have +LST-1 ID = 1 +LST-2 ID = 0 +LST-3 ID = 0 +LST-4 ID = 0 +MAGIC-I ID = 2 +MAGIC-II ID = 3 +If the telescope ID is set to 0, this means that the telescope is not used in the analysis. + +You can check if this process is done by typing +> $ squeue -n yourprojectnameCrabTeste +or +> $ squeue -u your_user_name + +in the terminal. Once it is done, all of the subdirectories in `/fefs/aswg/workspace/yourname/yourprojectname/CrabTeste/DL1/` will be filled with files of the type `dl1_[...]_LST1_MAGIC1_MAGIC2_runXXXXXX.h5` for the MCs and `dl1_MX.RunXXXXXX.0XX.h5` for the MAGIC runs. The next step of the conversion of DL0 to DL1 is to split the DL1 MC proton sample into "train" and "test" datasets (these will be used later in the Random Forest event classification and to do some diagnostic plots) and to merge all the MAGIC data files such that in the end, we have only one datafile per night. To do so, we run the following script: + +> $ python merging_runs_and_splitting_training_samples.py + +``` +***** Splitting protons into 'train' and 'test' datasets... +***** Generating merge bashscripts... +***** Running merge_hdf_files.py in the MAGIC data files... +Process name: merging_CrabTeste +To check the jobs submitted to the cluster, type: squeue -n merging_CrabTeste +``` + +This script will slice the proton MC sample according to the entry "proton_train_fraction" in the "config_general.yaml" file, and then it will merge the MAGIC data files in the following order: +- MAGIC subruns are merged into single runs. +- MAGIC I and II runs are merged (only if both telescopes are used, of course). +- All runs in specific nights are merged, such that in the end we have only one datafile per night. +- Proton MC training data is merged. +- Proton MC testing data is merged. +- Diffuse MC gammas are merged. +- MC gammas are merged. + +### Coincident events and stereo parameters on DL1 + +To find coincident events between MAGIC and LST, starting from DL1 data, we run the following script: + +> $ python coincident_events.py + +This script creates the file config_coincidence.yaml containing the telescope IDs and the following parameters: +``` +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + +event_coincidence: + timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time" + window_half_width: "300 ns" + pre_offset_search: true + n_pre_offset_search_events: 100 + time_offset: + start: "-10 us" + stop: "0 us" +``` + +It then links the real LST data files to the output directory [...]DL1/Observations/Coincident, and runs the script lst1_magic_event_coincidence.py in all of them. + +Once it is done, we add stereo parameters to the MAGIC+LST coincident DL1 data by running: + +> $ python stereo_events.py + +This script creates the file config_stereo.yaml with the follwoing parameters: +``` +mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + +stereo_reco: + quality_cuts: "(intensity > 50) & (width > 0)" + theta_uplim: "6 arcmin" +``` + +It then creates the output directories for the DL1 with stereo parameters [...]DL1/Observations/Coincident_stereo/SEVERALNIGHTS and [...]/DL1/MC/GAMMAorPROTON/Merged/StereoMerged, and then runs the script lst1_magic_stereo_reco.py in all of the coincident DL1 files. The stereo DL1 files for MC and real data are then saved in these directories. + From da9913244bd19ed01788e04bd1654dc9d40f1999 Mon Sep 17 00:00:00 2001 From: Raniere Date: Tue, 3 Oct 2023 12:08:40 +0200 Subject: [PATCH 48/76] Update README.md --- magicctapipe/scripts/lst1_magic/README.md | 179 ---------------------- 1 file changed, 179 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index 6ed4afecc..0f5f39b54 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -31,182 +31,3 @@ MAGIC+LST analysis starts from MAGIC calibrated data (\_Y\_ files), LST DL1 data - `lst1_magic_create_irf.py` to create the IRF - `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files -## MAGIC+LST analysis: data reduction tutorial (PRELIMINARY) - -1) The very first step to reduce MAGIC-LST data is to have remote access/credentials to the IT Container, so provide one. Once you have it, the connection steps are the following: - -Authorized institute server (Client) → ssh connection to CTALaPalma → ssh connection to cp01/02 - -2) Once connected to the IT Container, install MAGIC-CTA-PIPE (e.g. in your home directory in the IT Container) following the tutorial here: https://github.com/cta-observatory/magic-cta-pipe - -3) Do not forget to open the magic-lst environment with the command `conda activate magic-lst` before starting the analysis - -### DL0 to DL1 - -In this step, we will convert the MAGIC and Monte Carlo (MC) Data Level (DL) 0 to DL1 (our goal is to reach DL3). - -Now copy all the python scripts available here to your preferred directory (e.g. /fefs/aswg/workspace/yourname/yourprojectname) in the IT Container, as well as the files `config_general.yaml`, `MAGIC_runs.txt` and `LST_runs.txt`. - -The file `config_general.yaml` must contain the telescope IDs and the directories with the MC data, as shown below: -``` -mc_tel_ids: - LST-1: 1 - LST-2: 0 - LST-3: 0 - LST-4: 0 - MAGIC-I: 2 - MAGIC-II: 3 - -directories: - workspace_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" - target_name : "CrabTeste" - MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" - MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" - MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" - MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" - MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" - -general: - target_RA_deg : 83.633083 #RA in degrees - target_Dec_deg : 22.0145 #Dec in degrees - SimTel_version : "v1.4" - LST_version : "v0.9" - focal_length : "effective" #effective #nominal - MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" - LST_runs : "LST_runs.txt" - proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest - env_name : magic-lst - -``` - -The file `MAGIC_runs.txt` looks like that: -``` -2020_11_19,5093174 -2020_11_19,5093175 -2020_12_08,5093491 -2020_12_08,5093492 - -``` - - -The columns here represent the night and run in which you want to select data. Please do not add blank spaces in the rows, as these names will be used to i) find the MAGIC data in the IT Container and ii) create the subdirectories in your working directory. If there is no MAGIC data, please fill this file with "0,0". Similarly, the `LST_runs.txt` file looks like this: - -``` -2020_11_18,2923 -2020_11_18,2924 -2020_12_07,3093 - -``` -Note that the LST nights appear as being one day before MAGIC's!!! This is because LST saves the date at the beginning of the night, while MAGIC saves it at the end. If there is no LST data, please fill this file with "0,0". These files are the only ones we need to modify in order to convert DL0 into DL1 data. - -In this analysis, we use a wobble of 0.4°. - -To convert the MAGIC and SimTelArray MCs data into DL1 format, you first do the following: -> $ python setting_up_config_and_dir.py - -``` -***** Linking MC paths - this may take a few minutes ****** -*** Reducing DL0 to DL1 data - this can take many hours *** -Process name: yourprojectnameCrabTeste -To check the jobs submitted to the cluster, type: squeue -n yourprojectnameCrabTeste -``` -Note that this script can be run as -> $ python setting_up_config_and_dir.py --analysis-type onlyMAGIC - -or - -> $ python setting_up_config_and_dir.py --analysis-type onlyMC - -if you want to convert only MAGIC or only MC DL0 files to DL1, respectively. - - -The script `setting_up_config_and_dir.py` does a series of things: -- Creates a directory with your source name within the directory `yourprojectname` and several subdirectories inside it that are necessary for the rest of the data reduction. -- Generates a configuration file called config_step1.yaml with and telescope ID information and adopted imaging/cleaning cuts, and puts it in the directory created in the previous step. -- Links the MAGIC and MC data addresses to their respective subdirectories defined in the previous steps. -- Runs the scripts `lst1_magic_mc_dl0_to_dl1.py` and `magic_calib_to_dl1.py` for each one of the linked data files. - -In the file `config_general.yaml`, the sequence of telescopes is always LST1, LST2, LST3, LST4, MAGIC-I, MAGIC-II. So in this tutorial, we have -LST-1 ID = 1 -LST-2 ID = 0 -LST-3 ID = 0 -LST-4 ID = 0 -MAGIC-I ID = 2 -MAGIC-II ID = 3 -If the telescope ID is set to 0, this means that the telescope is not used in the analysis. - -You can check if this process is done by typing -> $ squeue -n yourprojectnameCrabTeste -or -> $ squeue -u your_user_name - -in the terminal. Once it is done, all of the subdirectories in `/fefs/aswg/workspace/yourname/yourprojectname/CrabTeste/DL1/` will be filled with files of the type `dl1_[...]_LST1_MAGIC1_MAGIC2_runXXXXXX.h5` for the MCs and `dl1_MX.RunXXXXXX.0XX.h5` for the MAGIC runs. The next step of the conversion of DL0 to DL1 is to split the DL1 MC proton sample into "train" and "test" datasets (these will be used later in the Random Forest event classification and to do some diagnostic plots) and to merge all the MAGIC data files such that in the end, we have only one datafile per night. To do so, we run the following script: - -> $ python merging_runs_and_splitting_training_samples.py - -``` -***** Splitting protons into 'train' and 'test' datasets... -***** Generating merge bashscripts... -***** Running merge_hdf_files.py in the MAGIC data files... -Process name: merging_CrabTeste -To check the jobs submitted to the cluster, type: squeue -n merging_CrabTeste -``` - -This script will slice the proton MC sample according to the entry "proton_train_fraction" in the "config_general.yaml" file, and then it will merge the MAGIC data files in the following order: -- MAGIC subruns are merged into single runs. -- MAGIC I and II runs are merged (only if both telescopes are used, of course). -- All runs in specific nights are merged, such that in the end we have only one datafile per night. -- Proton MC training data is merged. -- Proton MC testing data is merged. -- Diffuse MC gammas are merged. -- MC gammas are merged. - -### Coincident events and stereo parameters on DL1 - -To find coincident events between MAGIC and LST, starting from DL1 data, we run the following script: - -> $ python coincident_events.py - -This script creates the file config_coincidence.yaml containing the telescope IDs and the following parameters: -``` -mc_tel_ids: - LST-1: 1 - LST-2: 0 - LST-3: 0 - LST-4: 0 - MAGIC-I: 2 - MAGIC-II: 3 - -event_coincidence: - timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time" - window_half_width: "300 ns" - pre_offset_search: true - n_pre_offset_search_events: 100 - time_offset: - start: "-10 us" - stop: "0 us" -``` - -It then links the real LST data files to the output directory [...]DL1/Observations/Coincident, and runs the script lst1_magic_event_coincidence.py in all of them. - -Once it is done, we add stereo parameters to the MAGIC+LST coincident DL1 data by running: - -> $ python stereo_events.py - -This script creates the file config_stereo.yaml with the follwoing parameters: -``` -mc_tel_ids: - LST-1: 1 - LST-2: 0 - LST-3: 0 - LST-4: 0 - MAGIC-I: 2 - MAGIC-II: 3 - -stereo_reco: - quality_cuts: "(intensity > 50) & (width > 0)" - theta_uplim: "6 arcmin" -``` - -It then creates the output directories for the DL1 with stereo parameters [...]DL1/Observations/Coincident_stereo/SEVERALNIGHTS and [...]/DL1/MC/GAMMAorPROTON/Merged/StereoMerged, and then runs the script lst1_magic_stereo_reco.py in all of the coincident DL1 files. The stereo DL1 files for MC and real data are then saved in these directories. - From cd9b39652a96a1ca680aee14b8e39965e2fcbbc0 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 4 Oct 2023 11:46:24 +0200 Subject: [PATCH 49/76] Remove IT scripts --- .../README.md | 212 -------- .../coincident_events.py | 195 -------- ...ing_runs_and_splitting_training_samples.py | 286 ----------- .../setting_up_config_and_dir.py | 467 ------------------ .../stereo_events.py | 199 -------- 5 files changed, 1359 deletions(-) delete mode 100644 magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md delete mode 100644 magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/coincident_events.py delete mode 100644 magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/merging_runs_and_splitting_training_samples.py delete mode 100644 magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/setting_up_config_and_dir.py delete mode 100644 magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/stereo_events.py diff --git a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md deleted file mode 100644 index 6ed4afecc..000000000 --- a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/README.md +++ /dev/null @@ -1,212 +0,0 @@ -# Script for MAGIC and MAGIC+LST analysis - -This folder contains scripts to perform MAGIC-only or MAGIC+LST analysis. - -Each script can be called from the command line from anywhere in your system (some console scripts are created during installation). Please run them with `-h` option for the first time to check what are the options available. - -## MAGIC-only analysis - -MAGIC-only analysis starts from MAGIC-calibrated data (\_Y\_ files). The analysis flow is as follows: - -- `magic_calib_to_dl1.py` on real and MC data (if you use MCs produced with MMCS), to convert them into DL1 format -- if you use SimTelArray MCs, run `lst1_magic_mc_dl0_to_dl1.py` over them to convert them into DL1 format -- optionally, but recommended, `merge_hdf_files.py` to merge subruns and/or runs together -- `lst1_magic_stereo_reco.py` to add stereo parameters to the DL1 data (use `--magic-only` argument if the MC DL1 data contains LST-1 events) -- `lst1_magic_train_rfs.py` to train the RFs (energy, direction, classification) on train gamma MCs and protons -- `lst1_magic_dl1_stereo_to_dl2.py` to apply the RFs to stereo DL1 data (real and test MCs) and produce DL2 data -- `lst1_magic_create_irf.py` to create the IRF (use `magic_stereo` as `irf_type` in the configuration file) -- `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files - -## MAGIC+LST analysis: overview - -MAGIC+LST analysis starts from MAGIC calibrated data (\_Y\_ files), LST DL1 data and SimTelArray DL0 data. The analysis flow is as following: - -- `magic_calib_to_dl1.py` on real MAGIC data, to convert them into DL1 format -- `lst1_magic_mc_dl0_to_dl1.py` over SimTelArray MCs to convert them into DL1 format -- optionally, but recommended, `merge_hdf_files.py` on MAGIC data to merge subruns and/or runs together -- `lst1_magic_event_coincidence.py` to find coincident events between MAGIC and LST-1, starting from DL1 data -- `lst1_magic_stereo_reco.py` to add stereo parameters to the DL1 data -- `lst1_magic_train_rfs.py` to train the RFs (energy, direction, classification) on train gamma MCs and protons -- `lst1_magic_dl1_stereo_to_dl2.py` to apply the RFs to stereo DL1 data (real and test MCs) and produce DL2 data -- `lst1_magic_create_irf.py` to create the IRF -- `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files - -## MAGIC+LST analysis: data reduction tutorial (PRELIMINARY) - -1) The very first step to reduce MAGIC-LST data is to have remote access/credentials to the IT Container, so provide one. Once you have it, the connection steps are the following: - -Authorized institute server (Client) → ssh connection to CTALaPalma → ssh connection to cp01/02 - -2) Once connected to the IT Container, install MAGIC-CTA-PIPE (e.g. in your home directory in the IT Container) following the tutorial here: https://github.com/cta-observatory/magic-cta-pipe - -3) Do not forget to open the magic-lst environment with the command `conda activate magic-lst` before starting the analysis - -### DL0 to DL1 - -In this step, we will convert the MAGIC and Monte Carlo (MC) Data Level (DL) 0 to DL1 (our goal is to reach DL3). - -Now copy all the python scripts available here to your preferred directory (e.g. /fefs/aswg/workspace/yourname/yourprojectname) in the IT Container, as well as the files `config_general.yaml`, `MAGIC_runs.txt` and `LST_runs.txt`. - -The file `config_general.yaml` must contain the telescope IDs and the directories with the MC data, as shown below: -``` -mc_tel_ids: - LST-1: 1 - LST-2: 0 - LST-3: 0 - LST-4: 0 - MAGIC-I: 2 - MAGIC-II: 3 - -directories: - workspace_dir : "/fefs/aswg/workspace/yourname/yourprojectname/" - target_name : "CrabTeste" - MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" - MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" - MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" - MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" - MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" - -general: - target_RA_deg : 83.633083 #RA in degrees - target_Dec_deg : 22.0145 #Dec in degrees - SimTel_version : "v1.4" - LST_version : "v0.9" - focal_length : "effective" #effective #nominal - MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" - LST_runs : "LST_runs.txt" - proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest - env_name : magic-lst - -``` - -The file `MAGIC_runs.txt` looks like that: -``` -2020_11_19,5093174 -2020_11_19,5093175 -2020_12_08,5093491 -2020_12_08,5093492 - -``` - - -The columns here represent the night and run in which you want to select data. Please do not add blank spaces in the rows, as these names will be used to i) find the MAGIC data in the IT Container and ii) create the subdirectories in your working directory. If there is no MAGIC data, please fill this file with "0,0". Similarly, the `LST_runs.txt` file looks like this: - -``` -2020_11_18,2923 -2020_11_18,2924 -2020_12_07,3093 - -``` -Note that the LST nights appear as being one day before MAGIC's!!! This is because LST saves the date at the beginning of the night, while MAGIC saves it at the end. If there is no LST data, please fill this file with "0,0". These files are the only ones we need to modify in order to convert DL0 into DL1 data. - -In this analysis, we use a wobble of 0.4°. - -To convert the MAGIC and SimTelArray MCs data into DL1 format, you first do the following: -> $ python setting_up_config_and_dir.py - -``` -***** Linking MC paths - this may take a few minutes ****** -*** Reducing DL0 to DL1 data - this can take many hours *** -Process name: yourprojectnameCrabTeste -To check the jobs submitted to the cluster, type: squeue -n yourprojectnameCrabTeste -``` -Note that this script can be run as -> $ python setting_up_config_and_dir.py --analysis-type onlyMAGIC - -or - -> $ python setting_up_config_and_dir.py --analysis-type onlyMC - -if you want to convert only MAGIC or only MC DL0 files to DL1, respectively. - - -The script `setting_up_config_and_dir.py` does a series of things: -- Creates a directory with your source name within the directory `yourprojectname` and several subdirectories inside it that are necessary for the rest of the data reduction. -- Generates a configuration file called config_step1.yaml with and telescope ID information and adopted imaging/cleaning cuts, and puts it in the directory created in the previous step. -- Links the MAGIC and MC data addresses to their respective subdirectories defined in the previous steps. -- Runs the scripts `lst1_magic_mc_dl0_to_dl1.py` and `magic_calib_to_dl1.py` for each one of the linked data files. - -In the file `config_general.yaml`, the sequence of telescopes is always LST1, LST2, LST3, LST4, MAGIC-I, MAGIC-II. So in this tutorial, we have -LST-1 ID = 1 -LST-2 ID = 0 -LST-3 ID = 0 -LST-4 ID = 0 -MAGIC-I ID = 2 -MAGIC-II ID = 3 -If the telescope ID is set to 0, this means that the telescope is not used in the analysis. - -You can check if this process is done by typing -> $ squeue -n yourprojectnameCrabTeste -or -> $ squeue -u your_user_name - -in the terminal. Once it is done, all of the subdirectories in `/fefs/aswg/workspace/yourname/yourprojectname/CrabTeste/DL1/` will be filled with files of the type `dl1_[...]_LST1_MAGIC1_MAGIC2_runXXXXXX.h5` for the MCs and `dl1_MX.RunXXXXXX.0XX.h5` for the MAGIC runs. The next step of the conversion of DL0 to DL1 is to split the DL1 MC proton sample into "train" and "test" datasets (these will be used later in the Random Forest event classification and to do some diagnostic plots) and to merge all the MAGIC data files such that in the end, we have only one datafile per night. To do so, we run the following script: - -> $ python merging_runs_and_splitting_training_samples.py - -``` -***** Splitting protons into 'train' and 'test' datasets... -***** Generating merge bashscripts... -***** Running merge_hdf_files.py in the MAGIC data files... -Process name: merging_CrabTeste -To check the jobs submitted to the cluster, type: squeue -n merging_CrabTeste -``` - -This script will slice the proton MC sample according to the entry "proton_train_fraction" in the "config_general.yaml" file, and then it will merge the MAGIC data files in the following order: -- MAGIC subruns are merged into single runs. -- MAGIC I and II runs are merged (only if both telescopes are used, of course). -- All runs in specific nights are merged, such that in the end we have only one datafile per night. -- Proton MC training data is merged. -- Proton MC testing data is merged. -- Diffuse MC gammas are merged. -- MC gammas are merged. - -### Coincident events and stereo parameters on DL1 - -To find coincident events between MAGIC and LST, starting from DL1 data, we run the following script: - -> $ python coincident_events.py - -This script creates the file config_coincidence.yaml containing the telescope IDs and the following parameters: -``` -mc_tel_ids: - LST-1: 1 - LST-2: 0 - LST-3: 0 - LST-4: 0 - MAGIC-I: 2 - MAGIC-II: 3 - -event_coincidence: - timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time" - window_half_width: "300 ns" - pre_offset_search: true - n_pre_offset_search_events: 100 - time_offset: - start: "-10 us" - stop: "0 us" -``` - -It then links the real LST data files to the output directory [...]DL1/Observations/Coincident, and runs the script lst1_magic_event_coincidence.py in all of them. - -Once it is done, we add stereo parameters to the MAGIC+LST coincident DL1 data by running: - -> $ python stereo_events.py - -This script creates the file config_stereo.yaml with the follwoing parameters: -``` -mc_tel_ids: - LST-1: 1 - LST-2: 0 - LST-3: 0 - LST-4: 0 - MAGIC-I: 2 - MAGIC-II: 3 - -stereo_reco: - quality_cuts: "(intensity > 50) & (width > 0)" - theta_uplim: "6 arcmin" -``` - -It then creates the output directories for the DL1 with stereo parameters [...]DL1/Observations/Coincident_stereo/SEVERALNIGHTS and [...]/DL1/MC/GAMMAorPROTON/Merged/StereoMerged, and then runs the script lst1_magic_stereo_reco.py in all of the coincident DL1 files. The stereo DL1 files for MC and real data are then saved in these directories. - diff --git a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/coincident_events.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/coincident_events.py deleted file mode 100644 index 60eea4090..000000000 --- a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/coincident_events.py +++ /dev/null @@ -1,195 +0,0 @@ -""" -This scripts facilitates the usage of the script -"lst1_magic_event_coincidence.py". This script is -more like a "maneger" that organizes the analysis -process by: -1) Creating the bash scripts for looking for -coincidence events between MAGIC and LST in each -night. -2) Creating the subdirectories for the coincident -event files. - - -Usage: -$ python coincident_events.py - -""" - -import os -import numpy as np -import glob -import yaml -import logging -from pathlib import Path -import argparse - -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) -logger.setLevel(logging.INFO) - -def configfile_coincidence(ids, target_dir): - - """ - This function creates the configuration file needed for the event coincidence step - - Parameters - ---------- - ids: list - list of telescope IDs - target_dir: str - Path to the working directory - """ - - with open(f'{target_dir}/config_coincidence.yaml','w') as f: - f.write(f"mc_tel_ids:\n LST-1: {ids[0]}\n LST-2: {ids[1]}\n LST-3: {ids[2]}\n LST-4: {ids[3]}\n MAGIC-I: {ids[4]}\n MAGIC-II: {ids[5]}\n\n") - f.write('event_coincidence:\n timestamp_type_lst: "dragon_time" # select "dragon_time", "tib_time" or "ucts_time"\n window_half_width: "300 ns"\n') - f.write(' pre_offset_search: true\n') - f.write(' n_pre_offset_search_events: 100\n') - f.write(' time_offset:\n start: "-10 us"\n stop: "0 us"\n') - - - -def linking_lst(target_dir, LST_runs, LST_version): - - """ - This function links the LST data paths to the working directory. This is a preparation step required for running lst1_magic_event_coincidence.py - - Parameters - ---------- - target_dir: str - Path to the working directory - LST_runs: matrix of strings - This matrix is imported from config_general.yaml and tells the function where to find the LST data and link them to our working directory - """ - - - coincidence_DL1_dir = f"{target_dir}/DL1/Observations" - if not os.path.exists(f"{coincidence_DL1_dir}/Coincident"): - os.mkdir(f"{coincidence_DL1_dir}/Coincident") - - for i in LST_runs: - lstObsDir = i[0].split("_")[0]+i[0].split("_")[1]+i[0].split("_")[2] - inputdir = f'/fefs/aswg/data/real/DL1/{lstObsDir}/{LST_version}/tailcut84' - outputdir = f'{coincidence_DL1_dir}/Coincident/{lstObsDir}' - list_of_subruns = np.sort(glob.glob(f"{inputdir}/dl1*Run*{i[1]}*.*.h5")) - if os.path.exists(f"{outputdir}/list_LST.txt"): - with open(f"{outputdir}/list_LST.txt", "a") as LSTdataPathFile: - for subrun in list_of_subruns: - LSTdataPathFile.write(f"{subrun}\n") #If this files already exists, simply append the new information - else: - os.mkdir(outputdir) - with open(f"{outputdir}/list_LST.txt", "w") as f: #If the file list_LST.txt does not exist, it will be created here - for subrun in list_of_subruns: - f.write(f"{subrun}\n") - - - -def bash_coincident(target_dir, env_name): - - """ - This function generates the bashscript for running the coincidence analysis. - - Parameters - ---------- - target_dir: str - Path to the working directory - """ - - process_name = target_dir.split("/")[-2:][1] - - listOfNightsLST = np.sort(glob.glob(f"{target_dir}/DL1/Observations/Coincident/*")) - listOfNightsMAGIC = np.sort(glob.glob(f"{target_dir}/DL1/Observations/Merged/Merged*")) - - for nightMAGIC,nightLST in zip(listOfNightsMAGIC,listOfNightsLST): - process_size = len(np.genfromtxt(f"{nightLST}/list_LST.txt",dtype="str")) - 1 - - with open(f"LST_coincident_{nightLST.split('/')[-1]}.sh","w") as f: - f.write("#!/bin/sh\n\n") - f.write("#SBATCH -p short\n") - f.write(f"#SBATCH -J {process_name}_coincidence\n") - f.write(f"#SBATCH --array=0-{process_size}%50\n") - f.write("#SBATCH -N 1\n\n") - f.write("ulimit -l unlimited\n") - f.write("ulimit -s unlimited\n") - f.write("ulimit -a\n\n") - - f.write(f"export INM={nightMAGIC}\n") - f.write(f"export OUTPUTDIR={nightLST}\n") - f.write("SAMPLE_LIST=($(<$OUTPUTDIR/list_LST.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write("export LOG=$OUTPUTDIR/coincidence_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n {env_name} lst1_magic_event_coincidence --input-file-lst $SAMPLE --input-dir-magic $INM --output-dir $OUTPUTDIR --config-file {target_dir}/config_coincidence.yaml >$LOG 2>&1") - - - - -def main(): - - """ - Here we read the config_general.yaml file and call the functions defined above. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--config-file", - "-c", - dest="config_file", - type=str, - default="./config_general.yaml", - help="Path to a configuration file", - ) - - args = parser.parse_args() - with open( - args.config_file, "rb" - ) as f: # "rb" mode opens the file in binary format for reading - config = yaml.safe_load(f) - - - telescope_ids = list(config["mc_tel_ids"].values()) - target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}"' - - env_name = config["general"]["env_name"] - - LST_runs_and_dates = config["general"]["LST_runs"] - LST_runs = np.genfromtxt(LST_runs_and_dates,dtype=str,delimiter=',') - LST_version = config["general"]["LST_version"] - - print("***** Generating file config_coincidence.yaml...") - print("***** This file can be found in ",target_dir) - configfile_coincidence(telescope_ids,target_dir) - - - print("***** Linking the paths to LST data files...") - linking_lst(target_dir, LST_runs, LST_version) #linking the data paths to current working directory - - - print("***** Generating the bashscript...") - bash_coincident(target_dir, env_name) - - - print("***** Submitting processess to the cluster...") - print(f"Process name: {target_dir.split('/')[-2:][1]}_coincidence") - print(f"To check the jobs submitted to the cluster, type: squeue -n {target_dir.split('/')[-2:][1]}_coincidence") - - #Below we run the bash scripts to find the coincident events - list_of_coincidence_scripts = np.sort(glob.glob("LST_coincident*.sh")) - - for n,run in enumerate(list_of_coincidence_scripts): - if n == 0: - launch_jobs = f"coincidence{n}=$(sbatch --parsable {run})" - else: - launch_jobs = f"{launch_jobs} && coincidence{n}=$(sbatch --parsable --dependency=afterany:$coincidence{n-1} {run})" - - #print(launch_jobs) - os.system(launch_jobs) - -if __name__ == "__main__": - main() - - - - - - - - diff --git a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/merging_runs_and_splitting_training_samples.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/merging_runs_and_splitting_training_samples.py deleted file mode 100644 index cb3e58c74..000000000 --- a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/merging_runs_and_splitting_training_samples.py +++ /dev/null @@ -1,286 +0,0 @@ -""" -This script splits the proton MC data sample into "train" -and "test", deletes possible failed runs (only those files -that end up with a size < 1 kB), and generates the bash -scripts to merge the data files calling the script "merge_hdf_files.py" -in the following order: - -MAGIC: -1) Merge the subruns into runs for M1 and M2 individually. -2) Merge the runs of M1 and M2 into M1-M2 runs. -3) Merge all the M1-M2 runs for a given night. -Workingdir/DL1/Observations/Merged - -MC: -1) Merges all MC runs in a node and save them at -Workingdir/DL1/MC/PARTICLE/Merged - - -Usage: -$ python merging_runs_and_splitting_training_samples.py - -""" - -import os -import numpy as np -import glob -import yaml -import logging -from tqdm import tqdm -from pathlib import Path -import argparse - -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) -logger.setLevel(logging.INFO) - -def cleaning(list_of_nodes, target_dir): - - """ - This function looks for failed runs in each node and remove them. - - Parameters - ---------- - target_dir: str - Path to the target directory. - list_of_nodes: array of str - List of nodes where the function will look for failed runs. - """ - - for i in tqdm(range(len(list_of_nodes)), desc="Cleaning failed runs"): - os.chdir(list_of_nodes[i]) - os.system('find . -type f -name "*.h5" -size -1k -delete') - - os.chdir(f"{target_dir}/../") - print("Cleaning done.") - -def split_train_test(target_dir, train_fraction): - - """ - This function splits the MC proton sample in 2, i.e. the "test" and the "train" subsamples. - It generates 2 subdirectories in the directory .../DL1/MC/protons named "test" and "train" and creates sub-sub-directories with the names of all nodes. - For each node sub-sub-directory we move 80% of the .h5 files (if it is in the "test" subdirectory) or 20% of the .h5 files (if it is in the "train" subdirectory). - - Parameters - ---------- - target_dir: str - Path to the working directory - train_fraction: float - Fraction of proton MC files to be used in the training RF dataset - """ - - proton_dir = f"{target_dir}/DL1/MC/protons" - - if not os.path.exists(f"{proton_dir}/train"): - os.mkdir(f"{proton_dir}/train") - if not os.path.exists(f"{proton_dir}/../protons_test"): - os.mkdir(f"{proton_dir}/../protons_test") - - list_of_dir = np.sort(glob.glob(f'{proton_dir}/node*{os.path.sep}')) - - for directory in tqdm(range(len(list_of_dir))): #tqdm allows us to print a progessbar in the terminal - if not os.path.exists(f"{proton_dir}/train/{list_of_dir[directory].split('/')[-2]}"): - os.mkdir(f"{proton_dir}/train/{list_of_dir[directory].split('/')[-2]}") - if not os.path.exists(f"{proton_dir}/../protons_test/{list_of_dir[directory].split('/')[-2]}"): - os.mkdir(f'{proton_dir}/../protons_test/{list_of_dir[directory].split("/")[-2]}') - list_of_runs = np.sort(glob.glob(f'{proton_dir}/{list_of_dir[directory].split("/")[-2]}/*.h5')) - split_percent = int(len(list_of_runs)*train_fraction) - for j in list_of_runs[0:split_percent]: - os.system(f"mv {j} {proton_dir}/train/{list_of_dir[directory].split('/')[-2]}") - - os.system(f"cp {list_of_dir[directory]}*.txt {proton_dir}/train/{list_of_dir[directory].split('/')[-2]}") - os.system(f"mv {list_of_dir[directory]}*.txt {proton_dir}/../protons_test/{list_of_dir[directory].split('/')[-2]}") - os.system(f"mv {list_of_dir[directory]}*.h5 {proton_dir}/../protons_test/{list_of_dir[directory].split('/')[-2]}") - os.system(f"rm -r {list_of_dir[directory]}") - -def merge(target_dir, identification, MAGIC_runs, env_name): - - """ - This function creates the bash scripts to run merge_hdf_files.py in all MAGIC subruns. - - Parameters - ---------- - target_dir: str - Path to the working directory - identification: str - Tells which batch to create. Options: subruns, M1M2, nights - MAGIC_runs: matrix of strings - This matrix is imported from config_general.yaml and tells the function where to find the data and where to put the merged files - """ - - process_name = f"merging_{target_dir.split('/')[-2:][1]}" - - MAGIC_DL1_dir = f"{target_dir}/DL1/Observations" - if os.path.exists(f"{MAGIC_DL1_dir}/M1") & os.path.exists(f"{MAGIC_DL1_dir}/M2"): - if not os.path.exists(f"{MAGIC_DL1_dir}/Merged"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged") - - with open(f"Merge_{identification}.sh","w") as f: - f.write('#!/bin/sh\n\n') - f.write('#SBATCH -p short\n') - f.write(f'#SBATCH -J {process_name}\n') - f.write('#SBATCH -N 1\n\n') - f.write('ulimit -l unlimited\n') - f.write('ulimit -s unlimited\n') - f.write('ulimit -a\n\n') - - if identification == "0_subruns": - if os.path.exists(f"{MAGIC_DL1_dir}/M1"): - for i in MAGIC_runs: - if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night - if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/M1/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') - - if os.path.exists(f"{MAGIC_DL1_dir}/M2"): - for i in MAGIC_runs: - if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}") #Creating a merged directory for the respective night - if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]}") #Creating a merged directory for the respective run - f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/M2/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} \n') - - elif identification == "1_M1M2": - if os.path.exists(f"{MAGIC_DL1_dir}/M1") & os.path.exists(f"{MAGIC_DL1_dir}/M2"): - for i in MAGIC_runs: - if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/{i[0]}/Merged") - f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/{i[1]} --output-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --run-wise \n') - else: - for i in MAGIC_runs: - if not os.path.exists(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}"): - os.mkdir(f"{MAGIC_DL1_dir}/Merged/Merged_{i[0]}") #Creating a merged directory for each night - f.write(f'conda run -n {env_name} merge_hdf_files --input-dir {MAGIC_DL1_dir}/Merged/{i[0]}/Merged --output-dir {MAGIC_DL1_dir}/Merged/Merged_{i[0]} \n') - - - - - -def mergeMC(target_dir, identification, env_name): - - """ - This function creates the bash scripts to run merge_hdf_files.py in all MC runs. - - Parameters - ---------- - target_dir: str - Path to the working directory - identification: str - Tells which batch to create. Options: protons, gammadiffuse - """ - - process_name = f"merging_{target_dir.split('/')[-2:][1]}" - - MC_DL1_dir = f"{target_dir}/DL1/MC" - if not os.path.exists(f"{MC_DL1_dir}/{identification}/Merged"): - os.mkdir(f"{MC_DL1_dir}/{identification}/Merged") - - if identification == "protons": - list_of_nodes = np.sort(glob.glob(f"{MC_DL1_dir}/{identification}/train/node*")) - else: - list_of_nodes = np.sort(glob.glob(f"{MC_DL1_dir}/{identification}/node*")) - - np.savetxt(f"{MC_DL1_dir}/{identification}/list_of_nodes.txt",list_of_nodes, fmt='%s') - - - process_size = len(list_of_nodes) - 1 - - cleaning(list_of_nodes, target_dir) #This will delete the (possibly) failed runs. - - with open(f"Merge_{identification}.sh","w") as f: - lines_bash_file = [ - '#!/bin/sh\n\n', - '#SBATCH -p short\n', - f'#SBATCH -J {process_name}n', - f"#SBATCH --array=0-{process_size}%50\n", - '#SBATCH --mem=7g\n', - '#SBATCH -N 1\n\n', - 'ulimit -l unlimited\n', - 'ulimit -s unlimited\n', - 'ulimit -a\n\n', - f"SAMPLE_LIST=($(<{MC_DL1_dir}/{identification}/list_of_nodes.txt))\n", - "SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n", - f'export LOG={MC_DL1_dir}/{identification}/Merged'+'/merged_${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n {env_name} merge_hdf_files --input-dir $SAMPLE --output-dir {MC_DL1_dir}/{identification}/Merged >$LOG 2>&1\n' - ] - f.writelines(lines_bash_file) - f.close() - - - - -def main(): - - """ - Here we read the config_general.yaml file, split the pronton sample into "test" and "train", and merge the MAGIC files. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--config-file", - "-c", - dest="config_file", - type=str, - default="./config_general.yaml", - help="Path to a configuration file", - ) - - args = parser.parse_args() - with open( - args.config_file, "rb" - ) as f: # "rb" mode opens the file in binary format for reading - config = yaml.safe_load(f) - - - - target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}' - - MAGIC_runs_and_dates = config["general"]["MAGIC_runs"] - MAGIC_runs = np.genfromtxt(MAGIC_runs_and_dates,dtype=str,delimiter=',') - - train_fraction = float(config["general"]["proton_train_fraction"]) - - env_name = config["general"]["env_name"] - - #Here we slice the proton MC data into "train" and "test": - print("***** Splitting protons into 'train' and 'test' datasets...") - split_train_test(target_dir, train_fraction) - - print("***** Generating merge bashscripts...") - merge(target_dir, "0_subruns", MAGIC_runs, env_name) #generating the bash script to merge the subruns - merge(target_dir, "1_M1M2", MAGIC_runs, env_name) #generating the bash script to merge the M1 and M2 runs - merge(target_dir, "2_nights", MAGIC_runs, env_name) #generating the bash script to merge all runs per night - - print("***** Generating mergeMC bashscripts...") - mergeMC(target_dir, "protons", env_name) #generating the bash script to merge the files - mergeMC(target_dir, "gammadiffuse", env_name) #generating the bash script to merge the files - mergeMC(target_dir, "gammas", env_name) #generating the bash script to merge the files - mergeMC(target_dir, "protons_test", env_name) - - - print("***** Running merge_hdf_files.py in the MAGIC data files...") - print(f"Process name: merging_{target_dir.split('/')[-2:][1]}") - print(f"To check the jobs submitted to the cluster, type: squeue -n merging_{target_dir.split('/')[-2:][1]}") - - #Below we run the bash scripts to merge the MAGIC files - list_of_merging_scripts = np.sort(glob.glob("Merge_*.sh")) - - for n,run in enumerate(list_of_merging_scripts): - if n == 0: - launch_jobs = f"merging{n}=$(sbatch --parsable {run})" - else: - launch_jobs = f"{launch_jobs} && merging{n}=$(sbatch --parsable --dependency=afterany:$merging{n-1} {run})" - - #print(launch_jobs) - os.system(launch_jobs) - -if __name__ == "__main__": - main() - - - - - - - - diff --git a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/setting_up_config_and_dir.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/setting_up_config_and_dir.py deleted file mode 100644 index 7b1ba62d6..000000000 --- a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/setting_up_config_and_dir.py +++ /dev/null @@ -1,467 +0,0 @@ -""" -This script facilitates the usage of other two scripts -of the MCP, i.e. "lst1_magic_mc_dl0_to_dl1.py" and -"magic_calib_to_dl1.py". This script is more like a -"manager" that organizes the analysis process by: -1) Creating the necessary directories and subdirectories. -2) Generatign all the bash script files that convert the -MAGIC and MC files from DL0 to DL1. -3) Launching these jobs in the IT container. - -Notice that in this stage we only use MAGIC + MC data. -No LST data is used here. - -Standard usage: -$ python setting_up_config_and_dir.py - -If you want to run only the MAGIC or only the MC conversion, -you can do as follows: - -Only MAGIC: -$ python setting_up_config_and_dir.py --analysis type onlyMAGIC - -Only MC: -$ python setting_up_config_and_dir.py --analysis type onlyMC - -""" - -import os -import numpy as np -import argparse -import glob -import yaml -from pathlib import Path - -def config_file_gen(ids, target_dir): - - """ - Here we create the configuration file needed for transforming DL0 into DL1 - """ - with open(f'{target_dir}/config_DL0_to_DL1.yaml','w') as f: - - #f.write(f"directories:\n target: {target_dir}\n\n") - lines_of_config_file = [ - "mc_tel_ids:", - f"\n LST-1: {ids[0]}", - f"\n LST-2: {ids[1]}", - f"\n LST-3: {ids[2]}", - f"\n LST-4: {ids[3]}", - f"\n MAGIC-I: {ids[4]}", - f"\n MAGIC-II: {ids[5]}", - "\n", - "\nLST:", - "\n image_extractor:", - '\n type: "LocalPeakWindowSum"', - "\n window_shift: 4", - "\n window_width: 8", - "\n", - "\n increase_nsb:", - "\n use: true", - "\n extra_noise_in_dim_pixels: 1.27", - "\n extra_bias_in_dim_pixels: 0.665", - "\n transition_charge: 8", - "\n extra_noise_in_bright_pixels: 2.08", - "\n", - "\n increase_psf:", - "\n use: false", - "\n fraction: null", - "\n", - "\n tailcuts_clean:", - "\n picture_thresh: 8", - "\n boundary_thresh: 4", - "\n keep_isolated_pixels: false", - "\n min_number_picture_neighbors: 2", - "\n", - "\n time_delta_cleaning:", - "\n use: true", - "\n min_number_neighbors: 1", - "\n time_limit: 2", - "\n", - "\n dynamic_cleaning:", - "\n use: true", - "\n threshold: 267", - "\n fraction: 0.03", - "\n", - "\n use_only_main_island: false", - "\n", - "\nMAGIC:", - "\n image_extractor:", - '\n type: "SlidingWindowMaxSum"', - "\n window_width: 5", - "\n apply_integration_correction: false", - "\n", - "\n charge_correction:", - "\n use: true", - "\n factor: 1.143", - "\n", - "\n magic_clean:", - "\n use_time: true", - "\n use_sum: true", - "\n picture_thresh: 6", - "\n boundary_thresh: 3.5", - "\n max_time_off: 4.5", - "\n max_time_diff: 1.5", - "\n find_hotpixels: true", - '\n pedestal_type: "from_extractor_rndm"', - "\n", - "\n muon_ring:", - "\n thr_low: 25", - "\n tailcut: [12, 8]", - "\n ring_completeness_threshold: 25", - "\n"] - - f.writelines(lines_of_config_file) - - - -def lists_and_bash_generator(particle_type, target_dir, MC_path, SimTel_version, focal_length, env_name): - - """ - This function creates the lists list_nodes_gamma_complete.txt and list_folder_gamma.txt with the MC file paths. - After that, it generates a few bash scripts to link the MC paths to each subdirectory. - These bash scripts will be called later in the main() function below. - """ - - process_name = target_dir.split("/")[-2:][1] - - list_of_nodes = glob.glob(f"{MC_path}/node*") - with open(f"{target_dir}/list_nodes_{particle_type}_complete.txt","w") as f:# creating list_nodes_gammas_complete.txt - for i in list_of_nodes: - f.write(f"{i}/output_{SimTel_version}\n") - - - - with open(f"{target_dir}/list_folder_{particle_type}.txt","w") as f:# creating list_folder_gammas.txt - for i in list_of_nodes: - f.write(f'{i.split("/")[-1]}\n') - - - - #################################################################################### - ############ bash scripts that link the MC paths to each subdirectory. - #################################################################################### - - with open(f"linking_MC_{particle_type}_paths.sh","w") as f: - lines_of_config_file = [ - "#!/bin/sh\n\n", - "#SBATCH -p short\n", - f"#SBATCH -J {process_name}\n\n", - "#SBATCH -N 1\n\n", - "ulimit -l unlimited\n", - "ulimit -s unlimited\n", - "ulimit -a\n\n", - "while read -r -u 3 lineA && read -r -u 4 lineB\n", - "do\n", - f" cd {target_dir}/DL1/MC/{particle_type}\n", - " mkdir $lineB\n", - " cd $lineA\n", - " ls -lR *.gz |wc -l\n", - f" ls *.gz > {target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n", - ' string=$lineA"/"\n', - f" export file={target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0.txt\n\n", - " cat $file | while read line; do echo $string${line}"+f" >>{target_dir}/DL1/MC/{particle_type}/$lineB/list_dl0_ok.txt; done\n\n", - ' echo "folder $lineB and node $lineA"\n', - f'done 3<"{target_dir}/list_nodes_{particle_type}_complete.txt" 4<"{target_dir}/list_folder_{particle_type}.txt"\n', - ""] - f.writelines(lines_of_config_file) - - - - ################################################################################################################ - ############################ bash script that applies lst1_magic_mc_dl0_to_dl1.py to all MC data files. - ################################################################################################################ - - number_of_nodes = glob.glob(f"{MC_path}/node*") - number_of_nodes = len(number_of_nodes) -1 - - with open(f"linking_MC_{particle_type}_paths_r.sh","w") as f: - lines_of_config_file = [ - '#!/bin/sh\n\n', - '#SBATCH -p xxl\n', - f'#SBATCH -J {process_name}\n', - f'#SBATCH --array=0-{number_of_nodes}%50\n', - '#SBATCH --mem=10g\n', - '#SBATCH -N 1\n\n', - 'ulimit -l unlimited\n', - 'ulimit -s unlimited\n', - 'ulimit -a\n', - f'cd {target_dir}/DL1/MC/{particle_type}\n\n', - f'export INF={target_dir}\n', - f'SAMPLE_LIST=($(<$INF/list_folder_{particle_type}.txt))\n', - 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n', - 'cd $SAMPLE\n\n', - f'export LOG={target_dir}/DL1/MC/{particle_type}'+'/simtel_{$SAMPLE}_all.log\n', - 'cat list_dl0_ok.txt | while read line\n', - 'do\n', - f' cd {target_dir}/../\n', - f' conda run -n {env_name} lst1_magic_mc_dl0_to_dl1 --input-file $line --output-dir {target_dir}/DL1/MC/{particle_type}/$SAMPLE --config-file {target_dir}/config_DL0_to_DL1.yaml --focal_length_choice {focal_length}>>$LOG 2>&1\n\n', - 'done\n', - ""] - f.writelines(lines_of_config_file) - - - - - -def lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, env_name): - - """ - Below we create a bash script that links the the MAGIC data paths to each subdirectory. - """ - - process_name = target_dir.split("/")[-2:][1] - - with open("linking_MAGIC_data_paths.sh","w") as f: - f.write('#!/bin/sh\n\n') - f.write('#SBATCH -p short\n') - f.write(f'#SBATCH -J {process_name}\n') - f.write('#SBATCH -N 1\n\n') - f.write('ulimit -l unlimited\n') - f.write('ulimit -s unlimited\n') - f.write('ulimit -a\n') - - if telescope_ids[-1] > 0: - for i in MAGIC_runs: - f.write(f'export IN1=/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}\n') - f.write(f'export OUT1={target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}\n') - f.write(f'ls $IN1/*{i[1][-2:]}.*_Y_*.root > $OUT1/list_dl0.txt\n') - - f.write('\n') - if telescope_ids[-2] > 0: - for i in MAGIC_runs: - f.write(f'export IN1=/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}\n') - f.write(f'export OUT1={target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}\n') - f.write(f'ls $IN1/*{i[1][-2:]}.*_Y_*.root > $OUT1/list_dl0.txt\n') - - - - if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): - for i in MAGIC_runs: - if telescope_ids[-1] > 0: - - number_of_nodes = glob.glob(f'/fefs/onsite/common/MAGIC/data/M2/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}/*{i[1]}.*_Y_*.root') - number_of_nodes = len(number_of_nodes) - 1 - - with open(f"MAGIC-II_dl0_to_dl1_run_{i[1]}.sh","w") as f: - lines_of_config_file = [ - '#!/bin/sh\n\n', - '#SBATCH -p long\n', - f'#SBATCH -J {process_name}\n', - f'#SBATCH --array=0-{number_of_nodes}\n', - '#SBATCH -N 1\n\n', - 'ulimit -l unlimited\n', - 'ulimit -s unlimited\n', - 'ulimit -a\n\n', - f'export OUTPUTDIR={target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}\n', - f'cd {target_dir}/../\n', - 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', - 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', - 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n {env_name} magic_calib_to_dl1 --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', - ""] - f.writelines(lines_of_config_file) - - - if telescope_ids[-2] > 0: - - number_of_nodes = glob.glob(f'/fefs/onsite/common/MAGIC/data/M1/event/Calibrated/{i[0].split("_")[0]}/{i[0].split("_")[1]}/{i[0].split("_")[2]}/*{i[1]}.*_Y_*.root') - number_of_nodes = len(number_of_nodes) - 1 - - with open(f"MAGIC-I_dl0_to_dl1_run_{i[1]}.sh","w") as f: - lines_of_config_file = [ - '#!/bin/sh\n\n', - '#SBATCH -p long\n', - f'#SBATCH -J {process_name}\n', - f'#SBATCH --array=0-{number_of_nodes}\n', - '#SBATCH -N 1\n\n', - 'ulimit -l unlimited\n', - 'ulimit -s unlimited\n', - 'ulimit -a\n\n', - f'export OUTPUTDIR={target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}\n', - f'cd {target_dir}/../\n', - 'SAMPLE_LIST=($(<$OUTPUTDIR/list_dl0.txt))\n', - 'SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n\n', - 'export LOG=$OUTPUTDIR/real_0_1_task${SLURM_ARRAY_TASK_ID}.log\n', - f'conda run -n {env_name} magic_calib_to_dl1 --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_DL0_to_DL1.yaml >$LOG 2>&1\n', - ""] - f.writelines(lines_of_config_file) - - - -def directories_generator(target_dir, telescope_ids,MAGIC_runs): - - """ - Here we create all subdirectories for a given workspace and target name. - """ - - ########################################### - ##################### MC - ########################################### - - if not os.path.exists(target_dir): - os.mkdir(target_dir) - os.mkdir(f"{target_dir}/DL1") - os.mkdir(f"{target_dir}/DL1/Observations") - os.mkdir(f"{target_dir}/DL1/MC") - os.mkdir(f"{target_dir}/DL1/MC/gammas") - os.mkdir(f"{target_dir}/DL1/MC/gammadiffuse") - os.mkdir(f"{target_dir}/DL1/MC/electrons") - os.mkdir(f"{target_dir}/DL1/MC/protons") - os.mkdir(f"{target_dir}/DL1/MC/helium") - else: - overwrite = input(f'MC directory for {target_dir.split("/")[-1]} already exists. Would you like to overwrite it? [only "y" or "n"]: ') - if overwrite == "y": - os.system(f"rm -r {target_dir}") - os.mkdir(target_dir) - os.mkdir(f"{target_dir}/DL1") - os.mkdir(f"{target_dir}/DL1/Observations") - os.mkdir(f"{target_dir}/DL1/MC") - os.mkdir(f"{target_dir}/DL1/MC/gammas") - os.mkdir(f"{target_dir}/DL1/MC/gammadiffuse") - os.mkdir(f"{target_dir}/DL1/MC/electrons") - os.mkdir(f"{target_dir}/DL1/MC/protons") - os.mkdir(f"{target_dir}/DL1/MC/helium") - else: - print("Directory not modified.") - - - - ########################################### - ##################### MAGIC - ########################################### - - if telescope_ids[-1] > 0: - if not os.path.exists(f"{target_dir}/DL1/Observations/M2"): - os.mkdir(f"{target_dir}/DL1/Observations/M2") - for i in MAGIC_runs: - if not os.path.exists(f"{target_dir}/DL1/Observations/M2/{i[0]}"): - os.mkdir(f"{target_dir}/DL1/Observations/M2/{i[0]}") - os.mkdir(f"{target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}") - else: - os.mkdir(f"{target_dir}/DL1/Observations/M2/{i[0]}/{i[1]}") - - if telescope_ids[-2] > 0: - if not os.path.exists(f"{target_dir}/DL1/Observations/M1"): - os.mkdir(f"{target_dir}/DL1/Observations/M1") - for i in MAGIC_runs: - if not os.path.exists(f"{target_dir}/DL1/Observations/M1/{i[0]}"): - os.mkdir(f"{target_dir}/DL1/Observations/M1/{i[0]}") - os.mkdir(f"{target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}") - else: - os.mkdir(f"{target_dir}/DL1/Observations/M1/{i[0]}/{i[1]}") - - - - - -def main(): - - """ Here we read the config_general.yaml file and call the functions to generate the necessary directories, bash scripts and launching the jobs.""" - - parser = argparse.ArgumentParser() - - #Here we are simply collecting the parameters from the command line, as input file, output directory, and configuration file - parser.add_argument( - "--analysis-type", - "-t", - choices=['onlyMAGIC', 'onlyMC'], - dest="analysis_type", - type=str, - default="doEverything", - help="You can type 'onlyMAGIC' or 'onlyMC' to run this script only on MAGIC or MC data, respectively.", - ) - - parser.add_argument( - "--config-file", - "-c", - dest="config_file", - type=str, - default="./config_general.yaml", - help="Path to a configuration file", - ) - - - - args = parser.parse_args() - - - - with open( - args.config_file, "rb" - ) as f: # "rb" mode opens the file in binary format for reading - config = yaml.safe_load(f) - - - #Below we read the telescope IDs and runs - telescope_ids = list(config["mc_tel_ids"].values()) - SimTel_version = config["general"]["SimTel_version"] - MAGIC_runs_and_dates = config["general"]["MAGIC_runs"] - MAGIC_runs = np.genfromtxt(MAGIC_runs_and_dates,dtype=str,delimiter=',') #READ LIST OF DATES AND RUNS: format table in a way that each line looks like "2020_11_19,5093174" - focal_length = config["general"]["focal_length"] - - #Below we read the data paths - target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}' - MC_gammas = str(Path(config["directories"]["MC_gammas"])) - #MC_electrons = str(Path(config["directories"]["MC_electrons"])) - #MC_helium = str(Path(config["directories"]["MC_helium"])) - MC_protons = str(Path(config["directories"]["MC_protons"])) - MC_gammadiff = str(Path(config["directories"]["MC_gammadiff"])) - - - env_name = config["general"]["env_name"] - - - print("***** Linking MC paths - this may take a few minutes ******") - print("*** Reducing DL0 to DL1 data - this can take many hours ***") - print("Process name: ",target_dir.split('/')[-2:][1]) - print("To check the jobs submitted to the cluster, type: squeue -n",target_dir.split('/')[-2:][1]) - - directories_generator(target_dir, telescope_ids, MAGIC_runs) #Here we create all the necessary directories in the given workspace and collect the main directory of the target - config_file_gen(telescope_ids,target_dir) - - #Below we run the analysis on the MC data - if (args.analysis_type=='onlyMC') or (args.analysis_type=='doEverything'): - lists_and_bash_generator("gammas", target_dir, MC_gammas, SimTel_version, focal_length, env_name) #gammas - #lists_and_bash_generator("electrons", target_dir, MC_electrons, SimTel_version, focal_length, env_name) #electrons - #lists_and_bash_generator("helium", target_dir, MC_helium, SimTel_version, focal_length, env_name) #helium - lists_and_bash_generator("protons", target_dir, MC_protons, SimTel_version, focal_length, env_name) #protons - lists_and_bash_generator("gammadiffuse", target_dir, MC_gammadiff, SimTel_version, focal_length, env_name) #gammadiffuse - - #Here we do the MC DL0 to DL1 conversion: - list_of_MC = glob.glob("linking_MC_*s.sh") - - #os.system("RES=$(sbatch --parsable linking_MC_gammas_paths.sh) && sbatch --dependency=afterok:$RES MC_dl0_to_dl1.sh") - - for n,run in enumerate(list_of_MC): - if n == 0: - launch_jobs_MC = f"linking{n}=$(sbatch --parsable {run}) && running{n}=$(sbatch --parsable --dependency=afterany:$linking{n} {run[0:-3]}_r.sh)" - else: - launch_jobs_MC = f"{launch_jobs_MC} && linking{n}=$(sbatch --parsable --dependency=afterany:$running{n-1} {run}) && running{n}=$(sbatch --parsable --dependency=afterany:$linking{n} {run[0:-3]}_r.sh)" - - - os.system(launch_jobs_MC) - - #Below we run the analysis on the MAGIC data - if (args.analysis_type=='onlyMAGIC') or (args.analysis_type=='doEverything'): - lists_and_bash_gen_MAGIC(target_dir, telescope_ids, MAGIC_runs, env_name) #MAGIC real data - if (telescope_ids[-2] > 0) or (telescope_ids[-1] > 0): - - list_of_MAGIC_runs = glob.glob("MAGIC-*.sh") - - for n,run in enumerate(list_of_MAGIC_runs): - if n == 0: - launch_jobs = f"linking=$(sbatch --parsable linking_MAGIC_data_paths.sh) && RES{n}=$(sbatch --parsable --dependency=afterany:$linking {run})" - else: - launch_jobs = f"{launch_jobs} && RES{n}=$(sbatch --parsable --dependency=afterany:$RES{n-1} {run})" - - os.system(launch_jobs) - -if __name__ == "__main__": - main() - - - - - - - diff --git a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/stereo_events.py b/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/stereo_events.py deleted file mode 100644 index 3f531d8c7..000000000 --- a/magicctapipe/scripts/lst1_magic/IT_container_data_MC_bash_scripts/stereo_events.py +++ /dev/null @@ -1,199 +0,0 @@ -""" -This scripts generates and runs the bashscripts -to compute the stereo parameters of DL1 MC and -Coincident MAGIC+LST data files. - -Usage: -$ python stereo_events.py - -""" - -import os -import numpy as np -import glob -import yaml -import logging -from pathlib import Path -import argparse - -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) -logger.setLevel(logging.INFO) - -def configfile_stereo(ids, target_dir): - - """ - This function creates the configuration file needed for the event stereo step - - Parameters - ---------- - ids: list - list of telescope IDs - target_dir: str - Path to the working directory - """ - - with open(f'{target_dir}/config_stereo.yaml','w') as f: - f.write(f"mc_tel_ids:\n LST-1: {ids[0]}\n LST-2: {ids[1]}\n LST-3: {ids[2]}\n LST-4: {ids[3]}\n MAGIC-I: {ids[4]}\n MAGIC-II: {ids[5]}\n\n") - f.write('stereo_reco:\n quality_cuts: "(intensity > 50) & (width > 0)"\n theta_uplim: "6 arcmin"\n') - - - -def bash_stereo(target_dir, env_name): - - """ - This function generates the bashscript for running the stereo analysis. - - Parameters - ---------- - target_dir: str - Path to the working directory - """ - - process_name = target_dir.split("/")[-2:][1] - - if not os.path.exists(f"{target_dir}/DL1/Observations/Coincident_stereo"): - os.mkdir(f"{target_dir}/DL1/Observations/Coincident_stereo") - - listOfNightsLST = np.sort(glob.glob(f"{target_dir}/DL1/Observations/Coincident/*")) - - for nightLST in listOfNightsLST: - stereoDir = f"{target_dir}/DL1/Observations/Coincident_stereo/{nightLST.split('/')[-1]}" - if not os.path.exists(stereoDir): - os.mkdir(stereoDir) - - os.system(f"ls {nightLST}/*LST*.h5 > {nightLST}/list_coin.txt") #generating a list with the DL1 coincident data files. - process_size = len(np.genfromtxt(f"{nightLST}/list_coin.txt",dtype="str")) - 1 - - with open(f"StereoEvents_{nightLST.split('/')[-1]}.sh","w") as f: - f.write("#!/bin/sh\n\n") - f.write("#SBATCH -p short\n") - f.write(f"#SBATCH -J {process_name}_stereo\n") - f.write(f"#SBATCH --array=0-{process_size}%100\n") - f.write("#SBATCH -N 1\n\n") - f.write("ulimit -l unlimited\n") - f.write("ulimit -s unlimited\n") - f.write("ulimit -a\n\n") - - f.write(f"export INPUTDIR={nightLST}\n") - f.write(f"export OUTPUTDIR={stereoDir}\n") - f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n {env_name} lst1_magic_stereo_reco --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") - - -def bash_stereoMC(target_dir, identification, env_name): - - """ - This function generates the bashscript for running the stereo analysis. - - Parameters - ---------- - target_dir: str - Path to the working directory - identification: str - Particle name. Options: protons, gammadiffuse - """ - - process_name = target_dir.split("/")[-2:][1] - - if not os.path.exists(f"{target_dir}/DL1/MC/{identification}/Merged/StereoMerged"): - os.mkdir(f"{target_dir}/DL1/MC/{identification}/Merged/StereoMerged") - - inputdir = f"{target_dir}/DL1/MC/{identification}/Merged" - - os.system(f"ls {inputdir}/dl1*.h5 > {inputdir}/list_coin.txt") #generating a list with the DL1 coincident data files. - process_size = len(np.genfromtxt(f"{inputdir}/list_coin.txt",dtype="str")) - 1 - - with open(f"StereoEvents_{identification}.sh","w") as f: - f.write("#!/bin/sh\n\n") - f.write("#SBATCH -p xxl\n") - f.write(f"#SBATCH -J {process_name}_stereo\n") - f.write(f"#SBATCH --array=0-{process_size}%100\n") - f.write('#SBATCH --mem=30g\n') - f.write("#SBATCH -N 1\n\n") - f.write("ulimit -l unlimited\n") - f.write("ulimit -s unlimited\n") - f.write("ulimit -a\n\n") - - f.write(f"export INPUTDIR={inputdir}\n") - f.write(f"export OUTPUTDIR={inputdir}/StereoMerged\n") - f.write("SAMPLE_LIST=($(<$INPUTDIR/list_coin.txt))\n") - f.write("SAMPLE=${SAMPLE_LIST[${SLURM_ARRAY_TASK_ID}]}\n") - f.write("export LOG=$OUTPUTDIR/stereo_${SLURM_ARRAY_TASK_ID}.log\n") - f.write(f"conda run -n {env_name} lst1_magic_stereo_reco --input-file $SAMPLE --output-dir $OUTPUTDIR --config-file {target_dir}/config_stereo.yaml >$LOG 2>&1") - - - - - - -def main(): - - """ - Here we read the config_general.yaml file and call the functions defined above. - """ - - parser = argparse.ArgumentParser() - parser.add_argument( - "--config-file", - "-c", - dest="config_file", - type=str, - default="./config_general.yaml", - help="Path to a configuration file", - ) - - args = parser.parse_args() - with open( - args.config_file, "rb" - ) as f: # "rb" mode opens the file in binary format for reading - config = yaml.safe_load(f) - - target_dir = f'{Path(config["directories"]["workspace_dir"])}/{config["directories"]["target_name"]}' - - - env_name = config["general"]["env_name"] - - telescope_ids = list(config["mc_tel_ids"].values()) - - print("***** Generating file config_stereo.yaml...") - print("***** This file can be found in ",target_dir) - configfile_stereo(telescope_ids, target_dir) - - print("***** Generating the bashscript...") - bash_stereo(target_dir, env_name) - - print("***** Generating the bashscript for MCs...") - bash_stereoMC(target_dir,"gammadiffuse", env_name) - bash_stereoMC(target_dir,"gammas", env_name) - bash_stereoMC(target_dir,"protons", env_name) - bash_stereoMC(target_dir,"protons_test", env_name) - - print("***** Submitting processes to the cluster...") - print(f"Process name: {target_dir.split('/')[-2:][1]}_stereo") - print(f"To check the jobs submitted to the cluster, type: squeue -n {target_dir.split('/')[-2:][1]}_stereo") - - #Below we run the bash scripts to find the stereo events - list_of_stereo_scripts = np.sort(glob.glob("StereoEvents_*.sh")) - - for n,run in enumerate(list_of_stereo_scripts): - if n == 0: - launch_jobs = f"stereo{n}=$(sbatch --parsable {run})" - else: - launch_jobs = f"{launch_jobs} && stereo{n}=$(sbatch --parsable --dependency=afterany:$stereo{n-1} {run})" - - #print(launch_jobs) - os.system(launch_jobs) - -if __name__ == "__main__": - main() - - - - - - - - From e0756c51f68d433102e9894a303ad63e41ce96e1 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 4 Oct 2023 15:46:11 +0200 Subject: [PATCH 50/76] Calibrate (MAGIC & LST) --- magicctapipe/image/__init__.py | 9 +- magicctapipe/image/calib.py | 143 +----------------- .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 8 +- 3 files changed, 14 insertions(+), 146 deletions(-) diff --git a/magicctapipe/image/__init__.py b/magicctapipe/image/__init__.py index 830acf510..8f80d6617 100644 --- a/magicctapipe/image/__init__.py +++ b/magicctapipe/image/__init__.py @@ -10,17 +10,14 @@ ) from .calib import ( - Calibrate_LST, - Calibrate_MAGIC + calibrate, ) __all__ = [ "MAGICClean", "PixelTreatment", "get_num_islands_MAGIC", + "calibrate", "clean_image_params", - "get_leakage", - "Calibrate_LST", - "Calibrate_MAGIC", - "Calibrate" + "get_leakage", ] diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index 26492aa26..19a14c260 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -13,143 +13,11 @@ __all__ = [ - "Calibrate_LST", "Calibrate_MAGIC", "Calibrate" + "Calibrate" ] -def Calibrate_LST(event, tel_id, obs_id, config_lst, camera_geoms, calibrator_lst): - """ - This function computes and returns some information for a single event of a telescope of LST type - - Parameters - ---------- - event: event - From an EventSource - tel_id: int - Telescope ID - obs_id: int - Observation ID - config_lst: dictionary - Parameters for image extraction and calibration - camera_geoms: telescope.camera.geometry - Camera geometry - calibrator_lst: CameraCalibrator (ctapipe.calib) - ctapipe object needed to calibrate the camera - - - Returns - ------- - signal_pixels: Mask of the pixels selected by the cleaning - image: Array of number of p.e. in the camera pixels - peak_time: Array of the signal peak time in the camera pixels - - """ - - calibrator_lst._calibrate_dl0(event, tel_id) - calibrator_lst._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - increase_nsb = config_lst["increase_nsb"].pop("use") - increase_psf = config_lst["increase_psf"]["use"] - use_time_delta_cleaning = config_lst["time_delta_cleaning"].pop("use") - use_dynamic_cleaning = config_lst["dynamic_cleaning"].pop("use") - use_only_main_island = config_lst["use_only_main_island"] - - if increase_nsb: - rng = np.random.default_rng(obs_id) - # Add extra noise in pixels - image = add_noise_in_pixels(rng, image, **config_lst["increase_nsb"]) - - if increase_psf: - set_numba_seed(obs_id) - # Smear the image - image = random_psf_smearer( - image=image, - fraction=config_lst["increase_psf"]["fraction"], - indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, - indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, - ) - - # Apply the image cleaning - signal_pixels = tailcuts_clean( - camera_geoms[tel_id], image, **config_lst["tailcuts_clean"] - ) - - if use_time_delta_cleaning: - signal_pixels = apply_time_delta_cleaning( - geom=camera_geoms[tel_id], - mask=signal_pixels, - arrival_times=peak_time, - **config_lst["time_delta_cleaning"], - ) - - if use_dynamic_cleaning: - signal_pixels = apply_dynamic_cleaning( - image, signal_pixels, **config_lst["dynamic_cleaning"] - ) - - if use_only_main_island: - _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) - n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) - - # The first index means the pixels not surviving - # the cleaning, so should not be considered - n_pixels_on_island[0] = 0 - max_island_label = np.argmax(n_pixels_on_island) - signal_pixels[island_labels != max_island_label] = False - - return signal_pixels, image, peak_time - - -def Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic): - - """ - This function computes and returns some information for a single event of a telescope of MAGIC type - - Parameters - ---------- - event: event - From an EventSource - tel_id: int - telescope ID - config_magic: dictionary - Parameters for image extraction and calibration - magic_clean: dictionary (1 entry per MAGIC telescope) - Each entry is a MAGICClean object using the telescope camera geometry - calibrator_magic: CameraCalibrator (ctapipe.calib) - ctapipe object needed to calibrate the camera - - - Returns - ------- - signal_pixels: Mask of the pixels selected by the cleaning - image: Array of number of p.e. in the camera pixels - peak_time: Array of the signal peak time in the camera pixels - """ - - calibrator_magic._calibrate_dl0(event, tel_id) - calibrator_magic._calibrate_dl1(event, tel_id) - - image = event.dl1.tel[tel_id].image.astype(np.float64) - peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - use_charge_correction = config_magic["charge_correction"]["use"] - - if use_charge_correction: - # Scale the charges by the correction factor - image *= config_magic["charge_correction"]["factor"] - - # Apply the image cleaning - signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( - event_image=image, event_pulse_time=peak_time - ) - return signal_pixels, image, peak_time - - - - - -def Calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_geoms=None, magic_clean=None): +def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_geoms=None, magic_clean=None): """ This function computes and returns some information for a single event of a telescope @@ -187,7 +55,7 @@ def Calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g image = event.dl1.tel[tel_id].image.astype(np.float64) peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - if LST_bool==False: + if (LST_bool==False) and (magic_clean!=None): use_charge_correction = config["charge_correction"]["use"] if use_charge_correction: @@ -197,7 +65,7 @@ def Calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( event_image=image, event_pulse_time=peak_time ) - else: + elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): increase_nsb = config["increase_nsb"].pop("use") increase_psf = config["increase_psf"]["use"] use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") @@ -247,4 +115,7 @@ def Calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g max_island_label = np.argmax(n_pixels_on_island) signal_pixels[island_labels != max_island_label] = False + else: + print("Check the provided parameters and the telescope type; calibration was not possible") + return return signal_pixels, image, peak_time diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index c802a3ecf..3b6e8b365 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -46,7 +46,7 @@ from ctapipe.io import EventSource, HDF5TableWriter from magicctapipe.image import MAGICClean -from magicctapipe.image.calib import Calibrate_LST, Calibrate_MAGIC +from magicctapipe.image.calib import calibrate from magicctapipe.io import SimEventInfoContainer, format_object from magicctapipe.utils import calculate_disp, calculate_impact from traitlets.config import Config @@ -230,12 +230,12 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): for tel_id in tels_with_trigger: - if tel_id in LSTs_IDs: ##If the ID is in the LST list, we call Calibrate_LST() + if tel_id in LSTs_IDs: ##If the ID is in the LST list, we call calibrate on the LST() # Calibrate the LST-1 event - signal_pixels, image, peak_time = Calibrate_LST(event, tel_id, obs_id, config_lst, camera_geoms, calibrator_lst) + signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, obs_id=obs_id, config=config_lst, camera_geoms=camera_geoms, calibrator=calibrator_lst, LST_bool=True) elif tel_id in MAGICs_IDs: # Calibrate the MAGIC event - signal_pixels, image, peak_time = Calibrate_MAGIC(event, tel_id, config_magic, magic_clean, calibrator_magic) + signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, config=config_magic, magic_clean=magic_clean, calibrator=calibrator_magic, LST_bool=False) else: logger.info( f"--> Telescope ID {tel_id} not in LST list or MAGIC list. Please check if the IDs are OK in the configuration file" From ece33003287b298a7dde3c4a4d8ace439dc7aedc Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 4 Oct 2023 16:15:08 +0200 Subject: [PATCH 51/76] Minor fixes + calib Docstring --- magicctapipe/conftest.py | 11 ++--------- magicctapipe/image/__init__.py | 3 +-- magicctapipe/image/calib.py | 11 +++++++---- magicctapipe/io/gadf.py | 5 ++++- .../lst1_magic/lst1_magic_event_coincidence.py | 8 ++------ 5 files changed, 16 insertions(+), 22 deletions(-) diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index bf9539bf7..d9fe3db27 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -43,6 +43,7 @@ "20201216_M2_05093711.014_Y_CrabNebula-W0.40+035.root", ] DL1_LST_data = ["dl1_LST-1.Run03265.0094.h5"] + """ Temporary paths """ @@ -231,12 +232,10 @@ def temp_DL2_real_monly(tmp_path_factory): def temp_DL3_monly(tmp_path_factory): return tmp_path_factory.mktemp("DL3_monly") - """ Custom data """ - @pytest.fixture(scope="session") def dl2_test(temp_DL2_test): """ @@ -270,12 +269,10 @@ def pd_test(): df = pd.DataFrame(np.array([[1, 2], [3, 4], [5, 6]]), columns=["a", "b"]) return df - """ Remote paths (to download test files) """ - @pytest.fixture(scope="session") def base_url(): return "http://www.magic.iac.es/mcp-testdata" @@ -286,12 +283,10 @@ def env_prefix(): # ENVIRONMENT VARIABLES TO BE CREATED return "MAGIC_CTA_DATA_" - """ Downloads: files """ - @pytest.fixture(scope="session") def dl0_gamma(base_url, env_prefix): gamma_dl0 = [] @@ -382,7 +377,7 @@ def config(): @pytest.fixture(scope="session") def config_monly(): config_path = resource_file("test_config_monly.yaml") - return config_path + return config_path @pytest.fixture(scope="session") def config_gen(): @@ -391,12 +386,10 @@ def config_gen(): config = yaml.safe_load(f) return config - """ Data processing """ - @pytest.fixture(scope="session") def gamma_l1(temp_DL1_gamma, dl0_gamma, config): """ diff --git a/magicctapipe/image/__init__.py b/magicctapipe/image/__init__.py index 8f80d6617..f3a7e1859 100644 --- a/magicctapipe/image/__init__.py +++ b/magicctapipe/image/__init__.py @@ -4,15 +4,14 @@ get_num_islands_MAGIC, clean_image_params, ) - from .leakage import ( get_leakage, ) - from .calib import ( calibrate, ) + __all__ = [ "MAGICClean", "PixelTreatment", diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index 19a14c260..ebfffd0f4 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -20,7 +20,7 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_geoms=None, magic_clean=None): """ - This function computes and returns some information for a single event of a telescope + This function calibrates the camera image for a single event of a telescope Parameters ---------- @@ -44,9 +44,12 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g Returns ------- - signal_pixels: Mask of the pixels selected by the cleaning - image: Array of number of p.e. in the camera pixels - peak_time: Array of the signal peak time in the camera pixels + signal_pixels: boolean mask + Mask of the pixels selected by the cleaning + image: numpy array + Array of number of p.e. in the camera pixels + peak_time: numpy array + Array of the signal peak time in the camera pixels """ diff --git a/magicctapipe/io/gadf.py b/magicctapipe/io/gadf.py index 861550c9e..77b9664eb 100644 --- a/magicctapipe/io/gadf.py +++ b/magicctapipe/io/gadf.py @@ -2,7 +2,6 @@ # coding: utf-8 import logging - import numpy as np from astropy import units as u from astropy.coordinates import SkyCoord @@ -13,6 +12,7 @@ from magicctapipe.utils.functions import HEIGHT_ORM, LAT_ORM, LON_ORM from pyirf.binning import split_bin_lo_hi #from .io import telescope_combinations + __all__ = [ "create_gh_cuts_hdu", "create_event_hdu", @@ -27,6 +27,7 @@ # The MJD reference time MJDREF = Time(0, format="unix", scale="utc") + @u.quantity_input def create_gh_cuts_hdu( gh_cuts, reco_energy_bins: u.TeV, fov_offset_bins: u.deg, **header_cards @@ -86,6 +87,7 @@ def create_gh_cuts_hdu( return gh_cuts_hdu + def create_event_hdu( event_table, on_time, deadc, source_name, source_ra=None, source_dec=None ): @@ -230,6 +232,7 @@ def create_event_hdu( return event_hdu + def create_gti_hdu(event_table): """ Creates a fits binary table HDU for Good Time Interval (GTI). diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 307bcdea8..359d9bd61 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -58,7 +58,6 @@ import time from decimal import Decimal from pathlib import Path - import numpy as np import pandas as pd import yaml @@ -128,9 +127,7 @@ def telescope_positions(config): for k, v in telescopes_in_use.items(): TEL_POSITIONS[k] = list(np.round(np.asarray(v)-average_xyz,2)) * u.m - return TEL_POSITIONS - - + return TEL_POSITIONS def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): @@ -187,7 +184,7 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): window_half_width = u.Quantity(window_half_width.round(), dtype=int) pre_offset_search = False if "pre_offset_search" in config_coinc: #looking for the boolean value of pre_offset_search in the configuration file - pre_offset_search = config_coinc["pre_offset_search"] + pre_offset_search = config_coinc["pre_offset_search"] if pre_offset_search: logger.info("\nPre offset search will be performed.") @@ -196,7 +193,6 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): logger.info("\noffset scan range defined in the config file will be used.") offset_start = u.Quantity(config_coinc["time_offset"]["start"]) offset_stop = u.Quantity(config_coinc["time_offset"]["stop"]) - event_data = pd.DataFrame() features = pd.DataFrame() From 69ca7c155615ac936f6a43330070413eb985c8df Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 4 Oct 2023 16:17:57 +0200 Subject: [PATCH 52/76] calib exception (+ minor fixes) --- magicctapipe/image/__init__.py | 8 ++------ magicctapipe/image/calib.py | 3 +-- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/magicctapipe/image/__init__.py b/magicctapipe/image/__init__.py index f3a7e1859..7d062fc80 100644 --- a/magicctapipe/image/__init__.py +++ b/magicctapipe/image/__init__.py @@ -4,12 +4,8 @@ get_num_islands_MAGIC, clean_image_params, ) -from .leakage import ( - get_leakage, -) -from .calib import ( - calibrate, -) +from .leakage import get_leakage +from .calib import calibrate __all__ = [ diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index ebfffd0f4..87414f7ec 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -119,6 +119,5 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g signal_pixels[island_labels != max_island_label] = False else: - print("Check the provided parameters and the telescope type; calibration was not possible") - return + raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") return signal_pixels, image, peak_time From ee00aaaadb3b25f6758d73608f82ff98142565a9 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Wed, 4 Oct 2023 16:30:03 +0200 Subject: [PATCH 53/76] Bug --- magicctapipe/image/__init__.py | 2 +- magicctapipe/image/calib.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/magicctapipe/image/__init__.py b/magicctapipe/image/__init__.py index 7d062fc80..d7ec912f9 100644 --- a/magicctapipe/image/__init__.py +++ b/magicctapipe/image/__init__.py @@ -14,5 +14,5 @@ "get_num_islands_MAGIC", "calibrate", "clean_image_params", - "get_leakage", + "get_leakage", ] diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index 87414f7ec..a79b447a5 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -13,7 +13,7 @@ __all__ = [ - "Calibrate" + "calibrate" ] From 8c9a75cf465b25ee351137c01501f051ca8370a4 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin <121040436+Elisa-Visentin@users.noreply.github.com> Date: Thu, 5 Oct 2023 13:41:08 +0200 Subject: [PATCH 54/76] Update ci.yml --- .github/workflows/ci.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 90ea27050..4ad50d7ef 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,12 @@ name: CI -on: [push, pull_request] +on: + pull_request: + branches: + -master + push: + branches-ignore: + - master env: NUMBA_NUM_THREADS: 1 From b47ee022daa6df9277ef4ee65f571d2efce1595e Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 5 Oct 2023 15:50:31 +0200 Subject: [PATCH 55/76] Minor fixes + test tel. combinations --- magicctapipe/conftest.py | 12 +++++++- magicctapipe/io/tests/test_io.py | 11 ++++++++ .../resources/test_config_general_4LST.yaml | 28 +++++++++++++++++++ .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 12 ++------ .../lst1_magic/lst1_magic_stereo_reco.py | 8 ++---- .../scripts/lst1_magic/magic_calib_to_dl1.py | 8 +----- setup.py | 8 +----- 7 files changed, 57 insertions(+), 30 deletions(-) create mode 100644 magicctapipe/resources/test_config_general_4LST.yaml diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index d9fe3db27..ae7a4073c 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -50,7 +50,7 @@ @pytest.fixture(scope="session") def temp_DL1_gamma(tmp_path_factory): - return tmp_path_factory.mktemp("DL1_gammas") + return tmp_path_factory.mktemp("DL1_gammas") @pytest.fixture(scope="session") @@ -379,6 +379,7 @@ def config_monly(): config_path = resource_file("test_config_monly.yaml") return config_path + @pytest.fixture(scope="session") def config_gen(): config_path = resource_file("test_config_general.yaml") @@ -386,6 +387,15 @@ def config_gen(): config = yaml.safe_load(f) return config + +@pytest.fixture(scope="session") +def config_gen_4lst(): + config_path = resource_file("test_config_general_4LST.yaml") + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config + + """ Data processing """ diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index ce8cb05d8..abd793ffc 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -9,6 +9,7 @@ load_magic_dl1_data_files, load_lst_dl1_data_file, load_dl2_data_file, + telescope_combinations, ) import pytest @@ -16,6 +17,16 @@ import pandas as pd +def test_telescope_combinations(config_gen, config_gen_4lst): + """ + Simple check on telescope combinations + """ + M_LST, M_LST_comb = telescope_combinations(config_gen) + LSTs, LSTs_comb = telescope_combinations(config_gen_4lst) + assert M_LST == {1: 'LST-1', 2: 'MAGIC-I', 3: 'MAGIC-II'} + assert M_LST_comb == {'LST-1_MAGIC-I': [1, 2], 'LST-1_MAGIC-I_MAGIC-II': [1, 2, 3], 'LST-1_MAGIC-II': [1, 3], 'MAGIC-I_MAGIC-II': [2, 3]} + assert LSTs == {1: 'LST-1', 3: 'LST-2', 2: 'LST-3', 5: 'LST-4'} + assert LSTs_comb == {'LST-1_LST-2': [1, 3], 'LST-1_LST-2_LST-3': [1, 3, 2], 'LST-1_LST-2_LST-3_LST-4': [1, 3, 2, 5], 'LST-1_LST-2_LST-4': [1, 3, 5], 'LST-1_LST-3': [1, 2], 'LST-1_LST-3_LST-4': [1, 2, 5], 'LST-1_LST-4': [1, 5], 'LST-2_LST-3': [3, 2], 'LST-2_LST-3_LST-4': [3, 2, 5], 'LST-2_LST-4': [3, 5], 'LST-3_LST-4': [2, 5]} def test_format_object(): """ diff --git a/magicctapipe/resources/test_config_general_4LST.yaml b/magicctapipe/resources/test_config_general_4LST.yaml new file mode 100644 index 000000000..715b2fd4c --- /dev/null +++ b/magicctapipe/resources/test_config_general_4LST.yaml @@ -0,0 +1,28 @@ +mc_tel_ids: + LST-1: 1 + LST-2: 3 + LST-3: 2 + LST-4: 5 + MAGIC-I: 0 + MAGIC-II: 0 + +directories: + workspace_dir : "/fefs/aswg/workspace/raniere/" + target_name : "CrabTeste" + MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" + MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" + MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" + MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" + MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" + +general: + target_RA_deg : 83.633083 #RA in degrees + target_Dec_deg : 22.0145 #Dec in degrees + SimTel_version : "v1.4" + LST_version : "v0.9" + focal_length : "effective" #effective #nominal + MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" + LST_runs : "LST_runs.txt" + proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest + env_name : magic-lst + diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 3b6e8b365..e3dfc4320 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -14,15 +14,11 @@ relative to the center of the LST and MAGIC positions (including the altitude) for the convenience of the geometrical stereo reconstruction. -Usage per single data file (indicated if you want to do tests): +Usage: $ python lst1_magic_mc_dl0_to_dl1.py --input-file dl0/gamma_40deg_90deg_run1.simtel.gz (--output-dir dl1) (--config-file config_step1.yaml) - -Broader usage: -This script is called automatically from the script "setting_up_config_and_dir.py". -If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse #Parser for command-line options, arguments etc @@ -96,7 +92,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): obs_id = event_source.obs_ids[0] subarray = event_source.subarray - tel_descriptions = subarray.tel + tel_descriptions = subarray.tel tel_positions = subarray.positions logger.info("\nSubarray description:") @@ -208,10 +204,8 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): output_file = ( f"{output_dir}/dl1_{particle_type}_zd_{zenith.round(3)}deg_" f"az_{azimuth.round(3)}deg_{LSTs_in_use}_{MAGICs_in_use}_run{obs_id}.h5" - ) #The files are saved with the names of all telescopes involved + ) #The files are saved with the names of all telescopes involved - - # Loop over every shower event logger.info("\nProcessing the events...") diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index 0330fb4c6..99b6c5d41 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -24,10 +24,6 @@ (--output-dir dl1_stereo) (--config-file config.yaml) (--magic-only) - -Broader usage: -This script is called automatically from the script "stereo_events.py". -If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse @@ -85,7 +81,7 @@ def calculate_pointing_separation(event_data, config): # Extract LST events df_lst = event_data.query(f"tel_id == {LSTs_IDs}") - # Extract the MAGIC events seen by also LST + # Extract the coincident events observed by both MAGICs and LSTs df_magic = event_data.query(f"tel_id == {MAGICs_IDs}") df_magic = df_magic.loc[df_lst.index] @@ -156,7 +152,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) if magic_only_analysis: - event_data.query(f"tel_id > {LSTs_IDs.max()}", inplace=True) # Here we select only the events with the MAGIC tel_ids, i.e. above the maximum tel_id of the LSTs + event_data.query(f"tel_id not in {LSTs_IDs}", inplace=True) # Here we select only the events with the MAGIC tel_ids, i.e. above the maximum tel_id of the LSTs logger.info(f"\nQuality cuts: {config_stereo['quality_cuts']}") event_data = get_stereo_events(event_data, config=config, quality_cuts=config_stereo["quality_cuts"]) diff --git a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py index 2e575b28b..f23988281 100644 --- a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py @@ -22,16 +22,12 @@ this script, but since the MaTaJu cleaning is not yet implemented in this pipeline, it applies the standard cleaning instead. -Usage per single data file (indicated if you want to do tests): +Usage: $ python magic_calib_to_dl1.py --input-file calib/20201216_M1_05093711.001_Y_CrabNebula-W0.40+035.root (--output-dir dl1) (--config-file config.yaml) (--process-run) - -Broader usage: -This script is called automatically from the script "setting_up_config_and_dir.py". -If you want to analyse a target, this is the way to go. See this other script for more details. """ import argparse @@ -379,8 +375,6 @@ def main(): help="Max. number of processed showers", ) - - parser.add_argument( "--process-run", dest="process_run", diff --git a/setup.py b/setup.py index 565f090e8..a9653b4cb 100644 --- a/setup.py +++ b/setup.py @@ -15,13 +15,7 @@ "lst1_magic_stereo_reco = magicctapipe.scripts.lst1_magic.lst1_magic_stereo_reco:main", "lst1_magic_train_rfs = magicctapipe.scripts.lst1_magic.lst1_magic_train_rfs:main", "magic_calib_to_dl1 = magicctapipe.scripts.lst1_magic.magic_calib_to_dl1:main", - "merge_hdf_files = magicctapipe.scripts.lst1_magic.merge_hdf_files:main", - "setting_up_config_and_dir = magicctapipe.scripts.lst1_magic.setting_up_config_and_dir:main", - "merging_runs_and_splitting_training_samples = magicctapipe.scripts.lst1_magic.merging_runs_and_splitting_training_samples:main", - "coincident_events = magicctapipe.scripts.lst1_magic.coincident_events:main", - "stereo_events = magicctapipe.scripts.lst1_magic.stereo_events:main", - - + "merge_hdf_files = magicctapipe.scripts.lst1_magic.merge_hdf_files:main", ] tests_require = ["pytest", "pandas>=0.24.0", "importlib_resources;python_version<'3.9'"] From 03d5cbbe2b5b7a0e4c277644bd1e4196bf166d17 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 5 Oct 2023 16:15:08 +0200 Subject: [PATCH 56/76] removed max_multiplicity in get_stereo --- magicctapipe/io/io.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index ab3f08795..75549539c 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -256,10 +256,9 @@ def get_stereo_events( if quality_cuts is not None: event_data_stereo.query(quality_cuts, inplace=True) - max_multiplicity=len(TEL_NAMES.keys()) # Extract stereo events event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() - event_data_stereo.query(f"multiplicity >1 & multiplicity <= {max_multiplicity}", inplace=True) + event_data_stereo.query(f"multiplicity >1", inplace=True) if eval_multi_combo==True: # Check the total number of events n_events_total = len(event_data_stereo.groupby(group_index).size()) From 334ed6d446920053ca1d1234ac5198e0855b80f1 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 09:29:53 +0200 Subject: [PATCH 57/76] tests --- magicctapipe/conftest.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index ae7a4073c..5d86f4d33 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -540,6 +540,7 @@ def p_stereo(temp_DL1_p_train, temp_DL1_p_test, p_l1, config): f"-c{str(config)}", ] ) + print(len(temp_DL1_p_train.glob('*.h5'))) return (temp_DL1_p_train, temp_DL1_p_test) @@ -565,6 +566,7 @@ def p_stereo_monly( "--magic-only", ] ) + print(len(temp_DL1_p_train_monly.glob('*.h5'))) return (temp_DL1_p_train_monly, temp_DL1_p_test_monly) From bb44aff12283c65aa1703c5747912e3e3140c529 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 10:50:29 +0200 Subject: [PATCH 58/76] minor fixes --- .github/workflows/ci.yml | 2 +- magicctapipe/io/io.py | 2 +- .../scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 7 ++----- magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py | 2 +- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4ad50d7ef..b178c0878 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,7 +81,7 @@ jobs: # activate our env again source $CONDA/etc/profile.d/conda.sh conda activate ci - coverage run -m pytest -v + coverage run -m pytest -v -s coverage xml - uses: codecov/codecov-action@v3 diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 75549539c..c4f185d99 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -258,7 +258,7 @@ def get_stereo_events( # Extract stereo events event_data_stereo["multiplicity"] = event_data_stereo.groupby(group_index).size() - event_data_stereo.query(f"multiplicity >1", inplace=True) + event_data_stereo.query("multiplicity > 1", inplace=True) if eval_multi_combo==True: # Check the total number of events n_events_total = len(event_data_stereo.groupby(group_index).size()) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index e3dfc4320..eda0daec3 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -217,11 +217,8 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): tels_with_trigger = event.trigger.tels_with_trigger # Check if the event triggers both M1 and M2 or not - if((set(MAGICs_IDs).issubset(set(tels_with_trigger))) and (MAGICs_in_use=="MAGIC1_MAGIC2")): - magic_stereo = True #If both have trigger, then magic_stereo = True - else: - magic_stereo = False - + magic_stereo=(set(MAGICs_IDs).issubset(set(tels_with_trigger))) and (MAGICs_in_use=="MAGIC1_MAGIC2") #If both have trigger, then magic_stereo = True + for tel_id in tels_with_trigger: if tel_id in LSTs_IDs: ##If the ID is in the LST list, we call calibrate on the LST() diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index 99b6c5d41..5f2de6672 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -163,7 +163,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa Number_of_LSTs_in_use = len(LSTs_IDs[LSTs_IDs > 0]) MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) Number_of_MAGICs_in_use = len(MAGICs_IDs[MAGICs_IDs > 0]) - if (Number_of_LSTs_in_use > 0) and (Number_of_MAGICs_in_use > 0): #If we use the two arrays, i.e. MAGIC and LST, then the "if" statement below will work (except for MC simulations) + if Number_of_LSTs_in_use*Number_of_MAGICs_in_use > 0: #If we use the two arrays, i.e. MAGIC and LST, then the "if" statement below will work (except for MC simulations) Two_arrays_are_used = True else: Two_arrays_are_used = False From db0f298109ba614574a7a5ce495e8a418ffb9383 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 11:01:32 +0200 Subject: [PATCH 59/76] bug --- magicctapipe/image/calib.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index a79b447a5..f60b92ad1 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -58,6 +58,7 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g image = event.dl1.tel[tel_id].image.astype(np.float64) peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) + if (LST_bool==False) and (magic_clean!=None): use_charge_correction = config["charge_correction"]["use"] @@ -68,11 +69,14 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( event_image=image, event_pulse_time=peak_time ) - elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): - increase_nsb = config["increase_nsb"].pop("use") + elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): + nsb=config["increase_nsb"] + time_delta=config["time_delta_cleaning"] + dynamic=config["dynamic_cleaning"] + increase_nsb = nsb.pop("use") increase_psf = config["increase_psf"]["use"] - use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") - use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") + use_time_delta_cleaning = time_delta.pop("use") + use_dynamic_cleaning = dynamic.pop("use") use_only_main_island = config["use_only_main_island"] if increase_nsb: From 6f8223c3c56b1adc6161fa42c9fca22cedfc13d3 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 11:27:09 +0200 Subject: [PATCH 60/76] Bug calib function --- .github/workflows/ci.yml | 2 +- magicctapipe/conftest.py | 2 -- magicctapipe/image/calib.py | 16 ++++++++-------- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b178c0878..7ee85c979 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,7 +81,7 @@ jobs: # activate our env again source $CONDA/etc/profile.d/conda.sh conda activate ci - coverage run -m pytest -v -s + coverage run -m pytest -v coverage xml - uses: codecov/codecov-action@v3 diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index 5d86f4d33..ae7a4073c 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -540,7 +540,6 @@ def p_stereo(temp_DL1_p_train, temp_DL1_p_test, p_l1, config): f"-c{str(config)}", ] ) - print(len(temp_DL1_p_train.glob('*.h5'))) return (temp_DL1_p_train, temp_DL1_p_test) @@ -566,7 +565,6 @@ def p_stereo_monly( "--magic-only", ] ) - print(len(temp_DL1_p_train_monly.glob('*.h5'))) return (temp_DL1_p_train_monly, temp_DL1_p_test_monly) diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index f60b92ad1..ab064b605 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -58,7 +58,7 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g image = event.dl1.tel[tel_id].image.astype(np.float64) peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - + if (LST_bool==False) and (magic_clean!=None): use_charge_correction = config["charge_correction"]["use"] @@ -69,14 +69,11 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( event_image=image, event_pulse_time=peak_time ) - elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): - nsb=config["increase_nsb"] - time_delta=config["time_delta_cleaning"] - dynamic=config["dynamic_cleaning"] - increase_nsb = nsb.pop("use") + elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): + increase_nsb = config["increase_nsb"].pop("use") increase_psf = config["increase_psf"]["use"] - use_time_delta_cleaning = time_delta.pop("use") - use_dynamic_cleaning = dynamic.pop("use") + use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") + use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") use_only_main_island = config["use_only_main_island"] if increase_nsb: @@ -121,6 +118,9 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g n_pixels_on_island[0] = 0 max_island_label = np.argmax(n_pixels_on_island) signal_pixels[island_labels != max_island_label] = False + config["increase_nsb"]["use"]=increase_nsb + config["time_delta_cleaning"]["use"]=use_time_delta_cleaning + config["dynamic_cleaning"]["use"]=use_dynamic_cleaning else: raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") From d6ab37ff136b8e6c1951e85fdb06303bb313197d Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 12:11:29 +0200 Subject: [PATCH 61/76] Bug magic-only --- magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index 5f2de6672..79bc46345 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -152,7 +152,10 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa LSTs_IDs = np.asarray(list(assigned_tel_ids.values())[0:4]) if magic_only_analysis: - event_data.query(f"tel_id not in {LSTs_IDs}", inplace=True) # Here we select only the events with the MAGIC tel_ids, i.e. above the maximum tel_id of the LSTs + tel_id=np.asarray(list(assigned_tel_ids.values())[:]) + used_id=tel_id[tel_id!=0] + magic_ids=[item for item in used_id if item not in LSTs_IDs] + event_data.query(f"tel_id in {magic_ids}", inplace=True) # Here we select only the events with the MAGIC tel_ids, i.e. above the maximum tel_id of the LSTs logger.info(f"\nQuality cuts: {config_stereo['quality_cuts']}") event_data = get_stereo_events(event_data, config=config, quality_cuts=config_stereo["quality_cuts"]) From 52d62222f6e916fe0162d74ab5630eec388f8d4c Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 12:46:47 +0200 Subject: [PATCH 62/76] fixed test --- magicctapipe/io/tests/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index abd793ffc..c44cc8b31 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -106,7 +106,7 @@ def test_load_train_data_files_g(gamma_stereo): """ events = load_train_data_files(str(gamma_stereo[0])) - assert list(events.keys()) == ["M1_M2", "LST1_M1", "LST1_M2", "LST1_M1_M2"] + assert list(events.keys()) == ["LST1_M1", "LST1_M2", "LST1_M1_M2"] data = events["LST1_M1"] assert np.all(data["combo_type"]) == 1 assert "off_axis" in data.columns From 03f6e982a42dc46b5a000f1c9b590606b54d8c68 Mon Sep 17 00:00:00 2001 From: ranieremenezes Date: Fri, 6 Oct 2023 13:04:30 +0200 Subject: [PATCH 63/76] updated config files --- .../resources/test_config_general.yaml | 19 +------------------ .../resources/test_config_general_4LST.yaml | 17 ----------------- 2 files changed, 1 insertion(+), 35 deletions(-) diff --git a/magicctapipe/resources/test_config_general.yaml b/magicctapipe/resources/test_config_general.yaml index 7e96f5ce4..0a15083c4 100644 --- a/magicctapipe/resources/test_config_general.yaml +++ b/magicctapipe/resources/test_config_general.yaml @@ -5,24 +5,7 @@ mc_tel_ids: LST-4: 0 MAGIC-I: 2 MAGIC-II: 3 - -directories: - workspace_dir : "/fefs/aswg/workspace/raniere/" - target_name : "CrabTeste" - MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" - MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" - MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" - MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" - MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" -general: - target_RA_deg : 83.633083 #RA in degrees - target_Dec_deg : 22.0145 #Dec in degrees - SimTel_version : "v1.4" - LST_version : "v0.9" +general: focal_length : "effective" #effective #nominal - MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" - LST_runs : "LST_runs.txt" - proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest - env_name : magic-lst diff --git a/magicctapipe/resources/test_config_general_4LST.yaml b/magicctapipe/resources/test_config_general_4LST.yaml index 715b2fd4c..bdca64804 100644 --- a/magicctapipe/resources/test_config_general_4LST.yaml +++ b/magicctapipe/resources/test_config_general_4LST.yaml @@ -5,24 +5,7 @@ mc_tel_ids: LST-4: 5 MAGIC-I: 0 MAGIC-II: 0 - -directories: - workspace_dir : "/fefs/aswg/workspace/raniere/" - target_name : "CrabTeste" - MC_gammas : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/sim_telarray" - MC_electrons : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Electrons/sim_telarray/" - MC_helium : "/fefs/aswg/data/mc/DL0/LSTProd2/TestDataset/Helium/sim_telarray/" - MC_protons : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/Protons/dec_2276/sim_telarray" - MC_gammadiff : "/fefs/aswg/data/mc/DL0/LSTProd2/TrainingDataset/GammaDiffuse/dec_2276/sim_telarray/" general: - target_RA_deg : 83.633083 #RA in degrees - target_Dec_deg : 22.0145 #Dec in degrees - SimTel_version : "v1.4" - LST_version : "v0.9" focal_length : "effective" #effective #nominal - MAGIC_runs : "MAGIC_runs.txt" #If there is no MAGIC data, please fill this file with "0, 0" - LST_runs : "LST_runs.txt" - proton_train_fraction : 0.8 # 0.8 means that 80% of the DL1 protons will be used for training the Random Forest - env_name : magic-lst From 8de7c678c800334ca231281d01a47c925e9f44e8 Mon Sep 17 00:00:00 2001 From: ranieremenezes Date: Fri, 6 Oct 2023 13:23:50 +0200 Subject: [PATCH 64/76] Info on high level analysis in README --- magicctapipe/scripts/lst1_magic/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/magicctapipe/scripts/lst1_magic/README.md b/magicctapipe/scripts/lst1_magic/README.md index 0f5f39b54..6259c02f5 100644 --- a/magicctapipe/scripts/lst1_magic/README.md +++ b/magicctapipe/scripts/lst1_magic/README.md @@ -31,3 +31,6 @@ MAGIC+LST analysis starts from MAGIC calibrated data (\_Y\_ files), LST DL1 data - `lst1_magic_create_irf.py` to create the IRF - `lst1_magic_dl2_to_dl3.py` to create DL3 files, and `create_dl3_index_files.py` to create DL3 HDU and index files +## High level analysis + +The folder [Notebooks](https://github.com/cta-observatory/magic-cta-pipe/tree/master/notebooks) contains Jupyter notebooks to perform checks on the IRF, to produce theta2 plots and SEDs. Note that the notebooks run with gammapy v0.20 or higher, therefore another conda environment is needed to run them, since the MAGIC+LST-1 pipeline at the moment depends on v0.19. From 183da71cce4e5b64fa0bf5262c73ffeaca39b5e5 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 14:37:52 +0200 Subject: [PATCH 65/76] Tel. name exception --- .../scripts/lst1_magic/lst1_magic_event_coincidence.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 359d9bd61..ebe711880 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -550,13 +550,14 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): # Create the subarray description with the telescope coordinates # relative to the center of the LST and MAGIC positions - tel_descriptions = {} - for k, v in TEL_NAMES.items(): + tel_descriptions = {} + for k, v in TEL_NAMES.items(): if v[:3] == "LST": tel_descriptions[k] = subarray_lst.tel[k] - else: + elif v[:5] == "MAGIC": tel_descriptions[k] = subarray_magic.tel[k] - + else: + raise Exception(f"{v} is not a valid telescope name (check the config file). Only MAGIC and LST telescopes can be analyzed --> Valid telescope names are LST-[1-4] and MAGIC-[I-II] ") subarray_lst_magic = SubarrayDescription( "LST-MAGIC-Array", TEL_POSITIONS, tel_descriptions From 7dce9bada2db956d1989fd533386e456fd2206ac Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 6 Oct 2023 16:37:46 +0200 Subject: [PATCH 66/76] calib: test + bug fix --- magicctapipe/conftest.py | 8 + magicctapipe/image/calib.py | 129 ++++--- magicctapipe/image/tests/test_calib.py | 352 ++++++++++++++++++ magicctapipe/resources/test_config_calib.yaml | 60 +++ 4 files changed, 488 insertions(+), 61 deletions(-) create mode 100644 magicctapipe/image/tests/test_calib.py create mode 100644 magicctapipe/resources/test_config_calib.yaml diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index ae7a4073c..df4a3ebc0 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -396,6 +396,14 @@ def config_gen_4lst(): return config +@pytest.fixture(scope="session") +def config_calib(): + config_path = resource_file("test_config_calib.yaml") + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config + + """ Data processing """ diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index ab064b605..774e41757 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -4,12 +4,14 @@ number_of_islands, tailcuts_clean, ) +from ctapipe.instrument import CameraGeometry from lstchain.image.cleaning import apply_dynamic_cleaning from lstchain.image.modifier import ( add_noise_in_pixels, random_psf_smearer, set_numba_seed ) +from magicctapipe.image import MAGICClean __all__ = [ @@ -35,7 +37,7 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g LST_bool: bool Whether the telescope is a LST obs_id: int - Observation ID. Unsed in case of LSt telescope + Observation ID. Unsed in case of LST telescope camera_geoms: telescope.camera.geometry Camera geometry. Used in case of LST telescope magic_clean: dictionary (1 entry per MAGIC telescope) @@ -59,69 +61,74 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g image = event.dl1.tel[tel_id].image.astype(np.float64) peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - if (LST_bool==False) and (magic_clean!=None): - use_charge_correction = config["charge_correction"]["use"] - - if use_charge_correction: - # Scale the charges by the correction factor - image *= config["charge_correction"]["factor"] - # Apply the image cleaning - signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( - event_image=image, event_pulse_time=peak_time - ) - elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): - increase_nsb = config["increase_nsb"].pop("use") - increase_psf = config["increase_psf"]["use"] - use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") - use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") - use_only_main_island = config["use_only_main_island"] - - if increase_nsb: - rng = np.random.default_rng(obs_id) - # Add extra noise in pixels - image = add_noise_in_pixels(rng, image, **config["increase_nsb"]) - - if increase_psf: - set_numba_seed(obs_id) - # Smear the image - image = random_psf_smearer( - image=image, - fraction=config["increase_psf"]["fraction"], - indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, - indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, + if (LST_bool==False) and (magic_clean!=None): + if type(magic_clean[tel_id])==MAGICClean: + use_charge_correction = config["charge_correction"]["use"] + + if use_charge_correction: + # Scale the charges by the correction factor + image *= config["charge_correction"]["factor"] + # Apply the image cleaning + signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( + event_image=image, event_pulse_time=peak_time ) - - # Apply the image cleaning - signal_pixels = tailcuts_clean( - camera_geoms[tel_id], image, **config["tailcuts_clean"] - ) - - if use_time_delta_cleaning: - signal_pixels = apply_time_delta_cleaning( - geom=camera_geoms[tel_id], - mask=signal_pixels, - arrival_times=peak_time, - **config["time_delta_cleaning"], + else: + raise ValueError("Check the provided magic_clean parameter; calibration was not possible") + elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): + if type(camera_geoms[tel_id])==CameraGeometry: + increase_nsb = config["increase_nsb"].pop("use") + increase_psf = config["increase_psf"]["use"] + use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") + use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") + use_only_main_island = config["use_only_main_island"] + + if increase_nsb: + rng = np.random.default_rng(obs_id) + # Add extra noise in pixels + image = add_noise_in_pixels(rng, image, **config["increase_nsb"]) + + if increase_psf: + set_numba_seed(obs_id) + # Smear the image + image = random_psf_smearer( + image=image, + fraction=config["increase_psf"]["fraction"], + indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, + indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, + ) + + # Apply the image cleaning + signal_pixels = tailcuts_clean( + camera_geoms[tel_id], image, **config["tailcuts_clean"] ) - if use_dynamic_cleaning: - signal_pixels = apply_dynamic_cleaning( - image, signal_pixels, **config["dynamic_cleaning"] - ) - - if use_only_main_island: - _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) - n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) - - # The first index means the pixels not surviving - # the cleaning, so should not be considered - n_pixels_on_island[0] = 0 - max_island_label = np.argmax(n_pixels_on_island) - signal_pixels[island_labels != max_island_label] = False - config["increase_nsb"]["use"]=increase_nsb - config["time_delta_cleaning"]["use"]=use_time_delta_cleaning - config["dynamic_cleaning"]["use"]=use_dynamic_cleaning - + if use_time_delta_cleaning: + signal_pixels = apply_time_delta_cleaning( + geom=camera_geoms[tel_id], + mask=signal_pixels, + arrival_times=peak_time, + **config["time_delta_cleaning"], + ) + + if use_dynamic_cleaning: + signal_pixels = apply_dynamic_cleaning( + image, signal_pixels, **config["dynamic_cleaning"] + ) + + if use_only_main_island: + _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) + n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) + + # The first index means the pixels not surviving + # the cleaning, so should not be considered + n_pixels_on_island[0] = 0 + max_island_label = np.argmax(n_pixels_on_island) + signal_pixels[island_labels != max_island_label] = False + config["increase_nsb"]["use"]=increase_nsb + config["time_delta_cleaning"]["use"]=use_time_delta_cleaning + config["dynamic_cleaning"]["use"]=use_dynamic_cleaning + else: + raise ValueError("Check the provided camera_geoms parameter; calibration was not possible") else: raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") return signal_pixels, image, peak_time diff --git a/magicctapipe/image/tests/test_calib.py b/magicctapipe/image/tests/test_calib.py new file mode 100644 index 000000000..c93d5d34a --- /dev/null +++ b/magicctapipe/image/tests/test_calib.py @@ -0,0 +1,352 @@ +from magicctapipe.image.calib import calibrate +import pytest + + + +from ctapipe.calib import CameraCalibrator + +from ctapipe.io import EventSource + +from magicctapipe.image import MAGICClean +from traitlets.config import Config + + +@pytest.fixture(scope="session") +def tel_id_LST(): + return 1 + + +@pytest.fixture(scope="session") +def tel_id_MAGIC(): + return 2 + + +def test_calibrate_LST(dl0_gamma, config_calib, tel_id_LST): + + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + obs_id = event_source.obs_ids[0] + + subarray = event_source.subarray + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id, telescope in tel_descriptions.items(): + camera_geoms[tel_id]= telescope.camera.geometry + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + LST_bool=True + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + signal_pixels, image, peak_time = calibrate( + event=event, + tel_id=tel_id_LST, + obs_id=obs_id, + config=config_lst, + camera_geoms=camera_geoms, + calibrator=calibrator_lst, + LST_bool=LST_bool + ) + + assert len(signal_pixels)==1855 + assert signal_pixels.dtype==bool + assert len(image)==1855 + assert len(peak_time)==1855 + + config_lst["image_extractor"]["type"]=extractor_type_lst + + +def test_calibrate_MAGIC(dl0_gamma, config_calib, tel_id_MAGIC): + + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + + subarray = event_source.subarray + + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id, telescope in tel_descriptions.items(): + camera_geoms[tel_id]= telescope.camera.geometry + + + config_magic = config_calib["MAGIC"] + config_magic["magic_clean"].update({"find_hotpixels": False}) + + extractor_type_magic = config_magic["image_extractor"].pop("type") + config_extractor_magic = {extractor_type_magic: config_magic["image_extractor"]} + magic_clean = {} + for k in [1,2]: + + magic_clean[k] = MAGICClean(camera_geoms[k], config_magic["magic_clean"]) + calibrator_magic = CameraCalibrator( + image_extractor_type=extractor_type_magic, + config=Config(config_extractor_magic), + subarray=subarray, + ) + LST_bool=False + for event in event_source: + if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): + signal_pixels, image, peak_time = calibrate( + event=event, + tel_id=tel_id_MAGIC, + config=config_magic, + magic_clean=magic_clean, + calibrator=calibrator_magic, + LST_bool=LST_bool + ) + + assert len(signal_pixels)==1039 + assert signal_pixels.dtype==bool + assert len(image)==1039 + assert len(peak_time)==1039 + + config_magic["image_extractor"]["type"]=extractor_type_magic + + +def test_calibrate_exc_1(dl0_gamma, config_calib, tel_id_MAGIC): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + subarray = event_source.subarray + config_magic = config_calib["MAGIC"] + config_magic["magic_clean"].update({"find_hotpixels": False}) + extractor_type_magic = config_magic["image_extractor"].pop("type") + config_extractor_magic = {extractor_type_magic: config_magic["image_extractor"]} + calibrator_magic = CameraCalibrator( + image_extractor_type=extractor_type_magic, + config=Config(config_extractor_magic), + subarray=subarray, + ) + LST_bool=False + for event in event_source: + if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided parameters and the telescope type; calibration was not possible", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_MAGIC, + config=config_magic, + calibrator=calibrator_magic, + LST_bool=LST_bool + ) + config_magic["image_extractor"]["type"]=extractor_type_magic + + +def test_calibrate_exc_2(dl0_gamma, config_calib, tel_id_LST): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + + + subarray = event_source.subarray + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id, telescope in tel_descriptions.items(): + camera_geoms[tel_id]= telescope.camera.geometry + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + LST_bool=True + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided parameters and the telescope type; calibration was not possible", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_LST, + config=config_lst, + camera_geoms=camera_geoms, + calibrator=calibrator_lst, + LST_bool=LST_bool + ) + config_lst["image_extractor"]["type"]=extractor_type_lst + + +def test_calibrate_exc_3(dl0_gamma, config_calib, tel_id_LST): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + obs_id = event_source.obs_ids[0] + + subarray = event_source.subarray + + + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + LST_bool=True + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided parameters and the telescope type; calibration was not possible", + ): + signal_pixels, image, peak_time = calibrate( + event=event, + tel_id=tel_id_LST, + obs_id=obs_id, + config=config_lst, + calibrator=calibrator_lst, + LST_bool=LST_bool + ) + config_lst["image_extractor"]["type"]=extractor_type_lst + + +def test_calibrate_exc_4(dl0_gamma, config_calib, tel_id_MAGIC): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + subarray = event_source.subarray + tel_descriptions = subarray.tel + magic_clean={} + + for tel_id in range(len(tel_descriptions.items())): + magic_clean[tel_id]= f"camera {tel_id}" + config_magic = config_calib["MAGIC"] + config_magic["magic_clean"].update({"find_hotpixels": False}) + extractor_type_magic = config_magic["image_extractor"].pop("type") + config_extractor_magic = {extractor_type_magic: config_magic["image_extractor"]} + calibrator_magic = CameraCalibrator( + image_extractor_type=extractor_type_magic, + config=Config(config_extractor_magic), + subarray=subarray, + ) + + LST_bool=False + for event in event_source: + if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided magic_clean parameter; calibration was not possible", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_MAGIC, + config=config_magic, + calibrator=calibrator_magic, + magic_clean=magic_clean, + LST_bool=LST_bool + ) + config_magic["image_extractor"]["type"]=extractor_type_magic + + +def test_calibrate_exc_5(dl0_gamma, config_calib, tel_id_LST): + assigned_tel_ids = [1,2,3] + for input_file in dl0_gamma: + event_source = EventSource( + input_file, + allowed_tels=assigned_tel_ids, + focal_length_choice='effective' + ) + + + obs_id = event_source.obs_ids[0] + + subarray = event_source.subarray + + tel_descriptions = subarray.tel + camera_geoms={} + + for tel_id in range(len(tel_descriptions.items())): + camera_geoms[tel_id]= f"camera {tel_id}" + + config_lst = config_calib["LST"] + + extractor_type_lst = config_lst["image_extractor"].pop("type") + config_extractor_lst = {extractor_type_lst: config_lst["image_extractor"]} + + calibrator_lst = CameraCalibrator( + image_extractor_type=extractor_type_lst, + config=Config(config_extractor_lst), + subarray=subarray, + ) + + LST_bool=True + for event in event_source: + if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): + with pytest.raises( + ValueError, + match="Check the provided camera_geoms parameter; calibration was not possible", + ): + _,_,_ = calibrate( + event=event, + tel_id=tel_id_LST, + obs_id=obs_id, + config=config_lst, + camera_geoms=camera_geoms, + calibrator=calibrator_lst, + LST_bool=LST_bool, + ) + config_lst["image_extractor"]["type"]=extractor_type_lst + diff --git a/magicctapipe/resources/test_config_calib.yaml b/magicctapipe/resources/test_config_calib.yaml new file mode 100644 index 000000000..22d8b3952 --- /dev/null +++ b/magicctapipe/resources/test_config_calib.yaml @@ -0,0 +1,60 @@ +LST: + image_extractor: + type: "LocalPeakWindowSum" + window_shift: 4 + window_width: 8 + + increase_nsb: + use: true + extra_noise_in_dim_pixels: 1.27 + extra_bias_in_dim_pixels: 0.665 + transition_charge: 8 + extra_noise_in_bright_pixels: 2.08 + + increase_psf: + use: false + fraction: null + + tailcuts_clean: + picture_thresh: 8 + boundary_thresh: 4 + keep_isolated_pixels: false + min_number_picture_neighbors: 2 + + time_delta_cleaning: + use: true + min_number_neighbors: 1 + time_limit: 2 + + dynamic_cleaning: + use: true + threshold: 267 + fraction: 0.03 + + use_only_main_island: false + + +MAGIC: + image_extractor: + type: "SlidingWindowMaxSum" + window_width: 5 + apply_integration_correction: false + + charge_correction: + use: true + factor: 1.143 + + magic_clean: + use_time: true + use_sum: true + picture_thresh: 6 + boundary_thresh: 3.5 + max_time_off: 4.5 + max_time_diff: 1.5 + find_hotpixels: true + pedestal_type: "from_extractor_rndm" + + muon_ring: + thr_low: 25 + tailcut: [12, 8] + ring_completeness_threshold: 25 \ No newline at end of file From 15376a50de0c9fb3873f165628302474d6eed41a Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Sat, 7 Oct 2023 20:14:11 +0200 Subject: [PATCH 67/76] tel_positions --- .../lst1_magic_event_coincidence.py | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index ebe711880..eb55e0778 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -109,24 +109,20 @@ def telescope_positions(config): "MAGIC-I" : [-23.540, -191.750, 41.25], "MAGIC-II" : [-94.05, -143.770, 42.42] } - + telescopes_in_use = {} - x = np.asarray([]) - y = np.asarray([]) - z = np.asarray([]) - for k, v in config["mc_tel_ids"].items(): - if v > 0: + tels=config["mc_tel_ids"] + tel_cp=tels.copy() + for k, v in tel_cp.copy().items(): + if v <= 0: + tel_cp.pop(k) + else: telescopes_in_use[v] = RELATIVE_POSITIONS[k] - x = np.append(x,RELATIVE_POSITIONS[k][0]) - y = np.append(y,RELATIVE_POSITIONS[k][1]) - z = np.append(z,RELATIVE_POSITIONS[k][2]) - - average_xyz = np.asarray([np.mean(x), np.mean(y), np.mean(z)]) + average_xyz=np.array([RELATIVE_POSITIONS[k] for k in tel_cp.keys()]).mean(axis=0) TEL_POSITIONS = {} for k, v in telescopes_in_use.items(): TEL_POSITIONS[k] = list(np.round(np.asarray(v)-average_xyz,2)) * u.m - return TEL_POSITIONS @@ -152,7 +148,6 @@ def event_coincidence(input_file_lst, input_dir_magic, output_dir, config): TEL_NAMES, _ = telescope_combinations(config) TEL_POSITIONS = telescope_positions(config) - # Load the input LST DL1 data file logger.info(f"\nInput LST DL1 data file: {input_file_lst}") From ff8ee4a9842691f185a8f1fd9cf6a57505af02a0 Mon Sep 17 00:00:00 2001 From: ranieremenezes Date: Mon, 9 Oct 2023 14:52:04 +0200 Subject: [PATCH 68/76] Check if the input telescope list is fine --- .../scripts/lst1_magic/lst1_magic_event_coincidence.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index eb55e0778..65b1ce24a 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -115,9 +115,14 @@ def telescope_positions(config): tel_cp=tels.copy() for k, v in tel_cp.copy().items(): if v <= 0: + # Here we remove the telescopes with ID (i.e. "v") <= 0 from the dictionary: tel_cp.pop(k) else: - telescopes_in_use[v] = RELATIVE_POSITIONS[k] + # Here we check if the telescopes "k" listed in the configuration file are indeed LSTs or MAGICs: + if k in RELATIVE_POSITIONS.keys(): + telescopes_in_use[v] = RELATIVE_POSITIONS[k] + else: + raise Exception(f"Telescope {k} not allowed in analysis. The telescopes allowed are LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.") average_xyz=np.array([RELATIVE_POSITIONS[k] for k in tel_cp.keys()]).mean(axis=0) TEL_POSITIONS = {} From b60476815de884e8aad4d770366b1121f0a3d31a Mon Sep 17 00:00:00 2001 From: ranieremenezes Date: Mon, 9 Oct 2023 16:21:43 +0200 Subject: [PATCH 69/76] New function to check telescope list --- magicctapipe/io/__init__.py | 2 + magicctapipe/io/io.py | 65 +++++++++++++++++++ .../lst1_magic_event_coincidence.py | 4 ++ .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 5 +- .../lst1_magic/lst1_magic_stereo_reco.py | 5 +- .../scripts/lst1_magic/magic_calib_to_dl1.py | 5 +- 6 files changed, 83 insertions(+), 3 deletions(-) diff --git a/magicctapipe/io/__init__.py b/magicctapipe/io/__init__.py index 0b44e3700..4931f1717 100644 --- a/magicctapipe/io/__init__.py +++ b/magicctapipe/io/__init__.py @@ -7,6 +7,7 @@ SimEventInfoContainer, ) from .io import ( + check_input_list, format_object, get_dl2_mean, get_stereo_events, @@ -33,6 +34,7 @@ "BaseEventInfoContainer", "RealEventInfoContainer", "SimEventInfoContainer", + "check_input_list", "create_event_hdu", "create_gh_cuts_hdu", "create_gti_hdu", diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index c4f185d99..8c4fdffd5 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -23,6 +23,7 @@ from pyirf.utils import calculate_source_fov_offset, calculate_theta __all__ = [ + "check_input_list", "format_object", "get_dl2_mean", "get_stereo_events", @@ -58,6 +59,70 @@ DEAD_TIME_LST = 7.6 * u.us DEAD_TIME_MAGIC = 26 * u.us +def check_input_list(config): + """ + This function checks if the input telescope list is organized as follows: + 1) All 4 LSTs and 2 MAGICs must be listed + 2) All 4 LSTs must come before the MAGICs + And it rises an exception in case these rules are not satisfied. + + Below we give two examples of valid lists: + i) + mc_tel_ids: + LST-1: 1 + LST-2: 0 + LST-3: 0 + LST-4: 0 + MAGIC-I: 2 + MAGIC-II: 3 + ii) + mc_tel_ids: + LST-4: 1 + LST-2: 7 + LST-3: 9 + LST-1: 0 + MAGIC-II: 2 + MAGIC-I: 3 + + And here one example of an unvalid list: + iii) + mc_tel_ids: + LST-4: 1 + LST-1: 0 + MAGIC-II: 2 + LST-3: 9 + MAGIC-I: 3 + + Parameters + ---------- + config: dict + dictionary imported from the yaml configuration file with information about the telescope IDs. + + Returns + ------- + This function will rise an exception if the input list is not properly organized. + """ + + list_of_tel_names = list(config["mc_tel_ids"].keys()) + standard_list_of_tels = ["LST-1", "LST-2", "LST-3", "LST-4", "MAGIC-I", "MAGIC-II"] + + if len(list_of_tel_names) != 6: + raise Exception(f"Number of telescopes found in the configuration file is {len(list_of_tel_names)}. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.") + else: + for tel_name in list_of_tel_names[0:4]: + if tel_name in standard_list_of_tels[0:4]: + pass + else: + raise Exception(f"Entry '{tel_name}' not accepted as an LST. Please make sure that the first four telescopes are LSTs, e.g.: 'LST-1', 'LST-2', 'LST-3', and 'LST-4'") + + for tel_name in list_of_tel_names[4:6]: + if tel_name in standard_list_of_tels[4:6]: + pass + else: + raise Exception(f"Entry '{tel_name}' not accepted as a MAGIC. Please make sure that the last two telescopes are MAGICs, e.g.: 'MAGIC-I', and 'MAGIC-II'") + return + + def telescope_combinations(config): """ Generates all possible telescope combinations without repetition. E.g.: "LST1_M1", "LST2_LST4_M2", "LST1_LST2_LST3_M1" and so on. diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 65b1ce24a..7d962dcbd 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -69,6 +69,7 @@ load_magic_dl1_data_files, save_pandas_data_in_table, telescope_combinations, + check_input_list, ) __all__ = ["event_coincidence","telescope_positions"] @@ -616,6 +617,9 @@ def main(): with open(args.config_file, "rb") as f: config = yaml.safe_load(f) + # Checking if the input telescope list is properly organized: + check_input_list(config) + # Check the event coincidence event_coincidence( args.input_file_lst, args.input_dir_magic, args.output_dir, config diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index eda0daec3..5a393850c 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -43,7 +43,7 @@ from magicctapipe.image import MAGICClean from magicctapipe.image.calib import calibrate -from magicctapipe.io import SimEventInfoContainer, format_object +from magicctapipe.io import SimEventInfoContainer, format_object, check_input_list from magicctapipe.utils import calculate_disp, calculate_impact from traitlets.config import Config @@ -424,6 +424,9 @@ def main(): with open(args.config_file, "rb") as f: # "rb" mode opens the file in binary format for reading config = yaml.safe_load(f) #Here we collect the inputs from the configuration file + # Checking if the input telescope list is properly organized: + check_input_list(config) + # Process the input data mc_dl0_to_dl1(args.input_file, args.output_dir, config, args.focal_length_choice) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index 79bc46345..38dcf9d99 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -44,7 +44,7 @@ ) from ctapipe.instrument import SubarrayDescription from ctapipe.reco import HillasReconstructor -from magicctapipe.io import format_object, get_stereo_events, save_pandas_data_in_table +from magicctapipe.io import format_object, get_stereo_events, save_pandas_data_in_table, check_input_list from magicctapipe.utils import calculate_impact, calculate_mean_direction __all__ = ["calculate_pointing_separation", "stereo_reconstruction"] @@ -388,6 +388,9 @@ def main(): with open(args.config_file, "rb") as f: config = yaml.safe_load(f) + # Checking if the input telescope list is properly organized: + check_input_list(config) + # Process the input data stereo_reconstruction(args.input_file, args.output_dir, config, args.magic_only) diff --git a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py index f23988281..ed56cd2b0 100644 --- a/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/magic_calib_to_dl1.py @@ -51,7 +51,7 @@ from ctapipe.io import HDF5TableWriter from ctapipe_io_magic import MAGICEventSource from magicctapipe.image import MAGICClean -from magicctapipe.io import RealEventInfoContainer, SimEventInfoContainer, format_object +from magicctapipe.io import RealEventInfoContainer, SimEventInfoContainer, format_object, check_input_list from magicctapipe.utils import calculate_disp, calculate_impact __all__ = ["magic_calib_to_dl1"] @@ -387,6 +387,9 @@ def main(): with open(args.config_file, "rb") as f: config = yaml.safe_load(f) + # Checking if the input telescope list is properly organized: + check_input_list(config) + # Process the input data magic_calib_to_dl1(args.input_file, args.output_dir, config, args.max_events, args.process_run) logger.info("\nDone.") From cf92a25882959288f79c40b41632e9a18012d68a Mon Sep 17 00:00:00 2001 From: ranieremenezes Date: Mon, 9 Oct 2023 16:40:45 +0200 Subject: [PATCH 70/76] Just fixing a typo --- magicctapipe/io/io.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/magicctapipe/io/io.py b/magicctapipe/io/io.py index 8c4fdffd5..66f00865f 100644 --- a/magicctapipe/io/io.py +++ b/magicctapipe/io/io.py @@ -64,7 +64,7 @@ def check_input_list(config): This function checks if the input telescope list is organized as follows: 1) All 4 LSTs and 2 MAGICs must be listed 2) All 4 LSTs must come before the MAGICs - And it rises an exception in case these rules are not satisfied. + And it raises an exception in case these rules are not satisfied. Below we give two examples of valid lists: i) @@ -100,7 +100,7 @@ def check_input_list(config): Returns ------- - This function will rise an exception if the input list is not properly organized. + This function will raise an exception if the input list is not properly organized. """ list_of_tel_names = list(config["mc_tel_ids"].keys()) From 48ae5766740417a9a5ffbfdd7baf405c19b45124 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Thu, 12 Oct 2023 09:24:37 +0200 Subject: [PATCH 71/76] Minor fixes --- .github/workflows/ci.yml | 2 +- magicctapipe/image/calib.py | 142 +++++++++--------- .../lst1_magic_event_coincidence.py | 3 +- .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 2 +- setup.py | 2 +- 5 files changed, 78 insertions(+), 73 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7ee85c979..4ad50d7ef 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,7 +81,7 @@ jobs: # activate our env again source $CONDA/etc/profile.d/conda.sh conda activate ci - coverage run -m pytest -v + coverage run -m pytest -v coverage xml - uses: codecov/codecov-action@v3 diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index 774e41757..a98a5928e 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -54,81 +54,87 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g Array of the signal peak time in the camera pixels """ - + if (LST_bool==False) and (magic_clean==None): + raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") + if (LST_bool==True) and (obs_id==None): + raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") + if (LST_bool==True) and (camera_geoms==None): + raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") + if (LST_bool==False) and (type(magic_clean[tel_id])!=MAGICClean): + raise ValueError("Check the provided magic_clean parameter; calibration was not possible") + if (LST_bool==True) and (type(camera_geoms[tel_id])!=CameraGeometry): + raise ValueError("Check the provided camera_geoms parameter; calibration was not possible") + calibrator._calibrate_dl0(event, tel_id) calibrator._calibrate_dl1(event, tel_id) image = event.dl1.tel[tel_id].image.astype(np.float64) peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - if (LST_bool==False) and (magic_clean!=None): - if type(magic_clean[tel_id])==MAGICClean: - use_charge_correction = config["charge_correction"]["use"] - - if use_charge_correction: - # Scale the charges by the correction factor - image *= config["charge_correction"]["factor"] - # Apply the image cleaning - signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( - event_image=image, event_pulse_time=peak_time + if LST_bool==False: + use_charge_correction = config["charge_correction"]["use"] + + if use_charge_correction: + # Scale the charges by the correction factor + image *= config["charge_correction"]["factor"] + + # Apply the image cleaning + signal_pixels, image, peak_time = magic_clean[tel_id].clean_image( + event_image=image, event_pulse_time=peak_time + ) + + elif LST_bool==True: + increase_nsb = config["increase_nsb"].pop("use") + increase_psf = config["increase_psf"]["use"] + use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") + use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") + use_only_main_island = config["use_only_main_island"] + + if increase_nsb: + rng = np.random.default_rng(obs_id) + # Add extra noise in pixels + image = add_noise_in_pixels(rng, image, **config["increase_nsb"]) + + if increase_psf: + set_numba_seed(obs_id) + # Smear the image + image = random_psf_smearer( + image=image, + fraction=config["increase_psf"]["fraction"], + indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, + indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, ) - else: - raise ValueError("Check the provided magic_clean parameter; calibration was not possible") - elif (LST_bool==True) and (obs_id!=None) and (camera_geoms!=None): - if type(camera_geoms[tel_id])==CameraGeometry: - increase_nsb = config["increase_nsb"].pop("use") - increase_psf = config["increase_psf"]["use"] - use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") - use_dynamic_cleaning = config["dynamic_cleaning"].pop("use") - use_only_main_island = config["use_only_main_island"] - - if increase_nsb: - rng = np.random.default_rng(obs_id) - # Add extra noise in pixels - image = add_noise_in_pixels(rng, image, **config["increase_nsb"]) - - if increase_psf: - set_numba_seed(obs_id) - # Smear the image - image = random_psf_smearer( - image=image, - fraction=config["increase_psf"]["fraction"], - indices=camera_geoms[tel_id].neighbor_matrix_sparse.indices, - indptr=camera_geoms[tel_id].neighbor_matrix_sparse.indptr, - ) - - # Apply the image cleaning - signal_pixels = tailcuts_clean( - camera_geoms[tel_id], image, **config["tailcuts_clean"] + + # Apply the image cleaning + signal_pixels = tailcuts_clean( + camera_geoms[tel_id], image, **config["tailcuts_clean"] + ) + + if use_time_delta_cleaning: + signal_pixels = apply_time_delta_cleaning( + geom=camera_geoms[tel_id], + mask=signal_pixels, + arrival_times=peak_time, + **config["time_delta_cleaning"], ) - if use_time_delta_cleaning: - signal_pixels = apply_time_delta_cleaning( - geom=camera_geoms[tel_id], - mask=signal_pixels, - arrival_times=peak_time, - **config["time_delta_cleaning"], - ) - - if use_dynamic_cleaning: - signal_pixels = apply_dynamic_cleaning( - image, signal_pixels, **config["dynamic_cleaning"] - ) - - if use_only_main_island: - _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) - n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) - - # The first index means the pixels not surviving - # the cleaning, so should not be considered - n_pixels_on_island[0] = 0 - max_island_label = np.argmax(n_pixels_on_island) - signal_pixels[island_labels != max_island_label] = False - config["increase_nsb"]["use"]=increase_nsb - config["time_delta_cleaning"]["use"]=use_time_delta_cleaning - config["dynamic_cleaning"]["use"]=use_dynamic_cleaning - else: - raise ValueError("Check the provided camera_geoms parameter; calibration was not possible") - else: - raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") + if use_dynamic_cleaning: + signal_pixels = apply_dynamic_cleaning( + image, signal_pixels, **config["dynamic_cleaning"] + ) + + if use_only_main_island: + _, island_labels = number_of_islands(camera_geoms[tel_id], signal_pixels) + n_pixels_on_island = np.bincount(island_labels.astype(np.int64)) + + # The first index means the pixels not surviving + # the cleaning, so should not be considered + n_pixels_on_island[0] = 0 + max_island_label = np.argmax(n_pixels_on_island) + signal_pixels[island_labels != max_island_label] = False + + config["increase_nsb"]["use"]=increase_nsb + config["time_delta_cleaning"]["use"]=use_time_delta_cleaning + config["dynamic_cleaning"]["use"]=use_dynamic_cleaning + return signal_pixels, image, peak_time diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py index 7d962dcbd..62c67d561 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_event_coincidence.py @@ -112,8 +112,7 @@ def telescope_positions(config): } telescopes_in_use = {} - tels=config["mc_tel_ids"] - tel_cp=tels.copy() + tel_cp=config["mc_tel_ids"].copy() for k, v in tel_cp.copy().items(): if v <= 0: # Here we remove the telescopes with ID (i.e. "v") <= 0 from the dictionary: diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 5a393850c..1ad3618d5 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -90,7 +90,7 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): ) obs_id = event_source.obs_ids[0] - subarray = event_source.subarray + subarray = event_source.subarray tel_descriptions = subarray.tel tel_positions = subarray.positions diff --git a/setup.py b/setup.py index a9653b4cb..f8c4a145e 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ "lst1_magic_stereo_reco = magicctapipe.scripts.lst1_magic.lst1_magic_stereo_reco:main", "lst1_magic_train_rfs = magicctapipe.scripts.lst1_magic.lst1_magic_train_rfs:main", "magic_calib_to_dl1 = magicctapipe.scripts.lst1_magic.magic_calib_to_dl1:main", - "merge_hdf_files = magicctapipe.scripts.lst1_magic.merge_hdf_files:main", + "merge_hdf_files = magicctapipe.scripts.lst1_magic.merge_hdf_files:main", ] tests_require = ["pytest", "pandas>=0.24.0", "importlib_resources;python_version<'3.9'"] From 93c6dd781381c7b5b5dd6a77ad813373eb35e966 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 13 Oct 2023 16:58:37 +0200 Subject: [PATCH 72/76] Minor fixes+tests (check_list, tel-wise training) --- magicctapipe/io/tests/test_io.py | 107 ++++++++++++++++++ magicctapipe/io/tests/test_io_monly.py | 48 ++++++++ .../lst1_magic/lst1_magic_stereo_reco.py | 9 +- 3 files changed, 158 insertions(+), 6 deletions(-) diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index c44cc8b31..dfef43a1d 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -1,8 +1,10 @@ from magicctapipe.io.io import ( + check_input_list, format_object, get_dl2_mean, get_stereo_events, load_train_data_files, + load_train_data_files_tel, load_mc_dl2_data_file, load_irf_files, save_pandas_data_in_table, @@ -17,6 +19,63 @@ import pandas as pd +def test_check_input_list(): + """ + Test on different dictionaries + """ + + try: + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':4, 'MAGIC-I':5, 'MAGIC-II':6}}) + except Exception: + assert False + + try: + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':3, 'LST-3':0, 'LST-4':0, 'MAGIC-I':2, 'MAGIC-II':6}}) + except Exception: + assert False + + try: + check_input_list({'mc_tel_ids':{'LST-2':1, 'LST-1':3, 'LST-4':0, 'LST-3':0, 'MAGIC-II':2, 'MAGIC-I':6}}) + except Exception: + assert False + + with pytest.raises( + Exception, + match="Number of telescopes found in the configuration file is 5. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.", + ): + check_input_list({'mc_tel_ids':{'LST-2':1, 'LST-2':3, 'LST-4':0, 'LST-3':0, 'MAGIC-II':2, 'MAGIC-I':6}}) + + with pytest.raises( + Exception, + match="Number of telescopes found in the configuration file is 5. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'MAGIC-I':4, 'MAGIC-II':5}}) + + with pytest.raises( + Exception, + match="Number of telescopes found in the configuration file is 7. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':6, 'LST-5':7, 'MAGIC-I':4, 'MAGIC-II':5}}) + + with pytest.raises( + Exception, + match="Entry 'LSTT-1' not accepted as an LST. Please make sure that the first four telescopes are LSTs, e.g.: 'LST-1', 'LST-2', 'LST-3', and 'LST-4'", + ): + check_input_list({'mc_tel_ids':{'LSTT-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':6, 'MAGIC-I':4, 'MAGIC-II':5}}) + + with pytest.raises( + Exception, + match="Entry 'MAGIC-III' not accepted as a MAGIC. Please make sure that the last two telescopes are MAGICs, e.g.: 'MAGIC-I', and 'MAGIC-II'", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':6, 'MAGIC-I':4, 'MAGIC-III':5}}) + + with pytest.raises( + Exception, + match="Entry 'MAGIC-I' not accepted as an LST. Please make sure that the first four telescopes are LSTs, e.g.: 'LST-1', 'LST-2', 'LST-3', and 'LST-4'", + ): + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'MAGIC-I':4, 'LST-3':3, 'LST-4':6, 'MAGIC-II':5}}) + + def test_telescope_combinations(config_gen, config_gen_4lst): """ Simple check on telescope combinations @@ -28,6 +87,7 @@ def test_telescope_combinations(config_gen, config_gen_4lst): assert LSTs == {1: 'LST-1', 3: 'LST-2', 2: 'LST-3', 5: 'LST-4'} assert LSTs_comb == {'LST-1_LST-2': [1, 3], 'LST-1_LST-2_LST-3': [1, 3, 2], 'LST-1_LST-2_LST-3_LST-4': [1, 3, 2, 5], 'LST-1_LST-2_LST-4': [1, 3, 5], 'LST-1_LST-3': [1, 2], 'LST-1_LST-3_LST-4': [1, 2, 5], 'LST-1_LST-4': [1, 5], 'LST-2_LST-3': [3, 2], 'LST-2_LST-3_LST-4': [3, 2, 5], 'LST-2_LST-4': [3, 5], 'LST-3_LST-4': [2, 5]} + def test_format_object(): """ Simple check on a string @@ -136,6 +196,53 @@ def test_load_train_data_files_exc(temp_train_exc): _ = load_train_data_files(str(temp_train_exc)) +def test_load_train_data_files_tel_p(p_stereo, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(p_stereo[0]),config_gen) + assert list(events.keys()) == [1,2,3] + data = events[2] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_g(gamma_stereo, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(gamma_stereo[0]), config_gen) + assert list(events.keys()) == [1,2,3] + data = events[1] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_off(gamma_stereo, config_gen): + """ + Check off-axis cut + """ + events = load_train_data_files_tel( + str(gamma_stereo[0]), config=config_gen, offaxis_min="0.2 deg", offaxis_max="0.5 deg" + ) + data = events[1] + assert np.all(data["off_axis"] >= 0.2) + assert np.all(data["off_axis"] <= 0.5) + + +def test_load_train_data_files_tel_exc(temp_train_exc, config_gen): + """ + Check on exceptions + """ + with pytest.raises( + FileNotFoundError, + match="Could not find any DL1-stereo data files in the input directory.", + ): + _ = load_train_data_files(str(temp_train_exc), config_gen) + + def test_load_mc_dl2_data_file(p_dl2, gamma_dl2): """ Checks on default loading diff --git a/magicctapipe/io/tests/test_io_monly.py b/magicctapipe/io/tests/test_io_monly.py index 0823b3f54..135b782db 100644 --- a/magicctapipe/io/tests/test_io_monly.py +++ b/magicctapipe/io/tests/test_io_monly.py @@ -3,6 +3,7 @@ get_dl2_mean, get_stereo_events, load_train_data_files, + load_train_data_files_tel, load_mc_dl2_data_file, load_irf_files, save_pandas_data_in_table, @@ -124,6 +125,53 @@ def test_load_train_data_files_exc(temp_train_exc): _ = load_train_data_files(str(temp_train_exc)) +def test_load_train_data_files_tel_p(p_stereo_monly, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(p_stereo_monly[0]),config_gen) + assert list(events.keys()) == [2,3] + data = events[2] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_g(gamma_stereo_monly, config_gen): + """ + Check dictionary + """ + + events = load_train_data_files_tel(str(gamma_stereo_monly[0]), config_gen) + assert list(events.keys()) == [2,3] + data = events[3] + assert "off_axis" in data.columns + assert "true_event_class" not in data.columns + + +def test_load_train_data_files_tel_off(gamma_stereo_monly, config_gen): + """ + Check off-axis cut + """ + events = load_train_data_files_tel( + str(gamma_stereo_monly[0]), config=config_gen, offaxis_min="0.2 deg", offaxis_max="0.5 deg" + ) + data = events[2] + assert np.all(data["off_axis"] >= 0.2) + assert np.all(data["off_axis"] <= 0.5) + + +def test_load_train_data_files_tel_exc(temp_train_exc, config_gen): + """ + Check on exceptions + """ + with pytest.raises( + FileNotFoundError, + match="Could not find any DL1-stereo data files in the input directory.", + ): + _ = load_train_data_files(str(temp_train_exc), config_gen) + + def test_load_mc_dl2_data_file(p_dl2_monly, gamma_dl2_monly): """ Checks on default loading diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py index 38dcf9d99..277de8d43 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_stereo_reco.py @@ -155,7 +155,7 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa tel_id=np.asarray(list(assigned_tel_ids.values())[:]) used_id=tel_id[tel_id!=0] magic_ids=[item for item in used_id if item not in LSTs_IDs] - event_data.query(f"tel_id in {magic_ids}", inplace=True) # Here we select only the events with the MAGIC tel_ids, i.e. above the maximum tel_id of the LSTs + event_data.query(f"tel_id in {magic_ids}", inplace=True) # Here we select only the events with the MAGIC tel_ids logger.info(f"\nQuality cuts: {config_stereo['quality_cuts']}") event_data = get_stereo_events(event_data, config=config, quality_cuts=config_stereo["quality_cuts"]) @@ -166,11 +166,8 @@ def stereo_reconstruction(input_file, output_dir, config, magic_only_analysis=Fa Number_of_LSTs_in_use = len(LSTs_IDs[LSTs_IDs > 0]) MAGICs_IDs = np.asarray(list(assigned_tel_ids.values())[4:6]) Number_of_MAGICs_in_use = len(MAGICs_IDs[MAGICs_IDs > 0]) - if Number_of_LSTs_in_use*Number_of_MAGICs_in_use > 0: #If we use the two arrays, i.e. MAGIC and LST, then the "if" statement below will work (except for MC simulations) - Two_arrays_are_used = True - else: - Two_arrays_are_used = False - + Two_arrays_are_used = (Number_of_LSTs_in_use*Number_of_MAGICs_in_use > 0) + if (not is_simulation) and (Two_arrays_are_used): logger.info( From f4fde0fd32d986700d0988e91aa9c560953ed888 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 13 Oct 2023 17:11:08 +0200 Subject: [PATCH 73/76] Output file name --- magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 1ad3618d5..0d0eded0c 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -427,6 +427,8 @@ def main(): # Checking if the input telescope list is properly organized: check_input_list(config) + config['mc_tel_ids']=dict.sorted(config['mc_tel_ids'].items()) #Sorting needed to correctly name the output file + # Process the input data mc_dl0_to_dl1(args.input_file, args.output_dir, config, args.focal_length_choice) From 4ff0885a51d1396f423961f8b16569c50de002b1 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 13 Oct 2023 17:27:42 +0200 Subject: [PATCH 74/76] Pyflakes error --- magicctapipe/conftest.py | 8 ++++++++ magicctapipe/io/tests/test_io.py | 4 ++-- magicctapipe/resources/test_check_list.yaml | 7 +++++++ 3 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 magicctapipe/resources/test_check_list.yaml diff --git a/magicctapipe/conftest.py b/magicctapipe/conftest.py index df4a3ebc0..d9e151564 100644 --- a/magicctapipe/conftest.py +++ b/magicctapipe/conftest.py @@ -404,6 +404,14 @@ def config_calib(): return config +@pytest.fixture(scope="session") +def config_check(): + config_path = resource_file("test_check_list.yaml") + with open(config_path, "rb") as f: + config = yaml.safe_load(f) + return config + + """ Data processing """ diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index dfef43a1d..40cd9f23d 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -19,7 +19,7 @@ import pandas as pd -def test_check_input_list(): +def test_check_input_list(config_check): """ Test on different dictionaries """ @@ -43,7 +43,7 @@ def test_check_input_list(): Exception, match="Number of telescopes found in the configuration file is 5. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.", ): - check_input_list({'mc_tel_ids':{'LST-2':1, 'LST-2':3, 'LST-4':0, 'LST-3':0, 'MAGIC-II':2, 'MAGIC-I':6}}) + check_input_list(config_check) with pytest.raises( Exception, diff --git a/magicctapipe/resources/test_check_list.yaml b/magicctapipe/resources/test_check_list.yaml new file mode 100644 index 000000000..15b4296de --- /dev/null +++ b/magicctapipe/resources/test_check_list.yaml @@ -0,0 +1,7 @@ +mc_tel_ids: + LST-2: 1 + LST-2: 3 + LST-4: 0 + LST-3: 0 + MAGIC-II: 2 + MAGIC-I: 6 \ No newline at end of file From fff4a8428ccaf88c97337c863aa1d8de2346d165 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Fri, 13 Oct 2023 19:16:02 +0200 Subject: [PATCH 75/76] calib fixes --- magicctapipe/image/calib.py | 28 +++++++-------- magicctapipe/image/tests/test_calib.py | 35 ++++++++----------- .../lst1_magic/lst1_magic_mc_dl0_to_dl1.py | 6 ++-- 3 files changed, 32 insertions(+), 37 deletions(-) diff --git a/magicctapipe/image/calib.py b/magicctapipe/image/calib.py index a98a5928e..95ae4410d 100644 --- a/magicctapipe/image/calib.py +++ b/magicctapipe/image/calib.py @@ -19,7 +19,7 @@ ] -def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_geoms=None, magic_clean=None): +def calibrate(event, tel_id, config, calibrator, is_lst, obs_id=None, camera_geoms=None, magic_clean=None): """ This function calibrates the camera image for a single event of a telescope @@ -34,7 +34,7 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g Parameters for image extraction and calibration calibrator: CameraCalibrator (ctapipe.calib) ctapipe object needed to calibrate the camera - LST_bool: bool + is_lst: bool Whether the telescope is a LST obs_id: int Observation ID. Unsed in case of LST telescope @@ -54,16 +54,16 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g Array of the signal peak time in the camera pixels """ - if (LST_bool==False) and (magic_clean==None): - raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") - if (LST_bool==True) and (obs_id==None): - raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") - if (LST_bool==True) and (camera_geoms==None): - raise ValueError("Check the provided parameters and the telescope type; calibration was not possible") - if (LST_bool==False) and (type(magic_clean[tel_id])!=MAGICClean): - raise ValueError("Check the provided magic_clean parameter; calibration was not possible") - if (LST_bool==True) and (type(camera_geoms[tel_id])!=CameraGeometry): - raise ValueError("Check the provided camera_geoms parameter; calibration was not possible") + if (not is_lst) and (magic_clean==None): + raise ValueError("Check the provided parameters and the telescope type; MAGIC calibration not possible if magic_clean not provided") + if (is_lst) and (obs_id==None): + raise ValueError("Check the provided parameters and the telescope type; LST calibration not possible if obs_id not provided") + if (is_lst) and (camera_geoms==None): + raise ValueError("Check the provided parameters and the telescope type; LST calibration not possible if gamera_geoms not provided") + if (not is_lst) and (type(magic_clean[tel_id])!=MAGICClean): + raise ValueError("Check the provided magic_clean parameter; MAGIC calibration not possible if magic_clean not a dictionary of MAGICClean objects") + if (is_lst) and (type(camera_geoms[tel_id])!=CameraGeometry): + raise ValueError("Check the provided camera_geoms parameter; LST calibration not possible if camera_geoms not a dictionary of CameraGeometry objects") calibrator._calibrate_dl0(event, tel_id) calibrator._calibrate_dl1(event, tel_id) @@ -71,7 +71,7 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g image = event.dl1.tel[tel_id].image.astype(np.float64) peak_time = event.dl1.tel[tel_id].peak_time.astype(np.float64) - if LST_bool==False: + if not is_lst: use_charge_correction = config["charge_correction"]["use"] if use_charge_correction: @@ -83,7 +83,7 @@ def calibrate(event, tel_id, config, calibrator, LST_bool, obs_id=None, camera_g event_image=image, event_pulse_time=peak_time ) - elif LST_bool==True: + else: increase_nsb = config["increase_nsb"].pop("use") increase_psf = config["increase_psf"]["use"] use_time_delta_cleaning = config["time_delta_cleaning"].pop("use") diff --git a/magicctapipe/image/tests/test_calib.py b/magicctapipe/image/tests/test_calib.py index c93d5d34a..7229686e8 100644 --- a/magicctapipe/image/tests/test_calib.py +++ b/magicctapipe/image/tests/test_calib.py @@ -53,7 +53,6 @@ def test_calibrate_LST(dl0_gamma, config_calib, tel_id_LST): subarray=subarray, ) - LST_bool=True for event in event_source: if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): signal_pixels, image, peak_time = calibrate( @@ -63,7 +62,7 @@ def test_calibrate_LST(dl0_gamma, config_calib, tel_id_LST): config=config_lst, camera_geoms=camera_geoms, calibrator=calibrator_lst, - LST_bool=LST_bool + is_lst=True, ) assert len(signal_pixels)==1855 @@ -110,7 +109,7 @@ def test_calibrate_MAGIC(dl0_gamma, config_calib, tel_id_MAGIC): config=Config(config_extractor_magic), subarray=subarray, ) - LST_bool=False + for event in event_source: if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): signal_pixels, image, peak_time = calibrate( @@ -119,7 +118,7 @@ def test_calibrate_MAGIC(dl0_gamma, config_calib, tel_id_MAGIC): config=config_magic, magic_clean=magic_clean, calibrator=calibrator_magic, - LST_bool=LST_bool + is_lst=False, ) assert len(signal_pixels)==1039 @@ -148,19 +147,19 @@ def test_calibrate_exc_1(dl0_gamma, config_calib, tel_id_MAGIC): config=Config(config_extractor_magic), subarray=subarray, ) - LST_bool=False + for event in event_source: if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): with pytest.raises( ValueError, - match="Check the provided parameters and the telescope type; calibration was not possible", + match="Check the provided parameters and the telescope type; MAGIC calibration not possible if magic_clean not provided", ): _,_,_ = calibrate( event=event, tel_id=tel_id_MAGIC, config=config_magic, calibrator=calibrator_magic, - LST_bool=LST_bool + is_lst=False, ) config_magic["image_extractor"]["type"]=extractor_type_magic @@ -196,12 +195,11 @@ def test_calibrate_exc_2(dl0_gamma, config_calib, tel_id_LST): subarray=subarray, ) - LST_bool=True for event in event_source: if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): with pytest.raises( ValueError, - match="Check the provided parameters and the telescope type; calibration was not possible", + match="Check the provided parameters and the telescope type; LST calibration not possible if obs_id not provided", ): _,_,_ = calibrate( event=event, @@ -209,7 +207,7 @@ def test_calibrate_exc_2(dl0_gamma, config_calib, tel_id_LST): config=config_lst, camera_geoms=camera_geoms, calibrator=calibrator_lst, - LST_bool=LST_bool + is_lst=True, ) config_lst["image_extractor"]["type"]=extractor_type_lst @@ -241,12 +239,11 @@ def test_calibrate_exc_3(dl0_gamma, config_calib, tel_id_LST): subarray=subarray, ) - LST_bool=True for event in event_source: if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): with pytest.raises( ValueError, - match="Check the provided parameters and the telescope type; calibration was not possible", + match="Check the provided parameters and the telescope type; LST calibration not possible if gamera_geoms not provided", ): signal_pixels, image, peak_time = calibrate( event=event, @@ -254,7 +251,7 @@ def test_calibrate_exc_3(dl0_gamma, config_calib, tel_id_LST): obs_id=obs_id, config=config_lst, calibrator=calibrator_lst, - LST_bool=LST_bool + is_lst=True, ) config_lst["image_extractor"]["type"]=extractor_type_lst @@ -281,14 +278,13 @@ def test_calibrate_exc_4(dl0_gamma, config_calib, tel_id_MAGIC): image_extractor_type=extractor_type_magic, config=Config(config_extractor_magic), subarray=subarray, - ) + ) - LST_bool=False for event in event_source: if (event.count <200) and (tel_id_MAGIC in event.trigger.tels_with_trigger): with pytest.raises( ValueError, - match="Check the provided magic_clean parameter; calibration was not possible", + match="Check the provided magic_clean parameter; MAGIC calibration not possible if magic_clean not a dictionary of MAGICClean objects", ): _,_,_ = calibrate( event=event, @@ -296,7 +292,7 @@ def test_calibrate_exc_4(dl0_gamma, config_calib, tel_id_MAGIC): config=config_magic, calibrator=calibrator_magic, magic_clean=magic_clean, - LST_bool=LST_bool + is_lst=False, ) config_magic["image_extractor"]["type"]=extractor_type_magic @@ -332,12 +328,11 @@ def test_calibrate_exc_5(dl0_gamma, config_calib, tel_id_LST): subarray=subarray, ) - LST_bool=True for event in event_source: if (event.count <200) and (tel_id_LST in event.trigger.tels_with_trigger): with pytest.raises( ValueError, - match="Check the provided camera_geoms parameter; calibration was not possible", + match="Check the provided camera_geoms parameter; LST calibration not possible if camera_geoms not a dictionary of CameraGeometry objects", ): _,_,_ = calibrate( event=event, @@ -346,7 +341,7 @@ def test_calibrate_exc_5(dl0_gamma, config_calib, tel_id_LST): config=config_lst, camera_geoms=camera_geoms, calibrator=calibrator_lst, - LST_bool=LST_bool, + is_lst=True, ) config_lst["image_extractor"]["type"]=extractor_type_lst diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py index 0d0eded0c..e4e0ba006 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_mc_dl0_to_dl1.py @@ -223,10 +223,10 @@ def mc_dl0_to_dl1(input_file, output_dir, config, focal_length): if tel_id in LSTs_IDs: ##If the ID is in the LST list, we call calibrate on the LST() # Calibrate the LST-1 event - signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, obs_id=obs_id, config=config_lst, camera_geoms=camera_geoms, calibrator=calibrator_lst, LST_bool=True) + signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, obs_id=obs_id, config=config_lst, camera_geoms=camera_geoms, calibrator=calibrator_lst, is_lst=True) elif tel_id in MAGICs_IDs: # Calibrate the MAGIC event - signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, config=config_magic, magic_clean=magic_clean, calibrator=calibrator_magic, LST_bool=False) + signal_pixels, image, peak_time = calibrate(event=event, tel_id=tel_id, config=config_magic, magic_clean=magic_clean, calibrator=calibrator_magic, is_lst=False) else: logger.info( f"--> Telescope ID {tel_id} not in LST list or MAGIC list. Please check if the IDs are OK in the configuration file" @@ -427,7 +427,7 @@ def main(): # Checking if the input telescope list is properly organized: check_input_list(config) - config['mc_tel_ids']=dict.sorted(config['mc_tel_ids'].items()) #Sorting needed to correctly name the output file + config['mc_tel_ids']=dict(sorted(config['mc_tel_ids'].items())) #Sorting needed to correctly name the output file # Process the input data mc_dl0_to_dl1(args.input_file, args.output_dir, config, args.focal_length_choice) From df96e743f2847f1e48314107515e2036c40c6a39 Mon Sep 17 00:00:00 2001 From: Elisa-Visentin Date: Mon, 16 Oct 2023 11:29:04 +0200 Subject: [PATCH 76/76] Check_input_list test --- magicctapipe/io/tests/test_io.py | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/magicctapipe/io/tests/test_io.py b/magicctapipe/io/tests/test_io.py index 40cd9f23d..4951b93ad 100644 --- a/magicctapipe/io/tests/test_io.py +++ b/magicctapipe/io/tests/test_io.py @@ -24,21 +24,12 @@ def test_check_input_list(config_check): Test on different dictionaries """ - try: - check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':4, 'MAGIC-I':5, 'MAGIC-II':6}}) - except Exception: - assert False - - try: - check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':3, 'LST-3':0, 'LST-4':0, 'MAGIC-I':2, 'MAGIC-II':6}}) - except Exception: - assert False - - try: - check_input_list({'mc_tel_ids':{'LST-2':1, 'LST-1':3, 'LST-4':0, 'LST-3':0, 'MAGIC-II':2, 'MAGIC-I':6}}) - except Exception: - assert False - + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':2, 'LST-3':3, 'LST-4':4, 'MAGIC-I':5, 'MAGIC-II':6}}) + + check_input_list({'mc_tel_ids':{'LST-1':1, 'LST-2':3, 'LST-3':0, 'LST-4':0, 'MAGIC-I':2, 'MAGIC-II':6}}) + + check_input_list({'mc_tel_ids':{'LST-2':1, 'LST-1':3, 'LST-4':0, 'LST-3':0, 'MAGIC-II':2, 'MAGIC-I':6}}) + with pytest.raises( Exception, match="Number of telescopes found in the configuration file is 5. It must be 6, i.e.: LST-1, LST-2, LST-3, LST-4, MAGIC-I, and MAGIC-II.",