1
|
|
|
""" |
2
|
|
|
Main classes to define 3D Dataset Config, 3D Dataset Analysis Step and |
3
|
|
|
to generate 3D Datasets from given Instruments' DL3 data from the config. |
4
|
|
|
""" |
5
|
|
|
|
6
|
|
|
import logging |
7
|
|
|
from typing import List |
8
|
|
|
|
9
|
|
|
import numpy as np |
10
|
|
|
from astropy import units as u |
11
|
|
|
from astropy.io import fits |
12
|
|
|
from gammapy.catalog import CATALOG_REGISTRY |
13
|
|
|
from gammapy.data import GTI, EventList |
14
|
|
|
from gammapy.datasets import Datasets, MapDataset |
15
|
|
|
from gammapy.irf import EDispKernel, EDispKernelMap, PSFMap |
16
|
|
|
from gammapy.makers import MapDatasetMaker |
17
|
|
|
from gammapy.maps import Map, MapAxis |
18
|
|
|
from gammapy.modeling.models import Models |
19
|
|
|
|
20
|
|
|
from asgardpy.analysis.step_base import AnalysisStepBase |
21
|
|
|
from asgardpy.base.base import BaseConfig |
22
|
|
|
from asgardpy.base.geom import ( |
23
|
|
|
GeomConfig, |
24
|
|
|
SkyPositionConfig, |
25
|
|
|
create_counts_map, |
26
|
|
|
generate_geom, |
27
|
|
|
get_source_position, |
28
|
|
|
) |
29
|
|
|
from asgardpy.base.reduction import ( |
30
|
|
|
BackgroundConfig, |
31
|
|
|
MapSelectionEnum, |
32
|
|
|
ObservationsConfig, |
33
|
|
|
ReductionTypeEnum, |
34
|
|
|
SafeMaskConfig, |
35
|
|
|
generate_dl4_dataset, |
36
|
|
|
get_bkg_maker, |
37
|
|
|
get_dataset_maker, |
38
|
|
|
get_dataset_reference, |
39
|
|
|
get_exclusion_region_mask, |
40
|
|
|
get_filtered_observations, |
41
|
|
|
get_safe_mask_maker, |
42
|
|
|
) |
43
|
|
|
from asgardpy.data.target import ( |
44
|
|
|
apply_selection_mask_to_models, |
45
|
|
|
read_models_from_asgardpy_config, |
46
|
|
|
) |
47
|
|
|
from asgardpy.gammapy.read_models import ( |
48
|
|
|
create_gal_diffuse_skymodel, |
49
|
|
|
read_fermi_xml_models_list, |
50
|
|
|
update_aux_info_from_fermi_xml, |
51
|
|
|
) |
52
|
|
|
from asgardpy.io.input_dl3 import DL3Files, InputDL3Config |
53
|
|
|
from asgardpy.io.io_dl4 import DL4BaseConfig, DL4Files, get_reco_energy_bins |
54
|
|
|
|
55
|
|
|
__all__ = [ |
56
|
|
|
"Datasets3DAnalysisStep", |
57
|
|
|
"Dataset3DBaseConfig", |
58
|
|
|
"Dataset3DConfig", |
59
|
|
|
"Dataset3DGeneration", |
60
|
|
|
"Dataset3DInfoConfig", |
61
|
|
|
] |
62
|
|
|
|
63
|
|
|
log = logging.getLogger(__name__) |
64
|
|
|
|
65
|
|
|
|
66
|
|
|
# Defining various components of 3D Dataset Config section |
67
|
|
|
class Dataset3DInfoConfig(BaseConfig): |
68
|
|
|
"""Config section for 3D DL3 Dataset Reduction for each instrument.""" |
69
|
|
|
|
70
|
|
|
name: str = "dataset-name" |
71
|
|
|
key: List = [] |
72
|
|
|
observation: ObservationsConfig = ObservationsConfig() |
73
|
|
|
map_selection: List[MapSelectionEnum] = MapDatasetMaker.available_selection |
74
|
|
|
geom: GeomConfig = GeomConfig() |
75
|
|
|
background: BackgroundConfig = BackgroundConfig() |
76
|
|
|
safe_mask: SafeMaskConfig = SafeMaskConfig() |
77
|
|
|
on_region: SkyPositionConfig = SkyPositionConfig() |
78
|
|
|
containment_correction: bool = True |
79
|
|
|
|
80
|
|
|
|
81
|
|
|
class Dataset3DBaseConfig(BaseConfig): |
82
|
|
|
""" |
83
|
|
|
Config section for 3D DL3 Dataset base information for each instrument. |
84
|
|
|
""" |
85
|
|
|
|
86
|
|
|
name: str = "Instrument-name" |
87
|
|
|
input_dl3: List[InputDL3Config] = [InputDL3Config()] |
88
|
|
|
input_dl4: bool = False |
89
|
|
|
dataset_info: Dataset3DInfoConfig = Dataset3DInfoConfig() |
90
|
|
|
dl4_dataset_info: DL4BaseConfig = DL4BaseConfig() |
91
|
|
|
|
92
|
|
|
|
93
|
|
|
class Dataset3DConfig(BaseConfig): |
94
|
|
|
"""Config section for a list of all 3D DL3 Datasets information.""" |
95
|
|
|
|
96
|
|
|
type: ReductionTypeEnum = ReductionTypeEnum.cube |
97
|
|
|
instruments: List[Dataset3DBaseConfig] = [Dataset3DBaseConfig()] |
98
|
|
|
|
99
|
|
|
|
100
|
|
|
# The main Analysis Step |
101
|
|
|
class Datasets3DAnalysisStep(AnalysisStepBase): |
102
|
|
|
""" |
103
|
|
|
From the given config information, prepare the full list of 3D datasets, |
104
|
|
|
iterating over all the Instruments' information by running the |
105
|
|
|
Dataset3DGeneration function. |
106
|
|
|
""" |
107
|
|
|
|
108
|
|
|
tag = "datasets-3d" |
109
|
|
|
|
110
|
|
|
def _run(self): |
111
|
|
|
instruments_list = self.config.dataset3d.instruments |
112
|
|
|
self.log.info("%d number of 3D Datasets given", len(instruments_list)) |
113
|
|
|
|
114
|
|
|
datasets_3d_final = Datasets() |
115
|
|
|
models_final = Models() |
116
|
|
|
instrument_spectral_info = {"name": [], "spectral_energy_ranges": []} |
117
|
|
|
|
118
|
|
|
# Calculate the total number of reconstructed energy bins used |
119
|
|
|
# and the number of linked model parameters to incorporate in the |
120
|
|
|
# total number of free model parameters, for the final estimation of |
121
|
|
|
# total number of degrees of freedom |
122
|
|
|
free_params = 0 |
123
|
|
|
en_bins = 0 |
124
|
|
|
|
125
|
|
|
# Iterate over all instrument information given: |
126
|
|
|
for i in np.arange(len(instruments_list)): |
127
|
|
|
config_3d_dataset = instruments_list[i] |
128
|
|
|
instrument_spectral_info["name"].append(config_3d_dataset.name) |
129
|
|
|
|
130
|
|
|
key_names = config_3d_dataset.dataset_info.key |
131
|
|
|
if len(key_names) > 0: |
132
|
|
|
keys_str = " ".join(map(str, key_names)) |
133
|
|
|
self.log.info("The different keys used: %s", keys_str) |
134
|
|
|
else: |
135
|
|
|
key_names = [None] |
136
|
|
|
self.log.info("No distinct keys used for the 3D dataset") |
137
|
|
|
|
138
|
|
|
# Extra Datasets object to differentiate between datasets obtained |
139
|
|
|
# from various "keys" of each instrument. |
140
|
|
|
dataset_instrument = Datasets() |
141
|
|
|
dl4_files = DL4Files(config_3d_dataset.dl4_dataset_info, self.log) |
142
|
|
|
|
143
|
|
|
# Only read unique SkyModels for the first instrument, unless there |
144
|
|
|
# are associated files like XML to read from for the particular instrument. |
145
|
|
|
filled_skymodel = False |
146
|
|
|
if len(models_final) > 0: |
147
|
|
|
filled_skymodel = True |
148
|
|
|
|
149
|
|
|
# Retrieving a single dataset for each instrument. |
150
|
|
|
for key in key_names: |
151
|
|
|
if not config_3d_dataset.input_dl4: |
152
|
|
|
generate_3d_dataset = Dataset3DGeneration(self.log, config_3d_dataset, self.config) |
153
|
|
|
dataset, models = generate_3d_dataset.run(key, filled_skymodel) |
154
|
|
|
else: |
155
|
|
|
dataset = dl4_files.get_dl4_dataset(config_3d_dataset.dataset_info.observation) |
156
|
|
|
models = [] |
157
|
|
|
|
158
|
|
|
# Use the individual Dataset type object for following tasks |
159
|
|
|
if isinstance(dataset, Datasets): |
160
|
|
|
dataset = dataset[0] |
161
|
|
|
|
162
|
|
|
# Assigning datasets_names and including them in the final |
163
|
|
|
# model list |
164
|
|
|
|
165
|
|
|
# When no associated list of models are provided, look for a |
166
|
|
|
# separate model for target and an entry of catalog to fill in. |
167
|
|
|
if len(models) > 0: |
168
|
|
|
for model_ in models: |
169
|
|
|
model_.datasets_names = [dataset.name] |
170
|
|
|
|
171
|
|
|
if model_.name in models_final.names: |
172
|
|
|
models_final[model_.name].datasets_names.append(dataset.name) |
173
|
|
|
else: |
174
|
|
|
models_final.append(model_) |
175
|
|
|
|
176
|
|
|
dataset_instrument.append(dataset) |
177
|
|
|
|
178
|
|
|
if len(models_final) > 0: |
179
|
|
|
# Linking the spectral model of the diffuse model for each key |
180
|
|
|
diffuse_models_names = [] |
181
|
|
|
for model_name in models_final.names: |
182
|
|
|
if "diffuse-iso" in model_name: |
183
|
|
|
diffuse_models_names.append(model_name) |
184
|
|
|
|
185
|
|
|
if len(diffuse_models_names) > 1: |
186
|
|
|
for model_name in diffuse_models_names[1:]: |
187
|
|
|
models_final[diffuse_models_names[0]].spectral_model.model2 = models_final[ |
188
|
|
|
model_name |
189
|
|
|
].spectral_model.model2 |
190
|
|
|
# For each linked model parameter, reduce the number of DoF |
191
|
|
|
free_params -= 1 |
192
|
|
|
else: |
193
|
|
|
models_final = None |
194
|
|
|
|
195
|
|
|
energy_bin_edges = dl4_files.get_spectral_energies() |
196
|
|
|
instrument_spectral_info["spectral_energy_ranges"].append(energy_bin_edges) |
197
|
|
|
|
198
|
|
|
for data in dataset_instrument: |
199
|
|
|
en_bins = get_reco_energy_bins(data, en_bins) |
200
|
|
|
datasets_3d_final.append(data) |
201
|
|
|
|
202
|
|
|
instrument_spectral_info["free_params"] = free_params |
203
|
|
|
instrument_spectral_info["en_bins"] = en_bins |
204
|
|
|
|
205
|
|
|
return datasets_3d_final, models_final, instrument_spectral_info |
206
|
|
|
|
207
|
|
|
|
208
|
|
|
class Dataset3DGeneration: |
209
|
|
|
""" |
210
|
|
|
Class for 3D dataset creation based on the config or AsgardpyConfig |
211
|
|
|
information provided on the 3D dataset and the target source. |
212
|
|
|
|
213
|
|
|
Runs the following steps: |
214
|
|
|
|
215
|
|
|
1. Read the DL3 files of 3D datasets into gammapy readable objects. |
216
|
|
|
|
217
|
|
|
2. Create the base counts Map. |
218
|
|
|
|
219
|
|
|
3. Prepare standard data reduction makers using the parameters passed in the config. |
220
|
|
|
|
221
|
|
|
4. Generate the final dataset. |
222
|
|
|
""" |
223
|
|
|
|
224
|
|
|
def __init__(self, log, config_3d_dataset, config_full): |
225
|
|
|
self.config_3d_dataset = config_3d_dataset |
226
|
|
|
self.log = log |
227
|
|
|
self.exclusion_mask = None |
228
|
|
|
self.irfs = { |
229
|
|
|
"exposure": None, |
230
|
|
|
"psf": None, |
231
|
|
|
"edisp": None, |
232
|
|
|
"edisp_kernel": None, |
233
|
|
|
"edisp_interp_kernel": None, |
234
|
|
|
"exposure_interp": None, |
235
|
|
|
} |
236
|
|
|
self.events = {"events": None, "event_fits": None, "gti": None, "counts_map": None} |
237
|
|
|
self.diffuse_models = { |
238
|
|
|
"gal_diffuse": None, |
239
|
|
|
"iso_diffuse": None, |
240
|
|
|
"key_name": None, |
241
|
|
|
"gal_diffuse_cutout": None, |
242
|
|
|
} |
243
|
|
|
self.list_source_models = [] |
244
|
|
|
|
245
|
|
|
# For updating the main config file with target source position |
246
|
|
|
# information if necessary. |
247
|
|
|
self.config_full = config_full |
248
|
|
|
self.config_target = config_full.target |
249
|
|
|
|
250
|
|
|
def run(self, key_name, filled_skymodel): |
251
|
|
|
""" |
252
|
|
|
Main function to run the creation of 3D dataset. |
253
|
|
|
""" |
254
|
|
|
# First check for the given file list if they are readable or not. |
255
|
|
|
file_list = self.read_to_objects(key_name) |
256
|
|
|
|
257
|
|
|
exclusion_regions = [] |
258
|
|
|
|
259
|
|
|
if self.config_3d_dataset.input_dl3[0].type == "gadf-dl3": |
260
|
|
|
observations = get_filtered_observations( |
261
|
|
|
dl3_path=self.config_3d_dataset.input_dl3[0].input_dir, |
262
|
|
|
obs_config=self.config_3d_dataset.dataset_info.observation, |
263
|
|
|
log=self.log, |
264
|
|
|
) |
265
|
|
|
center_pos = get_source_position(target_region=self.config_3d_dataset.dataset_info.on_region) |
266
|
|
|
|
267
|
|
|
geom = generate_geom( |
268
|
|
|
tag="3d", |
269
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
270
|
|
|
center_pos=center_pos, |
271
|
|
|
) |
272
|
|
|
|
273
|
|
|
dataset_reference = get_dataset_reference( |
274
|
|
|
tag="3d", geom=geom, geom_config=self.config_3d_dataset.dataset_info.geom |
275
|
|
|
) |
276
|
|
|
|
277
|
|
|
dataset_maker = get_dataset_maker( |
278
|
|
|
tag="3d", |
279
|
|
|
dataset_config=self.config_3d_dataset.dataset_info, |
280
|
|
|
) |
281
|
|
|
|
282
|
|
|
safe_maker = get_safe_mask_maker(safe_config=self.config_3d_dataset.dataset_info.safe_mask) |
283
|
|
|
|
284
|
|
|
# If there is no explicit list of models provided for the 3D data, |
285
|
|
|
# one can use one of the several catalogs available in Gammapy. |
286
|
|
|
# Reading them as Models will keep the procedure uniform. |
287
|
|
|
|
288
|
|
|
# Unless the unique skymodels for 3D dataset is already set. |
289
|
|
|
if len(self.list_source_models) == 0: |
290
|
|
|
if not filled_skymodel: |
291
|
|
|
# Read the SkyModel info from AsgardpyConfig.target section |
292
|
|
|
if len(self.config_target.components) > 0: |
293
|
|
|
models_ = read_models_from_asgardpy_config(self.config_target) |
294
|
|
|
self.list_source_models = models_ |
295
|
|
|
|
296
|
|
|
# If a catalog information is provided, use it to build up the list of models |
297
|
|
|
# Check if a catalog data is given with selection radius |
298
|
|
|
if self.config_target.use_catalog.selection_radius != 0 * u.deg: |
299
|
|
|
catalog = CATALOG_REGISTRY.get_cls(self.config_target.use_catalog.name)() |
300
|
|
|
|
301
|
|
|
# One can also provide a separate file, but one has to add |
302
|
|
|
# another config option for reading Catalog file paths. |
303
|
|
|
sep = catalog.positions.separation(center_pos["center"].galactic) |
304
|
|
|
|
305
|
|
|
for k, cat_ in enumerate(catalog): |
306
|
|
|
if sep[k] < self.config_target.use_catalog.selection_radius: |
307
|
|
|
self.list_source_models.append(cat_.sky_model()) |
308
|
|
|
|
309
|
|
|
excluded_geom = generate_geom( |
310
|
|
|
tag="3d-ex", |
311
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
312
|
|
|
center_pos=center_pos, |
313
|
|
|
) |
314
|
|
|
|
315
|
|
|
exclusion_mask = get_exclusion_region_mask( |
316
|
|
|
exclusion_params=self.config_3d_dataset.dataset_info.background.exclusion, |
317
|
|
|
exclusion_regions=exclusion_regions, |
318
|
|
|
excluded_geom=excluded_geom, |
319
|
|
|
config_target=self.config_target, |
320
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
321
|
|
|
log=self.log, |
322
|
|
|
) |
323
|
|
|
|
324
|
|
|
bkg_maker = get_bkg_maker( |
325
|
|
|
bkg_config=self.config_3d_dataset.dataset_info.background, |
326
|
|
|
exclusion_mask=exclusion_mask, |
327
|
|
|
) |
328
|
|
|
|
329
|
|
|
dataset = generate_dl4_dataset( |
330
|
|
|
tag="3d", |
331
|
|
|
observations=observations, |
332
|
|
|
dataset_reference=dataset_reference, |
333
|
|
|
dataset_maker=dataset_maker, |
334
|
|
|
bkg_maker=bkg_maker, |
335
|
|
|
safe_maker=safe_maker, |
336
|
|
|
n_jobs=self.config_full.general.n_jobs, |
337
|
|
|
parallel_backend=self.config_full.general.parallel_backend, |
338
|
|
|
) |
339
|
|
|
|
340
|
|
|
elif "lat" in self.config_3d_dataset.input_dl3[0].type: |
341
|
|
|
self.load_events(file_list["events_file"]) |
342
|
|
|
|
343
|
|
|
# Start preparing objects to create the counts map |
344
|
|
|
self.set_energy_dispersion_matrix() |
345
|
|
|
|
346
|
|
|
center_pos = get_source_position( |
347
|
|
|
target_region=self.config_target.sky_position, |
348
|
|
|
fits_header=self.events["event_fits"][1].header, |
349
|
|
|
) |
350
|
|
|
|
351
|
|
|
# Create the Counts Map |
352
|
|
|
self.events["counts_map"] = create_counts_map( |
353
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
354
|
|
|
center_pos=center_pos, |
355
|
|
|
) |
356
|
|
|
self.events["counts_map"].fill_by_coord( |
357
|
|
|
{ |
358
|
|
|
"skycoord": self.events["events"].radec, |
359
|
|
|
"energy": self.events["events"].energy, |
360
|
|
|
"time": self.events["events"].time, |
361
|
|
|
} |
362
|
|
|
) |
363
|
|
|
# Create any dataset reduction makers or mask |
364
|
|
|
self.generate_diffuse_background_cutout() |
365
|
|
|
|
366
|
|
|
self.set_edisp_interpolator() |
367
|
|
|
self.set_exposure_interpolator() |
368
|
|
|
|
369
|
|
|
self.exclusion_mask = get_exclusion_region_mask( |
370
|
|
|
exclusion_params=self.config_3d_dataset.dataset_info.background.exclusion, |
371
|
|
|
excluded_geom=self.events["counts_map"].geom.copy(), |
372
|
|
|
exclusion_regions=exclusion_regions, |
373
|
|
|
config_target=self.config_target, |
374
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
375
|
|
|
log=self.log, |
376
|
|
|
) |
377
|
|
|
|
378
|
|
|
# Generate the final dataset |
379
|
|
|
dataset = self.generate_dataset(key_name) |
380
|
|
|
|
381
|
|
|
if len(self.list_source_models) != 0: |
382
|
|
|
# Apply the same exclusion mask to the list of source models as applied |
383
|
|
|
# to the Counts Map |
384
|
|
|
self.list_source_models = apply_selection_mask_to_models( |
385
|
|
|
self.list_source_models, |
386
|
|
|
target_source=self.config_target.source_name, |
387
|
|
|
selection_mask=self.exclusion_mask, |
388
|
|
|
) |
389
|
|
|
|
390
|
|
|
return dataset, self.list_source_models |
|
|
|
|
391
|
|
|
|
392
|
|
|
def read_to_objects(self, key_name): |
393
|
|
|
""" |
394
|
|
|
For each key type of files, read the files to get the required |
395
|
|
|
Gammapy objects for further analyses. |
396
|
|
|
""" |
397
|
|
|
file_list = {} |
398
|
|
|
|
399
|
|
|
# Read the first IO list for events, IRFs and XML files |
400
|
|
|
|
401
|
|
|
# Get the Diffuse models files list |
402
|
|
|
for io_dict in self.config_3d_dataset.input_dl3: |
403
|
|
|
if io_dict.type in ["gadf-dl3"]: |
404
|
|
|
file_list, _ = self.get_base_objects(io_dict, key_name, file_list) |
405
|
|
|
|
406
|
|
|
if io_dict.type in ["lat"]: |
407
|
|
|
( |
408
|
|
|
file_list, |
409
|
|
|
[ |
410
|
|
|
self.irfs["exposure"], |
411
|
|
|
self.irfs["psf"], |
412
|
|
|
self.irfs["edisp"], |
413
|
|
|
], |
414
|
|
|
) = self.get_base_objects(io_dict, key_name, file_list) |
415
|
|
|
|
416
|
|
|
if io_dict.type in ["lat-aux"]: |
417
|
|
|
if io_dict.glob_pattern["iso_diffuse"] == "": |
418
|
|
|
io_dict.glob_pattern = update_aux_info_from_fermi_xml( |
419
|
|
|
io_dict.glob_pattern, file_list["xml_file"], fetch_iso_diff=True |
420
|
|
|
) |
421
|
|
|
if io_dict.glob_pattern["gal_diffuse"] == "": |
422
|
|
|
io_dict.glob_pattern = update_aux_info_from_fermi_xml( |
423
|
|
|
io_dict.glob_pattern, file_list["xml_file"], fetch_gal_diff=True |
424
|
|
|
) |
425
|
|
|
|
426
|
|
|
( |
427
|
|
|
file_list, |
428
|
|
|
[ |
429
|
|
|
self.diffuse_models["gal_diffuse"], |
430
|
|
|
self.diffuse_models["iso_diffuse"], |
431
|
|
|
self.diffuse_models["key_name"], |
432
|
|
|
], |
433
|
|
|
) = self.get_base_objects(io_dict, key_name, file_list) |
434
|
|
|
self.list_source_models, self.diffuse_models = read_fermi_xml_models_list( |
435
|
|
|
self.list_source_models, |
436
|
|
|
io_dict.input_dir, |
437
|
|
|
file_list["xml_file"], |
438
|
|
|
self.diffuse_models, |
439
|
|
|
asgardpy_target_config=self.config_target, |
440
|
|
|
) |
441
|
|
|
|
442
|
|
|
# After reading the list of source objects, check if the source position needs to be |
443
|
|
|
# updated from the list provided. |
444
|
|
|
self.update_source_pos_from_3d_dataset() |
445
|
|
|
|
446
|
|
|
return file_list |
447
|
|
|
|
448
|
|
|
def get_base_objects(self, dl3_dir_dict, key, file_list): |
449
|
|
|
""" |
450
|
|
|
For a DL3 files type and tag of the 'mode of observations' or key |
451
|
|
|
(FRONT/00 and BACK/01 for Fermi-LAT in enrico/fermipy files), |
452
|
|
|
read the files to appropriate Object type for further analysis. |
453
|
|
|
|
454
|
|
|
If there are no distinct key types of files, the value is None. |
455
|
|
|
""" |
456
|
|
|
dl3_info = DL3Files(dl3_dir_dict, log=self.log) |
457
|
|
|
object_list = [] |
458
|
|
|
|
459
|
|
|
if dl3_dir_dict.type.lower() in ["gadf-dl3"]: |
460
|
|
|
dl3_info.list_dl3_files() |
461
|
|
|
file_list = dl3_info.events_files |
462
|
|
|
|
463
|
|
|
return file_list, object_list |
464
|
|
|
else: |
465
|
|
|
file_list = dl3_info.prepare_lat_files(key, file_list) |
466
|
|
|
|
467
|
|
|
if dl3_dir_dict.type.lower() in ["lat"]: |
468
|
|
|
exposure = Map.read(file_list["expmap_file"]) |
469
|
|
|
psf = PSFMap.read(file_list["psf_file"], format="gtpsf") |
470
|
|
|
drmap = fits.open(file_list["edrm_file"]) |
471
|
|
|
object_list = [exposure, psf, drmap] |
472
|
|
|
|
473
|
|
|
if dl3_dir_dict.type.lower() in ["lat-aux"]: |
474
|
|
|
object_list = [file_list["gal_diff_file"], file_list["iso_diff_file"], key] |
475
|
|
|
|
476
|
|
|
return file_list, object_list |
477
|
|
|
|
478
|
|
|
def update_source_pos_from_3d_dataset(self): |
479
|
|
|
""" |
480
|
|
|
Introduce the source coordinates from the 3D dataset to be the standard |
481
|
|
|
value in the main config file, for further use. |
482
|
|
|
""" |
483
|
|
|
if self.config_target.use_uniform_position: |
484
|
|
|
source_position_from_3d = None |
485
|
|
|
|
486
|
|
|
for source in self.list_source_models: |
487
|
|
|
if source.name == self.config_target.source_name: |
488
|
|
|
source_position_from_3d = source.spatial_model.position.icrs |
489
|
|
|
|
490
|
|
|
self.config_full.target.sky_position.lon = str(u.Quantity(source_position_from_3d.ra)) |
491
|
|
|
self.config_full.target.sky_position.lat = str(u.Quantity(source_position_from_3d.dec)) |
492
|
|
|
|
493
|
|
|
self.config_full.update(self.config_full) |
494
|
|
|
self.config_target = self.config_full.target |
495
|
|
|
|
496
|
|
|
def set_energy_axes(self): |
497
|
|
|
""" |
498
|
|
|
Get the energy axes from the given Detector Response Matrix file. |
499
|
|
|
|
500
|
|
|
Needs to be generalized for other possible file structures for other |
501
|
|
|
instruments. |
502
|
|
|
""" |
503
|
|
|
energy_lo = self.irfs["edisp"]["DRM"].data["ENERG_LO"] * u.MeV |
504
|
|
|
energy_hi = self.irfs["edisp"]["DRM"].data["ENERG_HI"] * u.MeV |
505
|
|
|
|
506
|
|
|
energy_axis = MapAxis.from_energy_edges(np.append(energy_lo[0], energy_hi)) |
507
|
|
|
energy_axis_true = energy_axis.copy(name="energy_true") |
508
|
|
|
|
509
|
|
|
return energy_axis, energy_axis_true |
510
|
|
|
|
511
|
|
|
def set_energy_dispersion_matrix(self): |
512
|
|
|
""" |
513
|
|
|
Generate the Energy Dispersion Kernel from the given Detector Response |
514
|
|
|
Matrix file. |
515
|
|
|
|
516
|
|
|
Needs to be generalized for other possible file structures for other |
517
|
|
|
instruments. |
518
|
|
|
""" |
519
|
|
|
energy_axis, energy_axis_true = self.set_energy_axes() |
520
|
|
|
drm = self.irfs["edisp"]["DRM"].data["MATRIX"] |
521
|
|
|
drm_matrix = np.array(list(drm)) |
522
|
|
|
|
523
|
|
|
self.irfs["edisp_kernel"] = EDispKernel(axes=[energy_axis_true, energy_axis], data=drm_matrix) |
524
|
|
|
|
525
|
|
|
def load_events(self, events_file): |
526
|
|
|
""" |
527
|
|
|
Loading the events files for the specific "Key" into an EventList |
528
|
|
|
object and the GTI information into a GTI object. |
529
|
|
|
""" |
530
|
|
|
self.events["event_fits"] = fits.open(events_file) |
531
|
|
|
self.events["events"] = EventList.read(events_file) |
532
|
|
|
self.events["gti"] = GTI.read(events_file) |
533
|
|
|
|
534
|
|
|
def generate_diffuse_background_cutout(self): |
535
|
|
|
""" |
536
|
|
|
Perform a cutout of the Diffuse background model with respect to the |
537
|
|
|
counts map geom (may improve fitting speed?) and update the main list |
538
|
|
|
of models. |
539
|
|
|
""" |
540
|
|
|
diffuse_cutout = self.diffuse_models["gal_diffuse_map"].cutout( |
541
|
|
|
self.events["counts_map"].geom.center_skydir, self.events["counts_map"].geom.width[0] |
542
|
|
|
) |
543
|
|
|
self.diffuse_models["gal_diffuse_cutout"], _ = create_gal_diffuse_skymodel(diffuse_cutout) |
544
|
|
|
|
545
|
|
|
# Update the model in self.list_source_models |
546
|
|
|
for k, model_ in enumerate(self.list_source_models): |
547
|
|
|
if model_.name in ["diffuse-iem"]: |
548
|
|
|
self.list_source_models[k] = self.diffuse_models["gal_diffuse_cutout"] |
549
|
|
|
|
550
|
|
|
def set_edisp_interpolator(self): |
551
|
|
|
""" |
552
|
|
|
Get the Energy Dispersion Kernel interpolated along true and |
553
|
|
|
reconstructed energy of the real counts. |
554
|
|
|
""" |
555
|
|
|
axis_reco = MapAxis.from_edges( |
556
|
|
|
self.events["counts_map"].geom.axes["energy"].edges, |
557
|
|
|
name="energy", |
558
|
|
|
unit="MeV", # Need to be generalized |
559
|
|
|
interp="log", |
560
|
|
|
) |
561
|
|
|
axis_true = axis_reco.copy(name="energy_true") |
562
|
|
|
energy_reco, energy_true = np.meshgrid(axis_true.center, axis_reco.center) |
563
|
|
|
|
564
|
|
|
drm_interp = self.irfs["edisp_kernel"].evaluate( |
565
|
|
|
"linear", **{"energy": energy_reco, "energy_true": energy_true} |
566
|
|
|
) |
567
|
|
|
self.irfs["edisp_interp_kernel"] = EDispKernel(axes=[axis_true, axis_reco], data=np.asarray(drm_interp)) |
568
|
|
|
|
569
|
|
|
def set_exposure_interpolator(self): |
570
|
|
|
""" |
571
|
|
|
Set Exposure interpolated along energy axis of real counts. |
572
|
|
|
""" |
573
|
|
|
self.irfs["exposure_interp"] = self.irfs["exposure"].interp_to_geom( |
574
|
|
|
self.events["counts_map"].geom.as_energy_true |
575
|
|
|
) |
576
|
|
|
|
577
|
|
|
def generate_dataset(self, key_name): |
578
|
|
|
""" |
579
|
|
|
Generate MapDataset for the given Instrument files using the Counts Map, |
580
|
|
|
and IRFs objects. |
581
|
|
|
""" |
582
|
|
|
if self.exclusion_mask is not None: |
583
|
|
|
mask_safe = self.exclusion_mask |
584
|
|
|
self.log.info("Using the exclusion mask to create a safe mask") |
585
|
|
|
else: |
586
|
|
|
self.log.info("Using counts_map to create safe mask") |
587
|
|
|
mask_bool = np.zeros(self.events["counts_map"].geom.data_shape).astype("bool") |
588
|
|
|
mask_safe = Map.from_geom(self.events["counts_map"].geom, mask_bool) |
589
|
|
|
mask_safe.data = np.asarray(mask_safe.data == 0, dtype=bool) |
590
|
|
|
|
591
|
|
|
edisp = EDispKernelMap.from_edisp_kernel(self.irfs["edisp_interp_kernel"]) |
592
|
|
|
if key_name: |
593
|
|
|
name = f"{self.config_3d_dataset.name}_{key_name}" |
594
|
|
|
else: |
595
|
|
|
name = f"{self.config_3d_dataset.name}" |
596
|
|
|
|
597
|
|
|
dataset = MapDataset( |
598
|
|
|
counts=self.events["counts_map"], |
599
|
|
|
gti=self.events["gti"], |
600
|
|
|
exposure=self.irfs["exposure_interp"], |
601
|
|
|
psf=self.irfs["psf"], |
602
|
|
|
edisp=edisp, |
603
|
|
|
mask_safe=mask_safe, |
604
|
|
|
name=name, |
605
|
|
|
) |
606
|
|
|
|
607
|
|
|
return dataset |
608
|
|
|
|