1
|
|
|
""" |
2
|
|
|
Main classes to define 3D Dataset Config, 3D Dataset Analysis Step and |
3
|
|
|
to generate 3D Datasets from given Instruments' DL3 data from the config. |
4
|
|
|
""" |
5
|
|
|
|
6
|
|
|
import logging |
7
|
|
|
|
8
|
|
|
import numpy as np |
9
|
|
|
from astropy import units as u |
10
|
|
|
from astropy.io import fits |
11
|
|
|
from gammapy.catalog import CATALOG_REGISTRY |
12
|
|
|
from gammapy.data import GTI, EventList |
13
|
|
|
from gammapy.datasets import Datasets, MapDataset |
14
|
|
|
from gammapy.irf import EDispKernel, EDispKernelMap, PSFMap |
15
|
|
|
from gammapy.makers import MapDatasetMaker |
16
|
|
|
from gammapy.maps import Map, MapAxis |
17
|
|
|
from gammapy.modeling.models import Models |
18
|
|
|
|
19
|
|
|
from asgardpy.analysis.step_base import AnalysisStepBase |
20
|
|
|
from asgardpy.base.base import BaseConfig |
21
|
|
|
from asgardpy.base.geom import ( |
22
|
|
|
GeomConfig, |
23
|
|
|
SkyPositionConfig, |
24
|
|
|
create_counts_map, |
25
|
|
|
generate_geom, |
26
|
|
|
get_source_position, |
27
|
|
|
) |
28
|
|
|
from asgardpy.base.reduction import ( |
29
|
|
|
BackgroundConfig, |
30
|
|
|
MapSelectionEnum, |
31
|
|
|
ObservationsConfig, |
32
|
|
|
ReductionTypeEnum, |
33
|
|
|
SafeMaskConfig, |
34
|
|
|
generate_dl4_dataset, |
35
|
|
|
get_bkg_maker, |
36
|
|
|
get_dataset_maker, |
37
|
|
|
get_dataset_reference, |
38
|
|
|
get_exclusion_region_mask, |
39
|
|
|
get_filtered_observations, |
40
|
|
|
get_safe_mask_maker, |
41
|
|
|
) |
42
|
|
|
from asgardpy.data.target import ( |
43
|
|
|
apply_selection_mask_to_models, |
44
|
|
|
read_models_from_asgardpy_config, |
45
|
|
|
) |
46
|
|
|
from asgardpy.gammapy.read_models import ( |
47
|
|
|
create_gal_diffuse_skymodel, |
48
|
|
|
read_fermi_xml_models_list, |
49
|
|
|
update_aux_info_from_fermi_xml, |
50
|
|
|
) |
51
|
|
|
from asgardpy.io.input_dl3 import DL3Files, InputDL3Config |
52
|
|
|
from asgardpy.io.io_dl4 import DL4BaseConfig, DL4Files, get_reco_energy_bins |
53
|
|
|
|
54
|
|
|
__all__ = [ |
55
|
|
|
"Datasets3DAnalysisStep", |
56
|
|
|
"Dataset3DBaseConfig", |
57
|
|
|
"Dataset3DConfig", |
58
|
|
|
"Dataset3DGeneration", |
59
|
|
|
"Dataset3DInfoConfig", |
60
|
|
|
] |
61
|
|
|
|
62
|
|
|
log = logging.getLogger(__name__) |
63
|
|
|
|
64
|
|
|
|
65
|
|
|
# Defining various components of 3D Dataset Config section |
66
|
|
|
class Dataset3DInfoConfig(BaseConfig): |
67
|
|
|
"""Config section for 3D DL3 Dataset Reduction for each instrument.""" |
68
|
|
|
|
69
|
|
|
name: str = "dataset-name" |
70
|
|
|
key: list = [] |
71
|
|
|
observation: ObservationsConfig = ObservationsConfig() |
72
|
|
|
map_selection: list[MapSelectionEnum] = MapDatasetMaker.available_selection |
73
|
|
|
geom: GeomConfig = GeomConfig() |
74
|
|
|
background: BackgroundConfig = BackgroundConfig() |
75
|
|
|
safe_mask: SafeMaskConfig = SafeMaskConfig() |
76
|
|
|
on_region: SkyPositionConfig = SkyPositionConfig() |
77
|
|
|
containment_correction: bool = True |
78
|
|
|
|
79
|
|
|
|
80
|
|
|
class Dataset3DBaseConfig(BaseConfig): |
81
|
|
|
""" |
82
|
|
|
Config section for 3D DL3 Dataset base information for each instrument. |
83
|
|
|
""" |
84
|
|
|
|
85
|
|
|
name: str = "Instrument-name" |
86
|
|
|
input_dl3: list[InputDL3Config] = [InputDL3Config()] |
87
|
|
|
input_dl4: bool = False |
88
|
|
|
dataset_info: Dataset3DInfoConfig = Dataset3DInfoConfig() |
89
|
|
|
dl4_dataset_info: DL4BaseConfig = DL4BaseConfig() |
90
|
|
|
|
91
|
|
|
|
92
|
|
|
class Dataset3DConfig(BaseConfig): |
93
|
|
|
"""Config section for a list of all 3D DL3 Datasets information.""" |
94
|
|
|
|
95
|
|
|
type: ReductionTypeEnum = ReductionTypeEnum.cube |
96
|
|
|
instruments: list[Dataset3DBaseConfig] = [Dataset3DBaseConfig()] |
97
|
|
|
|
98
|
|
|
|
99
|
|
|
# The main Analysis Step |
100
|
|
|
class Datasets3DAnalysisStep(AnalysisStepBase): |
101
|
|
|
""" |
102
|
|
|
From the given config information, prepare the full list of 3D datasets, |
103
|
|
|
iterating over all the Instruments' information by running the |
104
|
|
|
Dataset3DGeneration function. |
105
|
|
|
|
106
|
|
|
Also calculate the total number of reconstructed energy bins used and the |
107
|
|
|
number of linked model parameters to incorporate in the total number of |
108
|
|
|
free model parameters, for the final estimation of total number of degrees |
109
|
|
|
of freedom. |
110
|
|
|
""" |
111
|
|
|
|
112
|
|
|
tag = "datasets-3d" |
113
|
|
|
|
114
|
|
|
def _run(self): |
115
|
|
|
instruments_list = self.config.dataset3d.instruments |
116
|
|
|
self.log.info("%d number of 3D Datasets given", len(instruments_list)) |
117
|
|
|
|
118
|
|
|
datasets_3d_final = Datasets() |
119
|
|
|
models_final = Models() |
120
|
|
|
instrument_spectral_info = {"name": [], "spectral_energy_ranges": []} |
121
|
|
|
|
122
|
|
|
free_params = 0 |
123
|
|
|
en_bins = 0 |
124
|
|
|
|
125
|
|
|
# Iterate over all instrument information given: |
126
|
|
|
for i in np.arange(len(instruments_list)): |
127
|
|
|
config_3d_dataset = instruments_list[i] |
128
|
|
|
instrument_spectral_info["name"].append(config_3d_dataset.name) |
129
|
|
|
|
130
|
|
|
key_names = config_3d_dataset.dataset_info.key |
131
|
|
|
if len(key_names) > 0: |
132
|
|
|
keys_str = " ".join(map(str, key_names)) |
133
|
|
|
self.log.info("The different keys used: %s", keys_str) |
134
|
|
|
else: |
135
|
|
|
key_names = [None] |
136
|
|
|
self.log.info("No distinct keys used for the 3D dataset") |
137
|
|
|
|
138
|
|
|
# Extra Datasets object to differentiate between datasets obtained |
139
|
|
|
# from various "keys" of each instrument. |
140
|
|
|
dataset_instrument = Datasets() |
141
|
|
|
dl4_files = DL4Files(config_3d_dataset.dl4_dataset_info, self.log) |
142
|
|
|
|
143
|
|
|
# Only read unique SkyModels for the first instrument, unless there |
144
|
|
|
# are associated files like XML to read from for the particular instrument. |
145
|
|
|
filled_skymodel = False |
146
|
|
|
if len(models_final) > 0: |
147
|
|
|
filled_skymodel = True |
148
|
|
|
|
149
|
|
|
# Retrieving a single dataset for each instrument. |
150
|
|
|
for key in key_names: |
151
|
|
|
if not config_3d_dataset.input_dl4: |
152
|
|
|
generate_3d_dataset = Dataset3DGeneration(self.log, config_3d_dataset, self.config) |
153
|
|
|
dataset, models = generate_3d_dataset.run(key, filled_skymodel) |
154
|
|
|
else: |
155
|
|
|
dataset = dl4_files.get_dl4_dataset(config_3d_dataset.dataset_info.observation) |
156
|
|
|
models = [] |
157
|
|
|
|
158
|
|
|
# Use the individual Dataset type object for following tasks |
159
|
|
|
if isinstance(dataset, Datasets): |
160
|
|
|
dataset = dataset[0] |
161
|
|
|
|
162
|
|
|
self.update_model_dataset_names(models, dataset, models_final) |
163
|
|
|
dataset_instrument.append(dataset) |
164
|
|
|
|
165
|
|
|
models_final, free_params = self.link_diffuse_models(models_final, free_params) |
166
|
|
|
energy_bin_edges = dl4_files.get_spectral_energies() |
167
|
|
|
instrument_spectral_info["spectral_energy_ranges"].append(energy_bin_edges) |
168
|
|
|
|
169
|
|
|
for data in dataset_instrument: |
170
|
|
|
en_bins = get_reco_energy_bins(data, en_bins) |
171
|
|
|
datasets_3d_final.append(data) |
172
|
|
|
|
173
|
|
|
instrument_spectral_info["free_params"] = free_params |
174
|
|
|
instrument_spectral_info["en_bins"] = en_bins |
175
|
|
|
|
176
|
|
|
return datasets_3d_final, models_final, instrument_spectral_info |
177
|
|
|
|
178
|
|
|
def link_diffuse_models(self, models_final, free_params): |
179
|
|
|
""" |
180
|
|
|
Function to link the diffuse models if present and reduce the number of |
181
|
|
|
degrees of freedom. |
182
|
|
|
""" |
183
|
|
|
if len(models_final) > 0: |
184
|
|
|
diffuse_models_names = [] |
185
|
|
|
for model_name in models_final.names: |
186
|
|
|
if "diffuse-iso" in model_name: |
187
|
|
|
diffuse_models_names.append(model_name) |
188
|
|
|
|
189
|
|
|
if len(diffuse_models_names) > 1: |
190
|
|
|
for model_name in diffuse_models_names[1:]: |
191
|
|
|
models_final[diffuse_models_names[0]].spectral_model.model2 = models_final[ |
192
|
|
|
model_name |
193
|
|
|
].spectral_model.model2 |
194
|
|
|
free_params -= 1 |
195
|
|
|
else: |
196
|
|
|
models_final = None |
197
|
|
|
|
198
|
|
|
return models_final, free_params |
199
|
|
|
|
200
|
|
|
def update_model_dataset_names(self, models, dataset, models_final): |
201
|
|
|
""" |
202
|
|
|
Function assigning datasets_names and including them in the final |
203
|
|
|
model list. |
204
|
|
|
|
205
|
|
|
When no associated list of models are provided, look for a separate |
206
|
|
|
model for target and an entry of catalog to fill in. |
207
|
|
|
""" |
208
|
|
|
if len(models) > 0: |
209
|
|
|
for model_ in models: |
210
|
|
|
model_.datasets_names = [dataset.name] |
211
|
|
|
|
212
|
|
|
if model_.name in models_final.names: |
213
|
|
|
models_final[model_.name].datasets_names.append(dataset.name) |
214
|
|
|
else: |
215
|
|
|
models_final.append(model_) |
216
|
|
|
|
217
|
|
|
return models_final |
218
|
|
|
|
219
|
|
|
|
220
|
|
|
class Dataset3DGeneration: |
221
|
|
|
""" |
222
|
|
|
Class for 3D dataset creation based on the config or AsgardpyConfig |
223
|
|
|
information provided on the 3D dataset and the target source. |
224
|
|
|
|
225
|
|
|
Runs the following steps: |
226
|
|
|
|
227
|
|
|
1. Read the DL3 files of 3D datasets into gammapy readable objects. |
228
|
|
|
|
229
|
|
|
2. Create the base counts Map. |
230
|
|
|
|
231
|
|
|
3. Prepare standard data reduction makers using the parameters passed in the config. |
232
|
|
|
|
233
|
|
|
4. Generate the final dataset. |
234
|
|
|
""" |
235
|
|
|
|
236
|
|
|
def __init__(self, log, config_3d_dataset, config_full): |
237
|
|
|
self.config_3d_dataset = config_3d_dataset |
238
|
|
|
self.log = log |
239
|
|
|
self.exclusion_mask = None |
240
|
|
|
self.irfs = { |
241
|
|
|
"exposure": None, |
242
|
|
|
"psf": None, |
243
|
|
|
"edisp": None, |
244
|
|
|
"edisp_kernel": None, |
245
|
|
|
"edisp_interp_kernel": None, |
246
|
|
|
"exposure_interp": None, |
247
|
|
|
} |
248
|
|
|
self.events = {"events": None, "event_fits": None, "gti": None, "counts_map": None} |
249
|
|
|
self.diffuse_models = { |
250
|
|
|
"gal_diffuse": None, |
251
|
|
|
"iso_diffuse": None, |
252
|
|
|
"key_name": None, |
253
|
|
|
"gal_diffuse_cutout": None, |
254
|
|
|
} |
255
|
|
|
self.list_source_models = [] |
256
|
|
|
|
257
|
|
|
# For updating the main config file with target source position |
258
|
|
|
# information if necessary. |
259
|
|
|
self.config_full = config_full |
260
|
|
|
self.config_target = config_full.target |
261
|
|
|
|
262
|
|
|
def run(self, key_name, filled_skymodel): |
263
|
|
|
""" |
264
|
|
|
Main function to run the creation of 3D dataset. |
265
|
|
|
""" |
266
|
|
|
# First check for the given file list if they are readable or not. |
267
|
|
|
file_list = self.read_to_objects(key_name) |
268
|
|
|
|
269
|
|
|
exclusion_regions = [] |
270
|
|
|
|
271
|
|
|
if self.config_3d_dataset.input_dl3[0].type == "gadf-dl3": |
272
|
|
|
dataset = self.generate_gadf_dataset(file_list, exclusion_regions, filled_skymodel) |
273
|
|
|
|
274
|
|
|
elif "lat" in self.config_3d_dataset.input_dl3[0].type: |
275
|
|
|
dataset = self.generate_fermi_lat_dataset(file_list, exclusion_regions, key_name) |
276
|
|
|
|
277
|
|
|
if len(self.list_source_models) != 0: |
278
|
|
|
# Apply the same exclusion mask to the list of source models as applied |
279
|
|
|
# to the Counts Map |
280
|
|
|
self.list_source_models = apply_selection_mask_to_models( |
281
|
|
|
self.list_source_models, |
282
|
|
|
target_source=self.config_target.source_name, |
283
|
|
|
selection_mask=self.exclusion_mask, |
284
|
|
|
) |
285
|
|
|
|
286
|
|
|
return dataset, self.list_source_models |
287
|
|
|
|
288
|
|
|
def read_to_objects(self, key_name): |
289
|
|
|
""" |
290
|
|
|
For each key type of files, read the files to get the required |
291
|
|
|
Gammapy objects for further analyses. |
292
|
|
|
|
293
|
|
|
Read the first IO list for events, IRFs and XML files, and then |
294
|
|
|
get the Diffuse models files list. |
295
|
|
|
""" |
296
|
|
|
file_list = {} |
297
|
|
|
|
298
|
|
|
for io_dict in self.config_3d_dataset.input_dl3: |
299
|
|
|
match io_dict.type: |
300
|
|
|
case "gadf-dl3": |
301
|
|
|
file_list, _ = self.get_base_objects(io_dict, key_name, file_list) |
302
|
|
|
|
303
|
|
|
case "lat": |
304
|
|
|
( |
305
|
|
|
file_list, |
306
|
|
|
[ |
307
|
|
|
self.irfs["exposure"], |
308
|
|
|
self.irfs["psf"], |
309
|
|
|
self.irfs["edisp"], |
310
|
|
|
], |
311
|
|
|
) = self.get_base_objects(io_dict, key_name, file_list) |
312
|
|
|
|
313
|
|
|
case "lat-aux": |
314
|
|
|
if io_dict.glob_pattern["iso_diffuse"] == "": |
315
|
|
|
io_dict.glob_pattern = update_aux_info_from_fermi_xml( |
316
|
|
|
io_dict.glob_pattern, file_list["xml_file"], fetch_iso_diff=True |
317
|
|
|
) |
318
|
|
|
if io_dict.glob_pattern["gal_diffuse"] == "": |
319
|
|
|
io_dict.glob_pattern = update_aux_info_from_fermi_xml( |
320
|
|
|
io_dict.glob_pattern, file_list["xml_file"], fetch_gal_diff=True |
321
|
|
|
) |
322
|
|
|
|
323
|
|
|
( |
324
|
|
|
file_list, |
325
|
|
|
[ |
326
|
|
|
self.diffuse_models["gal_diffuse"], |
327
|
|
|
self.diffuse_models["iso_diffuse"], |
328
|
|
|
self.diffuse_models["key_name"], |
329
|
|
|
], |
330
|
|
|
) = self.get_base_objects(io_dict, key_name, file_list) |
331
|
|
|
|
332
|
|
|
self.list_source_models, self.diffuse_models = read_fermi_xml_models_list( |
333
|
|
|
self.list_source_models, |
334
|
|
|
io_dict.input_dir, |
335
|
|
|
file_list["xml_file"], |
336
|
|
|
self.diffuse_models, |
337
|
|
|
asgardpy_target_config=self.config_target, |
338
|
|
|
) |
339
|
|
|
|
340
|
|
|
# Check if the source position needs to be updated from the list provided. |
341
|
|
|
self.update_source_pos_from_3d_dataset() |
342
|
|
|
|
343
|
|
|
return file_list |
344
|
|
|
|
345
|
|
|
def get_base_objects(self, dl3_dir_dict, key, file_list): |
346
|
|
|
""" |
347
|
|
|
For a DL3 files type and tag of the 'mode of observations' or key |
348
|
|
|
(FRONT/00 and BACK/01 for Fermi-LAT in enrico/fermipy files), |
349
|
|
|
read the files to appropriate Object type for further analysis. |
350
|
|
|
|
351
|
|
|
If there are no distinct key types of files, the value is None. |
352
|
|
|
""" |
353
|
|
|
dl3_info = DL3Files(dl3_dir_dict, log=self.log) |
354
|
|
|
object_list = [] |
355
|
|
|
|
356
|
|
|
if dl3_dir_dict.type.lower() in ["gadf-dl3"]: |
357
|
|
|
dl3_info.list_dl3_files() |
358
|
|
|
file_list = dl3_info.events_files |
359
|
|
|
|
360
|
|
|
return file_list, object_list |
361
|
|
|
else: |
362
|
|
|
file_list = dl3_info.prepare_lat_files(key, file_list) |
363
|
|
|
|
364
|
|
|
if dl3_dir_dict.type.lower() in ["lat"]: |
365
|
|
|
exposure = Map.read(file_list["expmap_file"]) |
366
|
|
|
psf = PSFMap.read(file_list["psf_file"], format="gtpsf") |
367
|
|
|
drmap = fits.open(file_list["edrm_file"]) |
368
|
|
|
object_list = [exposure, psf, drmap] |
369
|
|
|
|
370
|
|
|
if dl3_dir_dict.type.lower() in ["lat-aux"]: |
371
|
|
|
object_list = [file_list["gal_diff_file"], file_list["iso_diff_file"], key] |
372
|
|
|
|
373
|
|
|
return file_list, object_list |
374
|
|
|
|
375
|
|
|
def update_source_pos_from_3d_dataset(self): |
376
|
|
|
""" |
377
|
|
|
Introduce the source coordinates from the 3D dataset to be the standard |
378
|
|
|
value in the main config file, for further use. |
379
|
|
|
""" |
380
|
|
|
if self.config_target.use_uniform_position: |
381
|
|
|
source_position_from_3d = None |
382
|
|
|
|
383
|
|
|
for source in self.list_source_models: |
384
|
|
|
if source.name == self.config_target.source_name: |
385
|
|
|
source_position_from_3d = source.spatial_model.position.icrs |
386
|
|
|
|
387
|
|
|
self.config_full.target.sky_position.lon = str(u.Quantity(source_position_from_3d.ra)) |
388
|
|
|
self.config_full.target.sky_position.lat = str(u.Quantity(source_position_from_3d.dec)) |
389
|
|
|
|
390
|
|
|
self.config_full.update(self.config_full) |
391
|
|
|
self.config_target = self.config_full.target |
392
|
|
|
|
393
|
|
|
def set_energy_axes(self): |
394
|
|
|
""" |
395
|
|
|
Get the energy axes from the given Detector Response Matrix file. |
396
|
|
|
|
397
|
|
|
Needs to be generalized for other possible file structures for other |
398
|
|
|
instruments. |
399
|
|
|
""" |
400
|
|
|
energy_lo = self.irfs["edisp"]["DRM"].data["ENERG_LO"] * u.MeV |
401
|
|
|
energy_hi = self.irfs["edisp"]["DRM"].data["ENERG_HI"] * u.MeV |
402
|
|
|
|
403
|
|
|
energy_axis = MapAxis.from_energy_edges(np.append(energy_lo[0], energy_hi)) |
404
|
|
|
energy_axis_true = energy_axis.copy(name="energy_true") |
405
|
|
|
|
406
|
|
|
return energy_axis, energy_axis_true |
407
|
|
|
|
408
|
|
|
def set_energy_dispersion_matrix(self): |
409
|
|
|
""" |
410
|
|
|
Generate the Energy Dispersion Kernel from the given Detector Response |
411
|
|
|
Matrix file. |
412
|
|
|
|
413
|
|
|
Needs to be generalized for other possible file structures for other |
414
|
|
|
instruments. |
415
|
|
|
""" |
416
|
|
|
energy_axis, energy_axis_true = self.set_energy_axes() |
417
|
|
|
drm = self.irfs["edisp"]["DRM"].data["MATRIX"] |
418
|
|
|
drm_matrix = np.array(list(drm)) |
419
|
|
|
|
420
|
|
|
self.irfs["edisp_kernel"] = EDispKernel(axes=[energy_axis_true, energy_axis], data=drm_matrix) |
421
|
|
|
|
422
|
|
|
def load_events(self, events_file): |
423
|
|
|
""" |
424
|
|
|
Loading the events files for the specific "Key" into an EventList |
425
|
|
|
object and the GTI information into a GTI object. |
426
|
|
|
""" |
427
|
|
|
self.events["event_fits"] = fits.open(events_file) |
428
|
|
|
self.events["events"] = EventList.read(events_file) |
429
|
|
|
self.events["gti"] = GTI.read(events_file) |
430
|
|
|
|
431
|
|
|
def generate_diffuse_background_cutout(self): |
432
|
|
|
""" |
433
|
|
|
Perform a cutout of the Diffuse background model with respect to the |
434
|
|
|
counts map geom (may improve fitting speed?) and update the main list |
435
|
|
|
of models. |
436
|
|
|
""" |
437
|
|
|
diffuse_cutout = self.diffuse_models["gal_diffuse_map"].cutout( |
438
|
|
|
self.events["counts_map"].geom.center_skydir, self.events["counts_map"].geom.width[0] |
439
|
|
|
) |
440
|
|
|
self.diffuse_models["gal_diffuse_cutout"], _ = create_gal_diffuse_skymodel(diffuse_cutout) |
441
|
|
|
|
442
|
|
|
# Update the model in self.list_source_models |
443
|
|
|
for k, model_ in enumerate(self.list_source_models): |
444
|
|
|
if model_.name in ["diffuse-iem"]: |
445
|
|
|
self.list_source_models[k] = self.diffuse_models["gal_diffuse_cutout"] |
446
|
|
|
|
447
|
|
|
def set_edisp_interpolator(self): |
448
|
|
|
""" |
449
|
|
|
Get the Energy Dispersion Kernel interpolated along true and |
450
|
|
|
reconstructed energy of the real counts. |
451
|
|
|
""" |
452
|
|
|
axis_reco = MapAxis.from_edges( |
453
|
|
|
self.events["counts_map"].geom.axes["energy"].edges, |
454
|
|
|
name="energy", |
455
|
|
|
unit="MeV", # Need to be generalized |
456
|
|
|
interp="log", |
457
|
|
|
) |
458
|
|
|
axis_true = axis_reco.copy(name="energy_true") |
459
|
|
|
energy_reco, energy_true = np.meshgrid(axis_true.center, axis_reco.center) |
460
|
|
|
|
461
|
|
|
drm_interp = self.irfs["edisp_kernel"].evaluate( |
462
|
|
|
"linear", **{"energy": energy_reco, "energy_true": energy_true} |
463
|
|
|
) |
464
|
|
|
self.irfs["edisp_interp_kernel"] = EDispKernel(axes=[axis_true, axis_reco], data=np.asarray(drm_interp)) |
465
|
|
|
|
466
|
|
|
def set_exposure_interpolator(self): |
467
|
|
|
""" |
468
|
|
|
Set Exposure interpolated along energy axis of real counts. |
469
|
|
|
""" |
470
|
|
|
self.irfs["exposure_interp"] = self.irfs["exposure"].interp_to_geom( |
471
|
|
|
self.events["counts_map"].geom.as_energy_true |
472
|
|
|
) |
473
|
|
|
|
474
|
|
|
def generate_dataset(self, key_name): |
475
|
|
|
""" |
476
|
|
|
Generate MapDataset for the given Instrument files using the Counts Map, |
477
|
|
|
and IRFs objects. |
478
|
|
|
""" |
479
|
|
|
if self.exclusion_mask is not None: |
480
|
|
|
mask_safe = self.exclusion_mask |
481
|
|
|
self.log.info("Using the exclusion mask to create a safe mask") |
482
|
|
|
else: |
483
|
|
|
self.log.info("Using counts_map to create safe mask") |
484
|
|
|
mask_bool = np.zeros(self.events["counts_map"].geom.data_shape).astype("bool") |
485
|
|
|
mask_safe = Map.from_geom(self.events["counts_map"].geom, mask_bool) |
486
|
|
|
mask_safe.data = np.asarray(mask_safe.data == 0, dtype=bool) |
487
|
|
|
|
488
|
|
|
edisp = EDispKernelMap.from_edisp_kernel(self.irfs["edisp_interp_kernel"]) |
489
|
|
|
if key_name: |
490
|
|
|
name = f"{self.config_3d_dataset.name}_{key_name}" |
491
|
|
|
else: |
492
|
|
|
name = f"{self.config_3d_dataset.name}" |
493
|
|
|
|
494
|
|
|
dataset = MapDataset( |
495
|
|
|
counts=self.events["counts_map"], |
496
|
|
|
gti=self.events["gti"], |
497
|
|
|
exposure=self.irfs["exposure_interp"], |
498
|
|
|
psf=self.irfs["psf"], |
499
|
|
|
edisp=edisp, |
500
|
|
|
mask_safe=mask_safe, |
501
|
|
|
name=name, |
502
|
|
|
) |
503
|
|
|
|
504
|
|
|
return dataset |
505
|
|
|
|
506
|
|
|
# Main functions for compiling different DL4 dataset generating procedures |
507
|
|
|
def generate_gadf_dataset(self, file_list, exclusion_regions, filled_skymodel): |
508
|
|
|
""" |
509
|
|
|
Separate function containing the procedures on creating a GADF DL4 |
510
|
|
|
dataset. |
511
|
|
|
""" |
512
|
|
|
observations = get_filtered_observations( |
513
|
|
|
dl3_path=self.config_3d_dataset.input_dl3[0].input_dir, |
514
|
|
|
obs_config=self.config_3d_dataset.dataset_info.observation, |
515
|
|
|
log=self.log, |
516
|
|
|
) |
517
|
|
|
center_pos = get_source_position(target_region=self.config_3d_dataset.dataset_info.on_region) |
518
|
|
|
|
519
|
|
|
geom = generate_geom( |
520
|
|
|
tag="3d", |
521
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
522
|
|
|
center_pos=center_pos, |
523
|
|
|
) |
524
|
|
|
|
525
|
|
|
dataset_reference = get_dataset_reference( |
526
|
|
|
tag="3d", geom=geom, geom_config=self.config_3d_dataset.dataset_info.geom |
527
|
|
|
) |
528
|
|
|
|
529
|
|
|
dataset_maker = get_dataset_maker( |
530
|
|
|
tag="3d", |
531
|
|
|
dataset_config=self.config_3d_dataset.dataset_info, |
532
|
|
|
) |
533
|
|
|
|
534
|
|
|
safe_maker = get_safe_mask_maker(safe_config=self.config_3d_dataset.dataset_info.safe_mask) |
535
|
|
|
|
536
|
|
|
# If there is no explicit list of models provided for the 3D data, |
537
|
|
|
# one can use one of the several catalogs available in Gammapy. |
538
|
|
|
# Reading them as Models will keep the procedure uniform. |
539
|
|
|
|
540
|
|
|
# Unless the unique skymodels for 3D dataset is already set. |
541
|
|
|
# Move it to Target module? |
542
|
|
|
if len(self.list_source_models) == 0: |
543
|
|
|
if not filled_skymodel: |
544
|
|
|
# Read the SkyModel info from AsgardpyConfig.target section |
545
|
|
|
if len(self.config_target.components) > 0: |
546
|
|
|
models_ = read_models_from_asgardpy_config(self.config_target) |
547
|
|
|
self.list_source_models = models_ |
548
|
|
|
|
549
|
|
|
# If a catalog information is provided, use it to build up the list of models |
550
|
|
|
# Check if a catalog data is given with selection radius |
551
|
|
|
if self.config_target.use_catalog.selection_radius != 0 * u.deg: |
552
|
|
|
catalog = CATALOG_REGISTRY.get_cls(self.config_target.use_catalog.name)() |
553
|
|
|
|
554
|
|
|
# One can also provide a separate file, but one has to add |
555
|
|
|
# another config option for reading Catalog file paths. |
556
|
|
|
sep = catalog.positions.separation(center_pos["center"].galactic) |
557
|
|
|
|
558
|
|
|
for k, cat_ in enumerate(catalog): |
559
|
|
|
if sep[k] < self.config_target.use_catalog.selection_radius: |
560
|
|
|
self.list_source_models.append(cat_.sky_model()) |
561
|
|
|
|
562
|
|
|
excluded_geom = generate_geom( |
563
|
|
|
tag="3d-ex", |
564
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
565
|
|
|
center_pos=center_pos, |
566
|
|
|
) |
567
|
|
|
|
568
|
|
|
exclusion_mask = get_exclusion_region_mask( |
569
|
|
|
exclusion_params=self.config_3d_dataset.dataset_info.background.exclusion, |
570
|
|
|
exclusion_regions=exclusion_regions, |
571
|
|
|
excluded_geom=excluded_geom, |
572
|
|
|
config_target=self.config_target, |
573
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
574
|
|
|
log=self.log, |
575
|
|
|
) |
576
|
|
|
|
577
|
|
|
bkg_maker = get_bkg_maker( |
578
|
|
|
bkg_config=self.config_3d_dataset.dataset_info.background, |
579
|
|
|
exclusion_mask=exclusion_mask, |
580
|
|
|
) |
581
|
|
|
|
582
|
|
|
dataset = generate_dl4_dataset( |
583
|
|
|
tag="3d", |
584
|
|
|
observations=observations, |
585
|
|
|
dataset_reference=dataset_reference, |
586
|
|
|
dataset_maker=dataset_maker, |
587
|
|
|
bkg_maker=bkg_maker, |
588
|
|
|
safe_maker=safe_maker, |
589
|
|
|
n_jobs=self.config_full.general.n_jobs, |
590
|
|
|
parallel_backend=self.config_full.general.parallel_backend, |
591
|
|
|
) |
592
|
|
|
return dataset |
593
|
|
|
|
594
|
|
|
def generate_fermi_lat_dataset(self, file_list, exclusion_regions, key_name): |
595
|
|
|
""" |
596
|
|
|
Separate function containing the procedures on creating a Fermi-LAT DL4 |
597
|
|
|
dataset. |
598
|
|
|
""" |
599
|
|
|
self.load_events(file_list["events_file"]) |
600
|
|
|
|
601
|
|
|
# Start preparing objects to create the counts map |
602
|
|
|
self.set_energy_dispersion_matrix() |
603
|
|
|
|
604
|
|
|
center_pos = get_source_position( |
605
|
|
|
target_region=self.config_target.sky_position, |
606
|
|
|
fits_header=self.events["event_fits"][1].header, |
607
|
|
|
) |
608
|
|
|
|
609
|
|
|
# Create the Counts Map |
610
|
|
|
self.events["counts_map"] = create_counts_map( |
611
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
612
|
|
|
center_pos=center_pos, |
613
|
|
|
) |
614
|
|
|
self.events["counts_map"].fill_by_coord( |
615
|
|
|
{ |
616
|
|
|
"skycoord": self.events["events"].radec, |
617
|
|
|
"energy": self.events["events"].energy, |
618
|
|
|
"time": self.events["events"].time, |
619
|
|
|
} |
620
|
|
|
) |
621
|
|
|
# Create any dataset reduction makers or mask |
622
|
|
|
self.generate_diffuse_background_cutout() |
623
|
|
|
|
624
|
|
|
self.set_edisp_interpolator() |
625
|
|
|
self.set_exposure_interpolator() |
626
|
|
|
|
627
|
|
|
self.exclusion_mask = get_exclusion_region_mask( |
628
|
|
|
exclusion_params=self.config_3d_dataset.dataset_info.background.exclusion, |
629
|
|
|
excluded_geom=self.events["counts_map"].geom.copy(), |
630
|
|
|
exclusion_regions=exclusion_regions, |
631
|
|
|
config_target=self.config_target, |
632
|
|
|
geom_config=self.config_3d_dataset.dataset_info.geom, |
633
|
|
|
log=self.log, |
634
|
|
|
) |
635
|
|
|
|
636
|
|
|
# Generate the final dataset |
637
|
|
|
dataset = self.generate_dataset(key_name) |
638
|
|
|
|
639
|
|
|
return dataset |
640
|
|
|
|