Passed
Pull Request — main (#157)
by Chaitanya
01:53
created

asgardpy.data.dataset_3d   C

Complexity

Total Complexity 53

Size/Duplication

Total Lines 607
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 351
dl 0
loc 607
rs 6.96
c 0
b 0
f 0
wmc 53

How to fix   Complexity   

Complexity

Complex classes like asgardpy.data.dataset_3d often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
"""
2
Main classes to define 3D Dataset Config, 3D Dataset Analysis Step and
3
to generate 3D Datasets from given Instruments' DL3 data from the config.
4
"""
5
6
import logging
7
8
import numpy as np
9
from astropy import units as u
10
from astropy.io import fits
11
from gammapy.catalog import CATALOG_REGISTRY
12
from gammapy.data import GTI, EventList
13
from gammapy.datasets import Datasets, MapDataset
14
from gammapy.irf import EDispKernel, EDispKernelMap, PSFMap
15
from gammapy.makers import MapDatasetMaker
16
from gammapy.maps import Map, MapAxis
17
from gammapy.modeling.models import Models
18
19
from asgardpy.analysis.step_base import AnalysisStepBase
20
from asgardpy.base.base import BaseConfig
21
from asgardpy.base.geom import (
22
    GeomConfig,
23
    SkyPositionConfig,
24
    create_counts_map,
25
    generate_geom,
26
    get_source_position,
27
)
28
from asgardpy.base.reduction import (
29
    BackgroundConfig,
30
    MapSelectionEnum,
31
    ObservationsConfig,
32
    ReductionTypeEnum,
33
    SafeMaskConfig,
34
    generate_dl4_dataset,
35
    get_bkg_maker,
36
    get_dataset_maker,
37
    get_dataset_reference,
38
    get_exclusion_region_mask,
39
    get_filtered_observations,
40
    get_safe_mask_maker,
41
)
42
from asgardpy.data.target import (
43
    apply_selection_mask_to_models,
44
    read_models_from_asgardpy_config,
45
)
46
from asgardpy.gammapy.read_models import (
47
    create_gal_diffuse_skymodel,
48
    read_fermi_xml_models_list,
49
    update_aux_info_from_fermi_xml,
50
)
51
from asgardpy.io.input_dl3 import DL3Files, InputDL3Config
52
from asgardpy.io.io_dl4 import DL4BaseConfig, DL4Files, get_reco_energy_bins
53
54
__all__ = [
55
    "Datasets3DAnalysisStep",
56
    "Dataset3DBaseConfig",
57
    "Dataset3DConfig",
58
    "Dataset3DGeneration",
59
    "Dataset3DInfoConfig",
60
]
61
62
log = logging.getLogger(__name__)
63
64
65
# Defining various components of 3D Dataset Config section
66
class Dataset3DInfoConfig(BaseConfig):
67
    """Config section for 3D DL3 Dataset Reduction for each instrument."""
68
69
    name: str = "dataset-name"
70
    key: list = []
71
    observation: ObservationsConfig = ObservationsConfig()
72
    map_selection: list[MapSelectionEnum] = MapDatasetMaker.available_selection
73
    geom: GeomConfig = GeomConfig()
74
    background: BackgroundConfig = BackgroundConfig()
75
    safe_mask: SafeMaskConfig = SafeMaskConfig()
76
    on_region: SkyPositionConfig = SkyPositionConfig()
77
    containment_correction: bool = True
78
79
80
class Dataset3DBaseConfig(BaseConfig):
81
    """
82
    Config section for 3D DL3 Dataset base information for each instrument.
83
    """
84
85
    name: str = "Instrument-name"
86
    input_dl3: list[InputDL3Config] = [InputDL3Config()]
87
    input_dl4: bool = False
88
    dataset_info: Dataset3DInfoConfig = Dataset3DInfoConfig()
89
    dl4_dataset_info: DL4BaseConfig = DL4BaseConfig()
90
91
92
class Dataset3DConfig(BaseConfig):
93
    """Config section for a list of all 3D DL3 Datasets information."""
94
95
    type: ReductionTypeEnum = ReductionTypeEnum.cube
96
    instruments: list[Dataset3DBaseConfig] = [Dataset3DBaseConfig()]
97
98
99
# The main Analysis Step
100
class Datasets3DAnalysisStep(AnalysisStepBase):
101
    """
102
    From the given config information, prepare the full list of 3D datasets,
103
    iterating over all the Instruments' information by running the
104
    Dataset3DGeneration function.
105
    """
106
107
    tag = "datasets-3d"
108
109
    def _run(self):
110
        instruments_list = self.config.dataset3d.instruments
111
        self.log.info("%d number of 3D Datasets given", len(instruments_list))
112
113
        datasets_3d_final = Datasets()
114
        models_final = Models()
115
        instrument_spectral_info = {"name": [], "spectral_energy_ranges": []}
116
117
        # Calculate the total number of reconstructed energy bins used
118
        # and the number of linked model parameters to incorporate in the
119
        # total number of free model parameters, for the final estimation of
120
        # total number of degrees of freedom
121
        free_params = 0
122
        en_bins = 0
123
124
        # Iterate over all instrument information given:
125
        for i in np.arange(len(instruments_list)):
126
            config_3d_dataset = instruments_list[i]
127
            instrument_spectral_info["name"].append(config_3d_dataset.name)
128
129
            key_names = config_3d_dataset.dataset_info.key
130
            if len(key_names) > 0:
131
                keys_str = " ".join(map(str, key_names))
132
                self.log.info("The different keys used: %s", keys_str)
133
            else:
134
                key_names = [None]
135
                self.log.info("No distinct keys used for the 3D dataset")
136
137
            # Extra Datasets object to differentiate between datasets obtained
138
            # from various "keys" of each instrument.
139
            dataset_instrument = Datasets()
140
            dl4_files = DL4Files(config_3d_dataset.dl4_dataset_info, self.log)
141
142
            # Only read unique SkyModels for the first instrument, unless there
143
            # are associated files like XML to read from for the particular instrument.
144
            filled_skymodel = False
145
            if len(models_final) > 0:
146
                filled_skymodel = True
147
148
            # Retrieving a single dataset for each instrument.
149
            for key in key_names:
150
                if not config_3d_dataset.input_dl4:
151
                    generate_3d_dataset = Dataset3DGeneration(self.log, config_3d_dataset, self.config)
152
                    dataset, models = generate_3d_dataset.run(key, filled_skymodel)
153
                else:
154
                    dataset = dl4_files.get_dl4_dataset(config_3d_dataset.dataset_info.observation)
155
                    models = []
156
157
                # Use the individual Dataset type object for following tasks
158
                if isinstance(dataset, Datasets):
159
                    dataset = dataset[0]
160
161
                self.update_model_dataset_names(models, dataset, models_final)
162
                dataset_instrument.append(dataset)
163
164
            models_final, free_params = self.update_models_list(models_final, free_params)
165
            energy_bin_edges = dl4_files.get_spectral_energies()
166
            instrument_spectral_info["spectral_energy_ranges"].append(energy_bin_edges)
167
168
            for data in dataset_instrument:
169
                en_bins = get_reco_energy_bins(data, en_bins)
170
                datasets_3d_final.append(data)
171
172
        instrument_spectral_info["free_params"] = free_params
173
        instrument_spectral_info["en_bins"] = en_bins
174
175
        return datasets_3d_final, models_final, instrument_spectral_info
176
177
    def update_models_list(self, models_final, free_params):
178
        """ """
179
        if len(models_final) > 0:
180
            # Linking the spectral model of the diffuse model for each key
181
            diffuse_models_names = []
182
            for model_name in models_final.names:
183
                if "diffuse-iso" in model_name:
184
                    diffuse_models_names.append(model_name)
185
186
            if len(diffuse_models_names) > 1:
187
                for model_name in diffuse_models_names[1:]:
188
                    models_final[diffuse_models_names[0]].spectral_model.model2 = models_final[
189
                        model_name
190
                    ].spectral_model.model2
191
                    # For each linked model parameter, reduce the number of DoF
192
                    free_params -= 1
193
        else:
194
            models_final = None
195
196
        return models_final, free_params
197
198
    def update_model_dataset_names(self, models, dataset, models_final):
199
        """ """
200
        # Assigning datasets_names and including them in the final
201
        # model list
202
203
        # When no associated list of models are provided, look for a
204
        # separate model for target and an entry of catalog to fill in.
205
        if len(models) > 0:
206
            for model_ in models:
207
                model_.datasets_names = [dataset.name]
208
209
                if model_.name in models_final.names:
210
                    models_final[model_.name].datasets_names.append(dataset.name)
211
                else:
212
                    models_final.append(model_)
213
214
        return models_final
215
216
217
class Dataset3DGeneration:
218
    """
219
    Class for 3D dataset creation based on the config or AsgardpyConfig
220
    information provided on the 3D dataset and the target source.
221
222
    Runs the following steps:
223
224
    1. Read the DL3 files of 3D datasets into gammapy readable objects.
225
226
    2. Create the base counts Map.
227
228
    3. Prepare standard data reduction makers using the parameters passed in the config.
229
230
    4. Generate the final dataset.
231
    """
232
233
    def __init__(self, log, config_3d_dataset, config_full):
234
        self.config_3d_dataset = config_3d_dataset
235
        self.log = log
236
        self.exclusion_mask = None
237
        self.irfs = {
238
            "exposure": None,
239
            "psf": None,
240
            "edisp": None,
241
            "edisp_kernel": None,
242
            "edisp_interp_kernel": None,
243
            "exposure_interp": None,
244
        }
245
        self.events = {"events": None, "event_fits": None, "gti": None, "counts_map": None}
246
        self.diffuse_models = {
247
            "gal_diffuse": None,
248
            "iso_diffuse": None,
249
            "key_name": None,
250
            "gal_diffuse_cutout": None,
251
        }
252
        self.list_source_models = []
253
254
        # For updating the main config file with target source position
255
        # information if necessary.
256
        self.config_full = config_full
257
        self.config_target = config_full.target
258
259
    def run(self, key_name, filled_skymodel):
260
        """
261
        Main function to run the creation of 3D dataset.
262
        """
263
        # First check for the given file list if they are readable or not.
264
        file_list = self.read_to_objects(key_name)
265
266
        exclusion_regions = []
267
268
        if self.config_3d_dataset.input_dl3[0].type == "gadf-dl3":
269
            dataset = self.generate_gadf_dataset(file_list, exclusion_regions, filled_skymodel)
270
271
        elif "lat" in self.config_3d_dataset.input_dl3[0].type:
272
            dataset = self.generate_fermi_lat_dataset(file_list, exclusion_regions, key_name)
273
274
        if len(self.list_source_models) != 0:
275
            # Apply the same exclusion mask to the list of source models as applied
276
            # to the Counts Map
277
            self.list_source_models = apply_selection_mask_to_models(
278
                self.list_source_models,
279
                target_source=self.config_target.source_name,
280
                selection_mask=self.exclusion_mask,
281
            )
282
283
        return dataset, self.list_source_models
284
285
    def read_to_objects(self, key_name):
286
        """
287
        For each key type of files, read the files to get the required
288
        Gammapy objects for further analyses.
289
        """
290
        file_list = {}
291
292
        # Read the first IO list for events, IRFs and XML files
293
294
        # Get the Diffuse models files list
295
        for io_dict in self.config_3d_dataset.input_dl3:
296
            match io_dict.type:
297
                case "gadf-dl3":
298
                    file_list, _ = self.get_base_objects(io_dict, key_name, file_list)
299
300
                case "lat":
301
                    (
302
                        file_list,
303
                        [
304
                            self.irfs["exposure"],
305
                            self.irfs["psf"],
306
                            self.irfs["edisp"],
307
                        ],
308
                    ) = self.get_base_objects(io_dict, key_name, file_list)
309
310
                case "lat-aux":
311
                    if io_dict.glob_pattern["iso_diffuse"] == "":
312
                        io_dict.glob_pattern = update_aux_info_from_fermi_xml(
313
                            io_dict.glob_pattern, file_list["xml_file"], fetch_iso_diff=True
314
                        )
315
                    if io_dict.glob_pattern["gal_diffuse"] == "":
316
                        io_dict.glob_pattern = update_aux_info_from_fermi_xml(
317
                            io_dict.glob_pattern, file_list["xml_file"], fetch_gal_diff=True
318
                        )
319
320
                    (
321
                        file_list,
322
                        [
323
                            self.diffuse_models["gal_diffuse"],
324
                            self.diffuse_models["iso_diffuse"],
325
                            self.diffuse_models["key_name"],
326
                        ],
327
                    ) = self.get_base_objects(io_dict, key_name, file_list)
328
329
                    self.list_source_models, self.diffuse_models = read_fermi_xml_models_list(
330
                        self.list_source_models,
331
                        io_dict.input_dir,
332
                        file_list["xml_file"],
333
                        self.diffuse_models,
334
                        asgardpy_target_config=self.config_target,
335
                    )
336
337
        # After reading the list of source objects, check if the source position needs to be
338
        # updated from the list provided.
339
        self.update_source_pos_from_3d_dataset()
340
341
        return file_list
342
343
    def get_base_objects(self, dl3_dir_dict, key, file_list):
344
        """
345
        For a DL3 files type and tag of the 'mode of observations' or key
346
        (FRONT/00 and BACK/01 for Fermi-LAT in enrico/fermipy files),
347
        read the files to appropriate Object type for further analysis.
348
349
        If there are no distinct key types of files, the value is None.
350
        """
351
        dl3_info = DL3Files(dl3_dir_dict, log=self.log)
352
        object_list = []
353
354
        if dl3_dir_dict.type.lower() in ["gadf-dl3"]:
355
            dl3_info.list_dl3_files()
356
            file_list = dl3_info.events_files
357
358
            return file_list, object_list
359
        else:
360
            file_list = dl3_info.prepare_lat_files(key, file_list)
361
362
            if dl3_dir_dict.type.lower() in ["lat"]:
363
                exposure = Map.read(file_list["expmap_file"])
364
                psf = PSFMap.read(file_list["psf_file"], format="gtpsf")
365
                drmap = fits.open(file_list["edrm_file"])
366
                object_list = [exposure, psf, drmap]
367
368
            if dl3_dir_dict.type.lower() in ["lat-aux"]:
369
                object_list = [file_list["gal_diff_file"], file_list["iso_diff_file"], key]
370
371
            return file_list, object_list
372
373
    def update_source_pos_from_3d_dataset(self):
374
        """
375
        Introduce the source coordinates from the 3D dataset to be the standard
376
        value in the main config file, for further use.
377
        """
378
        if self.config_target.use_uniform_position:
379
            source_position_from_3d = None
380
381
            for source in self.list_source_models:
382
                if source.name == self.config_target.source_name:
383
                    source_position_from_3d = source.spatial_model.position.icrs
384
385
                    self.config_full.target.sky_position.lon = str(u.Quantity(source_position_from_3d.ra))
386
                    self.config_full.target.sky_position.lat = str(u.Quantity(source_position_from_3d.dec))
387
388
                    self.config_full.update(self.config_full)
389
                    self.config_target = self.config_full.target
390
391
    def set_energy_axes(self):
392
        """
393
        Get the energy axes from the given Detector Response Matrix file.
394
395
        Needs to be generalized for other possible file structures for other
396
        instruments.
397
        """
398
        energy_lo = self.irfs["edisp"]["DRM"].data["ENERG_LO"] * u.MeV
399
        energy_hi = self.irfs["edisp"]["DRM"].data["ENERG_HI"] * u.MeV
400
401
        energy_axis = MapAxis.from_energy_edges(np.append(energy_lo[0], energy_hi))
402
        energy_axis_true = energy_axis.copy(name="energy_true")
403
404
        return energy_axis, energy_axis_true
405
406
    def set_energy_dispersion_matrix(self):
407
        """
408
        Generate the Energy Dispersion Kernel from the given Detector Response
409
        Matrix file.
410
411
        Needs to be generalized for other possible file structures for other
412
        instruments.
413
        """
414
        energy_axis, energy_axis_true = self.set_energy_axes()
415
        drm = self.irfs["edisp"]["DRM"].data["MATRIX"]
416
        drm_matrix = np.array(list(drm))
417
418
        self.irfs["edisp_kernel"] = EDispKernel(axes=[energy_axis_true, energy_axis], data=drm_matrix)
419
420
    def load_events(self, events_file):
421
        """
422
        Loading the events files for the specific "Key" into an EventList
423
        object and the GTI information into a GTI object.
424
        """
425
        self.events["event_fits"] = fits.open(events_file)
426
        self.events["events"] = EventList.read(events_file)
427
        self.events["gti"] = GTI.read(events_file)
428
429
    def generate_diffuse_background_cutout(self):
430
        """
431
        Perform a cutout of the Diffuse background model with respect to the
432
        counts map geom (may improve fitting speed?) and update the main list
433
        of models.
434
        """
435
        diffuse_cutout = self.diffuse_models["gal_diffuse_map"].cutout(
436
            self.events["counts_map"].geom.center_skydir, self.events["counts_map"].geom.width[0]
437
        )
438
        self.diffuse_models["gal_diffuse_cutout"], _ = create_gal_diffuse_skymodel(diffuse_cutout)
439
440
        # Update the model in self.list_source_models
441
        for k, model_ in enumerate(self.list_source_models):
442
            if model_.name in ["diffuse-iem"]:
443
                self.list_source_models[k] = self.diffuse_models["gal_diffuse_cutout"]
444
445
    def set_edisp_interpolator(self):
446
        """
447
        Get the Energy Dispersion Kernel interpolated along true and
448
        reconstructed energy of the real counts.
449
        """
450
        axis_reco = MapAxis.from_edges(
451
            self.events["counts_map"].geom.axes["energy"].edges,
452
            name="energy",
453
            unit="MeV",  # Need to be generalized
454
            interp="log",
455
        )
456
        axis_true = axis_reco.copy(name="energy_true")
457
        energy_reco, energy_true = np.meshgrid(axis_true.center, axis_reco.center)
458
459
        drm_interp = self.irfs["edisp_kernel"].evaluate(
460
            "linear", **{"energy": energy_reco, "energy_true": energy_true}
461
        )
462
        self.irfs["edisp_interp_kernel"] = EDispKernel(axes=[axis_true, axis_reco], data=np.asarray(drm_interp))
463
464
    def set_exposure_interpolator(self):
465
        """
466
        Set Exposure interpolated along energy axis of real counts.
467
        """
468
        self.irfs["exposure_interp"] = self.irfs["exposure"].interp_to_geom(
469
            self.events["counts_map"].geom.as_energy_true
470
        )
471
472
    def generate_dataset(self, key_name):
473
        """
474
        Generate MapDataset for the given Instrument files using the Counts Map,
475
        and IRFs objects.
476
        """
477
        if self.exclusion_mask is not None:
478
            mask_safe = self.exclusion_mask
479
            self.log.info("Using the exclusion mask to create a safe mask")
480
        else:
481
            self.log.info("Using counts_map to create safe mask")
482
            mask_bool = np.zeros(self.events["counts_map"].geom.data_shape).astype("bool")
483
            mask_safe = Map.from_geom(self.events["counts_map"].geom, mask_bool)
484
            mask_safe.data = np.asarray(mask_safe.data == 0, dtype=bool)
485
486
        edisp = EDispKernelMap.from_edisp_kernel(self.irfs["edisp_interp_kernel"])
487
        if key_name:
488
            name = f"{self.config_3d_dataset.name}_{key_name}"
489
        else:
490
            name = f"{self.config_3d_dataset.name}"
491
492
        dataset = MapDataset(
493
            counts=self.events["counts_map"],
494
            gti=self.events["gti"],
495
            exposure=self.irfs["exposure_interp"],
496
            psf=self.irfs["psf"],
497
            edisp=edisp,
498
            mask_safe=mask_safe,
499
            name=name,
500
        )
501
502
        return dataset
503
504
    # Main functions for compiling different DL4 dataset generating procedures
505
    def generate_gadf_dataset(self, file_list, exclusion_regions, filled_skymodel):
506
        """ """
507
        observations = get_filtered_observations(
508
            dl3_path=self.config_3d_dataset.input_dl3[0].input_dir,
509
            obs_config=self.config_3d_dataset.dataset_info.observation,
510
            log=self.log,
511
        )
512
        center_pos = get_source_position(target_region=self.config_3d_dataset.dataset_info.on_region)
513
514
        geom = generate_geom(
515
            tag="3d",
516
            geom_config=self.config_3d_dataset.dataset_info.geom,
517
            center_pos=center_pos,
518
        )
519
520
        dataset_reference = get_dataset_reference(
521
            tag="3d", geom=geom, geom_config=self.config_3d_dataset.dataset_info.geom
522
        )
523
524
        dataset_maker = get_dataset_maker(
525
            tag="3d",
526
            dataset_config=self.config_3d_dataset.dataset_info,
527
        )
528
529
        safe_maker = get_safe_mask_maker(safe_config=self.config_3d_dataset.dataset_info.safe_mask)
530
531
        # If there is no explicit list of models provided for the 3D data,
532
        # one can use one of the several catalogs available in Gammapy.
533
        # Reading them as Models will keep the procedure uniform.
534
535
        # Unless the unique skymodels for 3D dataset is already set.
536
        # Move it to Target module?
537
        if len(self.list_source_models) == 0:
538
            if not filled_skymodel:
539
                # Read the SkyModel info from AsgardpyConfig.target section
540
                if len(self.config_target.components) > 0:
541
                    models_ = read_models_from_asgardpy_config(self.config_target)
542
                    self.list_source_models = models_
543
544
                # If a catalog information is provided, use it to build up the list of models
545
                # Check if a catalog data is given with selection radius
546
                if self.config_target.use_catalog.selection_radius != 0 * u.deg:
547
                    catalog = CATALOG_REGISTRY.get_cls(self.config_target.use_catalog.name)()
548
549
                    # One can also provide a separate file, but one has to add
550
                    # another config option for reading Catalog file paths.
551
                    sep = catalog.positions.separation(center_pos["center"].galactic)
552
553
                    for k, cat_ in enumerate(catalog):
554
                        if sep[k] < self.config_target.use_catalog.selection_radius:
555
                            self.list_source_models.append(cat_.sky_model())
556
557
        excluded_geom = generate_geom(
558
            tag="3d-ex",
559
            geom_config=self.config_3d_dataset.dataset_info.geom,
560
            center_pos=center_pos,
561
        )
562
563
        exclusion_mask = get_exclusion_region_mask(
564
            exclusion_params=self.config_3d_dataset.dataset_info.background.exclusion,
565
            exclusion_regions=exclusion_regions,
566
            excluded_geom=excluded_geom,
567
            config_target=self.config_target,
568
            geom_config=self.config_3d_dataset.dataset_info.geom,
569
            log=self.log,
570
        )
571
572
        bkg_maker = get_bkg_maker(
573
            bkg_config=self.config_3d_dataset.dataset_info.background,
574
            exclusion_mask=exclusion_mask,
575
        )
576
577
        dataset = generate_dl4_dataset(
578
            tag="3d",
579
            observations=observations,
580
            dataset_reference=dataset_reference,
581
            dataset_maker=dataset_maker,
582
            bkg_maker=bkg_maker,
583
            safe_maker=safe_maker,
584
            n_jobs=self.config_full.general.n_jobs,
585
            parallel_backend=self.config_full.general.parallel_backend,
586
        )
587
        return dataset
588
589
    def generate_fermi_lat_dataset(self, file_list, exclusion_regions, key_name):
590
        """ """
591
        self.load_events(file_list["events_file"])
592
593
        # Start preparing objects to create the counts map
594
        self.set_energy_dispersion_matrix()
595
596
        center_pos = get_source_position(
597
            target_region=self.config_target.sky_position,
598
            fits_header=self.events["event_fits"][1].header,
599
        )
600
601
        # Create the Counts Map
602
        self.events["counts_map"] = create_counts_map(
603
            geom_config=self.config_3d_dataset.dataset_info.geom,
604
            center_pos=center_pos,
605
        )
606
        self.events["counts_map"].fill_by_coord(
607
            {
608
                "skycoord": self.events["events"].radec,
609
                "energy": self.events["events"].energy,
610
                "time": self.events["events"].time,
611
            }
612
        )
613
        # Create any dataset reduction makers or mask
614
        self.generate_diffuse_background_cutout()
615
616
        self.set_edisp_interpolator()
617
        self.set_exposure_interpolator()
618
619
        self.exclusion_mask = get_exclusion_region_mask(
620
            exclusion_params=self.config_3d_dataset.dataset_info.background.exclusion,
621
            excluded_geom=self.events["counts_map"].geom.copy(),
622
            exclusion_regions=exclusion_regions,
623
            config_target=self.config_target,
624
            geom_config=self.config_3d_dataset.dataset_info.geom,
625
            log=self.log,
626
        )
627
628
        # Generate the final dataset
629
        dataset = self.generate_dataset(key_name)
630
631
        return dataset
632