Passed
Push — master ( 741b90...16cb93 )
by Giacomo
10:18
created

hawc_hal.HAL.HAL.psf_integration_method()   A

Complexity

Conditions 2

Size

Total Lines 22
Code Lines 4

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 2
eloc 4
nop 2
dl 0
loc 22
rs 10
c 0
b 0
f 0
1
from __future__ import print_function
0 ignored issues
show
Coding Style Naming introduced by
Module name "HAL" doesn't conform to snake_case naming style ('([a-z_][a-z0-9_]*)$' pattern)

This check looks for invalid names for a range of different identifiers.

You can set regular expressions to which the identifiers must conform if the defaults do not match your requirements.

If your project includes a Pylint configuration file, the settings contained in that file take precedence.

To find out more about Pylint, please refer to their site.

Loading history...
Coding Style introduced by
This module should have a docstring.

The coding style of this project requires that you add a docstring to this code element. Below, you find an example for methods:

class SomeClass:
    def some_method(self):
        """Do x and return foo."""

If you would like to know more about docstrings, we recommend to read PEP-257: Docstring Conventions.

Loading history...
2
3
import copy
4
import collections
5
6
import numpy as np
7
import healpy as hp
8
import matplotlib.pyplot as plt
9
from scipy.stats import poisson
10
11
from astropy.convolution import Gaussian2DKernel
12
from astropy.convolution import convolve_fft as convolve
13
14
from threeML.plugin_prototype import PluginPrototype
15
from threeML.plugins.gammaln import logfactorial
16
from threeML.parallel import parallel_client
17
from threeML.io.progress_bar import progress_bar
18
19
from astromodels import Parameter
20
21
from hawc_hal.maptree import map_tree_factory
22
from hawc_hal.response import hawc_response_factory
23
from hawc_hal.convolved_source import ConvolvedPointSource, \
24
    ConvolvedExtendedSource3D, ConvolvedExtendedSource2D, ConvolvedSourcesContainer
25
from hawc_hal.healpix_handling import FlatSkyToHealpixTransform
26
from hawc_hal.healpix_handling import SparseHealpix
27
from hawc_hal.healpix_handling import get_gnomonic_projection
28
from hawc_hal.psf_fast import PSFConvolutor
29
from hawc_hal.log_likelihood import log_likelihood
30
from hawc_hal.util import ra_to_longitude
31
32
33
class HAL(PluginPrototype):
0 ignored issues
show
best-practice introduced by
Too many instance attributes (17/7)
Loading history...
34
    """
35
    The HAWC Accelerated Likelihood plugin for 3ML.
36
    :param name: name for the plugin
37
    :param maptree: Map Tree (either ROOT or hdf5 format)
38
    :param response: Response of HAWC (either ROOT or hd5 format)
39
    :param roi: a ROI instance describing the Region Of Interest
40
    :param flat_sky_pixels_size: size of the pixel for the flat sky projection (Hammer Aitoff)
41
    """
42
43
    def __init__(self, name, maptree, response_file, roi, flat_sky_pixels_size=0.17):
0 ignored issues
show
best-practice introduced by
Too many arguments (6/5)
Loading history...
44
45
        # Store ROI
46
        self._roi = roi
47
48
        # Set up the flat-sky projection
49
50
        self._flat_sky_projection = roi.get_flat_sky_projection(flat_sky_pixels_size)
51
52
        # Read map tree (data)
53
54
        self._maptree = map_tree_factory(maptree, roi=roi)
55
56
        # Read detector response_file
57
58
        self._response = hawc_response_factory(response_file)
59
60
        # Use a renormalization of the background as nuisance parameter
61
        # NOTE: it is fixed to 1.0 unless the user explicitly sets it free (experimental)
62
        self._nuisance_parameters = collections.OrderedDict()
63
        self._nuisance_parameters['%s_bkg_renorm' % name] = Parameter('%s_bkg_renorm' % name, 1.0,
64
                                                                      min_value=0.5, max_value=1.5,
65
                                                                      delta=0.01,
66
                                                                      desc="Renormalization for background map",
67
                                                                      free=False,
68
                                                                      is_normalization=False)
69
70
        # Instance parent class
71
72
        super(HAL, self).__init__(name, self._nuisance_parameters)
73
74
        self._likelihood_model = None
75
76
        # These lists will contain the maps for the point sources
77
        self._convolved_point_sources = ConvolvedSourcesContainer()
78
        # and this one for extended sources
79
        self._convolved_ext_sources = ConvolvedSourcesContainer()
80
81
        # All energy/nHit bins are loaded in memory
82
        self._all_planes = list(self._maptree.analysis_bins_labels)
83
84
        # The active planes list always contains the list of *indexes* of the active planes
85
        self._active_planes = None
86
87
        # Set up the transformations from the flat-sky projection to Healpix, as well as the list of active pixels
88
        # (one for each energy/nHit bin). We make a separate transformation because different energy bins might have
89
        # different nsides
90
        self._active_pixels = collections.OrderedDict()
91
        self._flat_sky_to_healpix_transform = collections.OrderedDict()
92
93
        for bin_id in self._maptree:
94
95
            this_maptree = self._maptree[bin_id]
96
            this_nside = this_maptree.nside
97
            this_active_pixels = roi.active_pixels(this_nside)
98
99
            this_flat_sky_to_hpx_transform = FlatSkyToHealpixTransform(self._flat_sky_projection.wcs,
100
                                                                       'icrs',
101
                                                                       this_nside,
102
                                                                       this_active_pixels,
103
                                                                       (self._flat_sky_projection.npix_width,
104
                                                                        self._flat_sky_projection.npix_height),
105
                                                                       order='bilinear')
106
107
            self._active_pixels[bin_id] = this_active_pixels
108
            self._flat_sky_to_healpix_transform[bin_id] = this_flat_sky_to_hpx_transform
109
110
        # This will contain a list of PSF convolutors for extended sources, if there is any in the model
111
112
        self._psf_convolutors = None
113
114
        # Pre-compute the log-factorial factor in the likelihood, so we do not keep to computing it over and over
115
        # again.
116
        self._log_factorials = collections.OrderedDict()
117
118
        # We also apply a bias so that the numerical value of the log-likelihood stays small. This helps when
119
        # fitting with algorithms like MINUIT because the convergence criterium involves the difference between
120
        # two likelihood values, which would be affected by numerical precision errors if the two values are
121
        # too large
122
        self._saturated_model_like_per_maptree = collections.OrderedDict()
123
124
        # The actual computation is in a method so we can recall it on clone (see the get_simulated_dataset method)
125
        self._compute_likelihood_biases()
126
127
        # This will save a clone of self for simulations
128
        self._clone = None
129
130
        # Integration method for the PSF (see psf_integration_method)
131
        self._psf_integration_method = "exact"
132
133
    @property
134
    def psf_integration_method(self):
135
        """
136
        Get or set the method for the integration of the PSF.
137
138
        * "exact" is more accurate but slow, if the position is free to vary it adds a lot of time to the fit. This is
139
        the default, to be used when the position of point sources are fixed. The computation in that case happens only
140
        once so the impact on the run time is negligible.
141
        * "fast" is less accurate (up to an error of few percent in flux) but a lot faster. This should be used when
142
        the position of the point source is free, because in that case the integration of the PSF happens every time
143
        the position changes, so several times during the fit.
144
145
        If you have a fit with a free position, use "fast". When the position is found, you can fix it, switch to
146
        "exact" and redo the fit to obtain the most accurate measurement of the flux. For normal sources the difference
147
        will be small, but for very bright sources it might be up to a few percent (most of the time < 1%). If you are
148
        interested in the localization contour there is no need to rerun with "exact".
149
150
        :param mode: either "exact" or "fast"
151
        :return: None
152
        """
153
154
        return self._psf_integration_method
155
156
    @psf_integration_method.setter
157
    def psf_integration_method(self, mode):
158
159
        assert mode.lower() in ["exact", "fast"], "PSF integration method must be either 'exact' or 'fast'"
160
161
        self._psf_integration_method = mode.lower()
162
163
    def _setup_psf_convolutors(self):
164
165
        central_response_bins = self._response.get_response_dec_bin(self._roi.ra_dec_center[1])
166
167
        self._psf_convolutors = collections.OrderedDict()
168
        for bin_id in central_response_bins:
169
            self._psf_convolutors[bin_id] = PSFConvolutor(central_response_bins[bin_id].psf, self._flat_sky_projection)
170
171
    def _compute_likelihood_biases(self):
172
173
        for bin_label in self._maptree:
174
175
            data_analysis_bin = self._maptree[bin_label]
176
177
            this_log_factorial = np.sum(logfactorial(data_analysis_bin.observation_map.as_partial()))
178
            self._log_factorials[bin_label] = this_log_factorial
179
180
            # As bias we use the likelihood value for the saturated model
181
            obs = data_analysis_bin.observation_map.as_partial()
182
            bkg = data_analysis_bin.background_map.as_partial()
183
184
            sat_model = np.clip(obs - bkg, 1e-50, None).astype(np.float64)
185
186
            self._saturated_model_like_per_maptree[bin_label] = log_likelihood(obs, bkg, sat_model) - this_log_factorial
187
188
    def get_saturated_model_likelihood(self):
189
        """
190
        Returns the likelihood for the saturated model (i.e. a model exactly equal to observation - background).
191
192
        :return:
193
        """
194
        return sum(self._saturated_model_like_per_maptree.values())
195
196
    def set_active_measurements(self, bin_id_min=None, bin_id_max=None, bin_list=None):
197
        """
198
        Set the active analysis bins to use during the analysis. It can be used in two ways:
199
200
        - Specifying a range: if the response and the maptree allows it, you can specify a minimum id and a maximum id
201
        number. This only works if the analysis bins are numerical, like in the normal fHit analysis. For example:
202
203
            > set_active_measurement(bin_id_min=1, bin_id_max=9(
204
205
        - Specifying a list of bins as strings. This is more powerful, as allows to select any bins, even
206
        non-contiguous bins. For example:
207
208
            > set_active_measurement(bin_list=[list])
209
210
        :param bin_id_min: minimum bin (only works for fHit analysis. For the others, use bin_list)
211
        :param bin_id_max: maximum bin (only works for fHit analysis. For the others, use bin_list)
212
        :param bin_list: a list of analysis bins to use
213
        :return: None
214
        """
215
216
        # Check for legal input
217
        if bin_id_min is not None:
218
219
            assert bin_id_max is not None, "If you provide a minimum bin, you also need to provide a maximum bin"
220
221
            # Make sure they are integers
222
            bin_id_min = int(bin_id_min)
223
            bin_id_max = int(bin_id_max)
224
225
            self._active_planes = []
226
            for this_bin in range(bin_id_min, bin_id_max + 1):
227
                this_bin = str(this_bin)
228
                if this_bin not in self._all_planes:
229
230
                    raise ValueError("Bin %s it not contained in this response" % this_bin)
231
232
                self._active_planes.append(this_bin)
233
234
        else:
235
236
            assert bin_id_max is None, "If you provide a maximum bin, you also need to provide a minimum bin"
237
238
            assert bin_list is not None
239
240
            self._active_planes = []
241
242
            for this_bin in bin_list:
243
244
                if not this_bin in self._all_planes:
245
246
                    raise ValueError("Bin %s it not contained in this response" % this_bin)
247
248
                self._active_planes.append(this_bin)
249
250
    def display(self, verbose=False):
251
        """
252
        Prints summary of the current object content.
253
        """
254
255
        print("Region of Interest: ")
256
        print("--------------------\n")
257
        self._roi.display()
258
259
        print("")
260
        print("Flat sky projection: ")
261
        print("----------------------\n")
262
263
        print("Width x height: %s x %s px" % (self._flat_sky_projection.npix_width,
264
                                              self._flat_sky_projection.npix_height))
265
        print("Pixel sizes: %s deg" % self._flat_sky_projection.pixel_size)
266
267
        print("")
268
        print("Response: ")
269
        print("----------\n")
270
271
        self._response.display(verbose)
272
273
        print("")
274
        print("Map Tree: ")
275
        print("----------\n")
276
277
        self._maptree.display()
278
279
        print("")
280
        print("Active energy/nHit planes: ")
281
        print("---------------------------\n")
282
        print(self._active_planes)
283
284
    def set_model(self, likelihood_model_instance):
285
        """
286
        Set the model to be used in the joint minimization. Must be a LikelihoodModel instance.
287
        """
288
289
        self._likelihood_model = likelihood_model_instance
290
291
        # Reset
292
        self._convolved_point_sources.reset()
293
        self._convolved_ext_sources.reset()
294
295
        # For each point source in the model, build the convolution class
296
297
        for source in self._likelihood_model.point_sources.values():
298
299
            this_convolved_point_source = ConvolvedPointSource(source, self._response, self._flat_sky_projection)
300
301
            self._convolved_point_sources.append(this_convolved_point_source)
302
303
        # Samewise for extended sources
304
305
        ext_sources = self._likelihood_model.extended_sources.values()
306
307
        # NOTE: ext_sources evaluate to False if empty
308
        if ext_sources:
309
310
            # We will need to convolve
311
312
            self._setup_psf_convolutors()
313
314
            for source in ext_sources:
315
316
                if source.spatial_shape.n_dim == 2:
317
318
                    this_convolved_ext_source = ConvolvedExtendedSource2D(source,
319
                                                                          self._response,
320
                                                                          self._flat_sky_projection)
321
322
                else:
323
324
                    this_convolved_ext_source = ConvolvedExtendedSource3D(source,
325
                                                                          self._response,
326
                                                                          self._flat_sky_projection)
327
328
                self._convolved_ext_sources.append(this_convolved_ext_source)
329
330
    def display_spectrum(self):
0 ignored issues
show
Comprehensibility introduced by
This function exceeds the maximum number of variables (24/15).
Loading history...
331
        """
332
        Make a plot of the current spectrum and its residuals (integrated over space)
333
334
        :return: a matplotlib.Figure
335
        """
336
337
        n_point_sources = self._likelihood_model.get_number_of_point_sources()
338
        n_ext_sources = self._likelihood_model.get_number_of_extended_sources()
339
340
        total_counts = np.zeros(len(self._active_planes), dtype=float)
341
        total_model = np.zeros_like(total_counts)
342
        model_only = np.zeros_like(total_counts)
343
        net_counts = np.zeros_like(total_counts)
344
        yerr_low = np.zeros_like(total_counts)
345
        yerr_high = np.zeros_like(total_counts)
346
347
        for i, energy_id in enumerate(self._active_planes):
348
349
            data_analysis_bin = self._maptree[energy_id]
350
351
            this_model_map_hpx = self._get_expectation(data_analysis_bin, energy_id, n_point_sources, n_ext_sources)
352
353
            this_model_tot = np.sum(this_model_map_hpx)
354
355
            this_data_tot = np.sum(data_analysis_bin.observation_map.as_partial())
356
            this_bkg_tot = np.sum(data_analysis_bin.background_map.as_partial())
357
358
            total_counts[i] = this_data_tot
359
            net_counts[i] = this_data_tot - this_bkg_tot
360
            model_only[i] = this_model_tot
361
362
            this_wh_model = this_model_tot + this_bkg_tot
363
            total_model[i] = this_wh_model
364
365
            if this_data_tot >= 50.0:
366
367
                # Gaussian limit
368
                # Under the null hypothesis the data are distributed as a Gaussian with mu = model
369
                # and sigma = sqrt(model)
370
                # NOTE: since we neglect the background uncertainty, the background is part of the
371
                # model
372
                yerr_low[i] = np.sqrt(this_data_tot)
373
                yerr_high[i] = np.sqrt(this_data_tot)
374
375
            else:
376
377
                # Low-counts
378
                # Under the null hypothesis the data are distributed as a Poisson distribution with
379
                # mean = model, plot the 68% confidence interval (quantile=[0.16,1-0.16]).
380
                # NOTE: since we neglect the background uncertainty, the background is part of the
381
                # model
382
                quantile = 0.16
383
                mean = this_wh_model
384
                y_low = poisson.isf(1-quantile, mu=mean)
385
                y_high = poisson.isf(quantile, mu=mean)
386
                yerr_low[i] = mean-y_low
387
                yerr_high[i] = y_high-mean
388
389
        residuals = (total_counts - total_model) / np.sqrt(total_model)
390
        residuals_err = [yerr_high / np.sqrt(total_model),
391
                         yerr_low / np.sqrt(total_model)]
392
393
        yerr = [yerr_high, yerr_low]
394
395
        return self._plot_spectrum(net_counts, yerr, model_only, residuals, residuals_err)
396
397
    def _plot_spectrum(self, net_counts, yerr, model_only, residuals, residuals_err):
0 ignored issues
show
best-practice introduced by
Too many arguments (6/5)
Loading history...
398
399
        fig, subs = plt.subplots(2, 1, gridspec_kw={'height_ratios': [2, 1], 'hspace': 0})
400
401
        subs[0].errorbar(self._active_planes, net_counts, yerr=yerr,
402
                         capsize=0,
403
                         color='black', label='Net counts', fmt='.')
404
405
        subs[0].plot(self._active_planes, model_only, label='Convolved model')
406
407
        subs[0].legend(bbox_to_anchor=(1.0, 1.0), loc="upper right",
408
                       numpoints=1)
409
410
        # Residuals
411
        subs[1].axhline(0, linestyle='--')
412
413
        subs[1].errorbar(
414
            self._active_planes, residuals,
415
            yerr=residuals_err,
416
            capsize=0, fmt='.'
417
        )
418
419
        y_limits = [min(net_counts[net_counts > 0]) / 2., max(net_counts) * 2.]
420
421
        subs[0].set_yscale("log", nonposy='clip')
422
        subs[0].set_ylabel("Counts per bin")
423
        subs[0].set_xticks([])
424
425
        subs[1].set_xlabel("Analysis bin")
426
        subs[1].set_ylabel(r"$\frac{{cts - mod - bkg}}{\sqrt{mod + bkg}}$")
427
        subs[1].set_xticks(self._active_planes)
428
        subs[1].set_xticklabels(self._active_planes)
429
430
        subs[0].set_ylim(y_limits)
431
432
        return fig
433
434
    def get_log_like(self):
435
        """
436
        Return the value of the log-likelihood with the current values for the
437
        parameters
438
        """
439
440
        n_point_sources = self._likelihood_model.get_number_of_point_sources()
441
        n_ext_sources = self._likelihood_model.get_number_of_extended_sources()
442
443
        # Make sure that no source has been added since we filled the cache
444
        assert n_point_sources == self._convolved_point_sources.n_sources_in_cache and \
445
               n_ext_sources == self._convolved_ext_sources.n_sources_in_cache, \
446
            "The number of sources has changed. Please re-assign the model to the plugin."
447
448
        # This will hold the total log-likelihood
449
        total_log_like = 0
450
451
        for bin_id in self._active_planes:
452
453
            data_analysis_bin = self._maptree[bin_id]
454
455
            this_model_map_hpx = self._get_expectation(data_analysis_bin, bin_id, n_point_sources, n_ext_sources)
456
457
            # Now compare with observation
458
            bkg_renorm = self._nuisance_parameters.values()[0].value
459
460
            obs = data_analysis_bin.observation_map.as_partial()  # type: np.array
461
            bkg = data_analysis_bin.background_map.as_partial() * bkg_renorm  # type: np.array
462
463
            this_pseudo_log_like = log_likelihood(obs,
464
                                                  bkg,
465
                                                  this_model_map_hpx)
466
467
            total_log_like += this_pseudo_log_like - self._log_factorials[bin_id] \
468
                              - self._saturated_model_like_per_maptree[bin_id]
469
470
        return total_log_like
471
472
    def write(self, response_file_name, map_tree_file_name):
473
        """
474
        Write this dataset to disk in HDF format.
475
476
        :param response_file_name: filename for the response
477
        :param map_tree_file_name: filename for the map tree
478
        :return: None
479
        """
480
481
        self._maptree.write(map_tree_file_name)
482
        self._response.write(response_file_name)
483
484
    def get_simulated_dataset(self, name):
485
        """
486
        Return a simulation of this dataset using the current model with current parameters.
487
488
        :param name: new name for the new plugin instance
489
        :return: a HAL instance
490
        """
491
492
493
        # First get expectation under the current model and store them, if we didn't do it yet
494
495
        if self._clone is None:
496
497
            n_point_sources = self._likelihood_model.get_number_of_point_sources()
498
            n_ext_sources = self._likelihood_model.get_number_of_extended_sources()
499
500
            expectations = []
501
502
            for bin_id, data_analysis_bin in enumerate(self._maptree):
503
504
                if bin_id not in self._active_planes:
505
506
                    expectations.append(None)
507
508
                else:
509
510
                    expectations.append(self._get_expectation(data_analysis_bin, bin_id,
511
                                                              n_point_sources, n_ext_sources) +
512
                                        data_analysis_bin.background_map.as_partial())
513
514
            if parallel_client.is_parallel_computation_active():
515
516
                # Do not clone, as the parallel environment already makes clones
517
518
                clone = self
519
520
            else:
521
522
                clone = copy.deepcopy(self)
523
524
            self._clone = (clone, expectations)
525
526
        # Substitute the observation and background for each data analysis bin
527
        for bin_id, data_analysis_bin in enumerate(self._clone[0]._maptree):
0 ignored issues
show
Coding Style Best Practice introduced by
It seems like _maptree was declared protected and should not be accessed from this context.

Prefixing a member variable _ is usually regarded as the equivalent of declaring it with protected visibility that exists in other languages. Consequentially, such a member should only be accessed from the same class or a child class:

class MyParent:
    def __init__(self):
        self._x = 1;
        self.y = 2;

class MyChild(MyParent):
    def some_method(self):
        return self._x    # Ok, since accessed from a child class

class AnotherClass:
    def some_method(self, instance_of_my_child):
        return instance_of_my_child._x   # Would be flagged as AnotherClass is not
                                         # a child class of MyParent
Loading history...
528
529
            if bin_id not in self._active_planes:
530
531
                continue
532
533
            else:
534
535
                # Active plane. Generate new data
536
                expectation = self._clone[1][bin_id]
537
                new_data = np.random.poisson(expectation, size=(1, expectation.shape[0])).flatten()
538
539
                # Substitute data
540
                data_analysis_bin.observation_map.set_new_values(new_data)
541
542
        # Now change name and return
543
        self._clone[0]._name = name
0 ignored issues
show
Coding Style Best Practice introduced by
It seems like _name was declared protected and should not be accessed from this context.

Prefixing a member variable _ is usually regarded as the equivalent of declaring it with protected visibility that exists in other languages. Consequentially, such a member should only be accessed from the same class or a child class:

class MyParent:
    def __init__(self):
        self._x = 1;
        self.y = 2;

class MyChild(MyParent):
    def some_method(self):
        return self._x    # Ok, since accessed from a child class

class AnotherClass:
    def some_method(self, instance_of_my_child):
        return instance_of_my_child._x   # Would be flagged as AnotherClass is not
                                         # a child class of MyParent
Loading history...
544
        # Adjust the name of the nuisance parameter
545
        old_name = self._clone[0]._nuisance_parameters.keys()[0]
0 ignored issues
show
Coding Style Best Practice introduced by
It seems like _nuisance_parameters was declared protected and should not be accessed from this context.

Prefixing a member variable _ is usually regarded as the equivalent of declaring it with protected visibility that exists in other languages. Consequentially, such a member should only be accessed from the same class or a child class:

class MyParent:
    def __init__(self):
        self._x = 1;
        self.y = 2;

class MyChild(MyParent):
    def some_method(self):
        return self._x    # Ok, since accessed from a child class

class AnotherClass:
    def some_method(self, instance_of_my_child):
        return instance_of_my_child._x   # Would be flagged as AnotherClass is not
                                         # a child class of MyParent
Loading history...
546
        new_name = old_name.replace(self.name, name)
547
        self._clone[0]._nuisance_parameters[new_name] = self._clone[0]._nuisance_parameters.pop(old_name)
0 ignored issues
show
Coding Style Best Practice introduced by
It seems like _nuisance_parameters was declared protected and should not be accessed from this context.

Prefixing a member variable _ is usually regarded as the equivalent of declaring it with protected visibility that exists in other languages. Consequentially, such a member should only be accessed from the same class or a child class:

class MyParent:
    def __init__(self):
        self._x = 1;
        self.y = 2;

class MyChild(MyParent):
    def some_method(self):
        return self._x    # Ok, since accessed from a child class

class AnotherClass:
    def some_method(self, instance_of_my_child):
        return instance_of_my_child._x   # Would be flagged as AnotherClass is not
                                         # a child class of MyParent
Loading history...
548
549
        # Recompute biases
550
        self._clone[0]._compute_likelihood_biases()
0 ignored issues
show
Coding Style Best Practice introduced by
It seems like _compute_likelihood_biases was declared protected and should not be accessed from this context.

Prefixing a member variable _ is usually regarded as the equivalent of declaring it with protected visibility that exists in other languages. Consequentially, such a member should only be accessed from the same class or a child class:

class MyParent:
    def __init__(self):
        self._x = 1;
        self.y = 2;

class MyChild(MyParent):
    def some_method(self):
        return self._x    # Ok, since accessed from a child class

class AnotherClass:
    def some_method(self, instance_of_my_child):
        return instance_of_my_child._x   # Would be flagged as AnotherClass is not
                                         # a child class of MyParent
Loading history...
551
552
        return self._clone[0]
553
554
    def _get_expectation(self, data_analysis_bin, energy_bin_id, n_point_sources, n_ext_sources):
555
556
        # Compute the expectation from the model
557
558
        this_model_map = None
559
560
        for pts_id in range(n_point_sources):
561
562
            this_conv_src = self._convolved_point_sources[pts_id]
563
564
            expectation_per_transit = this_conv_src.get_source_map(energy_bin_id,
565
                                                                   tag=None,
566
                                                                   psf_integration_method=self._psf_integration_method)
567
568
            expectation_from_this_source = expectation_per_transit * data_analysis_bin.n_transits
569
570
            if this_model_map is None:
571
572
                # First addition
573
574
                this_model_map = expectation_from_this_source
575
576
            else:
577
578
                this_model_map += expectation_from_this_source
579
580
        # Now process extended sources
581
        if n_ext_sources > 0:
582
583
            this_ext_model_map = None
584
585
            for ext_id in range(n_ext_sources):
586
587
                this_conv_src = self._convolved_ext_sources[ext_id]
588
589
                expectation_per_transit = this_conv_src.get_source_map(energy_bin_id)
590
591
                if this_ext_model_map is None:
592
593
                    # First addition
594
595
                    this_ext_model_map = expectation_per_transit
596
597
                else:
598
599
                    this_ext_model_map += expectation_per_transit
600
601
            # Now convolve with the PSF
602
            if this_model_map is None:
603
                
0 ignored issues
show
Coding Style introduced by
Trailing whitespace
Loading history...
604
                # Only extended sources
605
            
0 ignored issues
show
Coding Style introduced by
Trailing whitespace
Loading history...
606
                this_model_map = (self._psf_convolutors[energy_bin_id].extended_source_image(this_ext_model_map) *
607
                                  data_analysis_bin.n_transits)
608
            
0 ignored issues
show
Coding Style introduced by
Trailing whitespace
Loading history...
609
            else:
610
611
                this_model_map += (self._psf_convolutors[energy_bin_id].extended_source_image(this_ext_model_map) *
612
                                   data_analysis_bin.n_transits)
613
614
615
        # Now transform from the flat sky projection to HEALPiX
616
617
        if this_model_map is not None:
618
619
            # First divide for the pixel area because we need to interpolate brightness
620
            this_model_map = this_model_map / self._flat_sky_projection.project_plane_pixel_area
621
622
            this_model_map_hpx = self._flat_sky_to_healpix_transform[energy_bin_id](this_model_map, fill_value=0.0)
623
624
            # Now multiply by the pixel area of the new map to go back to flux
625
            this_model_map_hpx *= hp.nside2pixarea(data_analysis_bin.nside, degrees=True)
626
627
        else:
628
629
            # No sources
630
631
            this_model_map_hpx = 0.0
632
633
        return this_model_map_hpx
634
635
    @staticmethod
636
    def _represent_healpix_map(fig, hpx_map, longitude, latitude, xsize, resolution, smoothing_kernel_sigma):
0 ignored issues
show
best-practice introduced by
Too many arguments (7/5)
Loading history...
637
638
        proj = get_gnomonic_projection(fig, hpx_map,
639
                                       rot=(longitude, latitude, 0.0),
640
                                       xsize=xsize,
641
                                       reso=resolution)
642
643
        if smoothing_kernel_sigma is not None:
644
645
            # Get the sigma in pixels
646
            sigma = smoothing_kernel_sigma * 60 / resolution
647
648
            proj = convolve(list(proj),
649
                            Gaussian2DKernel(sigma),
650
                            nan_treatment='fill',
651
                            preserve_nan=True)
652
653
        return proj
654
655
    def display_fit(self, smoothing_kernel_sigma=0.1, display_colorbar=False):
0 ignored issues
show
Comprehensibility introduced by
This function exceeds the maximum number of variables (28/15).
Loading history...
656
        """
657
        Make a figure containing 3 maps for each active analysis bins with respectively model, data and residuals.
658
659
        :param smoothing_kernel_sigma: sigma for the Gaussian smoothing kernel
660
        :param display_colorbar: whether or not to display the colorbar in the residuals
661
        :return: a matplotlib.Figure
662
        """
663
664
        n_point_sources = self._likelihood_model.get_number_of_point_sources()
665
        n_ext_sources = self._likelihood_model.get_number_of_extended_sources()
666
667
        # This is the resolution (i.e., the size of one pixel) of the image
668
        resolution = 3.0 # arcmin
669
670
        # The image is going to cover the diameter plus 20% padding
671
        xsize = self._get_optimal_xsize(resolution)
672
673
        n_active_planes = len(self._active_planes)
674
675
        fig, subs = plt.subplots(n_active_planes, 3, figsize=(8, n_active_planes * 2))
676
677
        with progress_bar(len(self._active_planes), title='Smoothing maps') as prog_bar:
678
679
            images = ['None'] * 3
680
681
            for i, plane_id in enumerate(self._active_planes):
682
683
                data_analysis_bin = self._maptree[plane_id]
684
685
                # Get the center of the projection for this plane
686
                this_ra, this_dec = self._roi.ra_dec_center
687
688
                this_model_map_hpx = self._get_expectation(data_analysis_bin, plane_id, n_point_sources, n_ext_sources)
689
690
                # Make a full healpix map for a second
691
                whole_map = SparseHealpix(this_model_map_hpx,
692
                                          self._active_pixels[plane_id],
693
                                          data_analysis_bin.observation_map.nside).as_dense()
694
695
                # Healpix uses longitude between -180 and 180, while R.A. is between 0 and 360. We need to fix that:
696
                longitude = ra_to_longitude(this_ra)
697
698
                # Declination is already between -90 and 90
699
                latitude = this_dec
700
701
                # Plot model
702
703
                proj_m = self._represent_healpix_map(fig, whole_map,
704
                                                     longitude, latitude,
705
                                                     xsize, resolution, smoothing_kernel_sigma)
706
707
                images[0] = subs[i][0].imshow(proj_m, origin='lower')
708
709
                # Remove numbers from axis
710
                subs[i][0].axis('off')
711
712
                # Plot data map
713
                # Here we removed the background otherwise nothing is visible
714
                # Get background (which is in a way "part of the model" since the uncertainties are neglected)
715
                background_map = data_analysis_bin.background_map.as_dense()
716
                bkg_subtracted = data_analysis_bin.observation_map.as_dense() - background_map
717
718
                proj_d = self._represent_healpix_map(fig, bkg_subtracted,
719
                                                     longitude, latitude,
720
                                                     xsize, resolution, smoothing_kernel_sigma)
721
722
                images[1] = subs[i][1].imshow(proj_d, origin='lower')
723
724
                # Remove numbers from axis
725
                subs[i][1].axis('off')
726
727
                # Now residuals
728
                res = proj_d - proj_m
729
                # proj_res = self._represent_healpix_map(fig, res,
730
                #                                        longitude, latitude,
731
                #                                        xsize, resolution, smoothing_kernel_sigma)
732
                images[2] = subs[i][2].imshow(res, origin='lower')
733
734
                # Remove numbers from axis
735
                subs[i][2].axis('off')
736
737
                subs[i][0].set_title('model, bin {}'.format(data_analysis_bin.name))
738
                subs[i][1].set_title('excess, bin {}'.format(data_analysis_bin.name))
739
                subs[i][2].set_title('residuals, bin {}'.format(data_analysis_bin.name))
740
741
                if display_colorbar:
742
                    for j, image in enumerate(images):
743
                        plt.colorbar(image, ax=subs[i][j])
744
745
                prog_bar.increase()
746
747
        fig.set_tight_layout(True)
748
749
        return fig
750
751
    def _get_optimal_xsize(self, resolution):
752
753
        return 2.2 * self._roi.data_radius.to("deg").value / (resolution / 60.0)
754
755
    def display_stacked_image(self, smoothing_kernel_sigma=0.5):
0 ignored issues
show
Comprehensibility introduced by
This function exceeds the maximum number of variables (20/15).
Loading history...
756
        """
757
        Display a map with all active analysis bins stacked together.
758
759
        :param smoothing_kernel_sigma: sigma for the Gaussian smoothing kernel to apply
760
        :return: a matplotlib.Figure instance
761
        """
762
763
        # This is the resolution (i.e., the size of one pixel) of the image in arcmin
764
        resolution = 3.0
765
766
        # The image is going to cover the diameter plus 20% padding
767
        xsize = self._get_optimal_xsize(resolution)
768
769
        active_planes_bins = [self._maptree[x] for x in self._active_planes]
770
771
        # Get the center of the projection for this plane
772
        this_ra, this_dec = self._roi.ra_dec_center
773
774
        # Healpix uses longitude between -180 and 180, while R.A. is between 0 and 360. We need to fix that:
775
        longitude = ra_to_longitude(this_ra)
776
777
        # Declination is already between -90 and 90
778
        latitude = this_dec
779
780
        total = None
781
782
        for i, data_analysis_bin in enumerate(active_planes_bins):
783
784
            # Plot data
785
            background_map = data_analysis_bin.background_map.as_dense()
786
            this_data = data_analysis_bin.observation_map.as_dense() - background_map
787
            idx = np.isnan(this_data)
788
            # this_data[idx] = hp.UNSEEN
789
790
            if i == 0:
791
792
                total = this_data
793
794
            else:
795
796
                # Sum only when there is no UNSEEN, so that the UNSEEN pixels will stay UNSEEN
797
                total[~idx] += this_data[~idx]
798
799
        delta_coord = (self._roi.data_radius.to("deg").value * 2.0) / 15.0
800
801
        fig, sub = plt.subplots(1, 1)
802
803
        proj = self._represent_healpix_map(fig, total, longitude, latitude, xsize, resolution, smoothing_kernel_sigma)
804
805
        sub.imshow(proj, origin='lower')
806
        sub.axis('off')
807
808
        hp.graticule(delta_coord, delta_coord)
809
810
        return fig
811
812
    def inner_fit(self):
813
        """
814
        This is used for the profile likelihood. Keeping fixed all parameters in the
815
        LikelihoodModel, this method minimize the logLike over the remaining nuisance
816
        parameters, i.e., the parameters belonging only to the model for this
817
        particular detector. If there are no nuisance parameters, simply return the
818
        logLike value.
819
        """
820
821
        return self.get_log_like()
822
823
    def get_number_of_data_points(self):
824
        """
825
        Return the number of active bins across all active analysis bins
826
827
        :return: number of active bins
828
        """
829
830
        n_points = 0
831
832
        for bin_id in self._maptree:
833
            n_points += self._maptree[bin_id].observation_map.as_partial().shape[0]
834
835
        return n_points
836