polarpy.polar2hdf5   A
last analyzed

Complexity

Total Complexity 16

Size/Duplication

Total Lines 194
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 84
dl 0
loc 194
rs 10
c 0
b 0
f 0
wmc 16

2 Functions

Rating   Name   Duplication   Size   Complexity  
B polar_spectra_to_hdf5() 0 81 7
C polar_polarization_to_hdf5() 0 98 9
1
import h5py
2
import numpy as np
3
4
5
import ROOT
6
from threeML.io.cern_root_utils.io_utils import open_ROOT_file
7
from threeML.io.cern_root_utils.tobject_to_numpy import tree_to_ndarray, th2_to_arrays
8
9
10
11
12
if True:
13
14
    def polar_polarization_to_hdf5(polarization_root_file, hdf5_out_file):
15
        """
16
        Converts the ROOT POLAR response into an HDF5 file so that users are not 
17
        dependent on ROOT.
18
19
        :param polarization_root_file: The ROOT file from which to build the response
20
        :param hdf5_out_file: The output HDF5 file name
21
        """
22
23
        # create a few lists so that we can hold the values
24
25
        energy = []
26
        degree = []
27
        angle = []
28
29
        energy_str = []
30
        degree_str = []
31
        angle_str = []
32
33
        # open the ROOT file
34
35
        with open_ROOT_file(polarization_root_file) as f:
36
37
            # This looks at all the info in the ROOT file
38
            # It is gross because ROOT is gross.
39
40
            tmp = [key.GetName() for key in f.GetListOfKeys()]
41
            tmp = filter(lambda x: 'sim' in x, tmp)
42
            for tmp2 in tmp:
43
                _, x, y, z = tmp2.split('_')
44
45
                energy.append(float(x))
46
                degree.append(float(y))
47
                angle.append(float(z))
48
49
                energy_str.append(x)
50
                degree_str.append(y)
51
                angle_str.append(z)
52
53
            # There are duplicates everywhere.
54
            # This makes sure we only grab what we need.
55
56
            energy = np.array(np.unique(energy))
57
            degree = np.array(np.unique(degree))
58
            angle = np.array(np.unique(angle))
59
60
            energy_str = np.array(np.unique(energy_str))
61
            degree_str = np.array(np.unique(degree_str))
62
            angle_str = np.array(np.unique(angle_str))
63
64
            # just to get the bins
65
            # must change this from ints later
66
67
            file_string = 'sim_%s_%s_%s' % (energy_str[1], degree_str[1], angle_str[1])
68
69
            bins, _, hist = th2_to_arrays(f.Get(file_string))
70
71
            out_matrix = np.zeros((len(energy), len(angle), len(degree), len(hist)))
72
73
            # Now we will build the HDF5 file. Much eaasier because the format is
74
            # beautiful.
75
76
            with h5py.File(hdf5_out_file, 'w', libver='latest') as database:
77
78
                for i, x in enumerate(energy_str):
79
80
                    for j, y in enumerate(angle_str):
81
82
                        for k, z in enumerate(degree_str):
83
84
                            file_string = 'sim_%s_%s_%s' % (x, z, y)
85
86
                            _, _, hist = th2_to_arrays(f.Get(file_string))
87
88
                            # Some beautiful matrix math
89
90
                            out_matrix[i, j, k, :] = hist
91
92
                # write to the matrix extension
93
94
                database.create_dataset('matrix', data=out_matrix, compression='lzf')
95
96
                if np.min(bins) < 0:
97
                    # we will try to automatically correct for the
98
                    # badly specified bins
99
                    bins = np.array(bins)
100
101
                    bins += -np.min(bins)
102
103
                    assert np.min(bins) >= 0, 'The scattering bins have egdes less than zero'
104
                    assert np.max(bins) <= 360, 'The scattering bins have egdes greater than 360'
105
106
                # Save all this out. We MUST write some docs describing the format at some point
107
108
                database.create_dataset('bins', data=bins, compression='lzf')
109
                database.create_dataset('pol_ang', data=angle, compression='lzf')
110
                database.create_dataset('pol_deg', data=degree, compression='lzf')
111
                database.create_dataset('energy', data=energy, compression='lzf')
112
113
    def polar_spectra_to_hdf5(polar_root_file, polar_rsp_root_file, hdf5_out_file):
114
        """
115
        This function extracts the POLAR spectral information for spectral fitting. 
116
        These files can be further reduced the PHA FITS files with 3ML
117
118
        :param polar_root_file: The spectral ROOT file
119
        :param polar_rsp_root_file: The response ROOT file
120
        :param hdf5_out_file: the name of the output HDF5 file
121
        """
122
123
        # extract the info from the crappy root file
124
125
        with h5py.File(hdf5_out_file, 'w') as outfile:
126
127
            # first we do the RSP
128
129
            rsp_grp = outfile.create_group('rsp')
130
131
            with open_ROOT_file(polar_rsp_root_file) as f:
132
133
                matrix = th2_to_arrays(f.Get('rsp'))[-1]
134
135
                rsp_grp.create_dataset('matrix', data=matrix, compression='lzf')
136
137
                ebounds = th2_to_arrays(f.Get('EM_bounds'))[-1]
138
139
                rsp_grp.create_dataset('ebounds', data=ebounds, compression='lzf')
140
141
                mc_low = th2_to_arrays(f.Get('ER_low'))[-1]
142
143
                rsp_grp.create_dataset('mc_low', data=mc_low, compression='lzf')
144
145
                mc_high = th2_to_arrays(f.Get('ER_high'))[-1]
146
147
                rsp_grp.create_dataset('mc_high', data=mc_high, compression='lzf')
148
149
            # now we get the spectral informations
150
            keys_to_use = ['polar_out']
151
152
            f = ROOT.TFile(polar_root_file)
153
154
            extra_grp = outfile.create_group('extras')
155
156
            for key in f.GetListOfKeys():
157
158
                name = key.GetName()
159
160
                if name not in keys_to_use:
161
                    try:
162
163
                        # first we see if it is a TTree and then
164
                        # add a new group and attach its data
165
166
                        tree = tree_to_ndarray(f.Get(name))
167
168
                        new_grp = extra_grp.create_group(name)
169
170
                        for new_name in tree.dtype.names:
171
                            new_grp.create_dataset(new_name, data=tree[new_name], compression='lzf')
172
173
                    except:
174
175
                        # in this case we just want the actual data
176
177
                        data = th2_to_arrays(f.Get(name))[-1]
178
179
                        extra_grp.create_dataset(name, data=data, compression='lzf')
180
181
            # now we will deal with the data that is important
182
183
            tmp = tree_to_ndarray(f.Get('polar_out'))
184
185
            outfile.create_dataset('energy', data=tmp['Energy'], compression='lzf')
186
187
            outfile.create_dataset('scatter_angle', data=tmp['scatter_angle'], compression='lzf')
188
189
            outfile.create_dataset('dead_ratio', data=tmp['dead_ratio'], compression='lzf')
190
191
            outfile.create_dataset('time', data=tmp['tunix'], compression='lzf')
192
193
            f.Close()
194