|
1
|
|
|
# -*- coding: utf-8 -*- |
|
2
|
|
|
# vim:fileencoding=utf-8 |
|
3
|
|
|
# |
|
4
|
|
|
# Copyright (c) 2017-2018 Stefan Bender |
|
5
|
|
|
# |
|
6
|
|
|
# This module is part of sciapy. |
|
7
|
|
|
# sciapy is free software: you can redistribute it or modify |
|
8
|
|
|
# it under the terms of the GNU General Public License as published |
|
9
|
|
|
# by the Free Software Foundation, version 2. |
|
10
|
|
|
# See accompanying LICENSE file or http://www.gnu.org/licenses/gpl-2.0.html. |
|
11
|
1 |
|
"""SCIAMACHY regression tool plotting helpers |
|
12
|
|
|
|
|
13
|
|
|
Plot (helper) functions for the regression command line tool. |
|
14
|
|
|
""" |
|
15
|
|
|
|
|
16
|
1 |
|
import logging |
|
17
|
|
|
|
|
18
|
1 |
|
import numpy as np |
|
19
|
1 |
|
import matplotlib as mpl |
|
20
|
1 |
|
mpl.use("Agg") |
|
21
|
1 |
|
import matplotlib.pyplot as plt |
|
22
|
1 |
|
import matplotlib.gridspec as gridspec |
|
23
|
|
|
|
|
24
|
|
|
|
|
25
|
1 |
View Code Duplication |
def plot_single_sample_and_residuals(model, times, data, errs, |
|
|
|
|
|
|
26
|
|
|
sample, filename): |
|
27
|
|
|
"""Plot one sample and residuals in one figure |
|
28
|
|
|
|
|
29
|
|
|
Parameters |
|
30
|
|
|
---------- |
|
31
|
|
|
model: `celerite.Model`, `george.Model`, or `celerite.ModelSet` |
|
32
|
|
|
The Gaussian Process model to plot samples from. |
|
33
|
|
|
times: array_like |
|
34
|
|
|
Time axis values. |
|
35
|
|
|
data: array_like |
|
36
|
|
|
Data values. |
|
37
|
|
|
errs: array_like |
|
38
|
|
|
Data uncertainties for the errorbars. |
|
39
|
|
|
sample: array_like |
|
40
|
|
|
The (MCMC) sample of the parameter vector. |
|
41
|
|
|
filename: string |
|
42
|
|
|
Output filename of the figure file. |
|
43
|
|
|
""" |
|
44
|
|
|
# Set up the GP for this sample. |
|
45
|
|
|
logging.debug("sample values: %s", sample) |
|
46
|
|
|
model.set_parameter_vector(sample) |
|
47
|
|
|
log_lh = model.log_likelihood(data) |
|
48
|
|
|
logging.debug("sample log likelihood: %s", log_lh) |
|
49
|
|
|
logging.debug("half data variance: %s", 0.5 * np.var(data)) |
|
50
|
|
|
t = np.sort(np.append(times, |
|
51
|
|
|
np.linspace(times.min(), times.max(), 1000))) |
|
52
|
|
|
# Plot |
|
53
|
|
|
fig = plt.figure() |
|
54
|
|
|
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1]) |
|
55
|
|
|
ax = plt.subplot(gs[0]) |
|
56
|
|
|
ax2 = plt.subplot(gs[1]) |
|
57
|
|
|
# Plot the data. |
|
58
|
|
|
ax.errorbar(times, data, yerr=2 * errs, fmt=".k", zorder=8) |
|
59
|
|
|
# Compute the prediction conditioned on the observations and plot it. |
|
60
|
|
|
mum = model.predict(data, t=times, return_cov=False) |
|
61
|
|
|
mup = model.mean.get_value(t) |
|
62
|
|
|
mum2 = model.mean.get_value(times) |
|
63
|
|
|
mu, cov = model.predict(data, t=t) |
|
64
|
|
|
try: |
|
65
|
|
|
cov[np.diag_indices_from(cov)] += model.kernel.jitter |
|
66
|
|
|
except AttributeError: |
|
67
|
|
|
pass |
|
68
|
|
|
std = np.sqrt(np.diagonal(cov)) |
|
69
|
|
|
ax.plot(t, mup, alpha=0.75, color="C1", zorder=12, |
|
70
|
|
|
label="mean_model") |
|
71
|
|
|
ax.plot(t, mu, alpha=0.75, color="C0", zorder=10, |
|
72
|
|
|
label="logl: {0:.3f}".format(log_lh)) |
|
73
|
|
|
ax.fill_between(t, mu - 2. * std, mu + 2. * std, color="C0", alpha=0.3, zorder=10) |
|
74
|
|
|
ax2.errorbar(times, data - mum, yerr=2 * errs, fmt=".k", zorder=12, alpha=0.5) |
|
75
|
|
|
ax2.errorbar(times, data - mum2, yerr=2 * errs, fmt=".C1", zorder=8, ms=4) |
|
76
|
|
|
ax2.axhline(y=0, color='k', alpha=0.5) |
|
77
|
|
|
ax.legend() |
|
78
|
|
|
fig.savefig(filename, transparent=True) |
|
79
|
|
|
|
|
80
|
|
|
|
|
81
|
1 |
View Code Duplication |
def plot_residual(model, times, data, errs, sample, scale, filename): |
|
|
|
|
|
|
82
|
|
|
"""Plot the residuals of one sample |
|
83
|
|
|
|
|
84
|
|
|
Parameters |
|
85
|
|
|
---------- |
|
86
|
|
|
model: `celerite.Model`, `george.Model`, or `celerite.ModelSet` |
|
87
|
|
|
The Gaussian Process model to plot samples from. |
|
88
|
|
|
times: array_like |
|
89
|
|
|
Time axis values. |
|
90
|
|
|
data: array_like |
|
91
|
|
|
Data values. |
|
92
|
|
|
errs: array_like |
|
93
|
|
|
Data uncertainties for the errorbars. |
|
94
|
|
|
sample: array_like |
|
95
|
|
|
The (MCMC) sample of the parameter vector. |
|
96
|
|
|
scale: float |
|
97
|
|
|
The scale factor of the data to adjust the value axis. |
|
98
|
|
|
filename: string |
|
99
|
|
|
Output filename of the figure file. |
|
100
|
|
|
""" |
|
101
|
|
|
# Set up the GP for this sample. |
|
102
|
|
|
logging.debug("sample values: %s", sample) |
|
103
|
|
|
model.set_parameter_vector(sample) |
|
104
|
|
|
logging.debug("sample log likelihood: %s", model.log_likelihood(data)) |
|
105
|
|
|
# Plot |
|
106
|
|
|
fig, ax = plt.subplots() |
|
107
|
|
|
# Plot the data. |
|
108
|
|
|
# Compute the prediction conditioned on the observations |
|
109
|
|
|
mu = model.predict(data, t=times, return_cov=False) |
|
110
|
|
|
# Plot the residuals with error bars |
|
111
|
|
|
ax.errorbar(times, data - mu, yerr=2 * errs, fmt=".k", zorder=8) |
|
112
|
|
|
ax.axhline(y=0, color='k', alpha=0.5) |
|
113
|
|
|
ax.set_xlabel("time [years]") |
|
114
|
|
|
ax.set_ylabel("number density residual [10$^{{{0:.0f}}}$ cm$^{{-3}}$]" |
|
115
|
|
|
.format(-np.log10(scale))) |
|
116
|
|
|
fig.savefig(filename, transparent=True) |
|
117
|
|
|
|
|
118
|
|
|
|
|
119
|
1 |
View Code Duplication |
def plot_single_sample(model, times, data, errs, sample, filename): |
|
|
|
|
|
|
120
|
|
|
"""Plot a sample and data |
|
121
|
|
|
|
|
122
|
|
|
Parameters |
|
123
|
|
|
---------- |
|
124
|
|
|
model: `celerite.Model`, `george.Model`, or `celerite.ModelSet` |
|
125
|
|
|
The Gaussian Process model to plot samples from. |
|
126
|
|
|
times: array_like |
|
127
|
|
|
Time axis values. |
|
128
|
|
|
data: array_like |
|
129
|
|
|
Data values. |
|
130
|
|
|
errs: array_like |
|
131
|
|
|
Data uncertainties for the errorbars. |
|
132
|
|
|
sample: array_like |
|
133
|
|
|
The (MCMC) sample of the parameter vector. |
|
134
|
|
|
filename: string |
|
135
|
|
|
Output filename of the figure file. |
|
136
|
|
|
""" |
|
137
|
|
|
# Set up the GP for this sample. |
|
138
|
|
|
model.set_parameter_vector(sample) |
|
139
|
|
|
t = np.linspace(times.min(), times.max(), 1000) |
|
140
|
|
|
# Plot |
|
141
|
|
|
fig, ax = plt.subplots() |
|
142
|
|
|
# Plot the data. |
|
143
|
|
|
ax.errorbar(times, data, yerr=2 * errs, fmt="o", ms=4, zorder=4) |
|
144
|
|
|
# Compute the prediction conditioned on the observations and plot it. |
|
145
|
|
|
mu, cov = model.predict(data, t=t) |
|
146
|
|
|
_sample = np.random.multivariate_normal(mu, cov, 1)[0] |
|
147
|
|
|
ax.plot(t, _sample, alpha=0.75, zorder=2) |
|
148
|
|
|
fig.savefig(filename, transparent=True) |
|
149
|
|
|
|
|
150
|
|
|
|
|
151
|
1 |
View Code Duplication |
def plot_random_samples(model, times, data, errs, |
|
|
|
|
|
|
152
|
|
|
samples, scale, filename, size=8, |
|
153
|
|
|
extra_years=[0, 0]): |
|
154
|
|
|
"""Plot random samples and data |
|
155
|
|
|
|
|
156
|
|
|
Parameters |
|
157
|
|
|
---------- |
|
158
|
|
|
model: `celerite.Model`, `george.Model`, or `celerite.ModelSet` |
|
159
|
|
|
The Gaussian Process model to plot samples from. |
|
160
|
|
|
times: array_like |
|
161
|
|
|
Time axis values. |
|
162
|
|
|
data: array_like |
|
163
|
|
|
Data values. |
|
164
|
|
|
errs: array_like |
|
165
|
|
|
Data uncertainties for the errorbars. |
|
166
|
|
|
samples: array_like |
|
167
|
|
|
The (MCMC) sample of the parameter vector. |
|
168
|
|
|
scale: float |
|
169
|
|
|
The scale factor of the data to adjust the value axis. |
|
170
|
|
|
filename: string |
|
171
|
|
|
Output filename of the figure file. |
|
172
|
|
|
size: int, optional |
|
173
|
|
|
Number of samples to plot. |
|
174
|
|
|
extra_years: list or tuple, optional |
|
175
|
|
|
Extend the prediction period extra_years[0] into the past |
|
176
|
|
|
and extra_years[1] into the future. |
|
177
|
|
|
""" |
|
178
|
|
|
t = np.linspace(times.min() - extra_years[0], |
|
179
|
|
|
times.max() + extra_years[1], 2000) |
|
180
|
|
|
fig, ax = plt.subplots() |
|
181
|
|
|
# Plot the data. |
|
182
|
|
|
ax.errorbar(times, data, yerr=2 * errs, fmt=".k", zorder=8) |
|
183
|
|
|
ax.set_xlim(np.min(t), np.max(t)) |
|
184
|
|
|
ax.set_xlabel("time [years]") |
|
185
|
|
|
ax.set_ylabel("number density [10$^{{{0:.0f}}}$ cm$^{{-3}}$]" |
|
186
|
|
|
.format(-np.log10(scale))) |
|
187
|
|
|
for i in np.random.randint(len(samples), size=size): |
|
188
|
|
|
logging.info("plotting random sample %s.", i) |
|
189
|
|
|
logging.debug("sample values: %s", samples[i]) |
|
190
|
|
|
model.set_parameter_vector(samples[i]) |
|
191
|
|
|
logging.debug("sample log likelihood: %s", model.log_likelihood(data)) |
|
192
|
|
|
# Compute the prediction conditioned on the observations and plot it. |
|
193
|
|
|
mu, cov = model.predict(data, t=t) |
|
194
|
|
|
try: |
|
195
|
|
|
cov[np.diag_indices_from(cov)] += model.kernel.jitter |
|
196
|
|
|
except AttributeError: |
|
197
|
|
|
pass |
|
198
|
|
|
sample = np.random.multivariate_normal(mu, cov, 1)[0] |
|
199
|
|
|
ax.plot(t, sample, color="C0", alpha=0.25, zorder=12) |
|
200
|
|
|
# fig.subplots_adjust(left=0.2, bottom=0.2, top=0.9, right=0.9) |
|
201
|
|
|
fig.savefig(filename, transparent=True) |
|
202
|
|
|
|