Passed
Push — master ( fb019c...56e00e )
by Simon
01:34
created

fit_gaussian   A

Complexity

Total Complexity 2

Size/Duplication

Total Lines 53
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 30
dl 0
loc 53
rs 10
c 0
b 0
f 0
wmc 2

2 Functions

Rating   Name   Duplication   Size   Complexity  
A fit_gaussian() 0 10 1
A gaussian_function() 0 2 1
1
import numpy as np
2
import matplotlib.pyplot as plt
3
4
from gradient_free_optimizers import HillClimbingOptimizer
5
6
7
# define the gaussian function for the fit
8
def gaussian_function(x, A, B, C):
9
    return A * np.exp(-(x-B)**2/(2*C**2))
10
11
# create the gaussian distributed samples
12
gauss_np1 = np.random.normal(loc=2, scale=3, size=30000)
13
14
bins = 100
15
min_x = np.min(gauss_np1)
16
max_x = np.max(gauss_np1)
17
step_x = (max_x - min_x)/bins
18
19
# create the x axis samples
20
x_range = np.arange(min_x, max_x, step_x)
21
# the y axis samples to compare with the fitted gaussian
22
y_gauss_hist = plt.hist(gauss_np1, density=True, bins=bins)[0]
23
24
25
# the objective function for GFO
26
def fit_gaussian(para):
27
    A, B, C = para["A"], para["B"], para["C"]
28
    y_gauss_func = gaussian_function(x_range, A, B, C)
29
30
    # compare results of function and hist samples
31
    diff = np.subtract(y_gauss_func, y_gauss_hist)
32
33
    # we want to minimize the difference
34
    score = - np.abs(diff).sum()
35
    return score
36
37
38
search_space = {
39
    "A" : list(np.arange(-10, 10, 0.01)),
40
    "B" : list(np.arange(-10, 10, 0.01)),
41
    "C" : list(np.arange(-10, 10, 0.01)),
42
}
43
44
opt = HillClimbingOptimizer(search_space)
45
opt.search(fit_gaussian, n_iter=10000)
46
47
best_parameter = opt.best_para
48
y_gauss_func_final = gaussian_function(x_range, **best_parameter)
49
50
plt.hist(gauss_np1, density=True, bins=bins)
51
plt.plot(x_range, y_gauss_func_final)
52
plt.show()
53