fit_gaussian   A
last analyzed

Complexity

Total Complexity 2

Size/Duplication

Total Lines 54
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 2
eloc 30
dl 0
loc 54
rs 10
c 0
b 0
f 0

2 Functions

Rating   Name   Duplication   Size   Complexity  
A gaussian_function() 0 2 1
A fit_gaussian() 0 10 1
1
import numpy as np
2
import matplotlib.pyplot as plt
3
4
from gradient_free_optimizers import HillClimbingOptimizer
5
6
7
# define the gaussian function for the fit
8
def gaussian_function(x, A, B, C):
9
    return A * np.exp(-((x - B) ** 2) / (2 * C**2))
10
11
12
# create the gaussian distributed samples
13
gauss_np1 = np.random.normal(loc=2, scale=3, size=30000)
14
15
bins = 100
16
min_x = np.min(gauss_np1)
17
max_x = np.max(gauss_np1)
18
step_x = (max_x - min_x) / bins
19
20
# create the x axis samples
21
x_range = np.arange(min_x, max_x, step_x)
22
# the y axis samples to compare with the fitted gaussian
23
y_gauss_hist = plt.hist(gauss_np1, density=True, bins=bins)[0]
24
25
26
# the objective function for GFO
27
def fit_gaussian(para):
28
    A, B, C = para["A"], para["B"], para["C"]
29
    y_gauss_func = gaussian_function(x_range, A, B, C)
30
31
    # compare results of function and hist samples
32
    diff = np.subtract(y_gauss_func, y_gauss_hist)
33
34
    # we want to minimize the difference
35
    score = -np.abs(diff).sum()
36
    return score
37
38
39
search_space = {
40
    "A": np.arange(-10, 10, 0.01),
41
    "B": np.arange(-10, 10, 0.01),
42
    "C": np.arange(-10, 10, 0.01),
43
}
44
45
opt = HillClimbingOptimizer(search_space)
46
opt.search(fit_gaussian, n_iter=10000)
47
48
best_parameter = opt.best_para
49
y_gauss_func_final = gaussian_function(x_range, **best_parameter)
50
51
plt.hist(gauss_np1, density=True, bins=bins)
52
plt.plot(x_range, y_gauss_func_final)
53
plt.show()
54