NiaPy.benchmarks.perm   A
last analyzed

Complexity

Total Complexity 5

Size/Duplication

Total Lines 94
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 21
dl 0
loc 94
rs 10
c 0
b 0
f 0
wmc 5

3 Methods

Rating   Name   Duplication   Size   Complexity  
A Perm.__init__() 0 12 1
A Perm.latex_code() 0 8 1
A Perm.function() 0 24 3
1
# encoding=utf8
2
3
"""Implementations of Perm function."""
4
5
from NiaPy.benchmarks.benchmark import Benchmark
6
7
__all__ = ['Perm']
8
9
class Perm(Benchmark):
10
	r"""Implementations of Perm functions.
11
12
	Date: 2018
13
14
	Author: Klemen Berkovič
15
16
	License: MIT
17
18
	Arguments:
19
	beta {real} -- value added to inner sum of funciton
20
21
	Function:
22
	**Perm Function**
23
24
		:math:`f(\textbf{x}) = \sum_{i = 1}^D \left( \sum_{j = 1}^D (j - \beta) \left( x_j^i - \frac{1}{j^i} \right) \right)^2`
25
26
		**Input domain:**
27
		The function can be defined on any input domain but it is usually
28
		evaluated on the hypercube :math:`x_i ∈ [-D, D]`, for all :math:`i = 1, 2,..., D`.
29
30
		**Global minimum:**
31
		:math:`f(\textbf{x}^*) = 0` at :math:`\textbf{x}^* = (1, \frac{1}{2}, \cdots , \frac{1}{i} , \cdots , \frac{1}{D})`
32
33
	LaTeX formats:
34
		Inline:
35
				$f(\textbf{x}) = \sum_{i = 1}^D \left( \sum_{j = 1}^D (j - \beta) \left( x_j^i - \frac{1}{j^i} \right) \right)^2$
36
37
		Equation:
38
				\begin{equation} f(\textbf{x}) = \sum_{i = 1}^D \left( \sum_{j = 1}^D (j - \beta) \left( x_j^i - \frac{1}{j^i} \right) \right)^2 \end{equation}
39
40
		Domain:
41
				$-D \leq x_i \leq D$
42
43
	Reference:
44
		https://www.sfu.ca/~ssurjano/perm0db.html
45
	"""
46
	Name = ['Perm']
47
48
	def __init__(self, D=10.0, beta=.5):
49
		r"""Initialize of Bent Cigar benchmark.
50
51
		Args:
52
			Lower (Optional[float]): Lower bound of problem.
53
			Upper (Optional[float]): Upper bound of problem.
54
55
		See Also:
56
			:func:`NiaPy.benchmarks.Benchmark.__init__`
57
		"""
58
		Benchmark.__init__(self, -D, D)
59
		Perm.beta = beta
60
61
	@staticmethod
62
	def latex_code():
63
		r"""Return the latex code of the problem.
64
65
		Returns:
66
			str: Latex code
67
		"""
68
		return r'''$f(\textbf{x}) = \sum_{i = 1}^D \left( \sum_{j = 1}^D (j - \beta) \left( x_j^i - \frac{1}{j^i} \right) \right)^2$'''
69
70
	def function(self):
71
		r"""Return benchmark evaluation function.
72
73
		Returns:
74
			Callable[[int, Union[int, float, List[int, float], numpy.ndarray]], float]: Fitness function
75
		"""
76
		beta = self.beta
77
		def f(D, X):
78
			r"""Fitness function.
79
80
			Args:
81
				D (int): Dimensionality of the problem
82
				sol (Union[int, float, List[int, float], numpy.ndarray]): Solution to check.
83
84
			Returns:
85
				float: Fitness value for the solution.
86
			"""
87
			v = .0
88
			for i in range(1, D + 1):
89
				vv = .0
90
				for j in range(1, D + 1): vv += (j + beta) * (X[j - 1] ** i - 1 / j ** i)
91
				v += vv ** 2
92
			return v
93
		return f
94
95
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
96