Passed
Pull Request — master (#280)
by
unknown
03:01
created

HarrisHawksOptimization.algorithmInfo()   A

Complexity

Conditions 1

Size

Total Lines 11
Code Lines 3

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 3
nop 0
dl 0
loc 11
rs 10
c 0
b 0
f 0
1
# encoding=utf8
2
import logging
3
4
from numpy import random as rand, sin, pi, argmin, abs, mean
5
from scipy.special import gamma
6
7
from NiaPy.algorithms.algorithm import Algorithm
8
9
logging.basicConfig()
10
logger = logging.getLogger('NiaPy.algorithms.basic')
11
logger.setLevel('INFO')
12
13
__all__ = ['HarrisHawksOptimization']
14
15
def levy_function(dims, step=0.01, rnd=rand):
16
	r"""Calcs levy function
17
18
	Parameters:
19
		dim (int): Number of dimensions
20
		step (float): Step of the Levy function
21
22
	Returns:
23
		float: The Levy function evaluation
24
	"""
25
	beta = 1.5
26
	sigma = (gamma(1 + beta) * sin(pi * beta / 2) / \
27
			(gamma((1 + beta / 2) * beta * 2.0 ** ((beta - 1) / 2)))) ** \
28
			(1 / beta)
29
	normal_1 = rnd.normal(0, sigma, size=dims)
30
	normal_2 = rnd.normal(0, 1, size=dims)
31
	result = step * normal_1 / (abs(normal_2) ** (1 / beta))
32
	return result
33
34
class HarrisHawksOptimization(Algorithm):
35
	r"""Implementation of Harris Hawk Optimization algorithm.
36
37
	Algorithm:
38
		Harris Hawk Optimization
39
40
	Date:
41
		2019
42
43
	Authors:
44
		Francisco Jose Solis-Munoz and Iztok Fister Jr.
45
46
	License:
47
		MIT
48
49
	Reference paper:
50
		Heidari et al. "Harris hawks optimization: Algorithm and applications". Future Generation Computer Systems. 2019. Vol. 97. 849-872.
51
52
	Attributes:
53
		Name (List[str]): List of strings representing algorithm name.
54
		levy (float): Levy factor.
55
56
	See Also:
57
		* :class:`NiaPy.algorithms.Algorithm`
58
	"""
59
	Name = ['HarrisHawksOptimization', 'HHO']
60
61
	@staticmethod
62
	def algorithmInfo():
63
		r"""Get algorithms information.
64
65
		Returns:
66
			str: Algorithm information.
67
68
		See Also:
69
			* :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
70
		"""
71
		return r"""Heidari et al. "Harris hawks optimization: Algorithm and applications". Future Generation Computer Systems. 2019. Vol. 97. 849-872."""
72
73
	@staticmethod
74
	def typeParameters():
75
		r"""Return dict with where key of dict represents parameter name and values represent checking functions for selected parameter.
76
77
		Returns:
78
			Dict[str, Callable]:
79
				* levy (Callable[[Union[float, int]], bool]): Levy factor.
80
81
		See Also:
82
			* :func:`NiaPy.algorithms.Algorithm.typeParameters`
83
		"""
84
		d = Algorithm.typeParameters()
85
		d.update({
86
			'levy': lambda x: isinstance(x, (float, int)) and x > 0,
87
		})
88
		return d
89
90
	def setParameters(self, NP=40, levy=0.01, **ukwargs):
91
		r"""Set the parameters of the algorithm.
92
93
		Args:
94
			levy (Optional[float]): Levy factor.
95
96
		See Also:
97
			* :func:`NiaPy.algorithms.Algorithm.setParameters`
98
		"""
99
		Algorithm.setParameters(self, NP=NP, **ukwargs)
100
		self.levy = levy
101
102
	def getParameters(self):
103
		r"""Get parameters of the algorithm.
104
105
		Returns:
106
			Dict[str, Any]
107
		"""
108
		d = Algorithm.getParameters(self)
109
		d.update({
110
			'levy': self.levy
111
		})
112
		return d
113
114
	def initPopulation(self, task):
115
		r"""Initialize the starting population.
116
117
		Parameters:
118
			task (Task): Optimization task
119
120
		Returns:
121
			Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
122
				1. New population.
123
				2. New population fitness/function values.
124
125
		See Also:
126
			* :func:`NiaPy.algorithms.Algorithm.initPopulation`
127
		"""
128
		Sol, Fitness, d = Algorithm.initPopulation(self, task)
129
		return Sol, Fitness, d
130
131
	def runIteration(self, task, Sol, Fitness, xb, fxb, **dparams):
132
		r"""Core function of Harris Hawks Optimization.
133
134
		Parameters:
135
			task (Task): Optimization task.
136
			Sol (numpy.ndarray): Current population
137
			Fitness (numpy.ndarray[float]): Current population fitness/funciton values
138
			xb (numpy.ndarray): Current best individual
139
			fxb (float): Current best individual function/fitness value
140
			dparams (Dict[str, Any]): Additional algorithm arguments
141
142
		Returns:
143
			Tuple[numpy.ndarray, numpy.ndarray, numpy.ndarray, float, Dict[str, Any]]:
144
				1. New population
145
				2. New population fitness/function vlues
146
				3. New global best solution
147
				4. New global best fitness/objective value
148
		"""
149
		rnd = self.Rand
150
		# Decreasing energy factor
151
		decreasing_energy_factor = 2 * (1 - task.iters() / task.nGEN)
152
		mean_sol = mean(Sol)
153
		# Update population
154
		for i in range(self.NP):
155
			jumping_energy = rnd.uniform(0, 2)
156
			decreasing_energy_random = rnd.uniform(-1, 1)
157
			escaping_energy = decreasing_energy_factor * decreasing_energy_random
158
			escaping_energy_abs = abs(escaping_energy)
159
			random_number = rnd.rand()
160
			if escaping_energy >= 1 and random_number >= 0.5:
161
				# 0. Exploration: Random tall tree
162
				rhi = rnd.randint(0, self.NP)
163
				random_agent = Sol[rhi]
164
				Sol[i] = random_agent - rnd.rand() * \
165
						abs(random_agent - 2 * rnd.rand() * \
166
						Sol[i])
167
			elif escaping_energy_abs >= 1 and random_number < 0.5:
168
				# 1. Exploration: Family members mean
169
				Sol[i] = \
170
						(xb - mean_sol) - \
171
						rnd.rand() * \
172
						rnd.uniform(task.Lower, task.Upper)
173
			elif escaping_energy_abs >= 0.5 and random_number >= 0.5:
174
				# 2. Exploitation: Soft besiege
175
				Sol[i] = \
176
						(xb - Sol[i]) - \
177
						escaping_energy * \
178
						abs(jumping_energy * xb - Sol[i])
179
			elif escaping_energy_abs < 0.5 and random_number >= 0.5:
180
				# 3. Exploitation: Hard besiege
181
				Sol[i] = \
182
						xb - \
183
						escaping_energy * \
184
						abs(xb - Sol[i])
185
			elif escaping_energy_abs >= 0.5 and random_number < 0.5:
186
				# 4. Exploitation: Soft besiege with pprogressive rapid dives
187
				cand1 = task.repair(xb - escaping_energy * \
188
						abs(jumping_energy * xb - Sol[i]), rnd=rand)
189
				random_vector = rnd.rand(task.D)
190
				cand2 = task.repair(cand1 + random_vector * \
191
						levy_function(task.D, self.levy, rnd=rand), rnd=rand)
192
				if task.eval(cand1) < Fitness[i]:
193
					Sol[i] = cand1
194
				elif task.eval(cand2) < Fitness[i]:
195
					Sol[i] = cand2
196
			elif escaping_energy_abs < 0.5 and random_number < 0.5:
197
				# 5. Exploitation: Hard besiege with pprogressive rapid dives
198
				cand1 = task.repair(xb - escaping_energy * \
199
						abs(jumping_energy * xb - mean_sol), rnd=rand)
200
				random_vector = rnd.rand(task.D)
201
				cand2 = task.repair(cand1 + random_vector * \
202
						levy_function(task.D, self.levy, rnd=rand), rnd=rand)
203
				if task.eval(cand1) < Fitness[i]:
204
					Sol[i] = cand1
205
				elif task.eval(cand2) < Fitness[i]:
206
					Sol[i] = cand2
207
			# Repair agent (from population) values
208
			Sol[i] = task.repair(Sol[i], rnd=rand)
209
			# Eval population
210
			Fitness[i] = task.eval(Sol[i])
211
		# Get best of population
212
		best_index = argmin(Fitness)
213
		xb_cand = Sol[best_index].copy()
214
		fxb_cand = Fitness[best_index].copy()
215
		if fxb_cand < fxb:
216
			fxb = fxb_cand
217
			xb = xb_cand.copy()
218
		return Sol, Fitness, xb, fxb, {}
219
220
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
221