Passed
Pull Request — master (#280)
by
unknown
01:16
created

HarrisHawksOptimization.setParameters()   A

Complexity

Conditions 1

Size

Total Lines 11
Code Lines 3

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 3
nop 4
dl 0
loc 11
rs 10
c 0
b 0
f 0
1
# encoding=utf8
2
import logging
3
4
from numpy import full, zeros, inf, random as rand
5
import numpy as np
6
from scipy.special import gamma
7
8
from NiaPy.algorithms.algorithm import Algorithm
9
10
logging.basicConfig()
11
logger = logging.getLogger('NiaPy.algorithms.basic')
12
logger.setLevel('INFO')
13
14
__all__ = ['HarrisHawksOptimization']
15
16
def levy_function(dims, step=0.01, rnd=rand):
17
	r"""Calcs levy function
18
19
	Parameters:
20
		dim (int): Number of dimensions
21
		step (float): Step of the Levy function
22
23
	Returns:
24
		float: The Levy function evaluation
25
	"""
26
	beta = 1.5
27
	sigma = (gamma(1 + beta) * np.sin(np.pi * beta / 2) / \
28
			(gamma((1 + beta / 2) * beta * 2.0 ** ((beta - 1) / 2)))) ** \
29
			(1 / beta)
30
	normal_1 = rnd.normal(0, sigma, size=dims)
31
	normal_2 = rnd.normal(0, 1, size=dims)
32
	result = step * normal_1 / (np.abs(normal_2) ** (1 / beta))
33
	return result
34
35
class HarrisHawksOptimization(Algorithm):
36
	r"""Implementation of Harris Hawk Optimization algorithm.
37
38
	Algorithm:
39
		Harris Hawk Optimization
40
41
	Date:
42
		2019
43
44
	Authors:
45
		Francisco Jose Solis-Munoz and Iztok Fister Jr.
46
47
	License:
48
		MIT
49
50
	Reference paper:
51
		Heidari et al. "Harris hawks optimization: Algorithm and applications". Future Generation Computer Systems. 2019. Vol. 97. 849-872.
52
53
	Attributes:
54
		Name (List[str]): List of strings representing algorithm name.
55
		levy (float): Levy factor.
56
57
	See Also:
58
		* :class:`NiaPy.algorithms.Algorithm`
59
	"""
60
	Name = ['HarrisHawksOptimization', 'HHO']
61
62
	@staticmethod
63
	def algorithmInfo():
64
		r"""Get algorithms information.
65
66
		Returns:
67
			str: Algorithm information.
68
69
		See Also:
70
			* :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
71
		"""
72
		return r"""Heidari et al. "Harris hawks optimization: Algorithm and applications". Future Generation Computer Systems. 2019. Vol. 97. 849-872."""
73
74
	@staticmethod
75
	def typeParameters():
76
		r"""Return dict with where key of dict represents parameter name and values represent checking functions for selected parameter.
77
78
		Returns:
79
			Dict[str, Callable]:
80
				* levy (Callable[[Union[float, int]], bool]): Levy factor.
81
82
		See Also:
83
			* :func:`NiaPy.algorithms.Algorithm.typeParameters`
84
		"""
85
		d = Algorithm.typeParameters()
86
		d.update({
87
			'levy': lambda x: isinstance(x, (float, int)) and x > 0,
88
		})
89
		return d
90
91
	def setParameters(self, NP=40, levy=0.01, **ukwargs):
92
		r"""Set the parameters of the algorithm.
93
94
		Args:
95
			levy (Optional[float]): Levy factor.
96
97
		See Also:
98
			* :func:`NiaPy.algorithms.Algorithm.setParameters`
99
		"""
100
		Algorithm.setParameters(self, NP=NP, **ukwargs)
101
		self.levy = levy
102
103
	def getParameters(self):
104
		r"""Get parameters of the algorithm.
105
106
		Returns:
107
			Dict[str, Any]
108
		"""
109
		d = Algorithm.getParameters(self)
110
		d.update({
111
			'levy': self.levy
112
		})
113
		return d
114
115
	def initPopulation(self, task):
116
		r"""Initialize the starting population.
117
118
		Parameters:
119
			task (Task): Optimization task
120
121
		Returns:
122
			Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
123
				1. New population.
124
				2. New population fitness/function values.
125
126
		See Also:
127
			* :func:`NiaPy.algorithms.Algorithm.initPopulation`
128
		"""
129
		Sol, Fitness, d = Algorithm.initPopulation(self, task)
130
		return Sol, Fitness, d
131
132
	def runIteration(self, task, Sol, Fitness, xb, fxb, **dparams):
133
		r"""Core function of Harris Hawks Optimization.
134
135
		Parameters:
136
			task (Task): Optimization task.
137
			Sol (numpy.ndarray): Current population
138
			Fitness (numpy.ndarray[float]): Current population fitness/funciton values
139
			xb (numpy.ndarray): Current best individual
140
			fxb (float): Current best individual function/fitness value
141
			dparams (Dict[str, Any]): Additional algorithm arguments
142
143
		Returns:
144
			Tuple[numpy.ndarray, numpy.ndarray, numpy.ndarray, float, Dict[str, Any]]:
145
				1. New population
146
				2. New population fitness/function vlues
147
				3. New global best solution
148
				4. New global best fitness/objective value
149
		"""
150
		# Decreasing energy factor
151
		# DEBUG
152
		rnd = self.Rand
153
		decreasing_energy_factor = 2 * (1 - task.iters() / task.nGEN)
154
		mean_sol = np.mean(Sol)
155
		# Update population
156
		for i in range(self.NP):
157
			jumping_energy = rnd.uniform(0, 2)
158
			decreasing_energy_random = rnd.uniform(-1, 1)
159
			escaping_energy = decreasing_energy_factor * decreasing_energy_random
160
			escaping_energy_abs = np.abs(escaping_energy)
161
			random_number = rnd.rand()
162
			fx = Fitness[i]
163
			if escaping_energy >= 1 and random_number >= 0.5:
164
				# 0. Exploration: Random tall tree
165
				rhi = rnd.randint(0, self.NP)
166
				random_agent = Sol[rhi]
167
				Sol[i] = random_agent - rnd.rand() * \
168
						np.abs(random_agent - 2 * rnd.rand() * \
169
						Sol[i])
170
			elif escaping_energy_abs >= 1 and random_number < 0.5:
171
				# 1. Exploration: Family members mean
172
				Sol[i] = \
173
						(xb - mean_sol) - \
174
						rnd.rand() * \
175
						rnd.uniform(task.Lower, task.Upper)
176
			elif escaping_energy_abs >= 0.5 and random_number >= 0.5:
177
				# 2. Exploitation: Soft besiege
178
				Sol[i] = \
179
						(xb - Sol[i]) - \
180
						escaping_energy * \
181
						np.abs(jumping_energy * xb - Sol[i])
182
			elif escaping_energy_abs < 0.5 and random_number >= 0.5:
183
				# 3. Exploitation: Hard besiege
184
				Sol[i] = \
185
						xb - \
186
						escaping_energy * \
187
						np.abs(xb - Sol[i])
188
			elif escaping_energy_abs >= 0.5 and random_number < 0.5:
189
				# 4. Exploitation: Soft besiege with pprogressive rapid dives
190
				cand1 = task.repair(xb - escaping_energy * \
191
						np.abs(jumping_energy * xb - Sol[i]), rnd=self.Rand)
192
				random_vector = rnd.rand(task.D)
193
				cand2 = task.repair(cand1 + random_vector * \
194
						levy_function(task.D, self.levy), rnd=self.Rand)
195
				if task.eval(cand1) < Fitness[i]:
196
					Sol[i] = cand1
197
				elif task.eval(cand2) < Fitness[i]:
198
					Sol[i] = cand2
199
			elif escaping_energy_abs < 0.5 and random_number < 0.5:
200
				# 5. Exploitation: Hard besiege with pprogressive rapid dives
201
				cand1 = task.repair(xb - escaping_energy * \
202
						np.abs(jumping_energy * xb - mean_sol), rnd=self.Rand)
203
				random_vector = rnd.rand(task.D)
204
				cand2 = task.repair(cand1 + random_vector * \
205
						levy_function(task.D, self.levy), rnd=self.Rand)
206
				if task.eval(cand1) < Fitness[i]:
207
					Sol[i] = cand1
208
				elif task.eval(cand2) < Fitness[i]:
209
					Sol[i] = cand2
210
			# Repair agent (from population) values
211
			Sol[i] = task.repair(Sol[i], rnd=self.Rand)
212
			# Eval population
213
			Fitness[i] = task.eval(Sol[i])
214
		# Get best of population
215
		best_index = np.argmin(Fitness)
216
		xb_cand = Sol[best_index].copy()
217
		fxb_cand = Fitness[best_index].copy()
218
		if fxb_cand < fxb:
219
			fxb = fxb_cand
220
			xb = xb_cand.copy()
221
		return Sol, Fitness, xb, fxb, {}
222
223
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
224