Passed
Branch master (13db34)
by Grega
02:16
created

NiaPy.algorithms.other.mts.MTS_LS3()   D

Complexity

Conditions 12

Size

Total Lines 44
Code Lines 20

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 12
eloc 20
nop 11
dl 0
loc 44
rs 4.8
c 0
b 0
f 0

How to fix   Complexity    Many Parameters   

Complexity

Complex classes like NiaPy.algorithms.other.mts.MTS_LS3() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# encoding=utf8
2
# pylint: disable=mixed-indentation, trailing-whitespace, multiple-statements, attribute-defined-outside-init, logging-not-lazy, no-self-use, len-as-condition, singleton-comparison, arguments-differ, line-too-long, unused-argument, consider-using-enumerate, bad-continuation, superfluous-parens, redefined-builtin
3
import logging
4
import operator as oper
5
6
from numpy import random as rand, vectorize, where, copy, apply_along_axis, argmin, argmax, argsort, fmin, fmax, full, asarray, abs, inf
7
8
from NiaPy.algorithms.algorithm import Algorithm
9
10
logging.basicConfig()
11
logger = logging.getLogger('NiaPy.algorithms.other')
12
logger.setLevel('INFO')
13
14
__all__ = ['MultipleTrajectorySearch', 'MultipleTrajectorySearchV1', 'MTS_LS1', 'MTS_LS1v1', 'MTS_LS2', 'MTS_LS3', 'MTS_LS3v1']
15
16 View Code Duplication
def MTS_LS1(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, sr_fix=0.4, rnd=rand, **ukwargs):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
17
	r"""Multiple trajectory local search one.
18
19
	Args:
20
		Xk (numpy.ndarray): Current solution.
21
		Xk_fit (float): Current solutions fitness/function value.
22
		Xb (numpy.ndarray): Global best solution.
23
		Xb_fit (float): Global best solutions fitness/function value.
24
		improve (bool): Has the solution been improved.
25
		SR (numpy.ndarray): Search range.
26
		task (Task): Optimization task.
27
		BONUS1 (int): Bonus reward for improving global best solution.
28
		BONUS2 (int): Bonus reward for improving solution.
29
		sr_fix (numpy.ndarray): Fix when search range is to small.
30
		rnd (mtrand.RandomState): Random number generator.
31
		**ukwargs (Dict[str, Any]): Additional arguments.
32
33
	Returns:
34
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
35
			1. New solution.
36
			2. New solutions fitness/function value.
37
			3. Global best if found else old global best.
38
			4. Global bests function/fitness value.
39
			5. If solution has improved.
40
			6. Search range.
41
	"""
42
	if not improve:
43
		SR /= 2
44
		ifix = where(SR < 1e-15)
45
		SR[ifix] = task.bRange[ifix] * sr_fix
46
	improve, grade = False, 0.0
47
	for i in range(len(Xk)):
48
		Xk_i_old = Xk[i]
49
		Xk[i] = Xk_i_old - SR[i]
50
		Xk = task.repair(Xk, rnd)
51
		Xk_fit_new = task.eval(Xk)
52
		if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk, Xk_fit_new
53
		if Xk_fit_new == Xk_fit: Xk[i] = Xk_i_old
54
		elif Xk_fit_new > Xk_fit:
55
			Xk[i] = Xk_i_old + 0.5 * SR[i]
56
			Xk = task.repair(Xk, rnd)
57
			Xk_fit_new = task.eval(task.repair(Xk, rnd))
58
			if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, copy(Xk), Xk_fit_new
59
			if Xk_fit_new >= Xk_fit: Xk[i] = Xk_i_old
60
			else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
61
		else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
62
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
63
64 View Code Duplication
def MTS_LS1v1(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, sr_fix=0.4, rnd=rand, **ukwargs):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
65
	r"""Multiple trajectory local search one version two.
66
67
	Args:
68
		Xk (numpy.ndarray): Current solution.
69
		Xk_fit (float): Current solutions fitness/function value.
70
		Xb (numpy.ndarray): Global best solution.
71
		Xb_fit (float): Global best solutions fitness/function value.
72
		improve (bool): Has the solution been improved.
73
		SR (numpy.ndarray): Search range.
74
		task (Task): Optimization task.
75
		BONUS1 (int): Bonus reward for improving global best solution.
76
		BONUS2 (int): Bonus reward for improving solution.
77
		sr_fix (numpy.ndarray): Fix when search range is to small.
78
		rnd (mtrand.RandomState): Random number generator.
79
		**ukwargs (Dict[str, Any]): Additional arguments.
80
81
	Returns:
82
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
83
			1. New solution.
84
			2. New solutions fitness/function value.
85
			3. Global best if found else old global best.
86
			4. Global bests function/fitness value.
87
			5. If solution has improved.
88
			6. Search range.
89
	"""
90
	if not improve:
91
		SR /= 2
92
		ifix = where(SR < 1e-15)
93
		SR[ifix] = task.bRange[ifix] * sr_fix
94
	improve, D, grade = False, rnd.uniform(-1, 1, task.D), 0.0
95
	for i in range(len(Xk)):
96
		Xk_i_old = Xk[i]
97
		Xk[i] = Xk_i_old - SR[i] * D[i]
98
		Xk = task.repair(Xk, rnd)
99
		Xk_fit_new = task.eval(Xk)
100
		if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk, Xk_fit_new
101
		elif Xk_fit_new == Xk_fit: Xk[i] = Xk_i_old
102
		elif Xk_fit_new > Xk_fit:
103
			Xk[i] = Xk_i_old + 0.5 * SR[i]
104
			Xk = task.repair(Xk, rnd)
105
			Xk_fit_new = task.eval(Xk)
106
			if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk, Xk_fit_new
107
			if Xk_fit_new >= Xk_fit: Xk[i] = Xk_i_old
108
			else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
109
		else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
110
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
111
112
def genNewX(x, r, d, SR, op):
113
	r"""Move solution to other position based on operator.
114
115
	Args:
116
		x (numpy.ndarray): Solution to move.
117
		r (int): Random number.
118
		d (float): Scale factor.
119
		SR (numpy.ndarray): Search range.
120
		op (operator): Operator to use.
121
122
	Returns:
123
		numpy.ndarray: Moved solution based on operator.
124
	"""
125
	return op(x, SR * d) if r == 0 else x
126
127
def MTS_LS2(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, sr_fix=0.4, rnd=rand, **ukwargs):
128
	r"""Multiple trajectory local search two.
129
130
	Args:
131
		Xk (numpy.ndarray): Current solution.
132
		Xk_fit (float): Current solutions fitness/function value.
133
		Xb (numpy.ndarray): Global best solution.
134
		Xb_fit (float): Global best solutions fitness/function value.
135
		improve (bool): Has the solution been improved.
136
		SR (numpy.ndarray): Search range.
137
		task (Task): Optimization task.
138
		BONUS1 (int): Bonus reward for improving global best solution.
139
		BONUS2 (int): Bonus reward for improving solution.
140
		sr_fix (numpy.ndarray): Fix when search range is to small.
141
		rnd (mtrand.RandomState): Random number generator.
142
		**ukwargs (Dict[str, Any]): Additional arguments.
143
144
	Returns:
145
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
146
			1. New solution.
147
			2. New solutions fitness/function value.
148
			3. Global best if found else old global best.
149
			4. Global bests function/fitness value.
150
			5. If solution has improved.
151
			6. Search range.
152
153
	See Also:
154
		* :func:`NiaPy.algorithms.other.genNewX`
155
	"""
156
	if not improve:
157
		SR /= 2
158
		ifix = where(SR < 1e-15)
159
		SR[ifix] = task.bRange[ifix] * sr_fix
160
	improve, grade = False, 0.0
161
	for _ in range(len(Xk)):
162
		D = -1 + rnd.rand(len(Xk)) * 2
163
		R = rnd.choice([0, 1, 2, 3], len(Xk))
164
		Xk_new = task.repair(vectorize(genNewX)(Xk, R, D, SR, oper.sub), rnd)
165
		Xk_fit_new = task.eval(Xk_new)
166
		if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk_new, Xk_fit_new
167
		elif Xk_fit_new != Xk_fit:
168
			if Xk_fit_new > Xk_fit:
169
				Xk_new = task.repair(vectorize(genNewX)(Xk, R, D, SR, oper.add), rnd)
170
				Xk_fit_new = task.eval(Xk_new)
171
				if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk_new, Xk_fit_new
172
				if Xk_fit_new < Xk_fit: grade, Xk, Xk_fit, improve = grade + BONUS2, Xk_new, Xk_fit_new, True
173
			else: grade, Xk, Xk_fit, improve = grade + BONUS2, Xk_new, Xk_fit_new, True
174
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
175
176
def MTS_LS3(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, rnd=rand, **ukwargs):
177
	r"""Multiple trajectory local search three.
178
179
	Args:
180
		Xk (numpy.ndarray): Current solution.
181
		Xk_fit (float): Current solutions fitness/function value.
182
		Xb (numpy.ndarray): Global best solution.
183
		Xb_fit (float): Global best solutions fitness/function value.
184
		improve (bool): Has the solution been improved.
185
		SR (numpy.ndarray): Search range.
186
		task (Task): Optimization task.
187
		BONUS1 (int): Bonus reward for improving global best solution.
188
		BONUS2 (int): Bonus reward for improving solution.
189
		rnd (mtrand.RandomState): Random number generator.
190
		**ukwargs (Dict[str, Any]): Additional arguments.
191
192
	Returns:
193
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
194
			1. New solution.
195
			2. New solutions fitness/function value.
196
			3. Global best if found else old global best.
197
			4. Global bests function/fitness value.
198
			5. If solution has improved.
199
			6. Search range.
200
	"""
201
	Xk_new, grade = copy(Xk), 0.0
202
	for i in range(len(Xk)):
203
		Xk1, Xk2, Xk3 = copy(Xk_new), copy(Xk_new), copy(Xk_new)
204
		Xk1[i], Xk2[i], Xk3[i] = Xk1[i] + 0.1, Xk2[i] - 0.1, Xk3[i] + 0.2
205
		Xk1, Xk2, Xk3 = task.repair(Xk1, rnd), task.repair(Xk2, rnd), task.repair(Xk3, rnd)
206
		Xk1_fit, Xk2_fit, Xk3_fit = task.eval(Xk1), task.eval(Xk2), task.eval(Xk3)
207
		if Xk1_fit < Xb_fit: grade, Xb, Xb_fit, improve = grade + BONUS1, Xk1, Xk1_fit, True
208
		if Xk2_fit < Xb_fit: grade, Xb, Xb_fit, improve = grade + BONUS1, Xk2, Xk2_fit, True
209
		if Xk3_fit < Xb_fit: grade, Xb, Xb_fit, improve = grade + BONUS1, Xk3, Xk3_fit, True
210
		D1, D2, D3 = Xk_fit - Xk1_fit if abs(Xk1_fit) != inf else 0, Xk_fit - Xk2_fit if abs(Xk2_fit) != inf else 0, Xk_fit - Xk3_fit if abs(Xk3_fit) != inf else 0
211
		if D1 > 0: grade, improve = grade + BONUS2, True
212
		if D2 > 0: grade, improve = grade + BONUS2, True
213
		if D3 > 0: grade, improve = grade + BONUS2, True
214
		a, b, c = 0.4 + rnd.rand() * 0.1, 0.1 + rnd.rand() * 0.2, rnd.rand()
215
		Xk_new[i] += a * (D1 - D2) + b * (D3 - 2 * D1) + c
216
		Xk_new = task.repair(Xk_new, rnd)
217
		Xk_fit_new = task.eval(Xk_new)
218
		if Xk_fit_new < Xk_fit: Xk, Xk_fit, improve = Xk_new, Xk_fit_new, True
219
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
220
221
def MTS_LS3v1(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, phi=3, BONUS1=10, BONUS2=1, rnd=rand, **ukwargs):
222
	r"""Multiple trajectory local search three version one.
223
224
	Args:
225
		Xk (numpy.ndarray): Current solution.
226
		Xk_fit (float): Current solutions fitness/function value.
227
		Xb (numpy.ndarray): Global best solution.
228
		Xb_fit (float): Global best solutions fitness/function value.
229
		improve (bool): Has the solution been improved.
230
		SR (numpy.ndarray): Search range.
231
		task (Task): Optimization task.
232
		phi (int): Number of new generated positions.
233
		BONUS1 (int): Bonus reward for improving global best solution.
234
		BONUS2 (int): Bonus reward for improving solution.
235
		rnd (mtrand.RandomState): Random number generator.
236
		**ukwargs (Dict[str, Any]): Additional arguments.
237
238
	Returns:
239
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
240
			1. New solution.
241
			2. New solutions fitness/function value.
242
			3. Global best if found else old global best.
243
			4. Global bests function/fitness value.
244
			5. If solution has improved.
245
			6. Search range.
246
	"""
247
	grade, Disp = 0.0, task.bRange / 10
248
	while True in (Disp > 1e-3):
249
		Xn = apply_along_axis(task.repair, 1, asarray([rnd.permutation(Xk) + Disp * rnd.uniform(-1, 1, len(Xk)) for _ in range(phi)]), rnd)
250
		Xn_f = apply_along_axis(task.eval, 1, Xn)
251
		iBetter, iBetterBest = where(Xn_f < Xk_fit), where(Xn_f < Xb_fit)
252
		grade += len(iBetterBest) * BONUS1 + (len(iBetter) - len(iBetterBest)) * BONUS2
253
		if len(Xn_f[iBetterBest]) > 0:
254
			ib, improve = argmin(Xn_f[iBetterBest]), True
255
			Xb, Xb_fit, Xk, Xk_fit = Xn[ib], Xn_f[ib], Xn[ib], Xn_f[ib]
256
		elif len(Xn_f[iBetter]) > 0:
257
			ib, improve = argmin(Xn_f[iBetter]), True
258
			Xk, Xk_fit = Xn[ib], Xn_f[ib]
259
		Su, Sl = fmin(task.Upper, Xk + 2 * Disp), fmax(task.Lower, Xk - 2 * Disp)
260
		Disp = (Su - Sl) / 10
261
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
262
263
class MultipleTrajectorySearch(Algorithm):
264
	r"""Implementation of Multiple trajectory search.
265
266
	Algorithm:
267
		Multiple trajectory search
268
269
	Date:
270
		2018
271
272
	Authors:
273
		Klemen Berkovic
274
275
	License:
276
		MIT
277
278
	Reference URL:
279
		https://ieeexplore.ieee.org/document/4631210/
280
281
	Reference paper:
282
		Lin-Yu Tseng and Chun Chen, "Multiple trajectory search for Large Scale Global Optimization," 2008 IEEE Congress on Evolutionary Computation (IEEE World Congress on Computational Intelligence), Hong Kong, 2008, pp. 3052-3059. doi: 10.1109/CEC.2008.4631210
283
284
	Attributes:
285
		Name (List[Str]): List of strings representing algorithm name.
286
		LSs (Iterable[Callable[[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray, Task, Dict[str, Any]], Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, int, numpy.ndarray]]]): Local searches to use.
287
		BONUS1 (int): Bonus for improving global best solution.
288
		BONUS2 (int): Bonus for improving solution.
289
		NoLsTests (int): Number of test runs on local search algorithms.
290
		NoLs (int): Number of local search algorithm runs.
291
		NoLsBest (int): Number of locals search algorithm runs on best solution.
292
		NoEnabled (int): Number of best solution for testing.
293
294
	See Also:
295
		* :class:`NiaPy.algorithms.Algorithm`
296
	"""
297
	Name = ['MultipleTrajectorySearch', 'MTS']
298
299
	def __init__(self, **kwargs):
300
		Algorithm.__init__(self, **kwargs)
301
		self.LSs = [MTS_LS1, MTS_LS2, MTS_LS3]
302
303
	@staticmethod
304
	def typeParameters():
305
		r"""Get dictionary with functions for checking values of parameters.
306
307
		Returns:
308
			Dict[str, Callable]:
309
				* NoLsTests (Callable[[int], bool]): TODO
310
				* NoLs (Callable[[int], bool]): TODO
311
				* NoLsBest (Callable[[int], bool]): TODO
312
				* NoEnabled (Callable[[int], bool]): TODO
313
		"""
314
		return {
315
			'NoLsTests': lambda x: isinstance(x, int) and x >= 0,
316
			'NoLs': lambda x: isinstance(x, int) and x >= 0,
317
			'NoLsBest': lambda x: isinstance(x, int) and x >= 0,
318
			'NoEnabled': lambda x: isinstance(x, int) and x > 0
319
		}
320
321
	def setParameters(self, M=40, NoLsTests=5, NoLs=5, NoLsBest=5, NoEnabled=17, BONUS1=10, BONUS2=1, LSs=(MTS_LS1v1, MTS_LS2, MTS_LS3v1), **ukwargs):
322
		r"""Set the arguments of the algorithm.
323
324
		Arguments:
325
			M (int): Number of individuals in population.
326
			NoLsTests (int): Number of test runs on local search algorithms.
327
			NoLs (int): Number of local search algorithm runs.
328
			NoLsBest (int): Number of locals search algorithm runs on best solution.
329
			NoEnabled (int): Number of best solution for testing.
330
			BONUS1 (int): Bonus for improving global best solution.
331
			BONUS2 (int): Bonus for improving self.
332
			LSs (Iterable[Callable[[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray, Task, Dict[str, Any]], Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, int, numpy.ndarray]]]): Local searches to use.
333
334
		See Also:
335
			* :func:`NiaPy.algorithms.Algorithm.setParameters`
336
		"""
337
		Algorithm.setParameters(self, NP=M)
338
		self.NoLsTests, self.NoLs, self.NoLsBest, self.NoEnabled, self.BONUS1, self.BONUS2 = NoLsTests, NoLs, NoLsBest, NoEnabled, BONUS1, BONUS2
339
		if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
340
341
	def GradingRun(self, x, x_f, xb, xb_f, improve, SR, task):
342
		r"""Run local search for getting scores of local searches.
343
344
		Args:
345
			x (numpy.ndarray): Solution for grading.
346
			x_f (float): Solutions fitness/function value.
347
			xb (numpy.ndarray): Global best solution.
348
			xb_f (float): Global best solutions function/fitness value.
349
			improve (bool): Info if solution has improved.
350
			SR (numpy.ndarray): Search range.
351
			task (Task): Optimization task.
352
353
		Returns:
354
			Tuple[numpy.ndarray, float, numpy.ndarray, float]:
355
				1. New solution.
356
				2. New solutions function/fitness value.
357
				3. Global best solution.
358
				4. Global best solutions fitness/function value.
359
		"""
360
		ls_grades, Xn, Xnb = full(3, 0.0), [[x, x_f]] * len(self.LSs), [xb, xb_f]
361
		for _ in range(self.NoLsTests):
362
			for k in range(len(self.LSs)):
363
				Xn[k][0], Xn[k][1], xnb, xnb_f, improve, g, SR = self.LSs[k](Xn[k][0], Xn[k][1], Xnb[0], Xnb[1], improve, SR, task, BONUS1=self.BONUS1, BONUS2=self.BONUS2, rnd=self.Rand)
364
				if Xnb[1] > xnb_f: Xnb = [xnb, xnb_f]
365
				ls_grades[k] += g
366
		xn, k = min(Xn, key=lambda x: x[1]), argmax(ls_grades)
367
		return xn[0], xn[1], Xnb[0], Xnb[1], k
368
369
	def LsRun(self, k, x, x_f, xb, xb_f, improve, SR, g, task):
370
		r"""Run a selected local search.
371
372
		Args:
373
			k (int): Index of local search.
374
			x (numpy.ndarray): Current solution.
375
			x_f (float): Current solutions function/fitness value.
376
			xb (numpy.ndarray): Global best solution.
377
			xb_f (float): Global best solutions fitness/function value.
378
			improve (bool): If the solution has improved.
379
			SR (numpy.ndarray): Search range.
380
			g:
381
			task (Task): Optimization task.
382
383
		Returns:
384
			Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray, int]:
385
				1. New best solution found.
386
				2. New best solutions found function/fitness value.
387
				3. Global best solution.
388
				4. Global best solutions function/fitness value.
389
				5. If the solution has improved.
390
				6. Grade of local search run.
391
		"""
392
		XBn = list()
393
		for _j in range(self.NoLs):
394
			x, x_f, xnb, xnb_f, improve, grade, SR = self.LSs[k](x, x_f, xb, xb_f, improve, SR, task, BONUS1=self.BONUS1, BONUS2=self.BONUS2, rnd=self.Rand)
395
			g += grade
396
			XBn.append((xnb, xnb_f))
397
		xb, xb_f = min(XBn, key=lambda x: x[1])
398
		return x, x_f, xb, xb_f, improve, SR, g
399
400
	def initPopulation(self, task):
401
		r"""Initialize starting population.
402
403
		Args:
404
			task (Task): Optimization task.
405
406
		Returns:
407
			Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
408
				1. Initialized population.
409
				2. Initialized populations function/fitness value.
410
				3. Additional arguments:
411
					* enable (numpy.ndarray[bool]): If solution/individual is enabled.
412
					* improve (numpy.ndarray[bool]): If solution/individual is improved.
413
					* SR (numpy.ndarray): Search range.
414
					* grades (numpy.ndarray[int]): Grade of solution/individual.
415
		"""
416
		X, X_f, d = Algorithm.initPopulation(self, task)
417
		enable, improve, SR, grades = full(self.NP, True), full(self.NP, True), full([self.NP, task.D], task.bRange / 2), full(self.NP, 0.0)
418
		d.update({
419
			'enable': enable, 'improve': improve, 'SR': SR, 'grades': grades
420
		})
421
		return X, X_f, d
422
423
	def runIteration(self, task, X, X_f, xb, xb_f, enable, improve, SR, grades, **dparams):
424
		r"""Core function of MultipleTrajectorySearch algorithm.
425
426
		Args:
427
			task (Task): Optimization task.
428
			X (numpy.ndarray): Current population of individuals.
429
			X_f (numpy.ndarray[float]): Current individuals function/fitness values.
430
			xb (numpy.ndarray): Global best individual.
431
			xb_f (float): Global best individual function/fitness value.
432
			enable (numpy.ndarray[bool]): Enabled status of individuals.
433
			improve (numpy.ndarray[bool]): Improved status of individuals.
434
			SR (numpy.ndarray): Search ranges of individuals.
435
			grades (numpy.ndarray[int]): Grades of individuals.
436
			**dparams (Dict[str, Any]): Additional arguments.
437
438
		Returns:
439
			Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
440
				1. Initialized population.
441
				2. Initialized populations function/fitness value.
442
				3. Additional arguments:
443
					* enable (numpy.ndarray[bool]): If solution/individual is enabled.
444
					* improve (numpy.ndarray[bool]): If solution/individual is improved.
445
					* SR (numpy.ndarray): Search range.
446
					* grades (numpy.ndarray[int]): Grade of solution/individual.
447
		"""
448
		for i in range(len(X)):
449
			if not enable[i]: continue
450
			enable[i], grades[i] = False, 0
451
			X[i], X_f[i], xb, xb_f, k = self.GradingRun(X[i], X_f[i], xb, xb_f, improve[i], SR[i], task)
452
			X[i], X_f[i], xb, xb_f, improve[i], SR[i], grades[i] = self.LsRun(k, X[i], X_f[i], xb, xb_f, improve[i], SR[i], grades[i], task)
453
		for _ in range(self.NoLsBest): _, _, xb, xb_f, _, _, _ = MTS_LS1(xb, xb_f, xb, xb_f, False, task.bRange, task, rnd=self.Rand)
454
		enable[argsort(grades)[:self.NoEnabled]] = True
0 ignored issues
show
Comprehensibility Best Practice introduced by
The variable argsort does not seem to be defined.
Loading history...
455
		return X, X_f, {'enable': enable, 'improve': improve, 'SR': SR, 'grades': grades}
456
457
class MultipleTrajectorySearchV1(MultipleTrajectorySearch):
458
	r"""Implementation of Multiple trajectory search.
459
460
	Algorithm:
461
		Multiple trajectory search
462
463
	Date:
464
		2018
465
466
	Authors:
467
		Klemen Berkovic
468
469
	License:
470
		MIT
471
472
	Reference URL:
473
		https://ieeexplore.ieee.org/document/4983179/
474
475
	Reference paper:
476
		Tseng, Lin-Yu, and Chun Chen. "Multiple trajectory search for unconstrained/constrained multi-objective optimization." Evolutionary Computation, 2009. CEC'09. IEEE Congress on. IEEE, 2009.
477
478
	Attributes:
479
		Name (List[str]): List of strings representing algorithm name.
480
481
	See Also:
482
		* :class:`NiaPy.algorithms.other.MultipleTrajectorySearch``
483
	"""
484
	Name = ['MultipleTrajectorySearchV1', 'MTSv1']
485
486
	def setParameters(self, **kwargs):
487
		r"""Set core parameters of MultipleTrajectorySearchV1 algorithm.
488
489
		Args:
490
			**kwargs (Dict[str, Any]): Additional arguments.
491
492
		See Also:
493
			* :func:`NiaPy.algorithms.other.MultipleTrajectorySearch.setParameters`
494
		"""
495
		kwargs.pop('NoLsBest', None)
496
		MultipleTrajectorySearch.setParameters(self, NoLsBest=0, LSs=(MTS_LS1v1, MTS_LS2, MTS_LS3v1), **kwargs)
497
498
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
499