Passed
Pull Request — master (#202)
by Grega
01:02
created

MultipleTrajectorySearchV1.algorithmInfo()   A

Complexity

Conditions 1

Size

Total Lines 11
Code Lines 3

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 3
nop 0
dl 0
loc 11
rs 10
c 0
b 0
f 0
1
# encoding=utf8
2
# pylint: disable=mixed-indentation, trailing-whitespace, multiple-statements, attribute-defined-outside-init, logging-not-lazy, no-self-use, len-as-condition, singleton-comparison, arguments-differ, line-too-long, unused-argument, consider-using-enumerate, bad-continuation, superfluous-parens, redefined-builtin
3
import logging
4
import operator as oper
5
6
from numpy import random as rand, vectorize, where, copy, apply_along_axis, argmin, argmax, argsort, fmin, fmax, full, asarray, abs, inf
7
8
from NiaPy.algorithms.algorithm import Algorithm
9
10
logging.basicConfig()
11
logger = logging.getLogger('NiaPy.algorithms.other')
12
logger.setLevel('INFO')
13
14
__all__ = ['MultipleTrajectorySearch', 'MultipleTrajectorySearchV1', 'MTS_LS1', 'MTS_LS1v1', 'MTS_LS2', 'MTS_LS3', 'MTS_LS3v1']
15
16 View Code Duplication
def MTS_LS1(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, sr_fix=0.4, rnd=rand, **ukwargs):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
17
	r"""Multiple trajectory local search one.
18
19
	Args:
20
		Xk (numpy.ndarray): Current solution.
21
		Xk_fit (float): Current solutions fitness/function value.
22
		Xb (numpy.ndarray): Global best solution.
23
		Xb_fit (float): Global best solutions fitness/function value.
24
		improve (bool): Has the solution been improved.
25
		SR (numpy.ndarray): Search range.
26
		task (Task): Optimization task.
27
		BONUS1 (int): Bonus reward for improving global best solution.
28
		BONUS2 (int): Bonus reward for improving solution.
29
		sr_fix (numpy.ndarray): Fix when search range is to small.
30
		rnd (mtrand.RandomState): Random number generator.
31
		**ukwargs (Dict[str, Any]): Additional arguments.
32
33
	Returns:
34
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
35
			1. New solution.
36
			2. New solutions fitness/function value.
37
			3. Global best if found else old global best.
38
			4. Global bests function/fitness value.
39
			5. If solution has improved.
40
			6. Search range.
41
	"""
42
	if not improve:
43
		SR /= 2
44
		ifix = where(SR < 1e-15)
45
		SR[ifix] = task.bRange[ifix] * sr_fix
46
	improve, grade = False, 0.0
47
	for i in range(len(Xk)):
48
		Xk_i_old = Xk[i]
49
		Xk[i] = Xk_i_old - SR[i]
50
		Xk = task.repair(Xk, rnd)
51
		Xk_fit_new = task.eval(Xk)
52
		if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk, Xk_fit_new
53
		if Xk_fit_new == Xk_fit: Xk[i] = Xk_i_old
54
		elif Xk_fit_new > Xk_fit:
55
			Xk[i] = Xk_i_old + 0.5 * SR[i]
56
			Xk = task.repair(Xk, rnd)
57
			Xk_fit_new = task.eval(task.repair(Xk, rnd))
58
			if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, copy(Xk), Xk_fit_new
59
			if Xk_fit_new >= Xk_fit: Xk[i] = Xk_i_old
60
			else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
61
		else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
62
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
63
64 View Code Duplication
def MTS_LS1v1(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, sr_fix=0.4, rnd=rand, **ukwargs):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
65
	r"""Multiple trajectory local search one version two.
66
67
	Args:
68
		Xk (numpy.ndarray): Current solution.
69
		Xk_fit (float): Current solutions fitness/function value.
70
		Xb (numpy.ndarray): Global best solution.
71
		Xb_fit (float): Global best solutions fitness/function value.
72
		improve (bool): Has the solution been improved.
73
		SR (numpy.ndarray): Search range.
74
		task (Task): Optimization task.
75
		BONUS1 (int): Bonus reward for improving global best solution.
76
		BONUS2 (int): Bonus reward for improving solution.
77
		sr_fix (numpy.ndarray): Fix when search range is to small.
78
		rnd (mtrand.RandomState): Random number generator.
79
		**ukwargs (Dict[str, Any]): Additional arguments.
80
81
	Returns:
82
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
83
			1. New solution.
84
			2. New solutions fitness/function value.
85
			3. Global best if found else old global best.
86
			4. Global bests function/fitness value.
87
			5. If solution has improved.
88
			6. Search range.
89
	"""
90
	if not improve:
91
		SR /= 2
92
		ifix = where(SR < 1e-15)
93
		SR[ifix] = task.bRange[ifix] * sr_fix
94
	improve, D, grade = False, rnd.uniform(-1, 1, task.D), 0.0
95
	for i in range(len(Xk)):
96
		Xk_i_old = Xk[i]
97
		Xk[i] = Xk_i_old - SR[i] * D[i]
98
		Xk = task.repair(Xk, rnd)
99
		Xk_fit_new = task.eval(Xk)
100
		if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk, Xk_fit_new
101
		elif Xk_fit_new == Xk_fit: Xk[i] = Xk_i_old
102
		elif Xk_fit_new > Xk_fit:
103
			Xk[i] = Xk_i_old + 0.5 * SR[i]
104
			Xk = task.repair(Xk, rnd)
105
			Xk_fit_new = task.eval(Xk)
106
			if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk, Xk_fit_new
107
			if Xk_fit_new >= Xk_fit: Xk[i] = Xk_i_old
108
			else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
109
		else: grade, improve, Xk_fit = grade + BONUS2, True, Xk_fit_new
110
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
111
112
def genNewX(x, r, d, SR, op):
113
	r"""Move solution to other position based on operator.
114
115
	Args:
116
		x (numpy.ndarray): Solution to move.
117
		r (int): Random number.
118
		d (float): Scale factor.
119
		SR (numpy.ndarray): Search range.
120
		op (operator): Operator to use.
121
122
	Returns:
123
		numpy.ndarray: Moved solution based on operator.
124
	"""
125
	return op(x, SR * d) if r == 0 else x
126
127
def MTS_LS2(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, sr_fix=0.4, rnd=rand, **ukwargs):
128
	r"""Multiple trajectory local search two.
129
130
	Args:
131
		Xk (numpy.ndarray): Current solution.
132
		Xk_fit (float): Current solutions fitness/function value.
133
		Xb (numpy.ndarray): Global best solution.
134
		Xb_fit (float): Global best solutions fitness/function value.
135
		improve (bool): Has the solution been improved.
136
		SR (numpy.ndarray): Search range.
137
		task (Task): Optimization task.
138
		BONUS1 (int): Bonus reward for improving global best solution.
139
		BONUS2 (int): Bonus reward for improving solution.
140
		sr_fix (numpy.ndarray): Fix when search range is to small.
141
		rnd (mtrand.RandomState): Random number generator.
142
		**ukwargs (Dict[str, Any]): Additional arguments.
143
144
	Returns:
145
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
146
			1. New solution.
147
			2. New solutions fitness/function value.
148
			3. Global best if found else old global best.
149
			4. Global bests function/fitness value.
150
			5. If solution has improved.
151
			6. Search range.
152
153
	See Also:
154
		* :func:`NiaPy.algorithms.other.genNewX`
155
	"""
156
	if not improve:
157
		SR /= 2
158
		ifix = where(SR < 1e-15)
159
		SR[ifix] = task.bRange[ifix] * sr_fix
160
	improve, grade = False, 0.0
161
	for _ in range(len(Xk)):
162
		D = -1 + rnd.rand(len(Xk)) * 2
163
		R = rnd.choice([0, 1, 2, 3], len(Xk))
164
		Xk_new = task.repair(vectorize(genNewX)(Xk, R, D, SR, oper.sub), rnd)
165
		Xk_fit_new = task.eval(Xk_new)
166
		if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk_new, Xk_fit_new
167
		elif Xk_fit_new != Xk_fit:
168
			if Xk_fit_new > Xk_fit:
169
				Xk_new = task.repair(vectorize(genNewX)(Xk, R, D, SR, oper.add), rnd)
170
				Xk_fit_new = task.eval(Xk_new)
171
				if Xk_fit_new < Xb_fit: grade, Xb, Xb_fit = grade + BONUS1, Xk_new, Xk_fit_new
172
				if Xk_fit_new < Xk_fit: grade, Xk, Xk_fit, improve = grade + BONUS2, Xk_new, Xk_fit_new, True
173
			else: grade, Xk, Xk_fit, improve = grade + BONUS2, Xk_new, Xk_fit_new, True
174
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
175
176
def MTS_LS3(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, BONUS1=10, BONUS2=1, rnd=rand, **ukwargs):
177
	r"""Multiple trajectory local search three.
178
179
	Args:
180
		Xk (numpy.ndarray): Current solution.
181
		Xk_fit (float): Current solutions fitness/function value.
182
		Xb (numpy.ndarray): Global best solution.
183
		Xb_fit (float): Global best solutions fitness/function value.
184
		improve (bool): Has the solution been improved.
185
		SR (numpy.ndarray): Search range.
186
		task (Task): Optimization task.
187
		BONUS1 (int): Bonus reward for improving global best solution.
188
		BONUS2 (int): Bonus reward for improving solution.
189
		rnd (mtrand.RandomState): Random number generator.
190
		**ukwargs (Dict[str, Any]): Additional arguments.
191
192
	Returns:
193
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
194
			1. New solution.
195
			2. New solutions fitness/function value.
196
			3. Global best if found else old global best.
197
			4. Global bests function/fitness value.
198
			5. If solution has improved.
199
			6. Search range.
200
	"""
201
	Xk_new, grade = copy(Xk), 0.0
202
	for i in range(len(Xk)):
203
		Xk1, Xk2, Xk3 = copy(Xk_new), copy(Xk_new), copy(Xk_new)
204
		Xk1[i], Xk2[i], Xk3[i] = Xk1[i] + 0.1, Xk2[i] - 0.1, Xk3[i] + 0.2
205
		Xk1, Xk2, Xk3 = task.repair(Xk1, rnd), task.repair(Xk2, rnd), task.repair(Xk3, rnd)
206
		Xk1_fit, Xk2_fit, Xk3_fit = task.eval(Xk1), task.eval(Xk2), task.eval(Xk3)
207
		if Xk1_fit < Xb_fit: grade, Xb, Xb_fit, improve = grade + BONUS1, Xk1, Xk1_fit, True
208
		if Xk2_fit < Xb_fit: grade, Xb, Xb_fit, improve = grade + BONUS1, Xk2, Xk2_fit, True
209
		if Xk3_fit < Xb_fit: grade, Xb, Xb_fit, improve = grade + BONUS1, Xk3, Xk3_fit, True
210
		D1, D2, D3 = Xk_fit - Xk1_fit if abs(Xk1_fit) != inf else 0, Xk_fit - Xk2_fit if abs(Xk2_fit) != inf else 0, Xk_fit - Xk3_fit if abs(Xk3_fit) != inf else 0
211
		if D1 > 0: grade, improve = grade + BONUS2, True
212
		if D2 > 0: grade, improve = grade + BONUS2, True
213
		if D3 > 0: grade, improve = grade + BONUS2, True
214
		a, b, c = 0.4 + rnd.rand() * 0.1, 0.1 + rnd.rand() * 0.2, rnd.rand()
215
		Xk_new[i] += a * (D1 - D2) + b * (D3 - 2 * D1) + c
216
		Xk_new = task.repair(Xk_new, rnd)
217
		Xk_fit_new = task.eval(Xk_new)
218
		if Xk_fit_new < Xk_fit: Xk, Xk_fit, improve = Xk_new, Xk_fit_new, True
219
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
220
221
def MTS_LS3v1(Xk, Xk_fit, Xb, Xb_fit, improve, SR, task, phi=3, BONUS1=10, BONUS2=1, rnd=rand, **ukwargs):
222
	r"""Multiple trajectory local search three version one.
223
224
	Args:
225
		Xk (numpy.ndarray): Current solution.
226
		Xk_fit (float): Current solutions fitness/function value.
227
		Xb (numpy.ndarray): Global best solution.
228
		Xb_fit (float): Global best solutions fitness/function value.
229
		improve (bool): Has the solution been improved.
230
		SR (numpy.ndarray): Search range.
231
		task (Task): Optimization task.
232
		phi (int): Number of new generated positions.
233
		BONUS1 (int): Bonus reward for improving global best solution.
234
		BONUS2 (int): Bonus reward for improving solution.
235
		rnd (mtrand.RandomState): Random number generator.
236
		**ukwargs (Dict[str, Any]): Additional arguments.
237
238
	Returns:
239
		Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray]:
240
			1. New solution.
241
			2. New solutions fitness/function value.
242
			3. Global best if found else old global best.
243
			4. Global bests function/fitness value.
244
			5. If solution has improved.
245
			6. Search range.
246
	"""
247
	grade, Disp = 0.0, task.bRange / 10
248
	while True in (Disp > 1e-3):
249
		Xn = apply_along_axis(task.repair, 1, asarray([rnd.permutation(Xk) + Disp * rnd.uniform(-1, 1, len(Xk)) for _ in range(phi)]), rnd)
250
		Xn_f = apply_along_axis(task.eval, 1, Xn)
251
		iBetter, iBetterBest = where(Xn_f < Xk_fit), where(Xn_f < Xb_fit)
252
		grade += len(iBetterBest) * BONUS1 + (len(iBetter) - len(iBetterBest)) * BONUS2
253
		if len(Xn_f[iBetterBest]) > 0:
254
			ib, improve = argmin(Xn_f[iBetterBest]), True
255
			Xb, Xb_fit, Xk, Xk_fit = Xn[ib], Xn_f[ib], Xn[ib], Xn_f[ib]
256
		elif len(Xn_f[iBetter]) > 0:
257
			ib, improve = argmin(Xn_f[iBetter]), True
258
			Xk, Xk_fit = Xn[ib], Xn_f[ib]
259
		Su, Sl = fmin(task.Upper, Xk + 2 * Disp), fmax(task.Lower, Xk - 2 * Disp)
260
		Disp = (Su - Sl) / 10
261
	return Xk, Xk_fit, Xb, Xb_fit, improve, grade, SR
262
263
class MultipleTrajectorySearch(Algorithm):
264
	r"""Implementation of Multiple trajectory search.
265
266
	Algorithm:
267
		Multiple trajectory search
268
269
	Date:
270
		2018
271
272
	Authors:
273
		Klemen Berkovic
274
275
	License:
276
		MIT
277
278
	Reference URL:
279
		https://ieeexplore.ieee.org/document/4631210/
280
281
	Reference paper:
282
		Lin-Yu Tseng and Chun Chen, "Multiple trajectory search for Large Scale Global Optimization," 2008 IEEE Congress on Evolutionary Computation (IEEE World Congress on Computational Intelligence), Hong Kong, 2008, pp. 3052-3059. doi: 10.1109/CEC.2008.4631210
283
284
	Attributes:
285
		Name (List[Str]): List of strings representing algorithm name.
286
		LSs (Iterable[Callable[[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray, Task, Dict[str, Any]], Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, int, numpy.ndarray]]]): Local searches to use.
287
		BONUS1 (int): Bonus for improving global best solution.
288
		BONUS2 (int): Bonus for improving solution.
289
		NoLsTests (int): Number of test runs on local search algorithms.
290
		NoLs (int): Number of local search algorithm runs.
291
		NoLsBest (int): Number of locals search algorithm runs on best solution.
292
		NoEnabled (int): Number of best solution for testing.
293
294
	See Also:
295
		* :class:`NiaPy.algorithms.Algorithm`
296
	"""
297
	Name = ['MultipleTrajectorySearch', 'MTS']
298
299
	@staticmethod
300
	def algorithmInfo():
301
		r"""Get basic information of algorithm.
302
303
		Returns:
304
			str: Basic information of algorithm.
305
306
		See Also:
307
			* :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
308
		"""
309
		return r"""Lin-Yu Tseng and Chun Chen, "Multiple trajectory search for Large Scale Global Optimization," 2008 IEEE Congress on Evolutionary Computation (IEEE World Congress on Computational Intelligence), Hong Kong, 2008, pp. 3052-3059. doi: 10.1109/CEC.2008.4631210"""
310
311 View Code Duplication
	@staticmethod
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
312
	def typeParameters():
313
		r"""Get dictionary with functions for checking values of parameters.
314
315
		Returns:
316
			Dict[str, Callable]:
317
				* NoLsTests (Callable[[int], bool])
318
				* NoLs (Callable[[int], bool])
319
				* NoLsBest (Callable[[int], bool])
320
				* NoEnabled (Callable[[int], bool])
321
		"""
322
		return {
323
			'NoLsTests': lambda x: isinstance(x, int) and x >= 0,
324
			'NoLs': lambda x: isinstance(x, int) and x >= 0,
325
			'NoLsBest': lambda x: isinstance(x, int) and x >= 0,
326
			'NoEnabled': lambda x: isinstance(x, int) and x > 0
327
		}
328
329
	def setParameters(self, M=40, NoLsTests=5, NoLs=5, NoLsBest=5, NoEnabled=17, BONUS1=10, BONUS2=1, LSs=(MTS_LS1v1, MTS_LS2, MTS_LS3v1), **ukwargs):
330
		r"""Set the arguments of the algorithm.
331
332
		Arguments:
333
			M (int): Number of individuals in population.
334
			NoLsTests (int): Number of test runs on local search algorithms.
335
			NoLs (int): Number of local search algorithm runs.
336
			NoLsBest (int): Number of locals search algorithm runs on best solution.
337
			NoEnabled (int): Number of best solution for testing.
338
			BONUS1 (int): Bonus for improving global best solution.
339
			BONUS2 (int): Bonus for improving self.
340
			LSs (Iterable[Callable[[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray, Task, Dict[str, Any]], Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, int, numpy.ndarray]]]): Local searches to use.
341
342
		See Also:
343
			* :func:`NiaPy.algorithms.Algorithm.setParameters`
344
		"""
345
		Algorithm.setParameters(self, NP=M)
346
		self.NoLsTests, self.NoLs, self.NoLsBest, self.NoEnabled, self.BONUS1, self.BONUS2 = NoLsTests, NoLs, NoLsBest, NoEnabled, BONUS1, BONUS2
347
		self.LSs = LSs
348
		if ukwargs: logger.info('Unused arguments: %s' % (ukwargs))
349
350
	def GradingRun(self, x, x_f, xb, xb_f, improve, SR, task):
351
		r"""Run local search for getting scores of local searches.
352
353
		Args:
354
			x (numpy.ndarray): Solution for grading.
355
			x_f (float): Solutions fitness/function value.
356
			xb (numpy.ndarray): Global best solution.
357
			xb_f (float): Global best solutions function/fitness value.
358
			improve (bool): Info if solution has improved.
359
			SR (numpy.ndarray): Search range.
360
			task (Task): Optimization task.
361
362
		Returns:
363
			Tuple[numpy.ndarray, float, numpy.ndarray, float]:
364
				1. New solution.
365
				2. New solutions function/fitness value.
366
				3. Global best solution.
367
				4. Global best solutions fitness/function value.
368
		"""
369
		ls_grades, Xn, Xnb = full(3, 0.0), [[x, x_f]] * len(self.LSs), [xb, xb_f]
370
		for _ in range(self.NoLsTests):
371
			for k in range(len(self.LSs)):
372
				Xn[k][0], Xn[k][1], xnb, xnb_f, improve, g, SR = self.LSs[k](Xn[k][0], Xn[k][1], Xnb[0], Xnb[1], improve, SR, task, BONUS1=self.BONUS1, BONUS2=self.BONUS2, rnd=self.Rand)
373
				if Xnb[1] > xnb_f: Xnb = [xnb, xnb_f]
374
				ls_grades[k] += g
375
		xn, k = min(Xn, key=lambda x: x[1]), argmax(ls_grades)
376
		return xn[0], xn[1], Xnb[0], Xnb[1], k
377
378
	def LsRun(self, k, x, x_f, xb, xb_f, improve, SR, g, task):
379
		r"""Run a selected local search.
380
381
		Args:
382
			k (int): Index of local search.
383
			x (numpy.ndarray): Current solution.
384
			x_f (float): Current solutions function/fitness value.
385
			xb (numpy.ndarray): Global best solution.
386
			xb_f (float): Global best solutions fitness/function value.
387
			improve (bool): If the solution has improved.
388
			SR (numpy.ndarray): Search range.
389
			g:
390
			task (Task): Optimization task.
391
392
		Returns:
393
			Tuple[numpy.ndarray, float, numpy.ndarray, float, bool, numpy.ndarray, int]:
394
				1. New best solution found.
395
				2. New best solutions found function/fitness value.
396
				3. Global best solution.
397
				4. Global best solutions function/fitness value.
398
				5. If the solution has improved.
399
				6. Grade of local search run.
400
		"""
401
		XBn = list()
402
		for _j in range(self.NoLs):
403
			x, x_f, xnb, xnb_f, improve, grade, SR = self.LSs[k](x, x_f, xb, xb_f, improve, SR, task, BONUS1=self.BONUS1, BONUS2=self.BONUS2, rnd=self.Rand)
404
			g += grade
405
			XBn.append((xnb, xnb_f))
406
		xb, xb_f = min(XBn, key=lambda x: x[1])
407
		return x, x_f, xb, xb_f, improve, SR, g
408
409
	def initPopulation(self, task):
410
		r"""Initialize starting population.
411
412
		Args:
413
			task (Task): Optimization task.
414
415
		Returns:
416
			Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
417
				1. Initialized population.
418
				2. Initialized populations function/fitness value.
419
				3. Additional arguments:
420
					* enable (numpy.ndarray[bool]): If solution/individual is enabled.
421
					* improve (numpy.ndarray[bool]): If solution/individual is improved.
422
					* SR (numpy.ndarray): Search range.
423
					* grades (numpy.ndarray[int]): Grade of solution/individual.
424
		"""
425
		X, X_f, d = Algorithm.initPopulation(self, task)
426
		enable, improve, SR, grades = full(self.NP, True), full(self.NP, True), full([self.NP, task.D], task.bRange / 2), full(self.NP, 0.0)
427
		d.update({
428
			'enable': enable, 'improve': improve, 'SR': SR, 'grades': grades
429
		})
430
		return X, X_f, d
431
432
	def runIteration(self, task, X, X_f, xb, xb_f, enable, improve, SR, grades, **dparams):
433
		r"""Core function of MultipleTrajectorySearch algorithm.
434
435
		Args:
436
			task (Task): Optimization task.
437
			X (numpy.ndarray): Current population of individuals.
438
			X_f (numpy.ndarray[float]): Current individuals function/fitness values.
439
			xb (numpy.ndarray): Global best individual.
440
			xb_f (float): Global best individual function/fitness value.
441
			enable (numpy.ndarray[bool]): Enabled status of individuals.
442
			improve (numpy.ndarray[bool]): Improved status of individuals.
443
			SR (numpy.ndarray): Search ranges of individuals.
444
			grades (numpy.ndarray[int]): Grades of individuals.
445
			**dparams (Dict[str, Any]): Additional arguments.
446
447
		Returns:
448
			Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
449
				1. Initialized population.
450
				2. Initialized populations function/fitness value.
451
				3. Additional arguments:
452
					* enable (numpy.ndarray[bool]): If solution/individual is enabled.
453
					* improve (numpy.ndarray[bool]): If solution/individual is improved.
454
					* SR (numpy.ndarray): Search range.
455
					* grades (numpy.ndarray[int]): Grade of solution/individual.
456
		"""
457
		for i in range(len(X)):
458
			if not enable[i]: continue
459
			enable[i], grades[i] = False, 0
460
			X[i], X_f[i], xb, xb_f, k = self.GradingRun(X[i], X_f[i], xb, xb_f, improve[i], SR[i], task)
461
			X[i], X_f[i], xb, xb_f, improve[i], SR[i], grades[i] = self.LsRun(k, X[i], X_f[i], xb, xb_f, improve[i], SR[i], grades[i], task)
462
		for _ in range(self.NoLsBest): _, _, xb, xb_f, _, _, _ = MTS_LS1(xb, xb_f, xb, xb_f, False, task.bRange, task, rnd=self.Rand)
463
		enable[argsort(grades)[:self.NoEnabled]] = True
0 ignored issues
show
Comprehensibility Best Practice introduced by
The variable argsort does not seem to be defined.
Loading history...
464
		return X, X_f, {'enable': enable, 'improve': improve, 'SR': SR, 'grades': grades}
465
466
class MultipleTrajectorySearchV1(MultipleTrajectorySearch):
467
	r"""Implementation of Multiple trajectory search.
468
469
	Algorithm:
470
		Multiple trajectory search
471
472
	Date:
473
		2018
474
475
	Authors:
476
		Klemen Berkovic
477
478
	License:
479
		MIT
480
481
	Reference URL:
482
		https://ieeexplore.ieee.org/document/4983179/
483
484
	Reference paper:
485
		Tseng, Lin-Yu, and Chun Chen. "Multiple trajectory search for unconstrained/constrained multi-objective optimization." Evolutionary Computation, 2009. CEC'09. IEEE Congress on. IEEE, 2009.
486
487
	Attributes:
488
		Name (List[str]): List of strings representing algorithm name.
489
490
	See Also:
491
		* :class:`NiaPy.algorithms.other.MultipleTrajectorySearch``
492
	"""
493
	Name = ['MultipleTrajectorySearchV1', 'MTSv1']
494
495
	@staticmethod
496
	def algorithmInfo():
497
		r"""Get basic information of algorithm.
498
499
		Returns:
500
			str: Basic information of algorithm.
501
502
		See Also:
503
			* :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
504
		"""
505
		return r"""Tseng, Lin-Yu, and Chun Chen. "Multiple trajectory search for unconstrained/constrained multi-objective optimization." Evolutionary Computation, 2009. CEC'09. IEEE Congress on. IEEE, 2009."""
506
507
	def setParameters(self, **kwargs):
508
		r"""Set core parameters of MultipleTrajectorySearchV1 algorithm.
509
510
		Args:
511
			**kwargs (Dict[str, Any]): Additional arguments.
512
513
		See Also:
514
			* :func:`NiaPy.algorithms.other.MultipleTrajectorySearch.setParameters`
515
		"""
516
		kwargs.pop('NoLsBest', None)
517
		MultipleTrajectorySearch.setParameters(self, NoLsBest=0, LSs=(MTS_LS1v1, MTS_LS2, MTS_LS3v1), **kwargs)
518
519
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
520