Passed
Pull Request — master (#280)
by
unknown
01:24
created

NiaPy.algorithms.basic.hho   A

Complexity

Total Complexity 27

Size/Duplication

Total Lines 211
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 27
eloc 91
dl 0
loc 211
rs 10
c 0
b 0
f 0

8 Methods

Rating   Name   Duplication   Size   Complexity  
F HarrisHawksOptimization.runIteration() 0 78 19
A HarrisHawksOptimization.typeParameters() 0 16 2
A HarrisHawksOptimization.setParameters() 0 11 1
A HarrisHawksOptimization.initPopulation() 0 16 1
A HarrisHawksOptimization.__init__() 0 2 1
A HarrisHawksOptimization.getParameters() 0 11 1
A HarrisHawksOptimization.levy_function() 0 16 1
A HarrisHawksOptimization.algorithmInfo() 0 11 1
1
# encoding=utf8
2
import logging
3
4
from numpy import random as rand, sin, pi, argmin, abs, mean
5
from scipy.special import gamma
6
7
from NiaPy.algorithms.algorithm import Algorithm
8
9
logging.basicConfig()
10
logger = logging.getLogger('NiaPy.algorithms.basic')
11
logger.setLevel('INFO')
12
13
__all__ = ['HarrisHawksOptimization']
14
15
16
class HarrisHawksOptimization(Algorithm):
17
    r"""Implementation of Harris Hawks Optimization algorithm.
18
19
    Algorithm:
20
            Harris Hawks Optimization
21
22
    Date:
23
            2020
24
25
    Authors:
26
            Francisco Jose Solis-Munoz
27
28
    License:
29
            MIT
30
31
    Reference paper:
32
            Heidari et al. "Harris hawks optimization: Algorithm and applications". Future Generation Computer Systems. 2019. Vol. 97. 849-872.
33
34
    Attributes:
35
            Name (List[str]): List of strings representing algorithm name.
36
            levy (float): Levy factor.
37
38
    See Also:
39
            * :class:`NiaPy.algorithms.Algorithm`
40
    """
41
    Name = ['HarrisHawksOptimization', 'HHO']
42
43
    def __init__(self, **kwargs):
44
        super(HarrisHawksOptimization, self).__init__(**kwargs)
45
46
    @staticmethod
47
    def algorithmInfo():
48
        r"""Get algorithms information.
49
50
        Returns:
51
                str: Algorithm information.
52
53
        See Also:
54
                * :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
55
        """
56
        return r"""Heidari et al. "Harris hawks optimization: Algorithm and applications". Future Generation Computer Systems. 2019. Vol. 97. 849-872."""
57
58
    @staticmethod
59
    def typeParameters():
60
        r"""Return dict with where key of dict represents parameter name and values represent checking functions for selected parameter.
61
62
        Returns:
63
                Dict[str, Callable]:
64
                        * levy (Callable[[Union[float, int]], bool]): Levy factor.
65
66
        See Also:
67
                * :func:`NiaPy.algorithms.Algorithm.typeParameters`
68
        """
69
        d = Algorithm.typeParameters()
70
        d.update({
71
            'levy': lambda x: isinstance(x, (float, int)) and x > 0,
72
        })
73
        return d
74
75
    def setParameters(self, NP=40, levy=0.01, **ukwargs):
76
        r"""Set the parameters of the algorithm.
77
78
        Args:
79
                levy (Optional[float]): Levy factor.
80
81
        See Also:
82
                * :func:`NiaPy.algorithms.Algorithm.setParameters`
83
        """
84
        Algorithm.setParameters(self, NP=NP, **ukwargs)
85
        self.levy = levy
86
87
    def getParameters(self):
88
        r"""Get parameters of the algorithm.
89
90
        Returns:
91
                Dict[str, Any]
92
        """
93
        d = Algorithm.getParameters(self)
94
        d.update({
95
            'levy': self.levy
96
        })
97
        return d
98
99
    def initPopulation(self, task, rnd=rand):
100
        r"""Initialize the starting population.
101
102
        Parameters:
103
                task (Task): Optimization task
104
105
        Returns:
106
                Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
107
                        1. New population.
108
                        2. New population fitness/function values.
109
110
        See Also:
111
                * :func:`NiaPy.algorithms.Algorithm.initPopulation`
112
        """
113
        Sol, Fitness, d = Algorithm.initPopulation(self, task)
114
        return Sol, Fitness, d
115
116
    def levy_function(self, dims, step=0.01, rnd=rand):
117
        r"""Calculate levy function.
118
119
        Parameters:
120
                dim (int): Number of dimensions
121
                step (float): Step of the Levy function
122
123
        Returns:
124
                float: The Levy function evaluation
125
        """
126
        beta = 1.5
127
        sigma = (gamma(1 + beta) * sin(pi * beta / 2) / (gamma((1 + beta / 2) * beta * 2.0 ** ((beta - 1) / 2)))) ** (1 / beta)
128
        normal_1 = rnd.normal(0, sigma, size=dims)
129
        normal_2 = rnd.normal(0, 1, size=dims)
130
        result = step * normal_1 / (abs(normal_2) ** (1 / beta))
131
        return result
132
133
    def runIteration(self, task, Sol, Fitness, xb, fxb, **dparams):
134
        r"""Core function of Harris Hawks Optimization.
135
136
        Parameters:
137
                task (Task): Optimization task.
138
                Sol (numpy.ndarray): Current population
139
                Fitness (numpy.ndarray[float]): Current population fitness/funciton values
140
                xb (numpy.ndarray): Current best individual
141
                fxb (float): Current best individual function/fitness value
142
                dparams (Dict[str, Any]): Additional algorithm arguments
143
144
        Returns:
145
                Tuple[numpy.ndarray, numpy.ndarray, numpy.ndarray, float, Dict[str, Any]]:
146
                        1. New population
147
                        2. New population fitness/function vlues
148
                        3. New global best solution
149
                        4. New global best fitness/objective value
150
        """
151
        # Decreasing energy factor
152
        decreasing_energy_factor = 2 * (1 - task.iters() / task.nGEN)
153
        mean_sol = mean(Sol)
154
        # Update population
155
        for i in range(self.NP):
156
            jumping_energy = self.Rand.uniform(0, 2)
157
            decreasing_energy_random = self.Rand.uniform(-1, 1)
158
            escaping_energy = decreasing_energy_factor * decreasing_energy_random
159
            escaping_energy_abs = abs(escaping_energy)
160
            random_number = self.Rand.rand()
161
            if escaping_energy >= 1 and random_number >= 0.5:
162
                # 0. Exploration: Random tall tree
163
                rhi = self.Rand.randint(0, self.NP)
164
                random_agent = Sol[rhi]
165
                Sol[i] = random_agent - self.Rand.rand() * abs(random_agent - 2 * self.Rand.rand() * Sol[i])
166
            elif escaping_energy_abs >= 1 and random_number < 0.5:
167
                # 1. Exploration: Family members mean
168
                Sol[i] = (xb - mean_sol) - self.Rand.rand() * self.Rand.uniform(task.Lower, task.Upper)
169
            elif escaping_energy_abs >= 0.5 and random_number >= 0.5:
170
                # 2. Exploitation: Soft besiege
171
                Sol[i] = \
172
                    (xb - Sol[i]) - \
173
                    escaping_energy * \
174
                    abs(jumping_energy * xb - Sol[i])
175
            elif escaping_energy_abs < 0.5 and random_number >= 0.5:
176
                # 3. Exploitation: Hard besiege
177
                Sol[i] = \
178
                    xb - \
179
                    escaping_energy * \
180
                    abs(xb - Sol[i])
181
            elif escaping_energy_abs >= 0.5 and random_number < 0.5:
182
                # 4. Exploitation: Soft besiege with pprogressive rapid dives
183
                cand1 = task.repair(xb - escaping_energy * abs(jumping_energy * xb - Sol[i]), rnd=self.Rand)
184
                random_vector = self.Rand.rand(task.D)
185
                cand2 = task.repair(cand1 + random_vector * self.levy_function(task.D, self.levy, rnd=self.Rand), rnd=self.Rand)
186
                if task.eval(cand1) < Fitness[i]:
187
                    Sol[i] = cand1
188
                elif task.eval(cand2) < Fitness[i]:
189
                    Sol[i] = cand2
190
            elif escaping_energy_abs < 0.5 and random_number < 0.5:
191
                # 5. Exploitation: Hard besiege with progressive rapid dives
192
                cand1 = task.repair(xb - escaping_energy * abs(jumping_energy * xb - mean_sol), rnd=self.Rand)
193
                random_vector = self.Rand.rand(task.D)
194
                cand2 = task.repair(cand1 + random_vector * self.levy_function(task.D, self.levy, rnd=self.Rand), rnd=self.Rand)
195
                if task.eval(cand1) < Fitness[i]:
196
                    Sol[i] = cand1
197
                elif task.eval(cand2) < Fitness[i]:
198
                    Sol[i] = cand2
199
            # Repair agent (from population) values
200
            Sol[i] = task.repair(Sol[i], rnd=self.Rand)
201
            # Eval population
202
            Fitness[i] = task.eval(Sol[i])
203
        # Get best of population
204
        best_index = argmin(Fitness)
205
        xb_cand = Sol[best_index].copy()
206
        fxb_cand = Fitness[best_index].copy()
207
        if fxb_cand < fxb:
208
            fxb = fxb_cand
209
            xb = xb_cand.copy()
210
        return Sol, Fitness, xb, fxb, {}
211
212
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
213