Passed
Pull Request — master (#414)
by Osma
02:12
created

annif.backend.hyperopt   A

Complexity

Total Complexity 6

Size/Duplication

Total Lines 56
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 6
eloc 32
dl 0
loc 56
rs 10
c 0
b 0
f 0

6 Methods

Rating   Name   Duplication   Size   Complexity  
A HyperparameterOptimizer.optimize() 0 15 1
A HyperparameterOptimizer._prepare() 0 3 1
A HyperparameterOptimizer._test() 0 4 1
A HyperparameterOptimizer.get_hp_space() 0 4 1
A AnnifHyperoptBackend.get_hp_optimizer() 0 6 1
A HyperparameterOptimizer.__init__() 0 3 1
1
"""Hyperparameter optimization functionality for backends"""
2
3
import abc
4
import hyperopt
5
from .backend import AnnifBackend
6
7
8
class HyperparameterOptimizer:
9
    """Base class for hyperparameter optimizers"""
10
11
    def __init__(self, backend, corpus):
12
        self._backend = backend
13
        self._corpus = corpus
14
15
    @abc.abstractmethod
16
    def get_hp_space(self):
17
        """Get the hyperparameter space definition of this backend"""
18
        pass  # pragma: no cover
19
20
    def _prepare(self):
21
        """Prepare the optimizer for hyperparameter evaluation"""
22
        pass  # pragma: no cover
23
24
    @abc.abstractmethod
25
    def _test(self, hps):
26
        """Evaluate a set of hyperparameters"""
27
        pass  # pragma: no cover
28
29
    def optimize(self, n_trials):
30
        """Find the optimal hyperparameters by testing up to the given number of
31
        hyperparameter combinations"""
32
33
        self._prepare()
34
        space = self.get_hp_space()
35
        trials = hyperopt.Trials()
36
        best = hyperopt.fmin(
37
            show_progressbar=False,
38
            fn=self._test,
39
            space=space,
40
            algo=hyperopt.tpe.suggest,
41
            max_evals=n_trials,
42
            trials=trials)
43
        return (best, 1 - trials.best_trial['result']['loss'])
44
45
46
class AnnifHyperoptBackend(AnnifBackend):
47
    """Base class for Annif backends that can perform hyperparameter
48
    optimization"""
49
50
    @abc.abstractmethod
51
    def get_hp_optimizer(self, corpus):
52
        """Get a HyperparameterOptimizer object that can look for
53
        optimal hyperparameter combinations for the given corpus"""
54
55
        pass  # pragma: no cover
56