Code Duplication    Length = 12-16 lines in 2 locations

deepy/trainers/annealers.py 2 locations

@@ 31-46 (lines=16) @@
28
        if type(self._learning_rate) == float:
29
            raise Exception("use shared_scalar to wrap the value in the config.")
30
31
    def invoke(self):
32
        """
33
        Run it, return whether to end training.
34
        """
35
        self._iter += 1
36
        if self._iter - max(self._trainer.best_iter, self._annealed_iter) >= self._patience:
37
            if self._annealed_times >= self._anneal_times:
38
                logging.info("ending")
39
                return True
40
            else:
41
                self._trainer.set_params(*self._trainer.best_params)
42
                self._learning_rate.set_value(self._learning_rate.get_value() * 0.5)
43
                self._annealed_times += 1
44
                self._annealed_iter = self._iter
45
                logging.info("annealed learning rate to %f" % self._learning_rate.get_value())
46
        return False
47
48
    @staticmethod
49
    def learning_rate(value=0.01):
@@ 70-81 (lines=12) @@
67
        self._iter = 0
68
        self._rollback = rollback
69
70
    def invoke(self):
71
        self._iter += 1
72
        if self._iter >= self.iter_start_halving:
73
            if self._rollback:
74
                self._trainer.set_params(*self._trainer.best_params)
75
            self._learning_rate.set_value(self._learning_rate.get_value() * 0.5)
76
            logging.info("halving learning rate to %f" % self._learning_rate.get_value())
77
            self._trainer.network.train_logger.record("set learning rate to %f" % self._learning_rate.get_value())
78
        if self._iter >= self.end_at:
79
            logging.info("ending")
80
            return True
81
        return False
82
83
84
class ExponentialLearningRateAnnealer(TrainingController):