| Conditions | 9 |
| Total Lines | 66 |
| Lines | 0 |
| Ratio | 0 % |
| Changes | 0 | ||
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
| 1 | from __future__ import division |
||
| 28 | def __init__(self, config): |
||
| 29 | self.verbose = config.getoption("benchmark_verbose") |
||
| 30 | self.logger = Logger(self.verbose, config) |
||
| 31 | self.config = config |
||
| 32 | self.performance_regressions = [] |
||
| 33 | self.benchmarks = [] |
||
| 34 | self.machine_id = get_machine_id() |
||
| 35 | self.storage = load_storage( |
||
| 36 | config.getoption("benchmark_storage"), |
||
| 37 | logger=self.logger, |
||
| 38 | default_machine_id=self.machine_id, |
||
| 39 | netrc=config.getoption("benchmark_netrc") |
||
| 40 | ) |
||
| 41 | self.options = dict( |
||
| 42 | min_time=SecondsDecimal(config.getoption("benchmark_min_time")), |
||
| 43 | min_rounds=config.getoption("benchmark_min_rounds"), |
||
| 44 | max_time=SecondsDecimal(config.getoption("benchmark_max_time")), |
||
| 45 | timer=load_timer(config.getoption("benchmark_timer")), |
||
| 46 | calibration_precision=config.getoption("benchmark_calibration_precision"), |
||
| 47 | disable_gc=config.getoption("benchmark_disable_gc"), |
||
| 48 | warmup=config.getoption("benchmark_warmup"), |
||
| 49 | warmup_iterations=config.getoption("benchmark_warmup_iterations"), |
||
| 50 | use_cprofile=bool(config.getoption("benchmark_cprofile")), |
||
| 51 | ) |
||
| 52 | self.skip = config.getoption("benchmark_skip") |
||
| 53 | self.disabled = config.getoption("benchmark_disable") and not config.getoption("benchmark_enable") |
||
| 54 | self.cprofile_sort_by = config.getoption("benchmark_cprofile") |
||
| 55 | |||
| 56 | if config.getoption("dist", "no") != "no" and not self.skip: |
||
| 57 | self.logger.warn( |
||
| 58 | "BENCHMARK-U2", |
||
| 59 | "Benchmarks are automatically disabled because xdist plugin is active." |
||
| 60 | "Benchmarks cannot be performed reliably in a parallelized environment.", |
||
| 61 | fslocation="::" |
||
| 62 | ) |
||
| 63 | self.disabled = True |
||
| 64 | if hasattr(config, "slaveinput"): |
||
| 65 | self.disabled = True |
||
| 66 | if not statistics: |
||
| 67 | self.logger.warn( |
||
| 68 | "BENCHMARK-U3", |
||
| 69 | "Benchmarks are automatically disabled because we could not import `statistics`\n\n%s" % |
||
| 70 | statistics_error, |
||
| 71 | fslocation="::" |
||
| 72 | ) |
||
| 73 | self.disabled = True |
||
| 74 | |||
| 75 | self.only = config.getoption("benchmark_only") |
||
| 76 | self.sort = config.getoption("benchmark_sort") |
||
| 77 | self.columns = config.getoption("benchmark_columns") |
||
| 78 | if self.skip and self.only: |
||
| 79 | raise pytest.UsageError("Can't have both --benchmark-only and --benchmark-skip options.") |
||
| 80 | if self.disabled and self.only: |
||
| 81 | raise pytest.UsageError( |
||
| 82 | "Can't have both --benchmark-only and --benchmark-disable options. Note that --benchmark-disable is " |
||
| 83 | "automatically activated if xdist is on or you're missing the statistics dependency.") |
||
| 84 | self.group_by = config.getoption("benchmark_group_by") |
||
| 85 | self.save = config.getoption("benchmark_save") |
||
| 86 | self.autosave = config.getoption("benchmark_autosave") |
||
| 87 | self.save_data = config.getoption("benchmark_save_data") |
||
| 88 | self.json = config.getoption("benchmark_json") |
||
| 89 | self.compare = config.getoption("benchmark_compare") |
||
| 90 | self.compare_fail = config.getoption("benchmark_compare_fail") |
||
| 91 | self.name_format = NAME_FORMATTERS[config.getoption("benchmark_name")] |
||
| 92 | |||
| 93 | self.histogram = first_or_value(config.getoption("benchmark_histogram"), False) |
||
| 94 | |||
| 258 |