| Total Complexity | 2 |
| Total Lines | 33 |
| Duplicated Lines | 0 % |
| Changes | 0 | ||
| 1 | import numpy as np |
||
| 2 | from gradient_free_optimizers import RandomSearchOptimizer |
||
| 3 | |||
| 4 | |||
| 5 | def convex_function(pos_new): |
||
| 6 | score = -(pos_new["x1"] * pos_new["x1"] + pos_new["x2"] * pos_new["x2"]) |
||
| 7 | return score |
||
| 8 | |||
| 9 | |||
| 10 | search_space = { |
||
| 11 | "x1": np.arange(-100, 101, 0.1), |
||
| 12 | "x2": np.arange(-100, 101, 0.1), |
||
| 13 | } |
||
| 14 | |||
| 15 | |||
| 16 | def constraint_1(para): |
||
| 17 | # only values in 'x1' higher than -5 are valid |
||
| 18 | return para["x1"] > -5 |
||
| 19 | |||
| 20 | |||
| 21 | # put one or more constraints inside a list |
||
| 22 | constraints_list = [constraint_1] |
||
| 23 | |||
| 24 | |||
| 25 | # pass list of constraints to the optimizer |
||
| 26 | opt = RandomSearchOptimizer(search_space, constraints=constraints_list) |
||
| 27 | opt.search(convex_function, n_iter=50) |
||
| 28 | |||
| 29 | search_data = opt.search_data |
||
| 30 | |||
| 31 | # the search-data does not contain any samples where x1 is equal or below -5 |
||
| 32 | print("\n search_data \n", search_data, "\n") |
||
| 33 |