1
|
|
|
from keras.models import Sequential |
2
|
|
|
from keras.layers import Dense, Conv2D, MaxPooling2D, Flatten, Dropout, Activation |
3
|
|
|
from keras.datasets import cifar10 |
4
|
|
|
from keras.utils import to_categorical |
5
|
|
|
|
6
|
|
|
from hyperactive import Hyperactive |
7
|
|
|
|
8
|
|
|
(X_train, y_train), (X_test, y_test) = cifar10.load_data() |
9
|
|
|
|
10
|
|
|
y_train = to_categorical(y_train, 10) |
11
|
|
|
y_test = to_categorical(y_test, 10) |
12
|
|
|
|
13
|
|
|
|
14
|
|
View Code Duplication |
def cnn(para, X_train, y_train): |
|
|
|
|
15
|
|
|
nn = Sequential() |
16
|
|
|
nn.add( |
17
|
|
|
Conv2D(para["filter.0"], (3, 3), padding="same", input_shape=X_train.shape[1:]) |
18
|
|
|
) |
19
|
|
|
nn.add(Activation("relu")) |
20
|
|
|
nn.add(Conv2D(para["filter.0"], (3, 3))) |
21
|
|
|
nn.add(Activation("relu")) |
22
|
|
|
nn.add(MaxPooling2D(pool_size=(2, 2))) |
23
|
|
|
nn.add(Dropout(0.25)) |
24
|
|
|
|
25
|
|
|
nn.add(Conv2D(para["filter.0"], (3, 3), padding="same")) |
26
|
|
|
nn.add(Activation("relu")) |
27
|
|
|
nn.add(Conv2D(para["filter.0"], (3, 3))) |
28
|
|
|
nn.add(Activation("relu")) |
29
|
|
|
nn.add(MaxPooling2D(pool_size=(2, 2))) |
30
|
|
|
nn.add(Dropout(0.25)) |
31
|
|
|
|
32
|
|
|
nn.add(Flatten()) |
33
|
|
|
nn.add(Dense(para["layer.0"])) |
34
|
|
|
nn.add(Activation("relu")) |
35
|
|
|
nn.add(Dropout(0.5)) |
36
|
|
|
nn.add(Dense(10)) |
37
|
|
|
nn.add(Activation("softmax")) |
38
|
|
|
|
39
|
|
|
nn.compile(optimizer="adam", loss="categorical_crossentropy", metrics=["accuracy"]) |
40
|
|
|
nn.fit(X_train, y_train, epochs=25, batch_size=128) |
41
|
|
|
|
42
|
|
|
_, score = nn.evaluate(x=X_test, y=y_test) |
43
|
|
|
|
44
|
|
|
return score |
45
|
|
|
|
46
|
|
|
|
47
|
|
|
search_config = {cnn: {"filter.0": [16, 32, 64, 128], "layer.0": range(100, 1000, 100)}} |
48
|
|
|
|
49
|
|
|
opt = Hyperactive(X_train, y_train) |
50
|
|
|
opt.search(search_config, n_iter=5) |
51
|
|
|
|