Passed
Push — master ( ee1e78...515b92 )
by Konstantinos
01:14
created

artificial_artwork.model_loader.load_vgg_model()   B

Complexity

Conditions 1

Size

Total Lines 127
Code Lines 45

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 45
nop 2
dl 0
loc 127
rs 8.8
c 0
b 0
f 0

How to fix   Long Method   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
### Part of this code is due to the MatConvNet team and is used to load the parameters of the pretrained VGG19 model in the notebook ###
2
3
import numpy as np
4
import scipy.io
5
import tensorflow as tf
6
7
8
def load_vgg_model(path, config):
9
    """
10
    Returns a model for the purpose of 'painting' the picture.
11
    Takes only the convolution layer weights and wrap using the TensorFlow
12
    Conv2d, Relu and AveragePooling layer. VGG actually uses maxpool but
13
    the paper indicates that using AveragePooling yields better results.
14
    The last few fully connected layers are not used.
15
    Here is the detailed configuration of the VGG model:
16
        0 is conv1_1 (3, 3, 3, 64)
17
        1 is relu
18
        2 is conv1_2 (3, 3, 64, 64)
19
        3 is relu    
20
        4 is maxpool
21
        5 is conv2_1 (3, 3, 64, 128)
22
        6 is relu
23
        7 is conv2_2 (3, 3, 128, 128)
24
        8 is relu
25
        9 is maxpool
26
        10 is conv3_1 (3, 3, 128, 256)
27
        11 is relu
28
        12 is conv3_2 (3, 3, 256, 256)
29
        13 is relu
30
        14 is conv3_3 (3, 3, 256, 256)
31
        15 is relu
32
        16 is conv3_4 (3, 3, 256, 256)
33
        17 is relu
34
        18 is maxpool
35
        19 is conv4_1 (3, 3, 256, 512)
36
        20 is relu
37
        21 is conv4_2 (3, 3, 512, 512)
38
        22 is relu
39
        23 is conv4_3 (3, 3, 512, 512)
40
        24 is relu
41
        25 is conv4_4 (3, 3, 512, 512)
42
        26 is relu
43
        27 is maxpool
44
        28 is conv5_1 (3, 3, 512, 512)
45
        29 is relu
46
        30 is conv5_2 (3, 3, 512, 512)
47
        31 is relu
48
        32 is conv5_3 (3, 3, 512, 512)
49
        33 is relu
50
        34 is conv5_4 (3, 3, 512, 512)
51
        35 is relu
52
        36 is maxpool
53
        37 is fullyconnected (7, 7, 512, 4096)
54
        38 is relu
55
        39 is fullyconnected (1, 1, 4096, 4096)
56
        40 is relu
57
        41 is fullyconnected (1, 1, 4096, 1000)
58
        42 is softmax
59
    """
60
    
61
    vgg = scipy.io.loadmat(path)
62
63
    vgg_layers = vgg['layers']
64
    
65
    def _weights(layer, expected_layer_name):
66
        """
67
        Return the weights and bias from the VGG model for a given layer.
68
        """
69
        wb = vgg_layers[0][layer][0][0][2]
70
        W = wb[0][0]
71
        b = wb[0][1]
72
        layer_name = vgg_layers[0][layer][0][0][0][0]
73
        assert layer_name == expected_layer_name
74
        return W, b
75
76
    def _relu(conv2d_layer):
77
        """
78
        Return the RELU function wrapped over a TensorFlow layer. Expects a
79
        Conv2d layer input.
80
        """
81
        return tf.nn.relu(conv2d_layer)
82
83
    def _conv2d(prev_layer, layer, layer_name):
84
        """
85
        Return the Conv2D layer using the weights, biases from the VGG
86
        model at 'layer'.
87
        """
88
        W, b = _weights(layer, layer_name)
89
        W = tf.constant(W)
90
        b = tf.constant(np.reshape(b, (b.size)))
91
        # return tf.nn.conv2d(prev_layer, filter=W, strides=[1, 1, 1, 1], padding='SAME') + b
92
        return tf.compat.v1.nn.conv2d(prev_layer, filter=W, strides=[1, 1, 1, 1], padding='SAME') + b
93
        
94
95
    def _conv2d_relu(prev_layer, layer, layer_name):
96
        """
97
        Return the Conv2D + RELU layer using the weights, biases from the VGG
98
        model at 'layer'.
99
        """
100
        return _relu(_conv2d(prev_layer, layer, layer_name))
101
102
    def _avgpool(prev_layer):
103
        """
104
        Return the AveragePooling layer.
105
        """
106
        return tf.nn.avg_pool(prev_layer, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
107
108
    # Constructs the graph model.
109
    graph = {}
110
    # graph['input']   = tf.Variable(np.zeros((1, CONFIG.IMAGE_HEIGHT, CONFIG.IMAGE_WIDTH, CONFIG.COLOR_CHANNELS)), dtype = 'float32')
111
    graph['input']   = tf.Variable(np.zeros((1, config.image_height, config.image_width, config.color_channels)), dtype = 'float32')
112
    graph['conv1_1']  = _conv2d_relu(graph['input'], 0, 'conv1_1')
113
    graph['conv1_2']  = _conv2d_relu(graph['conv1_1'], 2, 'conv1_2')
114
    graph['avgpool1'] = _avgpool(graph['conv1_2'])
115
    graph['conv2_1']  = _conv2d_relu(graph['avgpool1'], 5, 'conv2_1')
116
    graph['conv2_2']  = _conv2d_relu(graph['conv2_1'], 7, 'conv2_2')
117
    graph['avgpool2'] = _avgpool(graph['conv2_2'])
118
    graph['conv3_1']  = _conv2d_relu(graph['avgpool2'], 10, 'conv3_1')
119
    graph['conv3_2']  = _conv2d_relu(graph['conv3_1'], 12, 'conv3_2')
120
    graph['conv3_3']  = _conv2d_relu(graph['conv3_2'], 14, 'conv3_3')
121
    graph['conv3_4']  = _conv2d_relu(graph['conv3_3'], 16, 'conv3_4')
122
    graph['avgpool3'] = _avgpool(graph['conv3_4'])
123
    graph['conv4_1']  = _conv2d_relu(graph['avgpool3'], 19, 'conv4_1')
124
    graph['conv4_2']  = _conv2d_relu(graph['conv4_1'], 21, 'conv4_2')
125
    graph['conv4_3']  = _conv2d_relu(graph['conv4_2'], 23, 'conv4_3')
126
    graph['conv4_4']  = _conv2d_relu(graph['conv4_3'], 25, 'conv4_4')
127
    graph['avgpool4'] = _avgpool(graph['conv4_4'])
128
    graph['conv5_1']  = _conv2d_relu(graph['avgpool4'], 28, 'conv5_1')
129
    graph['conv5_2']  = _conv2d_relu(graph['conv5_1'], 30, 'conv5_2')
130
    graph['conv5_3']  = _conv2d_relu(graph['conv5_2'], 32, 'conv5_3')
131
    graph['conv5_4']  = _conv2d_relu(graph['conv5_3'], 34, 'conv5_4')
132
    graph['avgpool5'] = _avgpool(graph['conv5_4'])
133
134
    return graph
135