-
Notifications
You must be signed in to change notification settings - Fork 0
/
wide_residual_kernel.py
130 lines (88 loc) · 4.22 KB
/
wide_residual_kernel.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
from keras.models import Model
from keras.layers import Input, Add, Activation, Dropout, Flatten, Dense
from keras.layers.convolutional import Convolution2D, MaxPooling2D, AveragePooling2D
from keras.layers.normalization import BatchNormalization
from keras.regularizers import L1L2
from keras import backend as K
def initial_conv(input):
x = Convolution2D(16, (5, 5), padding='same', kernel_initializer='he_normal',
use_bias=False)(input)
return x
def expand_conv(init, base, k, strides=(1, 1), dropout=0.0):
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
input = BatchNormalization(axis=channel_axis, momentum=0.9, epsilon=1e-5, gamma_initializer='uniform')(init)
input = Activation('relu')(input)
x = Convolution2D(base * k, (3, 3), padding='same', strides=strides, kernel_initializer='he_normal',
use_bias=False)(input)
x = BatchNormalization(axis=channel_axis, momentum=0.9, epsilon=1e-5, gamma_initializer='uniform')(x)
x = Activation('relu')(x)
if dropout > 0.0: x = Dropout(dropout)(x)
x = Convolution2D(base * k, (3, 3), padding='same', kernel_initializer='he_normal',
use_bias=False)(x)
skip = Convolution2D(base * k, (1, 1), padding='same', strides=strides, kernel_initializer='he_normal',
use_bias=False)(input)
m = Add()([x, skip])
return m
def conv_block(input, k=1, dropout=0.0, size=16):
init = input
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
x = BatchNormalization(axis=channel_axis, momentum=0.9, epsilon=1e-5, gamma_initializer='uniform')(input)
x = Activation('relu')(x)
x = Convolution2D(size * k, (3, 3), padding='same', kernel_initializer='he_normal',
use_bias=False)(x)
x = BatchNormalization(axis=channel_axis, momentum=0.9, epsilon=1e-5, gamma_initializer='uniform')(x)
x = Activation('relu')(x)
if dropout > 0.0: x = Dropout(dropout)(x)
x = Convolution2D(size * k, (3, 3), padding='same', kernel_initializer='he_normal',
use_bias=False)(x)
m = Add()([init, x])
return m
def create_wide_residual_network(input_dim, nb_classes=100, N=2, k=1, dropout=0.0, verbose=1):
"""
Creates a Wide Residual Network with specified parameters
:param input: Input Keras object
:param nb_classes: Number of output classes
:param N: Depth of the network. Compute N = (n - 4) / 6.
Example : For a depth of 16, n = 16, N = (16 - 4) / 6 = 2
Example2: For a depth of 28, n = 28, N = (28 - 4) / 6 = 4
Example3: For a depth of 40, n = 40, N = (40 - 4) / 6 = 6
:param k: Width of the network.
:param dropout: Adds dropout if value is greater than 0.0
:param verbose: Debug info to describe created WRN
:return:
"""
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
ip = Input(shape=input_dim)
x = initial_conv(ip)
nb_conv = 4
x = expand_conv(x, 16, k, dropout=dropout)
nb_conv += 2
for i in range(N - 1):
x = conv_block(x, k, dropout, size=16)
nb_conv += 2
x = expand_conv(x, 32, k, strides=(2, 2), dropout=dropout)
nb_conv += 2
for i in range(N - 1):
x = conv_block(x, k, dropout, size=32)
nb_conv += 2
x = expand_conv(x, 64, k, strides=(2, 2), dropout=dropout)
nb_conv += 2
for i in range(N - 1):
x = conv_block(x, k, dropout, size=64)
nb_conv += 2
x = BatchNormalization(axis=channel_axis, momentum=0.9, epsilon=1e-5, gamma_initializer='uniform')(x)
x = Activation('relu')(x)
x = AveragePooling2D((8, 8))(x)
x = Flatten()(x)
x = Dense(nb_classes, activation='softmax')(x)
model = Model(ip, x)
if verbose: print("Wide Residual Network-%d-%d created." % (nb_conv, k))
return model
if __name__ == "__main__":
from keras.utils import plot_model
from keras.layers import Input
from keras.models import Model
init = (32, 32, 3)
wrn_28_10 = create_wide_residual_network(init, nb_classes=10, N=2, k=2, dropout=0.0)
wrn_28_10.summary()
plot_model(wrn_28_10, "WRN-16-2.png", show_shapes=True, show_layer_names=True)