forked from sebasvega95/neural-style-transfer
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlayer_utils.py
More file actions
60 lines (48 loc) · 1.88 KB
/
layer_utils.py
File metadata and controls
60 lines (48 loc) · 1.88 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import tensorflow as tf
def ConvBnAct(filters, kernel_size=(3, 3), stride=1, activation='relu', name=''):
def forward(x):
x = tf.keras.layers.Conv2D(
filters,
kernel_size,
strides=stride,
padding='same',
name=name + '_conv')(x)
x = tf.keras.layers.BatchNormalization(name=name + '_batchnorm')(x)
x = tf.keras.layers.Activation(activation, name=name + '_' + activation)(x)
return x
return forward
def DeconvBnAct(filters, kernel_size=(3, 3), stride=1, activation='relu', name=''):
def forward(x):
x = tf.keras.layers.Conv2DTranspose(
filters,
kernel_size,
strides=stride,
padding='same',
name=name + '_conv')(x)
x = tf.keras.layers.BatchNormalization(name=name + '_batchnorm')(x)
x = tf.keras.layers.Activation(activation, name=name + '_' + activation)(x)
return x
return forward
def ResidualBlock(filters, kernel_size=(3, 3), stride=1, first=False, name=''):
def forward(x):
x_shortcut = x
# don't repeat bn->relu since we just did it
if not first:
x = tf.keras.layers.BatchNormalization(name=name + '_batchnorm1')(x)
x = tf.keras.layers.Activation('relu', name=name + '_relu1')(x)
x = tf.keras.layers.Conv2D(
filters,
kernel_size,
strides=stride,
padding='same',
name=name + '_conv1')(x)
x = tf.keras.layers.BatchNormalization(name=name + '_batchnorm2')(x)
x = tf.keras.layers.Activation('relu', name=name + '_relu2')(x)
x = tf.keras.layers.Conv2D(
filters,
kernel_size,
padding='same',
name=name + '_conv2')(x)
x = tf.keras.layers.Add(name=name + '_shortcut')([x, x_shortcut])
return x
return forward