-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathutils.py
More file actions
55 lines (48 loc) · 1.91 KB
/
utils.py
File metadata and controls
55 lines (48 loc) · 1.91 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
from tensorflow.keras import models, layers, regularizers
from tensorflow.keras import backend as K
import tensorflow as tf
def conv_block(x, filter_size, size, dropout,num, batch_norm=False):
conv = layers.Conv2D(size, (filter_size, filter_size), padding="same")(x)
if batch_norm is True:
conv = layers.BatchNormalization(axis=3)(conv)
conv = layers.Activation("relu")(conv)
conv = layers.Conv2D(size, (filter_size, filter_size), padding="same",name="conv"+str(num))(conv)
if batch_norm is True:
conv = layers.BatchNormalization(axis=3)(conv)
conv = layers.Activation("relu")(conv)
if dropout > 0:
conv = layers.Dropout(dropout)(conv)
return conv
def se_block(x,r):
copy=x
gap=layers.GlobalAveragePooling2D()(x)
flat=layers.Flatten()(gap)
dense=layers.Dense(flat.shape[-1]//r, activation = 'relu')(gap)
dense=layers.Dense(flat.shape[-1], activation = 'sigmoid')(dense)
m =layers.multiply([dense,copy])
return m
def resb(x, filter_size, size, dropout,num, batch_norm=False):
# copy tensor to variable called x_skip
x_skip = x
x_skip=layers.Conv2D(1, (1, 1), padding="same")(x_skip)
print(x_skip.shape)
# Layer 1
x = layers.Conv2D(size, (filter_size, filter_size), padding="same")(x)
if batch_norm is True:
x = layers.BatchNormalization(axis=3)(x)
x = layers.Activation("relu")(x)
if dropout > 0:
x = layers.Dropout(dropout)(x)
print(x.shape)
# Layer 2
x = layers.Conv2D(size, (filter_size, filter_size), padding="same")(x)
if batch_norm is True:
x = layers.BatchNormalization(axis=3)(x)
x = layers.Activation("relu")(x)
if dropout > 0:
x = layers.Dropout(dropout)(x)
print(x.shape)
# Add Residue
x = tf.keras.layers.Add()([x, x_skip])
x = tf.keras.layers.Activation('relu')(x)
return x