Upload cnn.py
Browse files
cnn.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from tensorflow.keras import Model as KerasModel
|
2 |
+
from tensorflow.keras import layers
|
3 |
+
import tensorflow.keras as keras
|
4 |
+
from tensorflow.keras.regularizers import l2
|
5 |
+
|
6 |
+
from modellib.losses import weighted_binary_crossentropy
|
7 |
+
|
8 |
+
|
9 |
+
class Baseline(KerasModel):
|
10 |
+
def __init__(self, input_shape):
|
11 |
+
super(Baseline, self).__init__()
|
12 |
+
self._input_shape = input_shape
|
13 |
+
|
14 |
+
# First bottleneck unit
|
15 |
+
self.bn1 = layers.BatchNormalization()
|
16 |
+
self.activation1 = layers.Activation('selu')
|
17 |
+
self.conv1 = layers.Conv2D(32, kernel_size=(5, 5), padding='same', kernel_regularizer=l2(0.02))
|
18 |
+
|
19 |
+
self.bn2 = layers.BatchNormalization()
|
20 |
+
self.activation2 = layers.Activation('selu')
|
21 |
+
self.conv2 = layers.Conv2D(128, kernel_size=(3, 3), padding='same', kernel_regularizer=l2(0.02))
|
22 |
+
|
23 |
+
# Corner detection
|
24 |
+
self.bn3 = layers.BatchNormalization()
|
25 |
+
self.padding = layers.ZeroPadding2D(padding=(0, 3))
|
26 |
+
self.conv3 = layers.Conv2D(32, kernel_size=(21, 7), padding='valid', activation='tanh')
|
27 |
+
self.conv4 = layers.Conv2D(128, kernel_size=(1, 3), padding='same', activation='tanh')
|
28 |
+
|
29 |
+
# Fully-connected predictor
|
30 |
+
self.flat = layers.Flatten()
|
31 |
+
self.classify = layers.Dense(512, activation='sigmoid')
|
32 |
+
self.dropout = layers.Dropout(0.1)
|
33 |
+
self.result = layers.Dense(input_shape[1], activation='sigmoid')
|
34 |
+
|
35 |
+
def build(self, input_shape):
|
36 |
+
super(Baseline, self).build(input_shape)
|
37 |
+
self.built = True
|
38 |
+
|
39 |
+
def call(self, inputs, training=False):
|
40 |
+
# First bottleneck unit
|
41 |
+
x = self.bn1(inputs, training=training)
|
42 |
+
x = self.activation1(x)
|
43 |
+
x = self.conv1(x)
|
44 |
+
|
45 |
+
x = self.bn2(x, training=training)
|
46 |
+
x = self.activation2(x)
|
47 |
+
x = self.conv2(x)
|
48 |
+
|
49 |
+
merged = layers.add([inputs, x])
|
50 |
+
|
51 |
+
# Corner detection
|
52 |
+
x = self.bn3(merged, training=training)
|
53 |
+
x = self.padding(x)
|
54 |
+
x = self.conv3(x)
|
55 |
+
x = self.conv4(x)
|
56 |
+
|
57 |
+
# Fully-connected predictor
|
58 |
+
x = self.flat(x)
|
59 |
+
x = self.classify(x)
|
60 |
+
x = self.dropout(x, training=training)
|
61 |
+
x = self.result(x)
|
62 |
+
|
63 |
+
return x
|
64 |
+
|
65 |
+
def build_graph(self):
|
66 |
+
x = layers.Input(shape=self.input_shape)
|
67 |
+
return KerasModel(inputs=[x], outputs=self.call(x))
|
68 |
+
|
69 |
+
|
70 |
+
# Function to create the model and compile it with the custom loss function
|
71 |
+
def create_compile_model_custom_loss(input_shape, optimizer, class_weights, metrics=False):
|
72 |
+
model = Baseline(input_shape)
|
73 |
+
model.build(input_shape=(None,) + input_shape)
|
74 |
+
|
75 |
+
# Compile with custom loss function
|
76 |
+
model.compile(
|
77 |
+
loss=lambda y_true, y_pred: weighted_binary_crossentropy(y_true, y_pred, class_weights),
|
78 |
+
optimizer=optimizer,
|
79 |
+
metrics=metrics
|
80 |
+
)
|
81 |
+
|
82 |
+
return model
|
83 |
+
|