This repository has been archived by the owner on May 7, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 13
/
Copy pathdf.py
93 lines (78 loc) · 2.92 KB
/
df.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras.models import Model
from keras.layers.convolutional import Conv1D, MaxPooling1D
from keras.layers import Dense, ELU, Activation, Dropout, Flatten
from keras.layers.normalization import BatchNormalization
from keras.optimizers import Adamax
from keras import Input
def get_model(config):
"""Returns deep fingerprinting model to run_model.py
Args:
config (dict): Deserialized JSON config file (see config.json)
"""
num_mon_sites = config['num_mon_sites']
num_mon_inst_test = config['num_mon_inst_test']
num_mon_inst_train = config['num_mon_inst_train']
num_unmon_sites_test = config['num_unmon_sites_test']
num_unmon_sites_train = config['num_unmon_sites_train']
num_unmon_sites = num_unmon_sites_test + num_unmon_sites_train
seq_length = config['seq_length']
dir_input = Input(shape=(seq_length, 1,), name='dir_input')
# Block 1
x = Conv1D(32, 8, strides=1, padding='same')(dir_input)
x = BatchNormalization()(x)
x = ELU(alpha=1.0)(x)
x = Conv1D(32, 8, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = ELU(alpha=1.0)(x)
x = MaxPooling1D(pool_size=8, strides=4)(x)
x = Dropout(0.1)(x)
# Block 2
x = Conv1D(64, 8, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv1D(64, 8, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = MaxPooling1D(pool_size=8, strides=4)(x)
x = Dropout(0.1)(x)
# Block 3
x = Conv1D(128, 8, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv1D(128, 8, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = MaxPooling1D(pool_size=8, strides=4)(x)
x = Dropout(0.1)(x)
# Block 4
x = Conv1D(256, 8, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv1D(256, 8, strides=1, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = MaxPooling1D(pool_size=8, strides=4)(x)
x = Dropout(0.1)(x)
x = Flatten()(x)
# FC layers
x = Dense(512)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.7)(x)
x = Dense(512)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
# Add final softmax layer
output_classes = num_mon_sites if num_unmon_sites == 0 else num_mon_sites + 1
model_output = Dense(units=output_classes, activation='softmax',
name='model_output')(x)
model = Model(inputs=dir_input, outputs=model_output)
model.compile(loss='categorical_crossentropy',
optimizer=Adamax(0.002),
metrics=['accuracy'])
callbacks = []
return model, callbacks