Skip to content

Commit

Permalink
one hidden layer
Browse files Browse the repository at this point in the history
  • Loading branch information
babenek committed Jan 17, 2025
1 parent df3698d commit fd4ff80
Showing 1 changed file with 17 additions and 7 deletions.
24 changes: 17 additions & 7 deletions experiment/src/lstm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,17 @@ def build(self, hp=None) -> Model:
dropout_line = hp.Float('dropout_line', min_value=min_val, max_value=max_val, step=step_val)
dropout_variable = hp.Float('dropout_variable', min_value=min_val, max_value=max_val, step=step_val)
dropout_value = hp.Float('dropout_value', min_value=min_val, max_value=max_val, step=step_val)
dropout_dense = hp.Float('dropout_dense', min_value=min_val, max_value=max_val, step=step_val)
dropout_a = hp.Float('dropout_a', min_value=min_val, max_value=max_val, step=step_val)
dropout_b = hp.Float('dropout_b', min_value=min_val, max_value=max_val, step=step_val)
dropout_final = hp.Float('dropout_final', min_value=min_val, max_value=max_val, step=step_val)
else:
# found best values
dropout_line = 0.33
dropout_variable = 0.33
dropout_value = 0.33
dropout_dense = 0.33
dropout_a= 0.33
dropout_b= 0.33
dropout_final = 0.33

line_input = Input(shape=(None, self.line_shape[2]), name="line_input", dtype=self.d_type)
line_lstm = LSTM(units=self.line_shape[1], dtype=self.d_type)
Expand All @@ -63,11 +67,17 @@ def build(self, hp=None) -> Model:
# 3 bidirectional + features
dense_units = 2 * MlValidator.MAX_LEN + 2 * 2 * ML_HUNK + self.feature_shape[1]
# check after model compilation. Should be matched the combined size.
dense_a = Dense(units=dense_units, activation='relu', name="dense", dtype=self.d_type)
joined_layers = dense_a(joined_features)
dropout_layer = Dropout(dropout_dense, name="dense_dropout")(joined_layers)
dense_b = Dense(units=1, activation='sigmoid', name="prediction", dtype=self.d_type)
output = dense_b(dropout_layer)

# first hidden layer
dense_a = Dense(units=dense_units, activation='relu', name="a_dense", dtype=self.d_type)(joined_features)
dropout_dense_a = Dropout(dropout_a, name="a_dropout")(dense_a)

# second hidden layer
dense_b = Dense(units=dense_units, activation='relu', name="b_dense", dtype=self.d_type)(dropout_dense_a)
dropout_dense_b = Dropout(dropout_b, name="b_dropout")(dense_b)

dense_final = Dense(units=1, activation='sigmoid', name="prediction", dtype=self.d_type)(dropout_dense_b)
output = Dropout(dropout_final, name="final_dropout")(dense_final)

metrics = [BinaryAccuracy(name="binary_accuracy"), Precision(name="precision"), Recall(name="recall")]

Expand Down

0 comments on commit fd4ff80

Please sign in to comment.