diff --git a/ecg/featurize.py b/ecg/featurize.py index a7e68ee3..0f128908 100644 --- a/ecg/featurize.py +++ b/ecg/featurize.py @@ -5,7 +5,6 @@ import warnings import scipy.signal as scs - class Normalizer(object): def __init__(self, strategy): self.scaler = None diff --git a/ecg/network.py b/ecg/network.py index 8062e3b1..c009ca12 100644 --- a/ecg/network.py +++ b/ecg/network.py @@ -1,6 +1,5 @@ from keras import backend as K - def _bn_relu(layer, dropout=0, **params): from keras.layers import BatchNormalization from keras.layers import Activation @@ -13,7 +12,6 @@ def _bn_relu(layer, dropout=0, **params): return layer - def add_conv_weight( layer, filter_length, @@ -41,7 +39,6 @@ def add_conv_layers(layer, **params): layer = _bn_relu(layer, **params) return layer - def resnet_block( layer, num_filters, @@ -83,12 +80,10 @@ def zeropad_output_shape(input_shape): layer = merge([shortcut, layer], mode="sum") return layer - def get_num_filters_at_index(index, num_start_filters, **params): return 2**int(index / params["conv_increase_channels_at"]) \ * num_start_filters - def add_resnet_layers(layer, **params): layer = add_conv_weight( layer, @@ -109,14 +104,12 @@ def add_resnet_layers(layer, **params): layer = _bn_relu(layer, **params) return layer - def add_output_layer(layer, **params): from keras.layers.core import Dense, Activation from keras.layers.wrappers import TimeDistributed layer = TimeDistributed(Dense(params["num_categories"]))(layer) return Activation('softmax')(layer) - def add_compile(model, **params): if params["optimizer"] == "adam": from keras.optimizers import Adam @@ -136,7 +129,6 @@ def add_compile(model, **params): optimizer=optimizer, metrics=['accuracy']) - def build_network(**params): from keras.models import Model from keras.layers import Input diff --git a/ecg/predict.py b/ecg/predict.py index 9c6e574a..b0a34161 100644 --- a/ecg/predict.py +++ b/ecg/predict.py @@ -1,24 +1,18 @@ -from builtins import str import argparse import numpy as np import json import os import load import util -from joblib import Memory import time import pickle -memory = Memory(cachedir='./cache') - -@memory.cache def get_model_pred_probs(model_path, x): from keras.models import load_model model = load_model(model_path) probs = model.predict(x, verbose=1) return probs - def get_ensemble_pred_probs(model_paths, x): print("Averaging " + str(len(model_paths)) + " model predictions...") all_model_probs = [get_model_pred_probs(model_path, x) @@ -38,7 +32,6 @@ def get_folder_name(start_time): os.makedirs(folder_name) return folder_name - def predict(args, train_params, test_params): x, gt, processor, _ = load.load_test( test_params, @@ -56,14 +49,12 @@ def predict(args, train_params, test_params): save_predictions(folder_name, x, gt, probs, processor) - def save_predictions(path, x, gt, probs, processor): np.save(path + '/x', x) np.save(path + '/gt', gt) np.save(path + '/probs', probs) pickle.dump(processor, open(path + '/processor', 'w+')) - if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("test_config_file", help="path to config file") diff --git a/ecg/train.py b/ecg/train.py index c114d79d..eede36bf 100644 --- a/ecg/train.py +++ b/ecg/train.py @@ -5,6 +5,7 @@ from builtins import open from builtins import int from builtins import str + import argparse import json import os @@ -17,20 +18,17 @@ MAX_EPOCHS = 100 - def get_folder_name(start_time, experiment_name): folder_name = FOLDER_TO_SAVE + experiment_name + '/' + start_time if not os.path.exists(folder_name): os.makedirs(folder_name) return folder_name - def get_filename_for_saving(start_time, experiment_name): saved_filename = get_folder_name(start_time, experiment_name) + \ "/{val_loss:.3f}-{val_acc:.3f}-{epoch:03d}-{loss:.3f}-{acc:.3f}.hdf5" return saved_filename - def plot_model(model, start_time, experiment_name): from keras.utils.visualize_util import plot plot( @@ -39,7 +37,6 @@ def plot_model(model, start_time, experiment_name): show_shapes=True, show_layer_names=False) - def save_params(params, start_time, experiment_name): saving_filename = get_folder_name(start_time, experiment_name) + \ "/params.json" @@ -49,20 +46,17 @@ def save_params(params, start_time, experiment_name): with open(saving_filename, 'w') as outfile: outfile.write(save_str) - def get_augment_fn(params): if params.get("amplitude_scale", False): return amplitude_scale else: return None - def amplitude_scale(x): scales = np.random.uniform(low=0.25, high=2.0, size=(x.shape[0], 1, 1)) return scales * x - def data_generator(x, y, batch_size, augmenter=None): num_examples = x.shape[0] indices = np.arange(num_examples) @@ -77,7 +71,6 @@ def data_generator(x, y, batch_size, augmenter=None): x_mb = augmenter(x_mb) yield (x_mb, y_mb) - def train(args, params): global FOLDER_TO_SAVE @@ -88,7 +81,7 @@ def train(args, params): if "dropout" in key: params[key] = 0 - params["test_split_start"] = args.test_split_start + params["test_split_start"] = args.test_split_start dl, processor = load.load_train(params) @@ -162,7 +155,6 @@ def train(args, params): callbacks=[checkpointer, reduce_lr, stopping], verbose=args.verbose) - if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("config_file", help="path to config file")