diff --git a/checkpoints/b.hdf5 b/checkpoints/b.hdf5 index 82c3d9c..86e478f 100644 Binary files a/checkpoints/b.hdf5 and b/checkpoints/b.hdf5 differ diff --git a/nn.py b/nn.py index 7012ea7..4618307 100644 --- a/nn.py +++ b/nn.py @@ -22,7 +22,7 @@ np.random.seed(RANDOM_SEED) df = pd.read_pickle('data_final.p') class Model(): - def __init__(self, name, batch_size=64, shuffle_buffer_size=500, learning_rate=0.001, epochs=1): + def __init__(self, name, batch_size=16, shuffle_buffer_size=500, learning_rate=0.001, epochs=1): self.name = name self.path = "checkpoints/{}.hdf5".format(name) @@ -46,13 +46,17 @@ class Model(): self.training = training self.test = test - def create_model(self, layers): + def create_model(self, layers, out_activation): + params = { + 'kernel_initializer': 'lecun_uniform', + 'bias_initializer': 'zeros', + } self.model = keras.Sequential([ - keras.layers.Dense(layers[0], activation=tf.nn.relu, input_shape=[self.features]) + keras.layers.Dense(layers[0], activation=tf.nn.elu, input_shape=[self.features], **params) ] + [ - keras.layers.Dense(n, activation=tf.nn.relu) for n in layers[1:] + keras.layers.Dense(n, activation=tf.nn.elu, **params) for n in layers[1:] ] + [ - keras.layers.Dense(self.output_size) + keras.layers.Dense(self.output_size, activation=out_activation, **params) ]) def compile(self, loss='mse', metrics=['accuracy'], optimizer=tf.train.AdamOptimizer): @@ -91,16 +95,16 @@ class Model(): return np.argmax(self.model.predict(a), axis=1) A = Model('a', epochs=2) -B = Model('b', learning_rate=0.001, batch_size=100, epochs=100) +B = Model('b', learning_rate=0.005, epochs=100) def compile_b(): B.prepare_dataset(df, dataframe_to_dataset_biomes) - B.create_model([64, 128]) + B.create_model([64, 128], tf.nn.softmax) B.compile(loss='sparse_categorical_crossentropy') def compile_a(): A.prepare_dataset(df, dataframe_to_dataset_temp_precip) - A.create_model([4]) + A.create_model([(4, tf.nn.elu)]) A.compile(metrics=['accuracy', 'mae']) if __name__ == "__main__": diff --git a/utils.py b/utils.py index 2b4edfe..b2804f2 100644 --- a/utils.py +++ b/utils.py @@ -7,7 +7,7 @@ inputs = ['elevation', 'distance_to_water'] output = 'biome_num' def normalize(v): - return (v - np.min(v)) / (np.max(v) - np.min(v)) + return (v - np.mean(v)) / np.std(v) def normalize_ndarray(ar): tr = np.transpose(ar)