fix(nn): better normalization, weight initialization and activation

This commit is contained in:
Mahdi Dibaiee 2019-02-28 17:22:50 +03:30
parent c28fc0850f
commit 865cc775ed
3 changed files with 13 additions and 9 deletions

Binary file not shown.

20
nn.py
View File

@ -22,7 +22,7 @@ np.random.seed(RANDOM_SEED)
df = pd.read_pickle('data_final.p') df = pd.read_pickle('data_final.p')
class Model(): class Model():
def __init__(self, name, batch_size=64, shuffle_buffer_size=500, learning_rate=0.001, epochs=1): def __init__(self, name, batch_size=16, shuffle_buffer_size=500, learning_rate=0.001, epochs=1):
self.name = name self.name = name
self.path = "checkpoints/{}.hdf5".format(name) self.path = "checkpoints/{}.hdf5".format(name)
@ -46,13 +46,17 @@ class Model():
self.training = training self.training = training
self.test = test self.test = test
def create_model(self, layers): def create_model(self, layers, out_activation):
params = {
'kernel_initializer': 'lecun_uniform',
'bias_initializer': 'zeros',
}
self.model = keras.Sequential([ self.model = keras.Sequential([
keras.layers.Dense(layers[0], activation=tf.nn.relu, input_shape=[self.features]) keras.layers.Dense(layers[0], activation=tf.nn.elu, input_shape=[self.features], **params)
] + [ ] + [
keras.layers.Dense(n, activation=tf.nn.relu) for n in layers[1:] keras.layers.Dense(n, activation=tf.nn.elu, **params) for n in layers[1:]
] + [ ] + [
keras.layers.Dense(self.output_size) keras.layers.Dense(self.output_size, activation=out_activation, **params)
]) ])
def compile(self, loss='mse', metrics=['accuracy'], optimizer=tf.train.AdamOptimizer): def compile(self, loss='mse', metrics=['accuracy'], optimizer=tf.train.AdamOptimizer):
@ -91,16 +95,16 @@ class Model():
return np.argmax(self.model.predict(a), axis=1) return np.argmax(self.model.predict(a), axis=1)
A = Model('a', epochs=2) A = Model('a', epochs=2)
B = Model('b', learning_rate=0.001, batch_size=100, epochs=100) B = Model('b', learning_rate=0.005, epochs=100)
def compile_b(): def compile_b():
B.prepare_dataset(df, dataframe_to_dataset_biomes) B.prepare_dataset(df, dataframe_to_dataset_biomes)
B.create_model([64, 128]) B.create_model([64, 128], tf.nn.softmax)
B.compile(loss='sparse_categorical_crossentropy') B.compile(loss='sparse_categorical_crossentropy')
def compile_a(): def compile_a():
A.prepare_dataset(df, dataframe_to_dataset_temp_precip) A.prepare_dataset(df, dataframe_to_dataset_temp_precip)
A.create_model([4]) A.create_model([(4, tf.nn.elu)])
A.compile(metrics=['accuracy', 'mae']) A.compile(metrics=['accuracy', 'mae'])
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -7,7 +7,7 @@ inputs = ['elevation', 'distance_to_water']
output = 'biome_num' output = 'biome_num'
def normalize(v): def normalize(v):
return (v - np.min(v)) / (np.max(v) - np.min(v)) return (v - np.mean(v)) / np.std(v)
def normalize_ndarray(ar): def normalize_ndarray(ar):
tr = np.transpose(ar) tr = np.transpose(ar)