This commit is contained in:
Mahdi Dibaiee 2019-02-28 13:34:47 +03:30
parent f268e72244
commit c28fc0850f
5 changed files with 38 additions and 43 deletions

2
.gitignore vendored
View File

@ -1,3 +1,5 @@
maps
logs
checkpoints.* checkpoints.*
geodata geodata
*.p *.p

Binary file not shown.

View File

@ -1,10 +1,11 @@
from shapely.geometry import Point, MultiPoint from shapely.geometry import Point, MultiPoint
from shapely.ops import cascaded_union from shapely.ops import cascaded_union
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import pandas as pd import pandas as pd
import cartopy.crs as ccrs import cartopy.crs as ccrs
def draw(df): def draw(df, path=None):
biomes = {} biomes = {}
biome_numbers = df['biome_num'].unique() biome_numbers = df['biome_num'].unique()
# biome_names = df['biome_name'].unique() # biome_names = df['biome_name'].unique()
@ -48,6 +49,9 @@ def draw(df):
# print(artist.get_label()) # print(artist.get_label())
# ax.legend(artists, biome_names) # ax.legend(artists, biome_names)
if path:
plt.savefig(path)
else:
plt.show() plt.show()
if __name__ == "__main__": if __name__ == "__main__":

46
nn.py
View File

@ -22,7 +22,7 @@ np.random.seed(RANDOM_SEED)
df = pd.read_pickle('data_final.p') df = pd.read_pickle('data_final.p')
class Model(): class Model():
def __init__(self, name, batch_size=100, shuffle_buffer_size=500, learning_rate=0.001, epochs=1): def __init__(self, name, batch_size=64, shuffle_buffer_size=500, learning_rate=0.001, epochs=1):
self.name = name self.name = name
self.path = "checkpoints/{}.hdf5".format(name) self.path = "checkpoints/{}.hdf5".format(name)
@ -55,13 +55,13 @@ class Model():
keras.layers.Dense(self.output_size) keras.layers.Dense(self.output_size)
]) ])
def compile(self): def compile(self, loss='mse', metrics=['accuracy'], optimizer=tf.train.AdamOptimizer):
self.model.load_weights(self.path) # self.model.load_weights(self.path)
optimizer = tf.train.AdamOptimizer(self.learning_rate) optimizer = optimizer(self.learning_rate)
self.model.compile(loss='mse', self.model.compile(loss=loss,
optimizer=optimizer, optimizer=optimizer,
metrics=['mae', 'accuracy']) metrics=metrics)
def evaluate(self): def evaluate(self):
return self.model.evaluate( return self.model.evaluate(
@ -74,39 +74,45 @@ class Model():
def train(self): def train(self):
self.model.summary() self.model.summary()
checkpoint = keras.callbacks.ModelCheckpoint(filepath, monitor='acc', verbose=1, mode='max') checkpoint = keras.callbacks.ModelCheckpoint(self.path, monitor='acc', verbose=1, mode='max')
tensorboard = keras.callbacks.TensorBoard(log_dir='./logs')
# map_callback = keras.callbacks.LambdaCallback(on_epoch_end=self.map_callback)
self.model.fit( self.model.fit(
self.training, self.training,
batch_size=self.batch_size, batch_size=self.batch_size,
epochs=self.epochs, epochs=self.epochs,
steps_per_epoch=int(self.dataset_size / self.batch_size), steps_per_epoch=int(self.dataset_size / self.batch_size),
callbacks=[checkpoint], callbacks=[checkpoint, tensorboard],
verbose=1 verbose=1
) )
def predict(self, a): def predict(self, a):
return np.argmax(self.model.predict(a), axis=1) return np.argmax(self.model.predict(a), axis=1)
A = Model('a', batch_size=100, shuffle_buffer_size=500, learning_rate=0.001, epochs=2) A = Model('a', epochs=2)
B = Model('b', batch_size=100, shuffle_buffer_size=500, learning_rate=0.001, epochs=850) B = Model('b', learning_rate=0.001, batch_size=100, epochs=100)
if __name__ == "__main__": def compile_b():
B.prepare_dataset(df, dataframe_to_dataset_biomes) B.prepare_dataset(df, dataframe_to_dataset_biomes)
B.create_model([64, 128]) B.create_model([64, 128])
B.compile() B.compile(loss='sparse_categorical_crossentropy')
def compile_a():
A.prepare_dataset(df, dataframe_to_dataset_temp_precip)
A.create_model([4])
A.compile(metrics=['accuracy', 'mae'])
if __name__ == "__main__":
compile_b()
B.train()
# for inp, out in B.test.take(1).make_one_shot_iterator(): # for inp, out in B.test.take(1).make_one_shot_iterator():
# print(inp, out) # print(inp, out)
# print(np.unique(nums)) # print(np.unique(nums))
# print(np.unique(predictions)) # print(np.unique(predictions))
# print('loss: {}, evaluation: {}'.format(*B.evaluate()))
print('loss: {}, evaluation: {}'.format(*B.evaluate())) compile_a()
A.train()
# B.train()
A.prepare_dataset(df, dataframe_to_dataset_temp_precip)
A.create_model([4])
A.compile()
# A.train()

View File

@ -14,11 +14,9 @@ df = pd.read_pickle('data_final.p')
latitude = np.array(df.index.get_level_values(1)) latitude = np.array(df.index.get_level_values(1))
df.loc[:, 'latitude'] = pd.Series(latitude, index=df.index) df.loc[:, 'latitude'] = pd.Series(latitude, index=df.index)
B.prepare_dataset(df, dataframe_to_dataset_biomes) compile_b()
B.create_model([64, 128])
B.compile()
for change in range(-5, 6): for change in range(0, 1):
print('TEMPERATURE MODIFICATION OF {}'.format(change)) print('TEMPERATURE MODIFICATION OF {}'.format(change))
inputs = ['elevation', 'distance_to_water'] inputs = ['elevation', 'distance_to_water']
@ -34,15 +32,8 @@ for change in range(-5, 6):
frame = df[inputs] frame = df[inputs]
print(frame.head()) print(frame.head())
# for season in SEASONS: for season in SEASONS:
# frame.loc[:, 'temp_{}_{}'.format(season, year)] += change frame.loc[:, 'temp_{}_{}'.format(season, year)] += change
# print(np.average(frame.loc[:, 'temp_winter_2016']))
# index = []
# for longitude in range(-179, 179):
# for latitude in range(-89, 89):
# index.append((longitude, latitude))
columns = ['biome_num'] columns = ['biome_num']
new_data = pd.DataFrame(columns=columns) new_data = pd.DataFrame(columns=columns)
@ -54,12 +45,4 @@ for change in range(-5, 6):
new_data = new_data.reindex(new_index) new_data = new_data.reindex(new_index)
new_data.loc[chunk.index.values, 'biome_num'] = out new_data.loc[chunk.index.values, 'biome_num'] = out
# print(new_data['biome_num'].unique())
draw(new_data) draw(new_data)
# columns = ['biome_num']
# indices = ['longitude', 'latitude']
# new_df = pd.DataFrame(index=indices, columns=columns)
# new_df =