fix(regression): prediction for temps model

This commit is contained in:
Mahdi Dibaiee 2019-05-02 10:51:29 +04:30
parent b192531a2a
commit cbe8e7dd20
4 changed files with 69 additions and 13 deletions

View File

@ -24,6 +24,7 @@ DEFAULT_BUFFER_SIZE=500
DEFAULT_OUT_ACTIVATION = tf.nn.softmax DEFAULT_OUT_ACTIVATION = tf.nn.softmax
DEFAULT_LOSS = 'sparse_categorical_crossentropy' DEFAULT_LOSS = 'sparse_categorical_crossentropy'
DEFAULT_OPTIMIZER = tf.keras.optimizers.Adam(lr=0.001) DEFAULT_OPTIMIZER = tf.keras.optimizers.Adam(lr=0.001)
DEFAULT_METRICS = ['accuracy']
class Model(): class Model():
def __init__(self, name, epochs=1): def __init__(self, name, epochs=1):
@ -84,7 +85,7 @@ class Model():
keras.layers.Dense(self.output_size, activation=out_activation, **params) keras.layers.Dense(self.output_size, activation=out_activation, **params)
]) ])
def compile(self, loss=DEFAULT_LOSS, metrics=['accuracy'], optimizer=DEFAULT_OPTIMIZER): def compile(self, loss=DEFAULT_LOSS, metrics=DEFAULT_METRICS, optimizer=DEFAULT_OPTIMIZER):
logger.debug('Model loss function: %s', loss) logger.debug('Model loss function: %s', loss)
logger.debug('Model optimizer: %s', optimizer) logger.debug('Model optimizer: %s', optimizer)
logger.debug('Model metrics: %s', metrics) logger.debug('Model metrics: %s', metrics)
@ -136,13 +137,16 @@ class Model():
return out return out
def predict(self, a): def predict_class(self, a):
return np.argmax(self.model.predict(a), axis=1) return np.argmax(self.model.predict(a), axis=1)
def prepare_for_use(self, df=None, batch_size=DEFAULT_BUFFER_SIZE, layers=DEFAULT_LAYERS, out_activation=DEFAULT_OUT_ACTIVATION, loss=DEFAULT_LOSS, optimizer=DEFAULT_OPTIMIZER, dataset_fn=dataframe_to_dataset_biomes): def predict(self, a):
return self.model.predict(a)
def prepare_for_use(self, df=None, batch_size=DEFAULT_BUFFER_SIZE, layers=DEFAULT_LAYERS, out_activation=DEFAULT_OUT_ACTIVATION, loss=DEFAULT_LOSS, optimizer=DEFAULT_OPTIMIZER, dataset_fn=dataframe_to_dataset_biomes, metrics=DEFAULT_METRICS):
if df is None: if df is None:
df = pd.read_pickle('data.p') df = pd.read_pickle('data.p')
self.prepare_dataset(df, dataset_fn, batch_size=batch_size) self.prepare_dataset(df, dataset_fn, batch_size=batch_size)
self.create_model(layers=layers, out_activation=out_activation) self.create_model(layers=layers, out_activation=out_activation)
self.compile(loss=loss, optimizer=optimizer) self.compile(loss=loss, optimizer=optimizer, metrics=metrics)

View File

@ -6,6 +6,7 @@ from utils import *
from constants import INPUTS from constants import INPUTS
from model import Model from model import Model
from draw import draw from draw import draw
from train import A_params
def predicted_map(B, change=0, path=None): def predicted_map(B, change=0, path=None):
year = MAX_YEAR - 1 year = MAX_YEAR - 1
@ -36,7 +37,7 @@ def predicted_map(B, change=0, path=None):
if chunk.shape[0] < B.batch_size: if chunk.shape[0] < B.batch_size:
continue continue
input_data = chunk.loc[:, inputs].values input_data = chunk.loc[:, inputs].values
out = B.predict(input_data) out = B.predict_class(input_data)
f = pd.DataFrame({ f = pd.DataFrame({
'longitude': chunk_original.loc[:, 'longitude'], 'longitude': chunk_original.loc[:, 'longitude'],
@ -53,6 +54,54 @@ def predicted_map_cmd(checkpoint='checkpoints/save.h5', change=0, path=None):
B.restore(checkpoint) B.restore(checkpoint)
predicted_map(B, change=change, path=path) predicted_map(B, change=change, path=path)
if __name__ == "__main__": def predicted_temps(A, year=2000):
fire.Fire(predicted_map_cmd) columns = INPUTS
df = pd.read_pickle('data.p')
print(columns)
# print(df[0:A.batch_size])
inputs = df[INPUTS]
all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS]
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values)
inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values)
inputs = inputs.to_numpy()
inputs = normalize_ndarray(inputs)
print(inputs[0:A.batch_size])
out_columns = all_temps + all_precips
print(out_columns)
out = A.predict(inputs)
# print(out.shape, out[0].shape)
# print(out)
# print(out[0])
print(normalize_ndarray(df[out_columns])[0:A.batch_size])
print(pd.DataFrame(data=out, columns=out_columns))
# print(df[out_columns][0:A.batch_size])
# print(pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns))
def predicted_temps_cmd(checkpoint='checkpoints/a.h5', year=2000):
batch_size = A_params['batch_size']['grid_search'][0]
layers = A_params['layers']['grid_search'][0]
optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0])
A = Model('a', epochs=1)
A.prepare_for_use(
batch_size=batch_size,
layers=layers,
dataset_fn=dataframe_to_dataset_temp_precip,
optimizer=optimizer,
out_activation=None,
loss='mse',
metrics=['mae']
)
A.restore(checkpoint)
predicted_temps(A, year=year)
if __name__ == "__main__":
fire.Fire({ 'map': predicted_map_cmd, 'temp': predicted_temps_cmd })

View File

@ -46,10 +46,11 @@ class TuneB(tune.Trainable):
return self.model.restore(path) return self.model.restore(path)
A_params = { A_params = {
'batch_size': tune.grid_search([128]), 'batch_size': tune.grid_search([256]),
'layers': tune.grid_search([[64, 64]]), 'layers': tune.grid_search([[64, 64]]),
'lr': tune.grid_search([1e-4]), 'lr': tune.grid_search([3e-4]),
'optimizer': tune.grid_search([tf.keras.optimizers.Adam]), 'optimizer': tune.grid_search([tf.keras.optimizers.Adam]),
#'optimizer': tune.grid_search([tf.keras.optimizers.RMSprop])
} }
class TuneA(tune.Trainable): class TuneA(tune.Trainable):
@ -66,18 +67,21 @@ class TuneA(tune.Trainable):
batch_size=config['batch_size'], batch_size=config['batch_size'],
layers=config['layers'], layers=config['layers'],
optimizer=optimizer, optimizer=optimizer,
out_activation=None,
dataset_fn=dataframe_to_dataset_temp_precip, dataset_fn=dataframe_to_dataset_temp_precip,
loss='mse' loss='mse',
metrics=['mae']
) )
def _train(self): def _train(self):
logs = self.model.train(self.config) logs = self.model.train(self.config)
print(logs.history)
metrics = { metrics = {
'mean_accuracy': logs.history['acc'][0],
'loss': logs.history['loss'][0], 'loss': logs.history['loss'][0],
'val_accuracy': logs.history['val_acc'][0], 'mae': logs.history['mean_absolute_error'][0],
'val_loss': logs.history['val_loss'][0], 'val_loss': logs.history['val_loss'][0],
'val_mae': logs.history['val_mean_absolute_error'][0],
} }
return metrics return metrics

View File

@ -94,7 +94,6 @@ def dataframe_to_dataset_temp_precip(df):
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS] all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
local_df.loc[:, 'mean_temp'] = np.mean(df[all_temps].values) local_df.loc[:, 'mean_temp'] = np.mean(df[all_temps].values)
local_df.loc[:, 'mean_precip'] = np.mean(df[all_precips].values) local_df.loc[:, 'mean_precip'] = np.mean(df[all_precips].values)
output = []
output = all_temps + all_precips output = all_temps + all_precips