feat(web): web server and basic dashboard

This commit is contained in:
Mahdi Dibaiee
2019-04-22 09:49:31 +04:30
parent 8d4010b5dc
commit e18fc7692b
14 changed files with 254 additions and 31 deletions

View File

@ -135,10 +135,10 @@ class Model():
def predict(self, a):
return np.argmax(self.model.predict(a), axis=1)
def prepare_for_use(self, df=None, batch_size=DEFAULT_BUFFER_SIZE, layers=DEFAULT_LAYERS, out_activation=DEFAULT_OUT_ACTIVATION, loss=DEFAULT_LOSS, optimizer=DEFAULT_OPTIMIZER):
def prepare_for_use(self, df=None, batch_size=DEFAULT_BUFFER_SIZE, layers=DEFAULT_LAYERS, out_activation=DEFAULT_OUT_ACTIVATION, loss=DEFAULT_LOSS, optimizer=DEFAULT_OPTIMIZER, dataset_fn=dataframe_to_dataset_biomes):
if df is None:
df = pd.read_pickle('data.p')
self.prepare_dataset(df, dataframe_to_dataset_biomes, batch_size=batch_size)
self.prepare_dataset(df, dataset_fn, batch_size=batch_size)
self.create_model(layers=layers, out_activation=out_activation)
self.compile(loss=loss, optimizer=optimizer)

90
biomes/train_temp.py Normal file
View File

@ -0,0 +1,90 @@
import fire
import ray
import pandas as pd
import tensorflow as tf
import numpy as np
from tensorflow import keras
from utils import *
from model import Model
from constants import *
CHECKPOINT = 'checkpoints/temp.h5'
SEED = 1
np.random.seed(SEED)
df = pd.read_pickle('data.p')
dataset_size, x_columns, y_columns, dataset = dataframe_to_dataset_temp_precip(df)
batch_size = 5
epochs = 500
def baseline_model():
model = keras.models.Sequential()
params = {
'kernel_initializer': 'lecun_uniform',
'bias_initializer': 'zeros',
}
model.add(keras.layers.Dense(x_columns, input_dim=x_columns, **params, activation='elu'))
model.add(keras.layers.Dense(6, **params, activation='relu'))
model.add(keras.layers.Dense(y_columns, **params))
model.compile(loss='mse', optimizer='adam', metrics=['mae'])
return model
model = baseline_model()
model.summary()
dataset = dataset.shuffle(500)
TRAIN_SIZE = int(dataset_size * 0.85)
TEST_SIZE = dataset_size - TRAIN_SIZE
(training, test) = (dataset.take(TRAIN_SIZE),
dataset.skip(TRAIN_SIZE))
training_batched = training.batch(batch_size).repeat()
test_batched = test.batch(batch_size).repeat()
logger.debug('Model dataset info: size=%s, train=%s, test=%s', dataset_size, TRAIN_SIZE, TEST_SIZE)
# model.load_weights(CHECKPOINT)
def predict():
columns = INPUTS
YEAR = 2000
print(columns)
print(df[0:batch_size])
inputs = df[columns].to_numpy()
inputs = normalize_ndarray(inputs, df[columns].to_numpy())
print(inputs[0:batch_size])
out_columns = []
for season in SEASONS:
out_columns += ['temp_{}_{}'.format(season, YEAR), 'precip_{}_{}'.format(season, YEAR)]
print(out_columns)
out = model.predict(inputs)
print(out)
print(df[out_columns][0:batch_size])
print(denormalize(out, df[out_columns].to_numpy()))
def train():
tfb_callback = tf.keras.callbacks.TensorBoard(batch_size=batch_size, log_dir='temp_logs')
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(filepath=CHECKPOINT, monitor='val_loss')
model.fit(training_batched,
batch_size=batch_size,
epochs=epochs,
steps_per_epoch=int(TRAIN_SIZE / batch_size),
validation_data=test_batched,
validation_steps=int(TEST_SIZE / batch_size),
callbacks=[tfb_callback, checkpoint_callback],
verbose=1)
model.save_weights(CHECKPOINT)
# train()
if __name__ == "__main__":
fire.Fire({ 'predict': predict, 'train': train })

View File

@ -16,7 +16,13 @@ def normalize(v, o=None):
o = v
return (v - np.mean(o)) / np.std(o)
def normalize_ndarray(ar, o=None):
def denormalize(v, o=None):
if o is None:
o = v
return (v * np.std(o) + np.mean(o))
def on_ndarray(ar, o=None, fn=None):
if o is None:
o = ar
@ -24,11 +30,17 @@ def normalize_ndarray(ar, o=None):
tr = np.transpose(ar)
to = np.transpose(o)
for i in range(tr.shape[0]):
tr[i] = normalize(tr[i], to[i])
tr[i] = fn(tr[i], to[i])
# transpose back
return np.transpose(tr)
def normalize_ndarray(ar, o=None):
return on_ndarray(ar, o=o, fn=normalize)
def denormalize_ndarray(ar, o=None):
return on_ndarray(ar, o=o, fn=denormalize)
def dataframe_to_dataset_biomes(df):
rows = df.shape[0]
@ -67,26 +79,27 @@ def dataframe_to_dataset_biomes(df):
def dataframe_to_dataset_temp_precip(df):
rows = df.shape[0]
# elevation, distance_to_water, latitude
# season, year
# elevation, distance_to_water, latitude, mean_temp, mean_precip
input_columns = 5
num_classes = 2
# (temp, precip) * 4 seasons
num_classes = 8
tf_inputs = np.empty((0, input_columns))
tf_output = np.empty((0, num_classes))
for year in range(MIN_YEAR, MAX_YEAR + 1):
local_inputs = list(INPUTS)
local_df = df[local_inputs]
all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS]
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
local_df.loc[:, 'mean_temp'] = np.mean(df[all_temps].values)
local_df.loc[:, 'mean_precip'] = np.mean(df[all_precips].values)
output = []
for idx, season in enumerate(SEASONS):
season_index = idx / len(season)
local_df = df[local_inputs]
local_df.loc[:, 'season'] = pd.Series(np.repeat(season_index, rows), index=local_df.index)
local_df.loc[:, 'year'] = pd.Series(np.repeat(year, rows), index=local_df.index)
output = all_temps + all_precips
output = ['temp_{}_{}'.format(season, year), 'precip_{}_{}'.format(season, year)]
tf_inputs = np.concatenate((tf_inputs, local_df.values), axis=0)
tf_output = np.concatenate((tf_output, df[output].values), axis=0)
tf_inputs = np.concatenate((tf_inputs, local_df.values), axis=0)
tf_output = np.concatenate((tf_output, df[output].values), axis=0)
tf_inputs = tf.cast(normalize_ndarray(tf_inputs), tf.float32)
tf_output = tf.cast(tf_output, tf.float32)