2019-03-31 05:22:00 +00:00
|
|
|
import fire
|
2019-02-27 11:36:20 +00:00
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
from utils import *
|
2019-03-31 05:22:00 +00:00
|
|
|
#from nn import compile_b
|
|
|
|
from constants import INPUTS
|
|
|
|
from model import Model
|
2019-02-27 11:36:20 +00:00
|
|
|
from draw import draw
|
2019-05-02 06:21:29 +00:00
|
|
|
from train import A_params
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
def predicted_map(B, change=0, path=None):
|
2019-03-07 03:25:23 +00:00
|
|
|
year = MAX_YEAR - 1
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-03-07 03:25:23 +00:00
|
|
|
df = pd.read_pickle('data.p')
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
logger.info('temperature change of %s', change)
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
inputs = list(INPUTS)
|
2019-02-27 11:36:20 +00:00
|
|
|
|
|
|
|
for season in SEASONS:
|
|
|
|
inputs += [
|
|
|
|
'temp_{}_{}'.format(season, year),
|
|
|
|
'precip_{}_{}'.format(season, year)
|
|
|
|
]
|
|
|
|
|
2019-03-05 11:53:29 +00:00
|
|
|
frame = df[inputs + ['longitude']]
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-02-28 10:04:47 +00:00
|
|
|
for season in SEASONS:
|
|
|
|
frame.loc[:, 'temp_{}_{}'.format(season, year)] += change
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-05-12 14:54:20 +00:00
|
|
|
print(frame.head())
|
|
|
|
frame_cp = frame.copy()
|
|
|
|
|
2019-03-05 07:59:30 +00:00
|
|
|
columns = ['latitude', 'longitude', 'biome_num']
|
2019-02-27 11:36:20 +00:00
|
|
|
new_data = pd.DataFrame(columns=columns)
|
2019-05-12 14:54:20 +00:00
|
|
|
nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy()))
|
2019-03-05 07:59:30 +00:00
|
|
|
|
2019-05-12 14:54:20 +00:00
|
|
|
for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, B.batch_size), chunker(frame_cp, B.batch_size))):
|
2019-03-05 07:59:30 +00:00
|
|
|
if chunk.shape[0] < B.batch_size:
|
|
|
|
continue
|
2019-03-31 05:22:00 +00:00
|
|
|
input_data = chunk.loc[:, inputs].values
|
2019-05-02 06:21:29 +00:00
|
|
|
out = B.predict_class(input_data)
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-03-05 07:59:30 +00:00
|
|
|
f = pd.DataFrame({
|
2019-03-31 05:22:00 +00:00
|
|
|
'longitude': chunk_original.loc[:, 'longitude'],
|
|
|
|
'latitude': chunk_original.loc[:, 'latitude'],
|
2019-03-05 07:59:30 +00:00
|
|
|
'biome_num': out
|
|
|
|
}, columns=columns)
|
|
|
|
new_data = new_data.append(f)
|
2019-02-27 11:36:20 +00:00
|
|
|
|
2019-03-07 03:25:23 +00:00
|
|
|
draw(new_data, path=path)
|
|
|
|
|
2019-05-12 14:54:20 +00:00
|
|
|
def predicted_map_cmd(checkpoint='checkpoints/b.h5', change=0, path=None):
|
2019-03-31 05:22:00 +00:00
|
|
|
B = Model('b', epochs=1)
|
|
|
|
B.prepare_for_use()
|
|
|
|
B.restore(checkpoint)
|
|
|
|
predicted_map(B, change=change, path=path)
|
|
|
|
|
2019-05-02 06:21:29 +00:00
|
|
|
def predicted_temps(A, year=2000):
|
|
|
|
columns = INPUTS
|
|
|
|
|
|
|
|
df = pd.read_pickle('data.p')
|
|
|
|
|
|
|
|
inputs = df[INPUTS]
|
|
|
|
|
|
|
|
all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS]
|
|
|
|
inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values)
|
2019-05-11 12:46:05 +00:00
|
|
|
|
|
|
|
inputs = inputs.to_numpy()
|
|
|
|
inputs = normalize_ndarray(inputs)
|
|
|
|
print(inputs[0:A.batch_size])
|
|
|
|
|
|
|
|
out_columns = all_temps # + all_precips
|
|
|
|
print(out_columns)
|
|
|
|
|
|
|
|
out = A.predict(inputs)
|
|
|
|
actual_output = df[out_columns][0:A.batch_size]
|
|
|
|
model_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)[0:A.batch_size]
|
|
|
|
print(actual_output)
|
|
|
|
print(model_output)
|
|
|
|
|
|
|
|
def predicted_precips(A, year=2000):
|
|
|
|
columns = INPUTS
|
|
|
|
|
|
|
|
df = pd.read_pickle('data.p')
|
|
|
|
|
|
|
|
inputs = df[INPUTS]
|
|
|
|
|
|
|
|
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
|
2019-05-02 06:21:29 +00:00
|
|
|
inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values)
|
|
|
|
|
|
|
|
inputs = inputs.to_numpy()
|
|
|
|
inputs = normalize_ndarray(inputs)
|
|
|
|
print(inputs[0:A.batch_size])
|
|
|
|
|
2019-05-11 12:46:05 +00:00
|
|
|
out_columns = all_precips
|
2019-05-02 06:21:29 +00:00
|
|
|
print(out_columns)
|
|
|
|
|
|
|
|
out = A.predict(inputs)
|
2019-05-11 12:46:05 +00:00
|
|
|
actual_output = df[out_columns][0:A.batch_size]
|
|
|
|
model_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)[0:A.batch_size]
|
|
|
|
print(actual_output)
|
|
|
|
print(model_output)
|
|
|
|
|
|
|
|
def predicted_temps_cmd(checkpoint='checkpoints/temp.h5', year=2000):
|
|
|
|
batch_size = A_params['batch_size']['grid_search'][0]
|
|
|
|
layers = A_params['layers']['grid_search'][0]
|
|
|
|
optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0])
|
|
|
|
|
|
|
|
Temp = Model('temp', epochs=1)
|
|
|
|
Temp.prepare_for_use(
|
|
|
|
batch_size=batch_size,
|
|
|
|
layers=layers,
|
|
|
|
dataset_fn=dataframe_to_dataset_temp,
|
|
|
|
optimizer=optimizer,
|
|
|
|
out_activation=None,
|
|
|
|
loss='mse',
|
|
|
|
metrics=['mae']
|
|
|
|
)
|
|
|
|
Temp.restore(checkpoint)
|
|
|
|
predicted_temps(Temp, year=year)
|
|
|
|
|
|
|
|
def predicted_precips_cmd(checkpoint='checkpoints/precip.h5', year=2000):
|
2019-05-02 06:21:29 +00:00
|
|
|
batch_size = A_params['batch_size']['grid_search'][0]
|
|
|
|
layers = A_params['layers']['grid_search'][0]
|
|
|
|
optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0])
|
|
|
|
|
2019-05-11 12:46:05 +00:00
|
|
|
Precip = Model('precip', epochs=1)
|
|
|
|
Precip.prepare_for_use(
|
2019-05-02 06:21:29 +00:00
|
|
|
batch_size=batch_size,
|
|
|
|
layers=layers,
|
2019-05-11 12:46:05 +00:00
|
|
|
dataset_fn=dataframe_to_dataset_temp,
|
2019-05-02 06:21:29 +00:00
|
|
|
optimizer=optimizer,
|
|
|
|
out_activation=None,
|
|
|
|
loss='mse',
|
|
|
|
metrics=['mae']
|
|
|
|
)
|
2019-05-11 12:46:05 +00:00
|
|
|
Precip.restore(checkpoint)
|
|
|
|
predicted_precips(Precip, year=year)
|
2019-05-02 06:21:29 +00:00
|
|
|
|
2019-05-12 14:54:20 +00:00
|
|
|
def predict_end_to_end(Temp, Precip, Biomes, year=2000):
|
|
|
|
columns = INPUTS
|
|
|
|
|
|
|
|
df = pd.read_pickle('data.p')
|
|
|
|
|
|
|
|
inputs = df[INPUTS]
|
|
|
|
|
|
|
|
all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS]
|
|
|
|
inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values)
|
2019-05-18 11:47:28 +00:00
|
|
|
print(inputs['mean_temp'])
|
2019-05-12 14:54:20 +00:00
|
|
|
|
|
|
|
inputs = inputs.to_numpy()
|
|
|
|
inputs = normalize_ndarray(inputs)
|
|
|
|
out_columns = all_temps
|
|
|
|
out = Temp.predict(inputs)
|
|
|
|
temp_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)
|
|
|
|
|
|
|
|
inputs = df[INPUTS]
|
|
|
|
|
|
|
|
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
|
|
|
|
inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values)
|
2019-05-18 11:47:28 +00:00
|
|
|
print(inputs['mean_precip'])
|
2019-05-12 14:54:20 +00:00
|
|
|
|
|
|
|
inputs = inputs.to_numpy()
|
|
|
|
inputs = normalize_ndarray(inputs)
|
|
|
|
out_columns = all_precips
|
|
|
|
out = Precip.predict(inputs)
|
|
|
|
|
|
|
|
precip_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)
|
|
|
|
|
|
|
|
inputs = list(INPUTS)
|
|
|
|
|
|
|
|
frame = df[inputs + ['longitude']]
|
|
|
|
|
|
|
|
for season in SEASONS:
|
|
|
|
tc = 'temp_{}_{}'.format(season, year)
|
|
|
|
pc = 'precip_{}_{}'.format(season, year)
|
|
|
|
frame.loc[:, tc] = temp_output[tc]
|
|
|
|
frame.loc[:, pc] = precip_output[pc]
|
|
|
|
|
|
|
|
frame.loc[:, 'latitude'] = df['latitude']
|
|
|
|
|
|
|
|
frame_cp = frame.copy()
|
|
|
|
|
|
|
|
columns = ['latitude', 'longitude', 'biome_num']
|
|
|
|
new_data = pd.DataFrame(columns=columns)
|
|
|
|
nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy()))
|
|
|
|
|
2019-05-18 11:47:28 +00:00
|
|
|
for season in SEASONS:
|
|
|
|
inputs += [
|
|
|
|
'temp_{}_{}'.format(season, year),
|
|
|
|
'precip_{}_{}'.format(season, year)
|
|
|
|
]
|
|
|
|
|
2019-05-12 14:54:20 +00:00
|
|
|
for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, Biomes.batch_size), chunker(frame_cp, Biomes.batch_size))):
|
|
|
|
if chunk.shape[0] < Biomes.batch_size:
|
|
|
|
continue
|
|
|
|
input_data = chunk.loc[:, inputs].values
|
|
|
|
out = Biomes.predict_class(input_data)
|
|
|
|
|
|
|
|
f = pd.DataFrame({
|
|
|
|
'longitude': chunk_original.loc[:, 'longitude'],
|
|
|
|
'latitude': chunk_original.loc[:, 'latitude'],
|
|
|
|
'biome_num': out
|
|
|
|
}, columns=columns)
|
|
|
|
new_data = new_data.append(f)
|
|
|
|
|
|
|
|
print(new_data)
|
|
|
|
draw(new_data)
|
|
|
|
|
|
|
|
def predict_end_to_end_cmd(checkpoint_temp='checkpoints/temp.h5', checkpoint_precip='checkpoints/precip.h5', checkpoint_biomes='checkpoints/b.h5', year=2000):
|
|
|
|
batch_size = A_params['batch_size']['grid_search'][0]
|
|
|
|
layers = A_params['layers']['grid_search'][0]
|
|
|
|
optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0])
|
|
|
|
|
|
|
|
Temp = Model('temp', epochs=1)
|
|
|
|
Temp.prepare_for_use(
|
|
|
|
batch_size=batch_size,
|
|
|
|
layers=layers,
|
|
|
|
dataset_fn=dataframe_to_dataset_temp,
|
|
|
|
optimizer=optimizer,
|
|
|
|
out_activation=None,
|
|
|
|
loss='mse',
|
|
|
|
metrics=['mae']
|
|
|
|
)
|
|
|
|
Temp.restore(checkpoint_temp)
|
|
|
|
|
|
|
|
Precip = Model('precip', epochs=1)
|
|
|
|
Precip.prepare_for_use(
|
|
|
|
batch_size=batch_size,
|
|
|
|
layers=layers,
|
|
|
|
dataset_fn=dataframe_to_dataset_temp,
|
|
|
|
optimizer=optimizer,
|
|
|
|
out_activation=None,
|
|
|
|
loss='mse',
|
|
|
|
metrics=['mae']
|
|
|
|
)
|
|
|
|
Precip.restore(checkpoint_precip)
|
|
|
|
|
|
|
|
Biomes = Model('b', epochs=1)
|
|
|
|
Biomes.prepare_for_use()
|
|
|
|
Biomes.restore(checkpoint_biomes)
|
|
|
|
|
|
|
|
predict_end_to_end(Temp=Temp, Precip=Precip, Biomes=Biomes, year=year)
|
|
|
|
|
|
|
|
|
2019-03-07 03:25:23 +00:00
|
|
|
if __name__ == "__main__":
|
2019-05-12 14:54:20 +00:00
|
|
|
fire.Fire({ 'map': predicted_map_cmd, 'temp': predicted_temps_cmd, 'precip': predicted_precips_cmd, 'end-to-end': predict_end_to_end_cmd })
|
2019-03-07 03:25:23 +00:00
|
|
|
|