108 lines
3.3 KiB
Python
108 lines
3.3 KiB
Python
import fire
|
|
import numpy as np
|
|
|
|
from utils import *
|
|
#from nn import compile_b
|
|
from constants import INPUTS
|
|
from model import Model
|
|
from draw import draw
|
|
from train import A_params
|
|
|
|
def predicted_map(B, change=0, path=None):
|
|
year = MAX_YEAR - 1
|
|
|
|
df = pd.read_pickle('data.p')
|
|
|
|
logger.info('temperature change of %s', change)
|
|
|
|
inputs = list(INPUTS)
|
|
|
|
for season in SEASONS:
|
|
inputs += [
|
|
'temp_{}_{}'.format(season, year),
|
|
'precip_{}_{}'.format(season, year)
|
|
]
|
|
|
|
frame = df[inputs + ['longitude']]
|
|
frame_cp = df[inputs + ['longitude']]
|
|
|
|
for season in SEASONS:
|
|
frame.loc[:, 'temp_{}_{}'.format(season, year)] += change
|
|
|
|
columns = ['latitude', 'longitude', 'biome_num']
|
|
new_data = pd.DataFrame(columns=columns)
|
|
nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy(), frame_cp.to_numpy()))
|
|
|
|
for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, B.batch_size), chunker(frame, B.batch_size))):
|
|
if chunk.shape[0] < B.batch_size:
|
|
continue
|
|
input_data = chunk.loc[:, inputs].values
|
|
out = B.predict_class(input_data)
|
|
|
|
f = pd.DataFrame({
|
|
'longitude': chunk_original.loc[:, 'longitude'],
|
|
'latitude': chunk_original.loc[:, 'latitude'],
|
|
'biome_num': out
|
|
}, columns=columns)
|
|
new_data = new_data.append(f)
|
|
|
|
draw(new_data, path=path)
|
|
|
|
def predicted_map_cmd(checkpoint='checkpoints/save.h5', change=0, path=None):
|
|
B = Model('b', epochs=1)
|
|
B.prepare_for_use()
|
|
B.restore(checkpoint)
|
|
predicted_map(B, change=change, path=path)
|
|
|
|
def predicted_temps(A, year=2000):
|
|
columns = INPUTS
|
|
|
|
df = pd.read_pickle('data.p')
|
|
|
|
print(columns)
|
|
# print(df[0:A.batch_size])
|
|
inputs = df[INPUTS]
|
|
|
|
all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS]
|
|
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
|
|
inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values)
|
|
inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values)
|
|
|
|
inputs = inputs.to_numpy()
|
|
inputs = normalize_ndarray(inputs)
|
|
print(inputs[0:A.batch_size])
|
|
|
|
out_columns = all_temps + all_precips
|
|
print(out_columns)
|
|
|
|
out = A.predict(inputs)
|
|
# print(out.shape, out[0].shape)
|
|
# print(out)
|
|
# print(out[0])
|
|
print(normalize_ndarray(df[out_columns])[0:A.batch_size])
|
|
print(pd.DataFrame(data=out, columns=out_columns))
|
|
# print(df[out_columns][0:A.batch_size])
|
|
# print(pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns))
|
|
|
|
def predicted_temps_cmd(checkpoint='checkpoints/a.h5', year=2000):
|
|
batch_size = A_params['batch_size']['grid_search'][0]
|
|
layers = A_params['layers']['grid_search'][0]
|
|
optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0])
|
|
|
|
A = Model('a', epochs=1)
|
|
A.prepare_for_use(
|
|
batch_size=batch_size,
|
|
layers=layers,
|
|
dataset_fn=dataframe_to_dataset_temp_precip,
|
|
optimizer=optimizer,
|
|
out_activation=None,
|
|
loss='mse',
|
|
metrics=['mae']
|
|
)
|
|
A.restore(checkpoint)
|
|
predicted_temps(A, year=year)
|
|
|
|
if __name__ == "__main__":
|
|
fire.Fire({ 'map': predicted_map_cmd, 'temp': predicted_temps_cmd })
|
|
|