import fire import numpy as np from utils import * #from nn import compile_b from constants import INPUTS from model import Model from draw import draw from train import A_params def predicted_map(B, change=0, path=None): year = MAX_YEAR - 1 df = pd.read_pickle('data.p') logger.info('temperature change of %s', change) inputs = list(INPUTS) for season in SEASONS: inputs += [ 'temp_{}_{}'.format(season, year), 'precip_{}_{}'.format(season, year) ] frame = df[inputs + ['longitude']] for season in SEASONS: frame.loc[:, 'temp_{}_{}'.format(season, year)] += change print(frame.head()) frame_cp = frame.copy() columns = ['latitude', 'longitude', 'biome_num'] new_data = pd.DataFrame(columns=columns) nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy())) for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, B.batch_size), chunker(frame_cp, B.batch_size))): if chunk.shape[0] < B.batch_size: continue input_data = chunk.loc[:, inputs].values out = B.predict_class(input_data) f = pd.DataFrame({ 'longitude': chunk_original.loc[:, 'longitude'], 'latitude': chunk_original.loc[:, 'latitude'], 'biome_num': out }, columns=columns) new_data = new_data.append(f) draw(new_data, path=path) def predicted_map_cmd(checkpoint='checkpoints/b.h5', change=0, path=None): B = Model('b', epochs=1) B.prepare_for_use() B.restore(checkpoint) predicted_map(B, change=change, path=path) def predicted_temps(A, year=2000): columns = INPUTS df = pd.read_pickle('data.p') inputs = df[INPUTS] all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS] inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values) inputs = inputs.to_numpy() inputs = normalize_ndarray(inputs) print(inputs[0:A.batch_size]) out_columns = all_temps # + all_precips print(out_columns) out = A.predict(inputs) actual_output = df[out_columns][0:A.batch_size] model_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)[0:A.batch_size] print(actual_output) print(model_output) def predicted_precips(A, year=2000): columns = INPUTS df = pd.read_pickle('data.p') inputs = df[INPUTS] all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS] inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values) inputs = inputs.to_numpy() inputs = normalize_ndarray(inputs) print(inputs[0:A.batch_size]) out_columns = all_precips print(out_columns) out = A.predict(inputs) actual_output = df[out_columns][0:A.batch_size] model_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)[0:A.batch_size] print(actual_output) print(model_output) def predicted_temps_cmd(checkpoint='checkpoints/temp.h5', year=2000): batch_size = A_params['batch_size']['grid_search'][0] layers = A_params['layers']['grid_search'][0] optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0]) Temp = Model('temp', epochs=1) Temp.prepare_for_use( batch_size=batch_size, layers=layers, dataset_fn=dataframe_to_dataset_temp, optimizer=optimizer, out_activation=None, loss='mse', metrics=['mae'] ) Temp.restore(checkpoint) predicted_temps(Temp, year=year) def predicted_precips_cmd(checkpoint='checkpoints/precip.h5', year=2000): batch_size = A_params['batch_size']['grid_search'][0] layers = A_params['layers']['grid_search'][0] optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0]) Precip = Model('precip', epochs=1) Precip.prepare_for_use( batch_size=batch_size, layers=layers, dataset_fn=dataframe_to_dataset_temp, optimizer=optimizer, out_activation=None, loss='mse', metrics=['mae'] ) Precip.restore(checkpoint) predicted_precips(Precip, year=year) def predict_end_to_end(Temp, Precip, Biomes, df=pd.read_pickle('data.p'), year=2000, path=None): columns = INPUTS inputs = df[INPUTS] earth_df = pd.read_pickle('data.p') earth_df_inputs = earth_df[INPUTS] all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS] inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values) earth_df_inputs.loc[:, 'mean_temp'] = np.mean(earth_df[all_temps].values) print(inputs['mean_temp']) inputs = inputs.to_numpy() inputs = normalize_ndarray(inputs) out_columns = all_temps out = Temp.predict(inputs) temp_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns) inputs = df[INPUTS] all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS] inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values) print(inputs['mean_precip']) inputs = inputs.to_numpy() inputs = normalize_ndarray(inputs) out_columns = all_precips out = Precip.predict(inputs) precip_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns) inputs = list(INPUTS) frame = df[inputs + ['longitude']] for season in SEASONS: tc = 'temp_{}_{}'.format(season, year) pc = 'precip_{}_{}'.format(season, year) frame.loc[:, tc] = temp_output[tc] frame.loc[:, pc] = precip_output[pc] frame.loc[:, 'latitude'] = df['latitude'] frame_cp = frame.copy() columns = ['latitude', 'longitude', 'biome_num'] new_data = pd.DataFrame(columns=columns) nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy())) for season in SEASONS: inputs += [ 'temp_{}_{}'.format(season, year), 'precip_{}_{}'.format(season, year) ] for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, Biomes.batch_size), chunker(frame_cp, Biomes.batch_size))): if chunk.shape[0] < Biomes.batch_size: continue input_data = chunk.loc[:, inputs].values out = Biomes.predict_class(input_data) f = pd.DataFrame({ 'longitude': chunk_original.loc[:, 'longitude'], 'latitude': chunk_original.loc[:, 'latitude'], 'biome_num': out }, columns=columns) new_data = new_data.append(f) print(new_data) draw(new_data, path=path) def predict_end_to_end_cmd(checkpoint_temp='checkpoints/temp.h5', checkpoint_precip='checkpoints/precip.h5', checkpoint_biomes='checkpoints/b.h5', year=2000, path=None, **kwargs): batch_size = A_params['batch_size']['grid_search'][0] layers = A_params['layers']['grid_search'][0] optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0]) Temp = Model('temp', epochs=1) Temp.prepare_for_use( batch_size=batch_size, layers=layers, dataset_fn=dataframe_to_dataset_temp, optimizer=optimizer, out_activation=None, loss='mse', metrics=['mae'] ) Temp.restore(checkpoint_temp) Precip = Model('precip', epochs=1) Precip.prepare_for_use( batch_size=batch_size, layers=layers, dataset_fn=dataframe_to_dataset_temp, optimizer=optimizer, out_activation=None, loss='mse', metrics=['mae'] ) Precip.restore(checkpoint_precip) Biomes = Model('b', epochs=1) Biomes.prepare_for_use() Biomes.restore(checkpoint_biomes) predict_end_to_end(Temp=Temp, Precip=Precip, Biomes=Biomes, year=year, path=path, **kwargs) if __name__ == "__main__": fire.Fire({ 'map': predicted_map_cmd, 'temp': predicted_temps_cmd, 'precip': predicted_precips_cmd, 'end-to-end': predict_end_to_end_cmd })