fix(map_generator): fix new changes on web version

This commit is contained in:
Mahdi Dibaiee 2019-05-29 17:46:37 +04:30
parent 88ef4da496
commit b371500a3c
6 changed files with 26 additions and 15 deletions

11
biomes/details.py Normal file
View File

@ -0,0 +1,11 @@
import pandas as pd
from utils import normalize_ndarray
df = pd.read_pickle('data.p')
print('Columns', df.columns);
print(df[df.columns[0:8]])
columns = ['longitude', 'latitude', 'biome_num', 'elevation', 'distance_to_water', 'temp_winter_1900', 'precip_winter_1900', 'temp_spring_1900', 'precip_spring_1900']
normalized = normalize_ndarray(df[columns].to_numpy())
print(pd.DataFrame(columns=columns, data=normalized))

View File

@ -39,10 +39,10 @@ parameters = {
'step': 0.01
},
'max_elevation': {
'default': 1e4,
'default': 10000,
'type': 'int',
'min': 0,
'max': 1e4,
'max': 10000,
},
'min_elevation': {
'default': -400,
@ -51,10 +51,10 @@ parameters = {
'max': 0
},
'ground_noise': {
'default': 6e3,
'default': 6000,
'type': 'int',
'min': 0,
'max': 1e5,
'max': 10000,
},
'water_proportion': {
'default': 0.3,
@ -315,7 +315,7 @@ def generate_map(biomes=False, **kwargs):
greys = cm.get_cmap('Greys')
greys.set_under(color=SEA_COLOR)
# ground = ndimage.gaussian_filter(ground, sigma=4)
ground = ndimage.gaussian_filter(ground, sigma=1)
ground = ndimage.generic_filter(ground, constant_filter, size=1)
print(np.min(ground), np.max(ground), p['max_elevation'])
@ -325,13 +325,13 @@ def generate_map(biomes=False, **kwargs):
plt.imshow(ground.T, cmap=greys, norm=norm)
plt.gca().invert_yaxis()
if biomes:
generate_biomes(ground)
figfile = BytesIO()
plt.savefig(figfile, format='png')
figfile.seek(0)
if biomes:
generate_biomes(ground)
return figfile

View File

@ -138,7 +138,7 @@ def predicted_precips_cmd(checkpoint='checkpoints/precip.h5', year=2000):
Precip.restore(checkpoint)
predicted_precips(Precip, year=year)
def predict_end_to_end(Temp, Precip, Biomes, df=pd.read_pickle('data.p'), year=2000):
def predict_end_to_end(Temp, Precip, Biomes, df=pd.read_pickle('data.p'), year=2000, path=None):
columns = INPUTS
inputs = df[INPUTS]
@ -207,9 +207,9 @@ def predict_end_to_end(Temp, Precip, Biomes, df=pd.read_pickle('data.p'), year=2
new_data = new_data.append(f)
print(new_data)
draw(new_data)
draw(new_data, path=path)
def predict_end_to_end_cmd(checkpoint_temp='checkpoints/temp.h5', checkpoint_precip='checkpoints/precip.h5', checkpoint_biomes='checkpoints/b.h5', year=2000, **kwargs):
def predict_end_to_end_cmd(checkpoint_temp='checkpoints/temp.h5', checkpoint_precip='checkpoints/precip.h5', checkpoint_biomes='checkpoints/b.h5', year=2000, path=None, **kwargs):
batch_size = A_params['batch_size']['grid_search'][0]
layers = A_params['layers']['grid_search'][0]
optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0])
@ -242,7 +242,7 @@ def predict_end_to_end_cmd(checkpoint_temp='checkpoints/temp.h5', checkpoint_pre
Biomes.prepare_for_use()
Biomes.restore(checkpoint_biomes)
predict_end_to_end(Temp=Temp, Precip=Precip, Biomes=Biomes, year=year, **kwargs)
predict_end_to_end(Temp=Temp, Precip=Precip, Biomes=Biomes, year=year, path=path, **kwargs)
if __name__ == "__main__":

View File

@ -12,7 +12,7 @@ function generate() {
const queryString = new URLSearchParams(formData).toString()
map.src = '/map?' + queryString;
map.classList.add('d-none');
map.width = formData.get('width');
//map.width = formData.get('width');
}
mapSettings.addEventListener('submit', (e) => {

View File

@ -11,7 +11,7 @@
<div class='row'>
<main class='col d-flex justify-content-center align-items-center'>
<!-- <canvas id='board'></canvas> -->
<img src='' id='map'>
<img src='' id='map' width='80%'>
<div class='spinner-border text-primary' role='status' id='spinner'>
<span class='sr-only'>Loading...</span>

View File

@ -10,7 +10,7 @@ from model import Model
B_params = {
'batch_size': tune.grid_search([256]),
'layers': tune.grid_search([[512, 512]]),
'lr': tune.grid_search([3e-4]),
'lr': tune.grid_search([1e-4]),
'optimizer': tune.grid_search([tf.keras.optimizers.Adam]),
}