feat(map-generator): use biome models for generating the biome layer

This commit is contained in:
Mahdi Dibaiee 2019-05-18 16:17:28 +04:30
parent f79c63abf8
commit d965474974
13 changed files with 204 additions and 34 deletions

View File

@ -2,26 +2,33 @@ import fire
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from matplotlib.collections import PatchCollection from matplotlib.collections import PatchCollection
from matplotlib.patches import Circle, Patch from matplotlib.patches import Circle, Patch
from utils import logger from utils import logger, to_range
from constants import BIOMES from constants import BIOMES
import pandas as pd import pandas as pd
import cartopy.crs as ccrs import cartopy.crs as ccrs
def draw(df, path=None): def draw(df, earth=True, width=23.22, height=13, only_draw=False, path=None):
logger.debug('draw(df, %s)', path) logger.debug('draw(df, %s)', path)
biomes = {} biomes = {}
biome_numbers = df['biome_num'].unique() biome_numbers = df['biome_num'].unique()
for i, row in df.iterrows(): for i, row in df.iterrows():
p = (row.longitude, row.latitude) if earth:
p = (row.longitude, row.latitude)
else:
p = (to_range(-180, 180, 0, width)(row.longitude), to_range(-90, 90, 0, height)(row.latitude))
if row.biome_num in biomes: if row.biome_num in biomes:
biomes[row.biome_num].append(p) biomes[row.biome_num].append(p)
else: else:
biomes[row.biome_num] = [p] biomes[row.biome_num] = [p]
ax = plt.axes(projection=ccrs.PlateCarree()) if earth:
ax.stock_img() ax = plt.axes(projection=ccrs.PlateCarree())
ax.stock_img()
else:
ax = plt.gca()
legend_handles = [] legend_handles = []
for n in biome_numbers: for n in biome_numbers:
@ -37,8 +44,11 @@ def draw(df, path=None):
ax.autoscale_view() ax.autoscale_view()
figure = plt.gcf() figure = plt.gcf()
figure.set_size_inches(23.22, 13) figure.set_size_inches(width, height)
figure.subplots_adjust(left=0.02, right=0.79) figure.subplots_adjust(left=0.02, right=0.79)
if only_draw: return
if path: if path:
plt.savefig(path) plt.savefig(path)
else: else:

View File

@ -9,10 +9,15 @@ from io import BytesIO
import pandas as pd import pandas as pd
from shapely.geometry import Point, MultiPoint from shapely.geometry import Point, MultiPoint
from descartes import PolygonPatch from descartes import PolygonPatch
from constants import INPUTS, SEASONS
from draw import draw
from train import A_params
from model import Model
from utils import *
parameters = { parameters = {
'width': { 'width': {
'default': 900, 'default': 700,
'type': 'int', 'type': 'int',
}, },
'height': { 'height': {
@ -34,16 +39,22 @@ parameters = {
'step': 0.01 'step': 0.01
}, },
'max_elevation': { 'max_elevation': {
'default': 30, 'default': 1e4,
'type': 'int', 'type': 'int',
'min': 0, 'min': 0,
'max': 50, 'max': 1e4,
},
'min_elevation': {
'default': -400,
'type': 'int',
'min': -1000,
'max': 0
}, },
'ground_noise': { 'ground_noise': {
'default': 15, 'default': 1.1e4,
'type': 'int', 'type': 'int',
'min': 0, 'min': 0,
'max': 50, 'max': 1e5,
}, },
'water_proportion': { 'water_proportion': {
'default': 0.6, 'default': 0.6,
@ -109,6 +120,16 @@ parameters = {
'default': False, 'default': False,
'type': 'bool' 'type': 'bool'
}, },
'mean_temperature': {
'default': -4.2,
'type': 'float',
'step': 1,
},
'mean_precipitation': {
'default': 45.24,
'type': 'float',
'step': 1,
},
'seed': { 'seed': {
'default': '', 'default': '',
'type': 'int', 'type': 'int',
@ -156,6 +177,9 @@ def bound_check(ground, point):
elif y >= h: elif y >= h:
y = y - h y = y - h
if x < 0 or x >= w or y < 0 or y >= h:
return bound_check(ground, (x, y))
return (x, y) return (x, y)
@ -190,7 +214,7 @@ def continent_agent(ground, position, size):
if not is_ground(ground[x, y]) and in_range((x, y), position, size**2 * np.pi): if not is_ground(ground[x, y]) and in_range((x, y), position, size**2 * np.pi):
trials = 0 trials = 0
size -= 1 size -= 1
ground[x, y] = np.random.randint(1, p['ground_noise']) ground[x, y] = np.random.randint(p['water_level'] + 1, p['ground_noise'])
else: else:
trials += 1 trials += 1
@ -218,6 +242,7 @@ def random_elevate_agent(ground, position, height, size=p['mountain_area_elevati
def mountain_agent(ground, position): def mountain_agent(ground, position):
print('mountain_agent')
if not away_from_sea(ground, position): if not away_from_sea(ground, position):
return return
@ -283,11 +308,11 @@ def generate_map(biomes=False, **kwargs):
ground = ndimage.gaussian_filter(ground, sigma=(1 - p['sharpness']) * 20) ground = ndimage.gaussian_filter(ground, sigma=(1 - p['sharpness']) * 20)
for i in range(int(ground_size * p['mountain_ratio'] / p['max_elevation']**2)): for i in range(int(ground_size * p['mountain_ratio'] / (p['max_elevation'] / 2))):
position = (np.random.randint(0, width), np.random.randint(0, height)) position = (np.random.randint(0, width), np.random.randint(0, height))
mountain_agent(ground, position) mountain_agent(ground, position)
norm = colors.Normalize(vmin=1) norm = colors.Normalize(vmin=p['water_level'] + 1)
greys = cm.get_cmap('Greys') greys = cm.get_cmap('Greys')
greys.set_under(color=SEA_COLOR) greys.set_under(color=SEA_COLOR)
@ -311,12 +336,6 @@ def generate_map(biomes=False, **kwargs):
return figfile return figfile
def to_range(omin, omax, nmin, nmax):
orange = omax - omin
nrange = nmax - nmin
return lambda x: ((x - omin) * nrange / orange) + nmin
def generate_biomes(ground): def generate_biomes(ground):
width, height = p['width'], p['height'] width, height = p['width'], p['height']
@ -325,7 +344,6 @@ def generate_biomes(ground):
width_to_longitude = to_range(0, width, -180, 180) width_to_longitude = to_range(0, width, -180, 180)
print('generate_biomes') print('generate_biomes')
INPUTS = ['elevation', 'distance_to_water', 'latitude']
data = {} data = {}
for col in ['longitude', 'latitude', 'elevation', 'distance_to_water']: for col in ['longitude', 'latitude', 'elevation', 'distance_to_water']:
@ -341,17 +359,24 @@ def generate_biomes(ground):
data['latitude'].append(height_to_latitude(y)) data['latitude'].append(height_to_latitude(y))
data['elevation'].append(v) data['elevation'].append(v)
print(len(points))
print('buffering points')
points = MultiPoint(points) points = MultiPoint(points)
boundary = points.buffer(1e-0).boundary boundary = points.buffer(1).boundary
for x, y in np.ndindex(ground.shape): for x, y in np.ndindex(ground.shape):
if ground[x,y] > p['water_level']: if ground[x,y] > p['water_level']:
# print(x,y, Point(x,y).distance(boundary))
data['distance_to_water'].append(Point(x, y).distance(boundary)) data['distance_to_water'].append(Point(x, y).distance(boundary))
df = pd.DataFrame(data) df = pd.DataFrame(data)
print(df) print(df['elevation'].min(), df['elevation'].max())
print(df['distance_to_water'].min(), df['distance_to_water'].max())
print(df['latitude'].min(), df['latitude'].max())
print('running prediction models')
print(p['mean_precipitation'], p['mean_temperature'])
result = predict_end_to_end(df, boundary)
# fig = plt.figure() # fig = plt.figure()
# ax = fig.add_subplot(111) # ax = fig.add_subplot(111)
@ -365,7 +390,118 @@ def generate_biomes(ground):
# plt.show() # plt.show()
df = pd.read_pickle('data.p')
print(df['elevation'].min(), df['elevation'].max())
print(df['distance_to_water'].min(), df['distance_to_water'].max())
print(df['latitude'].min(), df['latitude'].max())
def predict_end_to_end(input_df, boundary, checkpoint_temp='checkpoints/temp.h5', checkpoint_precip='checkpoints/precip.h5', checkpoint_biomes='checkpoints/b.h5', year=2000):
batch_size = A_params['batch_size']['grid_search'][0]
layers = A_params['layers']['grid_search'][0]
optimizer = A_params['optimizer']['grid_search'][0](A_params['lr']['grid_search'][0])
Temp = Model('temp', epochs=1)
Temp.prepare_for_use(
batch_size=batch_size,
layers=layers,
dataset_fn=dataframe_to_dataset_temp,
optimizer=optimizer,
out_activation=None,
loss='mse',
metrics=['mae']
)
Temp.restore(checkpoint_temp)
Precip = Model('precip', epochs=1)
Precip.prepare_for_use(
batch_size=batch_size,
layers=layers,
dataset_fn=dataframe_to_dataset_temp,
optimizer=optimizer,
out_activation=None,
loss='mse',
metrics=['mae']
)
Precip.restore(checkpoint_precip)
Biomes = Model('b', epochs=1)
Biomes.prepare_for_use()
Biomes.restore(checkpoint_biomes)
inputs = input_df[INPUTS]
inputs.loc[:, 'mean_temp'] = p['mean_temperature']
inputs_copy = inputs.copy()
inputs_copy.loc[:, 'mean_temp'] = mean_temperature_over_years(df, size=inputs.shape[0])
inputs = inputs.to_numpy()
inputs = normalize_ndarray(inputs, inputs_copy)
print(inputs)
out_columns = ['temp_{}_{}'.format(season, year) for season in SEASONS]
out = Temp.predict(inputs)
temp_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)
inputs = input_df[INPUTS]
inputs.loc[:, 'mean_precip'] = p['mean_precipitation']
inputs_copy = inputs.copy()
inputs_copy.loc[:, 'mean_precip'] = mean_precipitation_over_years(df, size=inputs.shape[0])
inputs = inputs.to_numpy()
inputs = normalize_ndarray(inputs, inputs_copy)
print(inputs)
out_columns = ['precip_{}_{}'.format(season, year) for season in SEASONS]
out = Precip.predict(inputs)
precip_output = pd.DataFrame(data=denormalize(out, df[out_columns].to_numpy()), columns=out_columns)
inputs = list(INPUTS)
frame = input_df[inputs + ['longitude']]
for season in SEASONS:
tc = 'temp_{}_{}'.format(season, year)
pc = 'precip_{}_{}'.format(season, year)
frame.loc[:, tc] = temp_output[tc]
frame.loc[:, pc] = precip_output[pc]
frame.loc[:, 'latitude'] = input_df['latitude']
frame_cp = frame.copy()
columns = ['latitude', 'longitude', 'biome_num']
new_data = pd.DataFrame(columns=columns)
nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy()))
for season in SEASONS:
inputs += [
'temp_{}_{}'.format(season, year),
'precip_{}_{}'.format(season, year)
]
for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, Biomes.batch_size), chunker(frame_cp, Biomes.batch_size))):
if chunk.shape[0] < Biomes.batch_size:
continue
input_data = chunk.loc[:, inputs].values
out = Biomes.predict_class(input_data)
f = pd.DataFrame({
'longitude': chunk_original.loc[:, 'longitude'],
'latitude': chunk_original.loc[:, 'latitude'],
'biome_num': out
}, columns=columns)
new_data = new_data.append(f)
#print(new_data)
draw(new_data, earth=False, only_draw=True, width=p['width'], height=p['height'])
# TODO: reduce opacity of biome layer
if __name__ == "__main__": if __name__ == "__main__":
generate_map() # p['width'] = 50
# p['height'] = 50
p['water_proportion'] = 0.9
p['continents'] = 3
p['seed'] = 1
generate_map(True)
# print(normalize_ndarray(np.array([[ 5.59359803,0.99879546,-90., 45.24], [ 5.54976747, 0.99879546,-86.4, 45.24 ]])))
plt.show() plt.show()

View File

@ -147,6 +147,7 @@ def predict_end_to_end(Temp, Precip, Biomes, year=2000):
all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS] all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS]
inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values) inputs.loc[:, 'mean_temp'] = np.mean(df[all_temps].values)
print(inputs['mean_temp'])
inputs = inputs.to_numpy() inputs = inputs.to_numpy()
inputs = normalize_ndarray(inputs) inputs = normalize_ndarray(inputs)
@ -158,6 +159,7 @@ def predict_end_to_end(Temp, Precip, Biomes, year=2000):
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS] all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values) inputs.loc[:, 'mean_precip'] = np.mean(df[all_precips].values)
print(inputs['mean_precip'])
inputs = inputs.to_numpy() inputs = inputs.to_numpy()
inputs = normalize_ndarray(inputs) inputs = normalize_ndarray(inputs)
@ -168,12 +170,6 @@ def predict_end_to_end(Temp, Precip, Biomes, year=2000):
inputs = list(INPUTS) inputs = list(INPUTS)
for season in SEASONS:
inputs += [
'temp_{}_{}'.format(season, year),
'precip_{}_{}'.format(season, year)
]
frame = df[inputs + ['longitude']] frame = df[inputs + ['longitude']]
for season in SEASONS: for season in SEASONS:
@ -190,6 +186,12 @@ def predict_end_to_end(Temp, Precip, Biomes, year=2000):
new_data = pd.DataFrame(columns=columns) new_data = pd.DataFrame(columns=columns)
nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy())) nframe = pd.DataFrame(columns=frame.columns, data=normalize_ndarray(frame.to_numpy()))
for season in SEASONS:
inputs += [
'temp_{}_{}'.format(season, year),
'precip_{}_{}'.format(season, year)
]
for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, Biomes.batch_size), chunker(frame_cp, Biomes.batch_size))): for i, (chunk, chunk_original) in enumerate(zip(chunker(nframe, Biomes.batch_size), chunker(frame_cp, Biomes.batch_size))):
if chunk.shape[0] < Biomes.batch_size: if chunk.shape[0] < Biomes.batch_size:
continue continue

View File

@ -6,15 +6,17 @@ from sklearn.utils import class_weight
from constants import * from constants import *
import logging import logging
import os import os
from math import ceil
logger = logging.getLogger('main') logger = logging.getLogger('main')
logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))
EPSILON = 1e-5
def normalize(v, o=None): def normalize(v, o=None):
if o is None: if o is None:
o = v o = v
return (v - np.mean(o)) / np.std(o) return (v - np.mean(o)) / max(EPSILON, np.std(o))
def denormalize(v, o=None): def denormalize(v, o=None):
if o is None: if o is None:
@ -132,8 +134,28 @@ def dataframe_to_dataset_precip(df):
logger.debug('dataset size: rows=%d, input_columns=%d, num_classes=%d', int(tf_inputs.shape[0]), input_columns, num_classes) logger.debug('dataset size: rows=%d, input_columns=%d, num_classes=%d', int(tf_inputs.shape[0]), input_columns, num_classes)
return int(tf_inputs.shape[0]), input_columns, num_classes, None, tf.data.Dataset.from_tensor_slices((tf_inputs, tf_output)) return int(tf_inputs.shape[0]), input_columns, num_classes, None, tf.data.Dataset.from_tensor_slices((tf_inputs, tf_output))
def mean_temperature_over_years(df, size=MAX_YEAR - MIN_YEAR):
means = []
for year in range(MIN_YEAR, MAX_YEAR + 1):
all_temps = ['temp_{}_{}'.format(season, year) for season in SEASONS]
means.append(np.mean(df[all_temps].values))
return (means * ceil(size / len(means)))[0:size]
def mean_precipitation_over_years(df, size=MAX_YEAR - MIN_YEAR):
means = []
for year in range(MIN_YEAR, MAX_YEAR + 1):
all_precips = ['precip_{}_{}'.format(season, year) for season in SEASONS]
means.append(np.mean(df[all_precips].values))
return (means * ceil(size / len(means)))[0:size]
flatten = lambda l: [item for sublist in l for item in sublist] flatten = lambda l: [item for sublist in l for item in sublist]
def chunker(seq, size): def chunker(seq, size):
return (seq[pos:pos + size] for pos in range(0, len(seq), size)) return (seq[pos:pos + size] for pos in range(0, len(seq), size))
def to_range(omin, omax, nmin, nmax):
orange = omax - omin
nrange = nmax - nmin
return lambda x: ((x - omin) * nrange / orange) + nmin

View File

@ -1,5 +1,5 @@
from flask import Flask, render_template, make_response, send_file, request from flask import Flask, render_template, make_response, send_file, request
from index import generate_map, parameters from map_generator import generate_map, parameters
app = Flask(__name__) app = Flask(__name__)