2019-02-12 05:11:33 +00:00
|
|
|
import numpy as np
|
|
|
|
import tensorflow as tf
|
|
|
|
import pandas as pd
|
2019-03-31 05:22:00 +00:00
|
|
|
from collections import Counter
|
|
|
|
from sklearn.utils import class_weight
|
2019-02-12 05:11:33 +00:00
|
|
|
from constants import *
|
2019-03-31 05:22:00 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2019-02-12 05:11:33 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
logger = logging.getLogger('main')
|
|
|
|
logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))
|
2019-02-12 05:11:33 +00:00
|
|
|
|
2019-02-14 09:06:09 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
def normalize(v, o=None):
|
|
|
|
if o is None:
|
|
|
|
o = v
|
|
|
|
return (v - np.mean(o)) / np.std(o)
|
|
|
|
|
|
|
|
def normalize_ndarray(ar, o=None):
|
|
|
|
if o is None:
|
|
|
|
o = ar
|
|
|
|
|
|
|
|
# transpose: operate over columns
|
2019-02-14 09:06:09 +00:00
|
|
|
tr = np.transpose(ar)
|
2019-03-31 05:22:00 +00:00
|
|
|
to = np.transpose(o)
|
2019-02-14 09:06:09 +00:00
|
|
|
for i in range(tr.shape[0]):
|
2019-03-31 05:22:00 +00:00
|
|
|
tr[i] = normalize(tr[i], to[i])
|
2019-02-14 09:06:09 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
# transpose back
|
2019-02-14 09:06:09 +00:00
|
|
|
return np.transpose(tr)
|
|
|
|
|
2019-02-12 05:11:33 +00:00
|
|
|
def dataframe_to_dataset_biomes(df):
|
|
|
|
rows = df.shape[0]
|
|
|
|
|
|
|
|
# 8 for seasonal temp and precipitation
|
|
|
|
# 3 for latitude, elevation and distance_to_water
|
2019-03-31 05:22:00 +00:00
|
|
|
input_columns = 11
|
2019-03-05 07:59:30 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
tf_inputs = np.empty((0, input_columns))
|
2019-02-12 05:11:33 +00:00
|
|
|
tf_output = np.empty((0))
|
|
|
|
|
|
|
|
for year in range(MIN_YEAR, MAX_YEAR + 1):
|
2019-03-31 05:22:00 +00:00
|
|
|
local_inputs = list(INPUTS)
|
2019-02-12 05:11:33 +00:00
|
|
|
for season in SEASONS:
|
|
|
|
local_inputs += [
|
|
|
|
'temp_{}_{}'.format(season, year),
|
|
|
|
'precip_{}_{}'.format(season, year)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
local_df = df[local_inputs]
|
|
|
|
|
|
|
|
tf_inputs = np.concatenate((tf_inputs, local_df.values), axis=0)
|
2019-03-31 05:22:00 +00:00
|
|
|
tf_output = np.concatenate((tf_output, df[OUTPUT].values), axis=0)
|
|
|
|
|
|
|
|
# balance class weights for the loss function, since the data is highly unbalanced
|
|
|
|
num_classes = len(np.unique(tf_output))
|
|
|
|
class_weights = class_weight.compute_class_weight('balanced', np.unique(tf_output), tf_output)
|
|
|
|
logger.debug('class_weights %s', class_weights)
|
2019-02-12 05:11:33 +00:00
|
|
|
|
2019-02-14 09:06:09 +00:00
|
|
|
tf_inputs = tf.cast(normalize_ndarray(tf_inputs), tf.float32)
|
2019-02-26 08:20:31 +00:00
|
|
|
tf_output = tf.cast(tf_output, tf.int64)
|
2019-02-14 09:06:09 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
logger.debug('dataset size: rows=%d, input_columns=%d, num_classes=%d', int(tf_inputs.shape[0]), input_columns, num_classes)
|
|
|
|
return int(tf_inputs.shape[0]), input_columns, num_classes, class_weights, tf.data.Dataset.from_tensor_slices((tf_inputs, tf_output))
|
2019-02-14 09:06:09 +00:00
|
|
|
|
|
|
|
def dataframe_to_dataset_temp_precip(df):
|
|
|
|
rows = df.shape[0]
|
|
|
|
|
|
|
|
# elevation, distance_to_water, latitude
|
|
|
|
# season, year
|
2019-03-31 05:22:00 +00:00
|
|
|
input_columns = 5
|
|
|
|
num_classes = 2
|
2019-02-14 09:06:09 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
tf_inputs = np.empty((0, input_columns))
|
|
|
|
tf_output = np.empty((0, num_classes))
|
2019-02-14 09:06:09 +00:00
|
|
|
|
|
|
|
for year in range(MIN_YEAR, MAX_YEAR + 1):
|
2019-03-31 05:22:00 +00:00
|
|
|
local_inputs = list(INPUTS)
|
2019-02-14 09:06:09 +00:00
|
|
|
|
|
|
|
for idx, season in enumerate(SEASONS):
|
|
|
|
season_index = idx / len(season)
|
|
|
|
local_df = df[local_inputs]
|
|
|
|
local_df.loc[:, 'season'] = pd.Series(np.repeat(season_index, rows), index=local_df.index)
|
|
|
|
local_df.loc[:, 'year'] = pd.Series(np.repeat(year, rows), index=local_df.index)
|
|
|
|
|
|
|
|
output = ['temp_{}_{}'.format(season, year), 'precip_{}_{}'.format(season, year)]
|
|
|
|
tf_inputs = np.concatenate((tf_inputs, local_df.values), axis=0)
|
|
|
|
tf_output = np.concatenate((tf_output, df[output].values), axis=0)
|
|
|
|
|
|
|
|
tf_inputs = tf.cast(normalize_ndarray(tf_inputs), tf.float32)
|
2019-03-05 07:59:30 +00:00
|
|
|
tf_output = tf.cast(tf_output, tf.float32)
|
2019-02-12 05:11:33 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
logger.debug('dataset size: rows=%d, input_columns=%d, num_classes=%d', int(tf_inputs.shape[0]), input_columns, num_classes)
|
|
|
|
return int(tf_inputs.shape[0]), input_columns, num_classes, tf.data.Dataset.from_tensor_slices((tf_inputs, tf_output))
|
|
|
|
|
2019-02-14 09:06:09 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
flatten = lambda l: [item for sublist in l for item in sublist]
|
2019-03-05 07:59:30 +00:00
|
|
|
|
2019-03-31 05:22:00 +00:00
|
|
|
def chunker(seq, size):
|
|
|
|
return (seq[pos:pos + size] for pos in range(0, len(seq), size))
|