Skip to content
Snippets Groups Projects
Commit c115bdcc authored by Clemens Berteld's avatar Clemens Berteld
Browse files

Some cleanups

parent ebf15e44
No related branches found
No related tags found
No related merge requests found
import configparser
import numpy as np
from psycopg2 import sql
import numpy as np
......@@ -150,10 +149,7 @@ def get_interpolation_data_for_point(lat, lon, columns, cursor):
return avg_data
# get_average_data_for_point(52.5, 13.4)
def calcAverageYear(stationList, fromYear, toYear):
dateRange = np.arange(fromYear, toYear+1)
dateRangeRegex = "|".join(np.char.mod('%d', dateRange))
......
......@@ -56,13 +56,6 @@ def index():
for n in [int(station_id)]:
values = (*values, n) # adding n to existing tuple
# Just for development
# if 'country' in request.args:
# country = request.args['country']
# wheres = wheres + (sql.SQL("AND LOWER({column}) LIKE {values} ").format(column=sql.Identifier('stations', 'country'), values=sql.Placeholder()))
# for n in [country]:
# values = (*values, n) # adding n to existing tuple
# Composing query
query = sql.SQL("SELECT array_to_json(array_agg(row_to_json(t))) from ("
"SELECT station_id, {} FROM stations "
......@@ -79,20 +72,6 @@ def index():
results = cursor.fetchall()[0][0]
return s2pTool.sqlToGeoPandas(results).to_json()
#return jsonify(results)
def getStandardQuery():
columns = sql.SQL(' * ') # columns to be queried (e.g. years)
wheres = sql.SQL('') # where filters
query = sql.SQL("SELECT array_to_json(array_agg(row_to_json(t))) from ("
"SELECT station_id, {} FROM stations "
"WHERE file IS NOT NULL "
"{} "
") t;").format(columns, wheres)
return query
@app.route('/minmax', methods=['GET'])
......@@ -126,16 +105,19 @@ def get_raster():
@app.route('/annualMean', methods=['GET'])
@cross_origin()
def annualMean():
columns = sql.SQL(' * ') # columns to be queried (e.g. years)
wheres = sql.SQL('') # where filters
query = getStandardQuery()
query = sql.SQL("SELECT array_to_json(array_agg(row_to_json(t))) from ("
"SELECT station_id, {} FROM stations "
"WHERE file IS NOT NULL "
"{} "
") t;").format(columns, wheres)
with psycopg2.connect(database=param_postgres["dbName"], user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
with connection.cursor() as cursor:
cursor.execute(query)
results = cursor.fetchall()[0][0]
return jsonify(s2pTool.determineAnnualMean(results))
return "{}"
......
......@@ -14,10 +14,11 @@ assert "INTERPOLATION" in cfg, "missing INTERPOLATION in config.ini"
param_postgres = cfg["POSTGRES"]
raster_columns = int(cfg["INTERPOLATION"]['raster_columns'])
# ramp = [[255,255,255,1],[255,244,191,1],[255,233,128,1],[255,221,64,1],[255,210,0,1],[243,105,0,1],[230,0,0,1],[153,0,0,1],[77,0,0,1],[0,0,0,1]]
def clean_temp(path):
"""
Clearing temp directory from last usages
"""
for file in os.listdir(path):
try:
os.remove(path+file)
......@@ -26,6 +27,9 @@ def clean_temp(path):
def get_class_steps(colour_ramp):
"""
Calculate steps between colour classes
"""
with psycopg2.connect(database=param_postgres["dbName"], user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
with connection.cursor() as cursor:
min_max = get_min_max(cursor)
......@@ -37,9 +41,11 @@ def get_class_steps(colour_ramp):
def colour_picker(min, steps, classes, colour_ramp, value):
"""
Converting temperatures to RGB values based on classes
"""
rgba = None
for i in range(0, classes + 1):
minor = math.floor(min + (i * steps))
major = math.ceil(min + ((i + 1) * steps))
if minor <= value <= major:
......@@ -54,6 +60,9 @@ def colour_picker(min, steps, classes, colour_ramp, value):
def write_raster(data, ramp):
"""
Writing GeoTIFF
"""
min, steps, classes = get_class_steps(ramp)
pixel_array_r = []
pixel_array_g = []
......@@ -65,6 +74,7 @@ def write_raster(data, ramp):
row_array_b = []
row_array_a = []
for i, station_id in enumerate(data):
# Rearranging matrix points from left to right instead of top to bottom
if i % raster_columns == 0:
value = data[i + j][1]
value = 0 if not value else value
......@@ -74,7 +84,7 @@ def write_raster(data, ramp):
r, g, b, a = rgba[0], rgba[1], rgba[2], rgba[3]
else:
r, g, b, a = 0, 0, 0, 0
transparent = data[i + j][2]
transparent = data[i + j][2] # Show only matrix points inside of Germany
a = 0 if transparent else a
a = 255 if a == 1 else a
row_array_r.append(r)
......@@ -99,13 +109,14 @@ def write_raster(data, ramp):
b_band = np_pixel_array_b
a_band = np_pixel_array_a
xmin, ymin, xmax, ymax = [5.01, 47.15, 14.81, 55.33]
xmin, ymin, xmax, ymax = [5.01, 47.15, 14.81, 55.33] # Germany
nrows, ncols = np.shape(r_band)
xres = (xmax - xmin) / float(ncols)
yres = (ymax - ymin) / float(nrows)
geotransform = (xmin, xres, 0, ymax, 0, -yres)
path = os.getcwd() + '/temp/'
clean_temp(path)
filename = 'raster_{}.tif'.format(datetime.now().strftime("%Y%m%d%H%M%S"))
output_raster = gdal.GetDriverByName('GTiff').Create(path+filename, ncols, nrows, 4, gdal.GDT_Float32) # Open the file
output_raster.SetGeoTransform(geotransform)
......@@ -118,7 +129,4 @@ def write_raster(data, ramp):
output_raster.GetRasterBand(4).WriteArray(a_band)
output_raster.FlushCache()
clean_temp(path)
return path, filename
......@@ -13,9 +13,11 @@ import psycopg2.extras
from psycopg2 import sql
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
import configparser
import os, sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import os
import sys
from api.GetAverageData import get_interpolation_data_for_point
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
cfg = configparser.ConfigParser()
cfg.read('../config.ini')
......@@ -146,15 +148,11 @@ def create_matrix_data(cursor, amount_points):
query = sql.SQL("""UPDATE stations SET "%(year)s" = %(value)s WHERE station_id = %(id)s; """)
print('Writing interpolation data to database')
psycopg2.extras.execute_batch(cursor, query, update_data) # Multiple times faster than using execute() in a for loop
# print((time.time() - start_time), 'seconds')
print('Done')
# Dumping all existing data from database. Inserting station data into database in bulk.
def createInsertStatement(cursor, station_list):
# create INSERT INTO table (columns) VALUES('%s',...)
# station_list.columns.astype(str)
df_columns = list(station_list)
columns = ['"' + column + '"' for column in df_columns]
columns = str(columns).replace('[', '').replace(']', '').replace("'", "").replace('\n', '').replace(' ', '')
......@@ -172,31 +170,11 @@ def createInsertStatement(cursor, station_list):
def insert_data(station_list, cursor):
print('Inserting data into database')
if len(station_list) > 0:
# print(stationList)
cursor.execute("DELETE FROM stations;")
#df_columns = list(station_list)
# create (col1,col2,...)
# As integers like 2018, 2017, etc. are not possible as column names, double quotes have to be added. This requires some tricks and cleanups
#columns = ['"' + column + '"' for column in df_columns]
# for column in df_columns:
# columns.append('"' + column + '"')
#columns = str(columns).replace('[', '').replace(']', '').replace("'", "").replace('\n', '').replace(' ', '')
station_list["station_id"] = station_list.index
station_list = station_list.round(decimals=3)
# create VALUES('%s', '%s",...) one '%s' per column
#values = "VALUES({})".format(",".join(["%s" for _ in df_columns]))
# create INSERT INTO table (columns) VALUES('%s',...)
#insert_stmt = """INSERT INTO {} ({}) {}""".format('stations', columns, values)
insert_stmt = createInsertStatement(cursor, station_list)
#psycopg2.extras.execute_batch(cursor, insert_stmt, station_list.values)
cursor.execute(insert_stmt)
print('Inserting data into database Done')
......@@ -208,22 +186,15 @@ def export(station_list):
connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) # Needs to be in AUTOCOMMIT mode for creating database
with connection.cursor() as cursor:
new_db = False if check_for_db_existence(cursor, db_name) else True
# station_exists = False
connection = psycopg2.connect(database=db_name, user=user, password=pw, host=param_postgres["host"], port=port)
connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) # Needs to be in AUTOCOMMIT mode for creating database
with connection.cursor() as cursor:
# station_exists = check_for_stations_existence(cursor, "stations")
if new_db:
create_table(station_list, cursor)
insert_data(station_list, cursor)
amount_points = insert_empty_matrix_into_db(cursor)
# connection = psycopg2.connect(database=db_name, user=user, password=pw, host=param_postgres["host"], port=port,
# keepalives=1, keepalives_idle=30, keepalives_interval=10, keepalives_count=5)
with connection.cursor() as cursor:
create_matrix_data(cursor, amount_points)
print('Installation successful. You can run the api.py now.')
......@@ -234,5 +205,3 @@ def export(station_list):
if connection:
connection.commit()
connection.close()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment