From 6fccf1f21773c9986009f897cfed4d69ebbc68cc Mon Sep 17 00:00:00 2001
From: Clemens Berteld <clemens@berteld.com>
Date: Wed, 15 Sep 2021 19:41:07 +0200
Subject: [PATCH] Wrote some comments

---
 dataacquisition/ExportToDatabase.py | 10 +++++++---
 dataacquisition/GetAverageData.py   |  4 ++++
 dataacquisition/api.py              | 11 ++++++-----
 3 files changed, 17 insertions(+), 8 deletions(-)

diff --git a/dataacquisition/ExportToDatabase.py b/dataacquisition/ExportToDatabase.py
index f827fd4..09f5604 100644
--- a/dataacquisition/ExportToDatabase.py
+++ b/dataacquisition/ExportToDatabase.py
@@ -24,7 +24,7 @@ param_interpol = cfg["INTERPOLATION"]
 stationGPD = None
 
 
-# Use existing connection to DB "postgres" to create DB "temperatures_berteld_morstein"
+# Create DB "temperatures_berteld_morstein"
 def create_db(db_name):
     print("Create DB: ", db_name)
     connection = psycopg2.connect(dbname='postgres', user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"])
@@ -49,6 +49,7 @@ def drop_db(db_name):
             return
 
 
+# Checks, if database "temperatures_berteld_morstein" exists
 def dbexists(db_name):
     try:
         with psycopg2.connect(database=db_name, user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
@@ -62,7 +63,7 @@ def dbexists(db_name):
         return False
 
 
-# Connect to DB "postgres" to check for database "temperatures_berteld_morstein"
+# If database "temperatures_berteld_morstein" not exists, create it
 def check_for_db_existence(station_list, db_name):
     print("Checking for database existence")
     if dbexists(db_name):
@@ -72,7 +73,7 @@ def check_for_db_existence(station_list, db_name):
         create_table(station_list, db_name)
 
 
-# Connect to DB "temperatures_berteld_morstein" to create table "temperatures"
+# Connect to DB "temperatures_berteld_morstein" to create table "temperatures". Also installs PostGIS
 def create_table(station_list, db_name):
     df_columns = list(station_list)
     columns = ['id INTEGER', 'lon NUMERIC', 'lat NUMERIC', 'country TEXT', 'file TEXT']
@@ -89,6 +90,7 @@ def create_table(station_list, db_name):
             cursor.execute('CREATE EXTENSION postgis;')
 
 
+# Loading matrix coordinates from csv and writing it into table "stations" in db "temperatures_berteld_morstein"
 def insert_empty_matrix_into_db():
     print('Inserting empty matrix into database')
     matrix_density = param_interpol['matrix_density']
@@ -112,6 +114,7 @@ def insert_empty_matrix_into_db():
                     cursor.execute(query, values)
 
 
+# Calculating interpolation data for matrix using the according function from the API, and writing it into the database in bulk
 def create_matrix_data():
     print('Calculating interpolation data for matrix')
     # start_time = time.time()
@@ -135,6 +138,7 @@ def create_matrix_data():
     # print((time.time() - start_time), 'seconds')
 
 
+# Dumping all existing data from database. Inserting station data into database in bulk.
 def insert_data(station_list, db_name):
     print('Inserting data into database')
     with psycopg2.connect(database=db_name, user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
diff --git a/dataacquisition/GetAverageData.py b/dataacquisition/GetAverageData.py
index c28edcc..e6855ec 100644
--- a/dataacquisition/GetAverageData.py
+++ b/dataacquisition/GetAverageData.py
@@ -11,6 +11,7 @@ param_postgres = cfg["POSTGRES"]
 param_interpol = cfg["INTERPOLATION"]
 
 
+# Getting all available year columns from database
 def get_year_columns(cursor):
     columns = []
     query = sql.SQL("SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'stations';")
@@ -24,6 +25,7 @@ def get_year_columns(cursor):
     return columns
 
 
+# Find n (defined in config) neighbours and return them ordered by distance
 def get_neighbours(cursor, lat, lon, columns):
     values = ''  # Used in second parameter of cursor.execute() (Avoids SQL injection)
     for n in [lat, lon]:
@@ -56,6 +58,7 @@ def calc_averages(neighbours, years):
     return averages
 
 
+# Calculating interpolation data by Inverse Distance Weighted method. Values are decreasingly important with increasing distance
 def calc_idw(neighbours, years):
     weighted_values = {}
     for year in years:
@@ -76,6 +79,7 @@ def calc_idw(neighbours, years):
     return weighted_values
 
 
+# Collecting preparation data and execute interpolation
 def get_interpolation_data_for_point(lat, lon, columns):
     with psycopg2.connect(database=param_postgres["dbName"], user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
         with connection.cursor() as cursor:
diff --git a/dataacquisition/api.py b/dataacquisition/api.py
index 5e16b76..8852cae 100644
--- a/dataacquisition/api.py
+++ b/dataacquisition/api.py
@@ -8,9 +8,7 @@ from GetAverageData import get_interpolation_data_for_point
 
 cfg = configparser.ConfigParser()
 cfg.read('config.ini')
-
 assert "POSTGRES" in cfg, "missing POSTGRES in config.ini"
-
 param_postgres = cfg["POSTGRES"]
 
 app = Flask(__name__)
@@ -26,6 +24,7 @@ def index():
     wheres = sql.SQL('')        # where filters
     values = ''                 # Used in second parameter of cursor.execute() (Avoids SQL injection)
 
+    # Filter for getting specific years
     if 'years' in request.args:
         years = request.args['years'].split(',')
         years_clean = []
@@ -36,6 +35,7 @@ def index():
 
         columns = sql.SQL(years_clean)
 
+    # Filter for specific coordinates
     if 'lat' in request.args or 'lng' in request.args:
         lat = request.args['lat']
         lon = request.args['lon']
@@ -55,18 +55,19 @@ def index():
         #     for n in [country]:
         #         values = (*values, n)  # adding n to existing tuple
 
+        # Composing query
         query = sql.SQL("SELECT array_to_json(array_agg(row_to_json(t))) from ("
                         "SELECT id, {} FROM stations "
                         "WHERE lon IS NOT NULL "    # Unnecessary filter, just so the real filters can always be written with AND
                         "{} "
                         ") t;").format(columns, wheres)
 
+        # Actual query execution
         with psycopg2.connect(database=param_postgres["dbName"], user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
             with connection.cursor() as cursor:
-                print(query.as_string(cursor))
-                print(values)
+                # print(query.as_string(cursor))
+                # print(values)
                 cursor.execute(query, values)
-
                 results = cursor.fetchall()[0][0]
                 return jsonify(results)
 
-- 
GitLab