diff --git a/dataacquisition/ExportToDatabase.py b/dataacquisition/ExportToDatabase.py
index 127698ffa41a3dee6a50fff4b67edaddcccc456f..f827fd4cab7c07f4550757ec90269e62c4e2e17c 100644
--- a/dataacquisition/ExportToDatabase.py
+++ b/dataacquisition/ExportToDatabase.py
@@ -20,6 +20,7 @@ cfg = configparser.ConfigParser()
 cfg.read('config.ini')
 assert "POSTGRES" in cfg, "missing POSTGRES in config.ini"
 param_postgres = cfg["POSTGRES"]
+param_interpol = cfg["INTERPOLATION"]
 stationGPD = None
 
 
@@ -65,7 +66,7 @@ def dbexists(db_name):
 def check_for_db_existence(station_list, db_name):
     print("Checking for database existence")
     if dbexists(db_name):
-        print('DB existing exists')
+        print('DB existing')
     else: 
         create_db(db_name)
         create_table(station_list, db_name)
@@ -89,9 +90,11 @@ def create_table(station_list, db_name):
 
 
 def insert_empty_matrix_into_db():
+    print('Inserting empty matrix into database')
+    matrix_density = param_interpol['matrix_density']
     with psycopg2.connect(database=param_postgres['dbName'], user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
         with connection.cursor() as cursor:
-            with open('clipped_matrix_25x25.csv', 'r') as matrix:
+            with open('clipped_matrix_{}x{}.csv'.format(matrix_density, matrix_density), 'r') as matrix:
                 matrix_data = matrix.readlines()
                 for line in matrix_data[1:]:
                     values = ''  # Used in second parameter of cursor.execute() (Avoids SQL injection)
@@ -107,10 +110,11 @@ def insert_empty_matrix_into_db():
                     # print(query.as_string(cursor))
                     # print(values)
                     cursor.execute(query, values)
-    print('Inserted empty matrix into database')
 
 
 def create_matrix_data():
+    print('Calculating interpolation data for matrix')
+    # start_time = time.time()
     with psycopg2.connect(database=param_postgres['dbName'], user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"],
                           keepalives=1, keepalives_idle=30, keepalives_interval=10, keepalives_count=5) as connection:
         with connection.cursor() as cursor:
@@ -127,10 +131,12 @@ def create_matrix_data():
                     update_data.append({'year': year, 'value': value, 'id': id})
 
             query = sql.SQL("""UPDATE stations SET "%(year)s" = %(value)s WHERE id = %(id)s; """)
-            psycopg2.extras.execute_batch(cursor, query, update_data)   # 2 times faster than using execute() in a for loop, ~20 mins instead of 40
+            psycopg2.extras.execute_batch(cursor, query, update_data)   # Multiple times faster than using execute() in a for loop
+    # print((time.time() - start_time), 'seconds')
 
 
 def insert_data(station_list, db_name):
+    print('Inserting data into database')
     with psycopg2.connect(database=db_name, user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
         with connection.cursor() as cursor:
 
@@ -160,8 +166,5 @@ def export(station_list):
     check_for_db_existence(station_list, param_postgres['dbName'])
     insert_data(station_list, param_postgres['dbName'])
     insert_empty_matrix_into_db()
-
-
-start_time = time.time()
-create_matrix_data()
-print((time.time() - start_time), 'seconds')
+    create_matrix_data()
+    print('Installation successful. You can run the api.py now.')
diff --git a/dataacquisition/GetAverageData.py b/dataacquisition/GetAverageData.py
index 9f3871729ce29cd1915a61167dfcb4420d5eed2e..c28edcc6f5818c04142b3e5980fd1122b17df134 100644
--- a/dataacquisition/GetAverageData.py
+++ b/dataacquisition/GetAverageData.py
@@ -85,7 +85,7 @@ def get_interpolation_data_for_point(lat, lon, columns):
                 year_columns = (str(columns).replace("""SQL('""", "").replace('"', '').replace("')", "")).split(',')
             neighbours = get_neighbours(cursor, lat, lon, columns)
             avg_data = calc_idw(neighbours, year_columns)
-            print(avg_data)
+            # print(avg_data)
             return avg_data
 
 
diff --git a/dataacquisition/config.ini b/dataacquisition/config.ini
index 7d49d0c54244ff0843ebde14571547f556171636..6f7d770cc774e3c78e94a9d8b5f3d1851801a158 100644
--- a/dataacquisition/config.ini
+++ b/dataacquisition/config.ini
@@ -6,4 +6,7 @@ password = postgres
 dbName = temperatures_berteld_morstein
 
 [INTERPOLATION]
-amount_neighbours = 5
\ No newline at end of file
+; neighbouring measurements used for interpolation
+amount_neighbours = 5
+; in km. 25km or 10km possible
+matrix_density = 25
\ No newline at end of file