#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 14 13:43:45 2021

export the stationlist to database

@author: geopeter
"""
import os
import sqlite3
import psycopg2
import psycopg2.extras
from psycopg2 import sql
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
import configparser
from api.GetAverageData import get_interpolation_data_for_point
from pathlib import Path
from sqlite3 import Error

cfg = configparser.ConfigParser()
cfg.read('../config.ini')
assert "POSTGRES" in cfg, "missing POSTGRES in config.ini"
param_postgres = cfg["POSTGRES"]
param_interpol = cfg["INTERPOLATION"]
stationGPD = None
cwd = Path(os.path.dirname(os.path.abspath(__file__)))
spatialite_path = 'D:/Uni/Master/01_SS2021/Automatisierte_Geodatenprozessierung/temperaturverteilung/dataacquisition/mod_spatialite-5.0.1-win-amd64'
os.environ['PATH'] = spatialite_path + ';' + os.environ['PATH']


# Create DB "temperatures_berteld_morstein"
def create_db(cursor):
    print("Creating DB: ", param_postgres['dbName'])
    create_db_query = sql.SQL("""CREATE DATABASE {};""".format(param_postgres['dbName']))
    cursor.execute(create_db_query)
    print('Done')


def drop_db(db_name):
    if table_exists(db_name):
        print("Drop DB: ", db_name)
        try:
            with psycopg2.connect(database=db_name, user=param_postgres["user"], password=param_postgres["password"], host=param_postgres["host"], port=param_postgres["port"]) as connection:
                with connection.cursor() as cursor:
                    cursor.execute("DROP DATABASE {};".format(db_name))
        except psycopg2.DatabaseError as error:
            # do nothing, because test db is clean
            print(error.message)
            return


# If table "temperatures" not exists, create it
def check_for_table_existence(cursor):
    print("Checking for table existence")
    try:
        cursor.execute("SELECT * FROM temperatures")
        db_exists = cursor.fetchall()[0]
        print('Table existing')
        print('Updating data')
        return True
    except Error as error:
        print('Table not found')
        return False


# Connect to DB "temperatures_berteld_morstein" to create table "temperatures".
def create_table(cursor, station_list):
    print('Creating table stations')
    df_columns = list(station_list)
    columns = ['"id" INTEGER', '"lon" NUMERIC', '"lat" NUMERIC', '"country" TEXT', '"file" TEXT']
    for column in df_columns:
        if str(column).startswith('19') or str(column).startswith('20'):
            columns.append('"{}" NUMERIC'.format(column))
    columns_clean = str(columns).strip('[]').replace("'", "")

    query = """CREATE TABLE stations ({});""".format(columns_clean)
    print(query)
    cursor.execute(query)
    print('Done')


# Loading matrix coordinates from csv and writing it into table "stations" in db "temperatures_berteld_morstein"
def insert_empty_matrix_into_db(cursor):
    print('Inserting empty matrix into database')
    matrix_density = param_interpol['matrix_density']

    with open('clipped_matrix_{}x{}.csv'.format(matrix_density, matrix_density), 'r') as matrix:
        matrix_data = matrix.readlines()
        matrix_points = 0
        for line in matrix_data[1:]:
            matrix_points += 1
            data = line.split(';')
            id = int("9999" + data[0].replace('"', ''))
            lon = float(data[1])
            lat = float(data[2].replace('\n', ''))

            query = "INSERT INTO STATIONS (id, lon, lat, country) VALUES ({id}, {lon}, {lat}, 'Germany');".format(id=id, lon=lon, lat=lat)
            # print(query.as_string(cursor))
            # print(values)
            cursor.execute(query)
        print('Done')
        return matrix_points


# Calculating interpolation data for matrix using the according function from the API, and writing it into the database in bulk
def create_matrix_data(cursor, amount_points):
    print('Calculating interpolation data for matrix')
    # start_time = time.time()
    cursor.execute("SELECT id, lon, lat FROM stations WHERE file is NULL;")
    matrix_points = cursor.fetchall()
    update_data = []
    for i, point in enumerate(matrix_points):
        id = point[0]
        lon = point[1]
        lat = point[2]
        interpolation_data = get_interpolation_data_for_point(lat, lon, sql.SQL('*'), cursor)

        for year, value in interpolation_data.items():
            update_data.append({'year': year, 'value': value, 'id': id})
        if i % round((amount_points / 10)) == 0:
            finished = i / amount_points * 100
            print(round(finished), end="% ... ")
    print('', end=' 100%, Done. \n')

    query = sql.SQL("""UPDATE stations SET "%(year)s" = %(value)s WHERE id = %(id)s; """)
    print('Writing interpolation data to database')
    psycopg2.extras.execute_batch(cursor, query, update_data)   # Multiple times faster than using execute() in a for loop
    # print((time.time() - start_time), 'seconds')
    print('Done')


# Dumping all existing data from database. Inserting station data into database in bulk.
def insert_data(cursor, station_list):
    print('Inserting data into database')
    if len(station_list) > 0:
        # print(stationList)
        df_columns = list(station_list)
        # create (col1,col2,...)

        # As integers like 2018, 2017, etc. are not possible as column names, double quotes have to be added. This requires some tricks and cleanups
        columns = ['"' + column + '"' for column in df_columns]
        # for column in df_columns:
        #     columns.append('"' + column + '"')
        columns = str(columns).replace('[', '').replace(']', '').replace("'", "").replace('\n', '').replace(' ', '')

        # create VALUES('%s', '%s",...) one '%s' per column
        values = "VALUES({})".format(",".join(['?' for _ in df_columns]))
        print(list(station_list[df_columns].to_records(index=False)))
        cursor.executemany("INSERT INTO stations (?) {}".format(values), df_columns, list(station_list[df_columns].to_records(index=False)))


        # cursor.execute("DELETE FROM stations;")
        #
        # df_columns = list(station_list)
        # # create (col1,col2,...)
        #
        # # As integers like 2018, 2017, etc. are not possible as column names, double quotes have to be added. This requires some tricks and cleanups
        # columns = ['"' + column + '"' for column in df_columns]
        # # for column in df_columns:
        # #     columns.append('"' + column + '"')
        # columns = str(columns).replace('[', '').replace(']', '').replace("'", "").replace('\n', '').replace(' ', '')
        # station_list = station_list.round(decimals=3)
        # print('colimns:')
        # print(columns)
        #
        # # create VALUES('%s', '%s",...) one '%s' per column
        # values = "VALUES({})".format(",".join([value for value in df_columns]))
        #
        # # create INSERT INTO table (columns) VALUES('%s',...)
        # insert_stmt = """INSERT INTO stations ({}) {};""".format(columns, values)
        # print(insert_stmt)
        # cursor.execute(insert_stmt)
    print('Done')


def create_connection(db_file):
    """ create a database connection to a SQLite database """
    try:
        conn = sqlite3.connect(db_file)
        conn.enable_load_extension(True)
        conn.load_extension("mod_spatialite")
        print(sqlite3.version)
        return conn
    except Error as e:
        print(e)


def export(station_list):

    try:
        connection = create_connection('temperatures.db')
        c = connection.cursor()
        # c.execute("CREATE TABLE 123 (id INTEGER);")
        new_db = False if check_for_table_existence(c) else True

        if new_db: create_table(c, station_list)
        # c.close()
        # connection.commit()

        # c = connection.cursor()


        connection.commit()
        connection = create_connection('temperatures.db')
        c = connection.cursor()
        c.execute('select * from stations')
        print(c.fetchall())
        insert_data(c, station_list)
        c.execute('select * from stations')
        print(c.fetchall())
        amount_points = insert_empty_matrix_into_db(c)
        c.execute('select * from stations')
        print(c.fetchall())

        # create_matrix_data(c, amount_points)
        # c.close()
        # connection.commit()
        # connection.close()
    except Error as e:
        print(e)

    print('Installation successful. You can run the api.py now.')