Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
T
Temperaturverteilung
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Package registry
Model registry
Operate
Terraform modules
Monitor
Service Desk
Analyze
Contributor analytics
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
s39174
Temperaturverteilung
Commits
6fccf1f2
Commit
6fccf1f2
authored
3 years ago
by
Clemens Berteld
Browse files
Options
Downloads
Patches
Plain Diff
Wrote some comments
parent
cc8a6173
No related branches found
No related tags found
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
dataacquisition/ExportToDatabase.py
+7
-3
7 additions, 3 deletions
dataacquisition/ExportToDatabase.py
dataacquisition/GetAverageData.py
+4
-0
4 additions, 0 deletions
dataacquisition/GetAverageData.py
dataacquisition/api.py
+6
-5
6 additions, 5 deletions
dataacquisition/api.py
with
17 additions
and
8 deletions
dataacquisition/ExportToDatabase.py
+
7
−
3
View file @
6fccf1f2
...
...
@@ -24,7 +24,7 @@ param_interpol = cfg["INTERPOLATION"]
stationGPD
=
None
#
Use existing connection to DB "postgres" to c
reate DB "temperatures_berteld_morstein"
#
C
reate DB "temperatures_berteld_morstein"
def
create_db
(
db_name
):
print
(
"
Create DB:
"
,
db_name
)
connection
=
psycopg2
.
connect
(
dbname
=
'
postgres
'
,
user
=
param_postgres
[
"
user
"
],
password
=
param_postgres
[
"
password
"
],
host
=
param_postgres
[
"
host
"
],
port
=
param_postgres
[
"
port
"
])
...
...
@@ -49,6 +49,7 @@ def drop_db(db_name):
return
# Checks, if database "temperatures_berteld_morstein" exists
def
dbexists
(
db_name
):
try
:
with
psycopg2
.
connect
(
database
=
db_name
,
user
=
param_postgres
[
"
user
"
],
password
=
param_postgres
[
"
password
"
],
host
=
param_postgres
[
"
host
"
],
port
=
param_postgres
[
"
port
"
])
as
connection
:
...
...
@@ -62,7 +63,7 @@ def dbexists(db_name):
return
False
#
Connect to DB "postgres" to check for
database "temperatures_berteld_morstein"
#
If
database "temperatures_berteld_morstein"
not exists, create it
def
check_for_db_existence
(
station_list
,
db_name
):
print
(
"
Checking for database existence
"
)
if
dbexists
(
db_name
):
...
...
@@ -72,7 +73,7 @@ def check_for_db_existence(station_list, db_name):
create_table
(
station_list
,
db_name
)
# Connect to DB "temperatures_berteld_morstein" to create table "temperatures"
# Connect to DB "temperatures_berteld_morstein" to create table "temperatures"
. Also installs PostGIS
def
create_table
(
station_list
,
db_name
):
df_columns
=
list
(
station_list
)
columns
=
[
'
id INTEGER
'
,
'
lon NUMERIC
'
,
'
lat NUMERIC
'
,
'
country TEXT
'
,
'
file TEXT
'
]
...
...
@@ -89,6 +90,7 @@ def create_table(station_list, db_name):
cursor
.
execute
(
'
CREATE EXTENSION postgis;
'
)
# Loading matrix coordinates from csv and writing it into table "stations" in db "temperatures_berteld_morstein"
def
insert_empty_matrix_into_db
():
print
(
'
Inserting empty matrix into database
'
)
matrix_density
=
param_interpol
[
'
matrix_density
'
]
...
...
@@ -112,6 +114,7 @@ def insert_empty_matrix_into_db():
cursor
.
execute
(
query
,
values
)
# Calculating interpolation data for matrix using the according function from the API, and writing it into the database in bulk
def
create_matrix_data
():
print
(
'
Calculating interpolation data for matrix
'
)
# start_time = time.time()
...
...
@@ -135,6 +138,7 @@ def create_matrix_data():
# print((time.time() - start_time), 'seconds')
# Dumping all existing data from database. Inserting station data into database in bulk.
def
insert_data
(
station_list
,
db_name
):
print
(
'
Inserting data into database
'
)
with
psycopg2
.
connect
(
database
=
db_name
,
user
=
param_postgres
[
"
user
"
],
password
=
param_postgres
[
"
password
"
],
host
=
param_postgres
[
"
host
"
],
port
=
param_postgres
[
"
port
"
])
as
connection
:
...
...
This diff is collapsed.
Click to expand it.
dataacquisition/GetAverageData.py
+
4
−
0
View file @
6fccf1f2
...
...
@@ -11,6 +11,7 @@ param_postgres = cfg["POSTGRES"]
param_interpol
=
cfg
[
"
INTERPOLATION
"
]
# Getting all available year columns from database
def
get_year_columns
(
cursor
):
columns
=
[]
query
=
sql
.
SQL
(
"
SELECT column_name FROM information_schema.columns WHERE table_schema =
'
public
'
AND table_name =
'
stations
'
;
"
)
...
...
@@ -24,6 +25,7 @@ def get_year_columns(cursor):
return
columns
# Find n (defined in config) neighbours and return them ordered by distance
def
get_neighbours
(
cursor
,
lat
,
lon
,
columns
):
values
=
''
# Used in second parameter of cursor.execute() (Avoids SQL injection)
for
n
in
[
lat
,
lon
]:
...
...
@@ -56,6 +58,7 @@ def calc_averages(neighbours, years):
return
averages
# Calculating interpolation data by Inverse Distance Weighted method. Values are decreasingly important with increasing distance
def
calc_idw
(
neighbours
,
years
):
weighted_values
=
{}
for
year
in
years
:
...
...
@@ -76,6 +79,7 @@ def calc_idw(neighbours, years):
return
weighted_values
# Collecting preparation data and execute interpolation
def
get_interpolation_data_for_point
(
lat
,
lon
,
columns
):
with
psycopg2
.
connect
(
database
=
param_postgres
[
"
dbName
"
],
user
=
param_postgres
[
"
user
"
],
password
=
param_postgres
[
"
password
"
],
host
=
param_postgres
[
"
host
"
],
port
=
param_postgres
[
"
port
"
])
as
connection
:
with
connection
.
cursor
()
as
cursor
:
...
...
This diff is collapsed.
Click to expand it.
dataacquisition/api.py
+
6
−
5
View file @
6fccf1f2
...
...
@@ -8,9 +8,7 @@ from GetAverageData import get_interpolation_data_for_point
cfg
=
configparser
.
ConfigParser
()
cfg
.
read
(
'
config.ini
'
)
assert
"
POSTGRES
"
in
cfg
,
"
missing POSTGRES in config.ini
"
param_postgres
=
cfg
[
"
POSTGRES
"
]
app
=
Flask
(
__name__
)
...
...
@@ -26,6 +24,7 @@ def index():
wheres
=
sql
.
SQL
(
''
)
# where filters
values
=
''
# Used in second parameter of cursor.execute() (Avoids SQL injection)
# Filter for getting specific years
if
'
years
'
in
request
.
args
:
years
=
request
.
args
[
'
years
'
].
split
(
'
,
'
)
years_clean
=
[]
...
...
@@ -36,6 +35,7 @@ def index():
columns
=
sql
.
SQL
(
years_clean
)
# Filter for specific coordinates
if
'
lat
'
in
request
.
args
or
'
lng
'
in
request
.
args
:
lat
=
request
.
args
[
'
lat
'
]
lon
=
request
.
args
[
'
lon
'
]
...
...
@@ -55,18 +55,19 @@ def index():
# for n in [country]:
# values = (*values, n) # adding n to existing tuple
# Composing query
query
=
sql
.
SQL
(
"
SELECT array_to_json(array_agg(row_to_json(t))) from (
"
"
SELECT id, {} FROM stations
"
"
WHERE lon IS NOT NULL
"
# Unnecessary filter, just so the real filters can always be written with AND
"
{}
"
"
) t;
"
).
format
(
columns
,
wheres
)
# Actual query execution
with
psycopg2
.
connect
(
database
=
param_postgres
[
"
dbName
"
],
user
=
param_postgres
[
"
user
"
],
password
=
param_postgres
[
"
password
"
],
host
=
param_postgres
[
"
host
"
],
port
=
param_postgres
[
"
port
"
])
as
connection
:
with
connection
.
cursor
()
as
cursor
:
print
(
query
.
as_string
(
cursor
))
print
(
values
)
#
print(query.as_string(cursor))
#
print(values)
cursor
.
execute
(
query
,
values
)
results
=
cursor
.
fetchall
()[
0
][
0
]
return
jsonify
(
results
)
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment