Skip to content

Commit

Permalink
Removes decommissioned stations from list
Browse files Browse the repository at this point in the history
  • Loading branch information
FL550 committed Sep 25, 2023
1 parent 7233052 commit 8dd49e6
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 29 deletions.
76 changes: 50 additions & 26 deletions development/generate_stations.py
Original file line number Diff line number Diff line change
@@ -1,56 +1,80 @@
import datetime
from secret import API_KEY
import requests
from requests.exceptions import Timeout
from bs4 import BeautifulSoup
import json
import re

request = requests.get(
"https://www.dwd.de/DE/leistungen/met_verfahren_mosmix/mosmix_stationskatalog.cfg?view=nasPublication"
)
success = False
print("Retrieving MOSMIX stations catalogue...")
while not success:
request = requests.get(
"https://www.dwd.de/DE/leistungen/met_verfahren_mosmix/mosmix_stationskatalog.cfg?view=nasPublication"
)
if request.status_code == 200:
success = True

request.encoding = request.apparent_encoding
data = request.text.split("\n")
mosmix_data = request.text.split("\n")
print("Done.")

request = requests.get("https://opendata.dwd.de/weather/weather_reports/poi/")
success = False
print("Retrieving POI stations catalogue...")
while not success:
request = requests.get("https://opendata.dwd.de/weather/weather_reports/poi/")
if request.status_code == 200:
success = True
soup = BeautifulSoup(request.text, "html.parser")
print("Parsing...")
links_html = soup.find_all("a")
links = []
poi_links = []
for link in links_html:
link = link.text[0:-9]
link = link.replace("_", "")
links.append(link)
poi_links.append(link)
print("Done.")

request = requests.get(
"https://www.dwd.de/DE/leistungen/klimadatendeutschland/statliste/statlex_html.html?view=nasPublication"
)
success = False
print("Retrieving station name catalogue...")
while not success:
request = requests.get(
"https://www.dwd.de/DE/leistungen/klimadatendeutschland/statliste/statlex_html.html?view=nasPublication"
)
if request.status_code == 200:
success = True
print("Parsing...")
soup = BeautifulSoup(request.text, "html.parser")
print("Done.")
stations_list = {}
rows_html = soup.table.find_all("tr")
for row in rows_html:
cols = row.find_all("td")
if len(cols) > 5:
stations_list[cols[3].text.strip()] = {
"name": cols[0].text.strip(),
"lat": cols[4].text.strip(),
"lon": cols[5].text.strip(),
"elev": cols[6].text.strip(),
"bundesland": cols[8].text.strip(),
"report_available": 1 if cols[3].text.strip() in links else 0,
}

range_iter = iter(range(len(data) - 1))
if len(cols) > 5 and "." in cols[10].text:
end_year = int(cols[10].text.split(".")[2])
if end_year >= datetime.datetime.now().year:
stations_list[cols[3].text.strip()] = {
"name": cols[0].text.strip(),
"lat": cols[4].text.strip(),
"lon": cols[5].text.strip(),
"elev": cols[6].text.strip(),
"bundesland": cols[8].text.strip(),
"report_available": 1 if cols[3].text.strip() in poi_links else 0,
}
print(f"Found {len(stations_list.keys())} active stations.")
range_iter = iter(range(len(mosmix_data) - 1))
first_run = True
stations = {}
for i in range_iter:
if first_run:
data[i] += f" POI REGION"
mosmix_data[i] += f" POI REGION"
next(range_iter)
first_run = False
continue
print(data[i])
print(mosmix_data[i])
groups = re.search(
"^(?P<id>\S*)\s*(?P<icao>\S*)\s+(?P<name>.+?)\s+(?P<lat>-?\d{1,2}\.\d{2})\s*(?P<lon>-?\d{1,3}\.\d{2})\s+(?P<elev>-*\d+)$",
data[i],
mosmix_data[i],
)
is_in_stations_list = groups.group("id") in stations_list.keys()
region = ""
Expand Down Expand Up @@ -119,8 +143,8 @@
if is_in_stations_list
else groups.group("elev"),
"bundesland": region,
"report_available": 1 if groups.group("id") in links else 0,
"report_available": 1 if groups.group("id") in poi_links else 0,
}

with open("stations.json", "w", encoding="utf-8") as f:
with open("../stations.json", "w", encoding="utf-8") as f:
json.dump(stations, f, ensure_ascii=False)
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

setuptools.setup(
name="simple_dwd_weatherforecast",
version="2.0.13",
version="2.0.14",
author="Max Fermor",
description="A simple tool to retrieve a weather forecast from DWD OpenData",
long_description=long_description,
Expand Down
2 changes: 1 addition & 1 deletion simple_dwd_weatherforecast/stations.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion stations.json

Large diffs are not rendered by default.

0 comments on commit 8dd49e6

Please sign in to comment.