diff --git a/cgi-bin/request/asos.py b/cgi-bin/request/asos.py
index 1064588ad7..6251bc35b5 100644
--- a/cgi-bin/request/asos.py
+++ b/cgi-bin/request/asos.py
@@ -1,586 +1,3 @@
-""".. title:: ASOS/METAR Backend Service
+"""implemented in /pylib/iemweb/request/asos.py"""
-`IEM API Mainpage `_
-
-Documentation on /cgi-bin/request/asos.py
------------------------------------------
-
-This cgi-bin script provides METAR/ASOS data. It has a IP-based rate limit for
-requests to prevent abuse. A `503 Service Unavailable` response will be
-returned if the server is under heavy load.
-
-Changelog:
-
-- **2024-04-01** Fix recently introduced bug with time sort order.
-- **2024-03-29** This service had an intermediate bug whereby if the `tz` value
- was not provided, it would default to `America/Chicago` instead of `UTC`.
-- **2024-03-29** Migrated to pydantic based request validation. Will be
- monitoring for any issues.
-- **2024-03-14** Initial documentation release.
-
-Example Usage
--------------
-
-Get the past 24 hours of air temperature and dew point for Des Moines and
-Mason City, Iowa.
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/asos.py?data=tmpf&data=dwpf&station=DSM&station=MCW&hours=24
-
-"""
-
-import datetime
-import sys
-from io import StringIO
-from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
-
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_dbconn
-from pyiem.network import Table as NetworkTable
-from pyiem.util import utc
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-
-NULLS = {"M": "M", "null": "null", "empty": ""}
-TRACE_OPTS = {"T": "T", "null": "null", "empty": "", "0.0001": "0.0001"}
-AVAILABLE = [
- "tmpf",
- "dwpf",
- "relh",
- "drct",
- "sknt",
- "p01i",
- "alti",
- "mslp",
- "vsby",
- "gust",
- "skyc1",
- "skyc2",
- "skyc3",
- "skyc4",
- "skyl1",
- "skyl2",
- "skyl3",
- "skyl4",
- "wxcodes",
- "ice_accretion_1hr",
- "ice_accretion_3hr",
- "ice_accretion_6hr",
- "peak_wind_gust",
- "peak_wind_drct",
- "peak_wind_time",
- "feel",
- "metar",
- "snowdepth",
-]
-# inline is so much faster!
-CONV_COLS = {
- "tmpc": "f2c(tmpf) as tmpc",
- "dwpc": "f2c(dwpf) as dwpc",
- "p01m": "p01i * 25.4 as p01m",
- "sped": "sknt * 1.15 as sped",
- "gust_mph": "gust * 1.15 as gust_mph",
- "peak_wind_gust_mph": "peak_wind_gust * 1.15 as peak_wind_gust_mph",
-}
-
-
-class MyModel(CGIModel):
- """Request Model."""
-
- data: ListOrCSVType = Field(
- None,
- description=(
- "The data columns to return, defaults to all. The available "
- "options are: tmpf, dwpf, relh, drct, sknt, p01i, alti, mslp, "
- "vsby, gust, skyc1, skyc2, skyc3, skyc4, skyl1, skyl2, skyl3, "
- "skyl4, wxcodes, ice_accretion_1hr, ice_accretion_3hr, "
- "ice_accretion_6hr, peak_wind_gust, peak_wind_drct, "
- "peak_wind_time, feel, metar, snowdepth"
- ),
- )
- direct: bool = Field(
- False,
- description=(
- "If set to 'yes', the data will be directly downloaded as a file."
- ),
- )
- elev: bool = Field(
- False,
- description=(
- "If set to 'yes', the elevation (m) of the station will be "
- "included in the output."
- ),
- )
- ets: AwareDatetime = Field(
- None,
- description=("The end time of the data request."),
- )
- format: str = Field(
- "onlycomma",
- description=(
- "The format of the data, defaults to onlycomma. The available "
- "options are: onlycomma, tdf."
- ),
- )
- hours: int = Field(
- None,
- description=(
- "The number of hours of data to return prior to the current "
- "timestamp. Can not be more than 24 if no stations are specified."
- ),
- )
- latlon: bool = Field(
- False,
- description=(
- "If set to 'yes', the latitude and longitude of the station will "
- "be included in the output."
- ),
- )
- missing: str = Field(
- "M",
- description=(
- "How to represent missing values, defaults to M. Other options "
- "are 'null' and 'empty'."
- ),
- pattern="^(M|null|empty)$",
- )
- nometa: bool = Field(
- False,
- description=(
- "If set to 'yes', the column headers will not be included in the "
- "output."
- ),
- )
- network: ListOrCSVType = Field(
- None,
- description="The network to query, defaults to all networks.",
- )
- report_type: ListOrCSVType = Field(
- [],
- description=(
- "The report type to query, defaults to all. The available "
- "options are: 1 (HFMETAR), 3 (Routine), 4 (Specials)."
- ),
- )
- station: ListOrCSVType = Field(
- None,
- description=(
- "The station identifier to query, defaults to all stations and "
- "if you do not specify any stations, you can only request 24 "
- "hours of data."
- ),
- )
- sts: AwareDatetime = Field(
- None,
- description=("The start time of the data request."),
- )
- trace: str = Field(
- "0.0001",
- description=(
- "How to represent trace values, defaults to 0.0001. Other "
- "options are 'null' and 'empty'."
- ),
- pattern="^(0.0001|null|empty|T)$",
- )
- tz: str = Field(
- "UTC",
- description=(
- "The timezone to use for the request timestamps (when not "
- "providing already tz-aware ``sts`` and ``ets`` values) and the "
- "output valid timestamp. It is highly recommended to set this to "
- "UTC to ensure it is set. This string should be "
- "something that the Python ``zoneinfo`` library can understand."
- ),
- )
- year1: int = Field(
- None,
- description=(
- "The year of the start time, defaults to the time zone provided "
- "by `tzname`. If `sts` is not provided."
- ),
- )
- month1: int = Field(
- None,
- description=(
- "The month of the start time, defaults to the time zone provided "
- "by `tzname`. If `sts` is not provided."
- ),
- )
- day1: int = Field(
- None,
- description=(
- "The day of the start time, defaults to the time zone provided by "
- "`tzname`. If `sts` is not provided."
- ),
- )
- hour1: int = Field(
- 0,
- description=(
- "The hour of the start time, defaults to the time zone provided "
- "by `tzname`. If `sts` is not provided."
- ),
- )
- minute1: int = Field(
- 0,
- description=(
- "The minute of the start time, defaults to the time zone provided "
- "by `tzname`. If `sts` is not provided."
- ),
- )
- year2: int = Field(
- None,
- description=(
- "The year of the end time, defaults to the time zone provided by "
- "`tzname`. If `ets` is not provided."
- ),
- )
- month2: int = Field(
- None,
- description=(
- "The month of the end time, defaults to the time zone provided by "
- "`tzname`. If `ets` is not provided."
- ),
- )
- day2: int = Field(
- None,
- description=(
- "The day of the end time, defaults to the time zone provided by "
- "`tzname`. If `ets` is not provided."
- ),
- )
- hour2: int = Field(
- 0,
- description=(
- "The hour of the end time, defaults to the time zone provided by "
- "`tzname`. If `ets` is not provided."
- ),
- )
- minute2: int = Field(
- 0,
- description=(
- "The minute of the end time, defaults to the time zone provided "
- "by `tzname`. If `ets` is not provided."
- ),
- )
-
-
-def fmt_time(val, missing, _trace, tzinfo):
- """Format timestamp."""
- if val is None:
- return missing
- return (val.astimezone(tzinfo)).strftime("%Y-%m-%d %H:%M")
-
-
-def fmt_trace(val, missing, trace, _tzinfo):
- """Format precip."""
- if val is None:
- return missing
- # careful with this comparison
- if 0 < val < 0.009999:
- return trace
- return f"{val:.2f}"
-
-
-def fmt_simple(val, missing, _trace, _tzinfo):
- """Format simplely."""
- if val is None:
- return missing
- return dance(val).replace(",", " ").replace("\n", " ")
-
-
-def fmt_wxcodes(val, missing, _trace, _tzinfo):
- """Format weather codes."""
- if val is None:
- return missing
- return " ".join(val)
-
-
-def fmt_f2(val, missing, _trace, _tzinfo):
- """Simple 2 place formatter."""
- if val is None:
- return missing
- return f"{val:.2f}"
-
-
-def fmt_f0(val, missing, _trace, _tzinfo):
- """Simple 0 place formatter."""
- if val is None:
- return missing
- return f"{val:.0f}"
-
-
-def dance(val):
- """Force the val to ASCII."""
- return val.encode("ascii", "ignore").decode("ascii")
-
-
-def overloaded():
- """Prevent automation from overwhelming the server"""
-
- with get_dbconn("asos") as pgconn:
- cursor = pgconn.cursor()
- cursor.execute("select one::float from system_loadavg")
- val = cursor.fetchone()[0]
- if val > 30: # Cut back on logging
- sys.stderr.write(f"/cgi-bin/request/asos.py over cpu thres: {val}\n")
- return val > 20
-
-
-def get_stations(form):
- """Figure out the requested station"""
- if not form["station"]:
- if form["network"] is not None:
- nt = NetworkTable(form["network"], only_online=False)
- return list(nt.sts.keys())
- return []
- stations = form["station"]
- if not stations:
- return []
- # allow folks to specify the ICAO codes for K*** sites
- for i, station in enumerate(stations):
- if len(station) == 4 and station[0] == "K":
- stations[i] = station[1:]
- return stations
-
-
-def get_time_bounds(form, tzinfo):
- """Figure out the exact time bounds desired"""
- if form["hours"] is not None:
- ets = utc()
- sts = ets - datetime.timedelta(hours=int(form.get("hours")))
- return sts, ets
- # Here lie dragons, so tricky to get a proper timestamp
- try:
-
- def _get(num):
- return datetime.datetime(
- form[f"year{num}"],
- form[f"month{num}"],
- form[f"day{num}"],
- form[f"hour{num}"],
- form[f"minute{num}"],
- )
-
- if form["sts"] is None:
- form["sts"] = _get("1").replace(tzinfo=tzinfo)
- if form["ets"] is None:
- form["ets"] = _get("2").replace(tzinfo=tzinfo)
- except Exception:
- return None, None
-
- if form["sts"] == form["ets"]:
- form["ets"] += datetime.timedelta(days=1)
- if form["sts"] > form["ets"]:
- form["sts"], form["ets"] = form["ets"], form["sts"]
- return form["sts"], form["ets"]
-
-
-def build_querycols(form):
- """Which database columns correspond to our query."""
- req = form["data"]
- if not req or "all" in req:
- return AVAILABLE
- res = []
- for col in req:
- if col == "presentwx":
- res.append("wxcodes")
- elif col in AVAILABLE:
- res.append(col)
- elif col in CONV_COLS:
- res.append(CONV_COLS[col])
- if not res:
- res.append("tmpf")
- return res
-
-
-def toobusy(pgconn, name):
- """Check internal logging..."""
- cursor = pgconn.cursor()
- cursor.execute(
- "SELECT pid from pg_stat_activity where query ~* %s",
- (name,),
- )
- over = cursor.rowcount > 6
- if over and cursor.rowcount > 9: # cut back on logging
- sys.stderr.write(f"asos.py cursors {cursor.rowcount}: {name}\n")
- cursor.close()
- return over
-
-
-@iemapp(help=__doc__, parse_times=False, schema=MyModel)
-def application(environ, start_response):
- """Go main"""
- if environ["REQUEST_METHOD"] == "OPTIONS":
- start_response("400 Bad Request", [("Content-type", "text/plain")])
- yield b"Allow: GET,POST,OPTIONS"
- return
- if overloaded():
- start_response(
- "503 Service Unavailable", [("Content-type", "text/plain")]
- )
- yield b"ERROR: server over capacity, please try later"
- return
- try:
- tzname = environ["tz"].strip()
- if tzname in ["etc/utc", ""]:
- tzname = "UTC"
- tzinfo = ZoneInfo(tzname)
- except ZoneInfoNotFoundError as exp:
- start_response("400 Bad Request", [("Content-type", "text/plain")])
- sys.stderr.write(f"asos.py invalid tz: {exp}\n")
- yield b"Invalid Timezone (tz) provided"
- return
- pgconn = get_dbconn("asos")
- cursor_name = f"mystream_{environ.get('REMOTE_ADDR')}"
- if toobusy(pgconn, cursor_name):
- pgconn.close()
- start_response(
- "503 Service Unavailable", [("Content-type", "text/plain")]
- )
- yield b"ERROR: server over capacity, please try later"
- return
- acursor = pgconn.cursor(cursor_name, scrollable=False)
- acursor.itersize = 2000
-
- report_types = [int(i) for i in environ["report_type"]]
- sts, ets = get_time_bounds(environ, tzinfo)
- if sts is None:
- pgconn.close()
- start_response(
- "422 Unprocessable Entity", [("Content-type", "text/plain")]
- )
- yield b"Invalid times provided."
- return
- stations = get_stations(environ)
- if not stations:
- # We are asking for all-data. We limit the amount of data returned to
- # one day or less
- if (ets - sts) > datetime.timedelta(hours=24):
- pgconn.close()
- start_response("400 Bad Request", [("Content-type", "text/plain")])
- yield b"When requesting all-stations, must be less than 24 hours."
- return
- delim = environ["format"]
- headers = []
- if environ["direct"]:
- headers.append(("Content-type", "application/octet-stream"))
- suffix = "tsv" if delim in ["tdf", "onlytdf"] else "csv"
- if not stations or len(stations) > 1:
- fn = f"asos.{suffix}"
- else:
- fn = f"{stations[0]}.{suffix}"
- headers.append(("Content-Disposition", f"attachment; filename={fn}"))
- else:
- headers.append(("Content-type", "text/plain"))
- start_response("200 OK", headers)
-
- # How should null values be represented
- missing = NULLS[environ["missing"]]
- # How should trace values be represented
- trace = TRACE_OPTS[environ["trace"]]
-
- querycols = build_querycols(environ)
-
- if delim in ["tdf", "onlytdf"]:
- rD = "\t"
- else:
- rD = ","
-
- gisextra = environ["latlon"]
- elev_extra = environ["elev"]
- table = "alldata"
- metalimiter = ""
- colextra = "0 as lon, 0 as lat, 0 as elev, "
- if gisextra or elev_extra:
- colextra = "ST_X(geom) as lon, ST_Y(geom) as lat, elevation, "
- table = "alldata a JOIN stations t on (a.station = t.id)"
- metalimiter = "t.network ~* 'ASOS' and "
-
- rlimiter = ""
- # Munge legacy report_type=2 into 2,3,4 see akrherz/iem#104
- if 2 in report_types:
- report_types.extend([3, 4])
- if len(report_types) == 1:
- rlimiter = f" and report_type = {report_types[0]}"
- elif len(report_types) > 1:
- rlimiter = f" and report_type in {tuple(report_types)}"
- sqlcols = ",".join(querycols)
- sorder = "DESC" if environ["hours"] is not None else "ASC"
- if stations:
- acursor.execute(
- f"SELECT station, valid, {colextra} {sqlcols} from {table} "
- f"WHERE {metalimiter} valid >= %s and valid < %s and "
- f"station = ANY(%s) {rlimiter} ORDER by valid {sorder}",
- (sts, ets, stations),
- )
- else:
- acursor.execute(
- f"SELECT station, valid, {colextra} {sqlcols} from {table} "
- f"WHERE {metalimiter} valid >= %s and valid < %s {rlimiter} "
- f"ORDER by valid {sorder}",
- (sts, ets),
- )
- sio = StringIO()
- if delim not in ["onlytdf", "onlycomma"]:
- sio.write(f"#DEBUG: Format Typ -> {delim}\n")
- sio.write(f"#DEBUG: Time Period -> {sts} {ets}\n")
- sio.write(f"#DEBUG: Time Zone -> {tzinfo}\n")
- sio.write(
- "#DEBUG: Data Contact -> daryl herzmann "
- "akrherz@iastate.edu 515-294-5978\n"
- )
- sio.write(f"#DEBUG: Entries Found -> {acursor.rowcount}\n")
- nometa = environ["nometa"]
- if not nometa:
- sio.write(f"station{rD}valid{rD}")
- if gisextra:
- sio.write(f"lon{rD}lat{rD}")
- if elev_extra:
- sio.write(f"elevation{rD}")
- # hack to convert tmpf as tmpc to tmpc
- sio.write(
- f"{rD.join([c.rsplit(' as ', maxsplit=1)[-1] for c in querycols])}"
- )
- sio.write("\n")
-
- ff = {
- "wxcodes": fmt_wxcodes,
- "metar": fmt_simple,
- "skyc1": fmt_simple,
- "skyc2": fmt_simple,
- "skyc3": fmt_simple,
- "skyc4": fmt_simple,
- "p01i": fmt_trace,
- "p01i * 25.4 as p01m": fmt_trace,
- "ice_accretion_1hr": fmt_trace,
- "ice_accretion_3hr": fmt_trace,
- "ice_accretion_6hr": fmt_trace,
- "peak_wind_time": fmt_time,
- "snowdepth": fmt_f0,
- }
- # The default is the %.2f formatter
- formatters = [ff.get(col, fmt_f2) for col in querycols]
-
- for rownum, row in enumerate(acursor):
- if not nometa:
- sio.write(row[0] + rD)
- sio.write(
- (row[1].astimezone(tzinfo)).strftime("%Y-%m-%d %H:%M") + rD
- )
- if gisextra:
- sio.write(f"{row[2]:.4f}{rD}{row[3]:.4f}{rD}")
- if elev_extra:
- sio.write(f"{row[4]:.2f}{rD}")
- sio.write(
- rD.join(
- [
- func(val, missing, trace, tzinfo)
- for func, val in zip(formatters, row[5:])
- ]
- )
- + "\n"
- )
- if rownum > 0 and rownum % 1000 == 0:
- yield sio.getvalue().encode("ascii", "ignore")
- sio = StringIO()
- acursor.close()
- pgconn.close()
- yield sio.getvalue().encode("ascii", "ignore")
+from iemweb.request.asos import application # noqa: F401
diff --git a/cgi-bin/request/asos1min.py b/cgi-bin/request/asos1min.py
index f054625acd..363c02508e 100644
--- a/cgi-bin/request/asos1min.py
+++ b/cgi-bin/request/asos1min.py
@@ -1,202 +1,3 @@
-""".. title:: ASOS 1 Minute Data Request
+"""implemented in /pylib/iemweb/request/asos1min.py"""
-Documentation for /cgi-bin/request/asos1min.py
-----------------------------------------------
-
-This service provides the ASOS 1 minute data provided by NCEI and is not the
-"one minute data" via MADIS. There is an availability delay of about 24 hours
-due to the way NCEI collects the data from the ASOS sites.
-
-Examples
---------
-
-Request air temperature data for Ames IA KAMW for 2022, but only provide data
-at 1 hour intervals. Provide timestamps in UTC timezone.
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/asos1min.py?station=KAMW\
-&vars=tmpf&sts=2022-01-01T00:00Z&ets=2023-01-01T00:00Z&sample=1hour\
-&what=download&tz=UTC
-
-"""
-
-from io import StringIO
-
-from pydantic import AwareDatetime, Field
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-
-SAMPLING = {
- "1min": 1,
- "5min": 5,
- "10min": 10,
- "20min": 20,
- "1hour": 60,
-}
-DELIM = {"space": " ", "comma": ",", "tab": "\t", ",": ","}
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- delim: str = Field(
- "comma",
- description="Delimiter to use in output",
- pattern="^(comma|space|tab|,)$",
- )
- ets: AwareDatetime = Field(None, description="End timestamp for data")
- gis: bool = Field(
- False, description="Include Lat/Lon information in output"
- )
- sample: str = Field(
- "1min",
- description="Sampling period for data",
- pattern="^(1min|5min|10min|20min|1hour)$",
- )
- station: ListOrCSVType = Field(
- ..., description="Station(s) to request data for"
- )
- sts: AwareDatetime = Field(None, description="Start timestamp for data")
- tz: str = Field(
- "UTC",
- description="Timezone to use for the output and input timestamps",
- )
- vars: ListOrCSVType = Field(
- None, description="Variable(s) to request data for"
- )
- what: str = Field(
- "dl", description="Output format", pattern="^(download|view)$"
- )
- year1: int = Field(None, description="Start year for data")
- month1: int = Field(None, description="Start month for data")
- day1: int = Field(None, description="Start day for data")
- hour1: int = Field(0, description="Start hour for data")
- minute1: int = Field(0, description="Start minute for data")
- year2: int = Field(None, description="End year for data")
- month2: int = Field(None, description="End month for data")
- day2: int = Field(None, description="End day for data")
- hour2: int = Field(0, description="End hour for data")
- minute2: int = Field(0, description="End minute for data")
-
-
-def get_station_metadata(environ, stations) -> dict:
- """build a dictionary."""
- cursor = environ["iemdb.mesosite.cursor"]
- cursor.execute(
- """
- SELECT id, name, round(ST_x(geom)::numeric, 4) as lon,
- round(ST_y(geom)::numeric, 4) as lat from stations
- where id = ANY(%s) and network ~* 'ASOS'
- """,
- (stations,),
- )
- res = {}
- for row in cursor:
- res[row["id"]] = dict(name=row["name"], lon=row["lon"], lat=row["lat"])
- for station in stations:
- if station not in res:
- raise IncompleteWebRequest(f"Unknown station provided: {station}")
- return res
-
-
-def compute_prefixes(sio, environ, delim, stations, tz) -> dict:
- """"""
- station_meta = get_station_metadata(environ, stations)
- prefixes = {}
- if environ["gis"]:
- sio.write(
- delim.join(
- ["station", "station_name", "lat", "lon", f"valid({tz})", ""]
- )
- )
- for station in stations:
- prefixes[station] = (
- delim.join(
- [
- station,
- station_meta[station]["name"].replace(delim, "_"),
- str(station_meta[station]["lat"]),
- str(station_meta[station]["lon"]),
- ]
- )
- + delim
- )
- else:
- sio.write(delim.join(["station", "station_name", f"valid({tz})", ""]))
- for station in stations:
- prefixes[station] = (
- delim.join(
- [
- station,
- station_meta[station]["name"].replace(delim, "_"),
- ]
- )
- + delim
- )
- return prefixes
-
-
-@iemapp(
- iemdb=["asos1min", "mesosite"],
- iemdb_cursor="blah",
- help=__doc__,
- schema=Schema,
-)
-def application(environ, start_response):
- """Handle mod_wsgi request."""
- if environ["station"] is None:
- raise IncompleteWebRequest("No station= was specified in request.")
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Insufficient start timestamp variables.")
- # Ensure we have uppercase stations
- stations = [s.upper() for s in environ["station"]]
- delim = DELIM[environ["delim"]]
- sample = SAMPLING[environ["sample"]]
- tz = environ["tz"]
- varnames = environ["vars"]
- if not varnames:
- raise IncompleteWebRequest("No vars= was specified in request.")
- cursor = environ["iemdb.asos1min.cursor"]
- # get a list of columns we have in the alldata_1minute table
- cursor.execute(
- "select column_name from information_schema.columns where "
- "table_name = 'alldata_1minute' ORDER by column_name"
- )
- columns = [row["column_name"] for row in cursor]
- # cross check varnames now
- for varname in varnames:
- if varname not in columns:
- raise IncompleteWebRequest(
- f"Unknown variable {varname} specified in request."
- )
- cursor.execute(
- """
- select *,
- to_char(valid at time zone %s, 'YYYY-MM-DD hh24:MI') as local_valid
- from alldata_1minute
- where station = ANY(%s) and valid >= %s and valid < %s and
- extract(minute from valid) %% %s = 0 ORDER by station, valid
- """,
- (tz, stations, environ["sts"], environ["ets"], sample),
- )
- headers = []
- if environ["what"] == "download":
- headers.append(("Content-type", "application/octet-stream"))
- headers.append(
- ("Content-Disposition", "attachment; filename=changeme.txt")
- )
- else:
- headers.append(("Content-type", "text/plain"))
-
- sio = StringIO()
- prefixes = compute_prefixes(sio, environ, delim, stations, tz)
-
- sio.write(delim.join(varnames) + "\n")
- rowfmt = delim.join([f"%({var})s" for var in varnames])
- for row in cursor:
- sio.write(prefixes[row["station"]])
- sio.write(f"{row['local_valid']}{delim}")
- sio.write((rowfmt % row).replace("None", "M"))
- sio.write("\n")
-
- start_response("200 OK", headers)
- return [sio.getvalue().encode("ascii")]
+from iemweb.request.asos1min import application # noqa: F401
diff --git a/cgi-bin/request/coop.py b/cgi-bin/request/coop.py
index 006305a016..0b4a091d1d 100644
--- a/cgi-bin/request/coop.py
+++ b/cgi-bin/request/coop.py
@@ -1,262 +1,3 @@
-""".. title:: IEM Climodat Data Export
+"""implemented in /pylib/iemweb/request/coop.py"""
-Documentation for /cgi-bin/request/coop.py
-------------------------------------------
-
-This service is somewhat poorly named ``coop.py``, but is providing the IEM
-Climodat data, which is a combination of NWS COOP and NWS ASOS/AWOS data. There
-are a number of knobs here as this is one of the most popular datasets the IEM
-produces.
-
-Changelog
----------
-
-- 2024-06-22: Initital documentation and backend conversion to pydantic.
-
-"""
-
-import datetime
-
-from iemweb.request.coop import (
- do_apsim,
- do_century,
- do_daycent,
- do_dndc,
- do_salus,
- do_simple,
- do_swat,
-)
-from metpy.units import units
-from pydantic import Field
-from pyiem.database import get_dbconnc
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.network import Table as NetworkTable
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-
-DEGC = units.degC
-DEGF = units.degF
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- delim: str = Field(
- "comma",
- description=(
- "The delimiter to use in the output file. "
- "Options: comma, tab, space"
- ),
- )
- gis: bool = Field(
- False,
- description="Include latitude and longitude columns in the output.",
- )
- inclatlon: bool = Field(
- False,
- description="Include latitude and longitude columns in the output.",
- )
- model: str = Field(
- None,
- description=(
- "The model to use for output. Options: simple, apsim, "
- "century, daycent, salus, dndc, swat. Specifying this will "
- "override the 'vars' option."
- ),
- )
- network: str = Field(
- "IACLIMATE", description="The network to use for station lookups."
- )
- scenario: bool = Field(
- False,
- description=(
- "Should data from a previous year, specified by scenario_year "
- "be used to fill out the present year."
- ),
- )
- scenario_year: int = Field(
- 2020,
- description=(
- "The year to use as a scenario year, if scenario is true."
- ),
- )
- station: ListOrCSVType = Field(
- [], description="List of stations to include in the output."
- )
- stations: ListOrCSVType = Field(
- [],
- description=(
- "List of stations to include in the output. Legacy variable name."
- ),
- )
- vars: ListOrCSVType = Field(
- [], description="List of variables to include in the output."
- )
- what: str = Field("view", description="The type of output to generate.")
- with_header: bool = Field(
- True, description="Include a header row in the output."
- )
- year1: int = Field(
- datetime.date.today().year,
- description="The starting year for the data request.",
- )
- month1: int = Field(
- 1,
- description="The starting month for the data request.",
- )
- day1: int = Field(
- 1,
- description="The starting day for the data request.",
- )
- year2: int = Field(
- datetime.date.today().year,
- description="The ending year for the data request.",
- )
- month2: int = Field(
- datetime.date.today().month,
- description="The ending month for the data request.",
- )
- day2: int = Field(
- datetime.date.today().day,
- description="The ending day for the data request.",
- )
-
-
-def get_scenario_period(ctx):
- """Compute the inclusive start and end dates to fetch scenario data for
- Arguments:
- ctx dictionary context this app was called with
- """
- if ctx["ets"].month == 2 and ctx["ets"].day == 29:
- sts = datetime.date(ctx["scenario_year"], ctx["ets"].month, 28)
- else:
- sts = datetime.date(
- ctx["scenario_year"], ctx["ets"].month, ctx["ets"].day
- )
- ets = datetime.date(ctx["scenario_year"], 12, 31)
- return sts, ets
-
-
-def sane_date(year, month, day):
- """Attempt to account for usage of days outside of the bounds for
- a given month"""
- # Calculate the last date of the given month
- nextmonth = datetime.date(year, month, 1) + datetime.timedelta(days=35)
- lastday = nextmonth.replace(day=1) - datetime.timedelta(days=1)
- return datetime.date(year, month, min(day, lastday.day))
-
-
-def get_cgi_dates(environ):
- """Figure out which dates are requested via the form, we shall attempt
- to account for invalid dates provided!"""
-
- ets = min(
- sane_date(environ["year2"], environ["month2"], environ["day2"]),
- datetime.date.today() - datetime.timedelta(days=1),
- )
-
- return [
- sane_date(environ["year1"], environ["month1"], environ["day1"]),
- ets,
- ]
-
-
-def get_cgi_stations(environ):
- """Figure out which stations the user wants, return a list of them"""
- reqlist = environ["station"]
- if not reqlist:
- reqlist = environ["stations"]
- if not reqlist:
- return []
- if "_ALL" in reqlist:
- network = environ["network"]
- nt = NetworkTable(network, only_online=False)
- return list(nt.sts.keys())
-
- return reqlist
-
-
-@iemapp(help=__doc__, schema=Schema)
-def application(environ, start_response):
- """go main go"""
- ctx = {}
- ctx["stations"] = get_cgi_stations(environ)
- if not ctx["stations"]:
- raise IncompleteWebRequest("No stations were specified.")
- ctx["sts"], ctx["ets"] = get_cgi_dates(environ)
- ctx["myvars"] = environ["vars"]
- # Model specification trumps vars[]
- if environ["model"] is not None:
- ctx["myvars"] = [environ["model"]]
- ctx["what"] = environ["what"]
- ctx["delim"] = environ["delim"]
- ctx["inclatlon"] = environ["gis"]
- ctx["scenario"] = environ["scenario"]
- ctx["scenario_year"] = 2099
- if ctx["scenario"] == "yes":
- ctx["scenario_year"] = environ["scenario_year"]
- ctx["scenario_sts"], ctx["scenario_ets"] = get_scenario_period(ctx)
- ctx["with_header"] = environ["with_header"]
-
- # TODO: this code stinks and is likely buggy
- headers = []
- if (
- "apsim" in ctx["myvars"]
- or "daycent" in ctx["myvars"]
- or "century" in ctx["myvars"]
- or "salus" in ctx["myvars"]
- ):
- if ctx["what"] == "download":
- headers.append(("Content-type", "application/octet-stream"))
- headers.append(
- ("Content-Disposition", "attachment; filename=metdata.txt")
- )
- else:
- headers.append(("Content-type", "text/plain"))
- elif "dndc" not in ctx["myvars"] and ctx["what"] != "excel":
- if ctx["what"] == "download":
- headers.append(("Content-type", "application/octet-stream"))
- dlfn = "changeme.txt"
- if len(ctx["stations"]) < 10:
- dlfn = f"{'_'.join(ctx['stations'])}.txt"
- headers.append(
- ("Content-Disposition", f"attachment; filename={dlfn}")
- )
- else:
- headers.append(("Content-type", "text/plain"))
- elif "dndc" in ctx["myvars"]:
- headers.append(("Content-type", "application/octet-stream"))
- headers.append(
- ("Content-Disposition", "attachment; filename=dndc.zip")
- )
- elif "swat" in ctx["myvars"]:
- headers.append(("Content-type", "application/octet-stream"))
- headers.append(
- ("Content-Disposition", "attachment; filename=swatfiles.zip")
- )
- elif ctx["what"] == "excel":
- headers.append(("Content-type", EXL))
- headers.append(
- ("Content-Disposition", "attachment; filename=nwscoop.xlsx")
- )
-
- conn, cursor = get_dbconnc("coop")
- start_response("200 OK", headers)
- # OK, now we fret
- if "daycent" in ctx["myvars"]:
- res = do_daycent(cursor, ctx)
- elif "century" in ctx["myvars"]:
- res = do_century(cursor, ctx)
- elif "apsim" in ctx["myvars"]:
- res = do_apsim(cursor, ctx)
- elif "dndc" in ctx["myvars"]:
- res = do_dndc(cursor, ctx)
- elif "salus" in ctx["myvars"]:
- res = do_salus(cursor, ctx)
- elif "swat" in ctx["myvars"]:
- res = do_swat(None, ctx)
- else:
- res = do_simple(cursor, ctx)
- cursor.close()
- conn.close()
- return [res]
+from iemweb.request.coop import application # noqa: F401
diff --git a/cgi-bin/request/daily.py b/cgi-bin/request/daily.py
index e865521795..06a276e7d6 100644
--- a/cgi-bin/request/daily.py
+++ b/cgi-bin/request/daily.py
@@ -1,237 +1,3 @@
-""".. title:: IEM Computed Daily Summaries
+"""implemented in /pylib/iemweb/request/daily.py"""
-Documentation for /cgi-bin/request/daily.py
--------------------------------------------
-
-This data source contains a combination of IEM computed calendar day summaries
-and some more official totals with some sites reporting explicit values. One
-should also note that typically the airport stations are for a 24 hour period
-over standard time, which means 1 AM to 1 AM daylight time.
-
-Example Usage
--------------
-
-Request all high temperature data for Ames, IA (AMW) for the month of January
-2019:
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?sts=2019-01-01&ets=2019-01-31&network=IA_ASOS&stations=AMW&var=max_temp_f&format=csv
-
-
-Request daily precipitation and the climatology for all stations in Washington
-state on 23 June 2023 in Excel format:
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?sts=2023-06-23&ets=2023-06-23&network=WA_ASOS&stations=_ALL&var=precip_in,climo_precip_in&format=excel
-
-"""
-
-import copy
-import sys
-from datetime import datetime
-from io import BytesIO, StringIO
-
-import pandas as pd
-from pydantic import Field
-from pyiem.database import get_dbconn, get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.network import Table as NetworkTable
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-DEFAULT_COLS = (
- "max_temp_f,min_temp_f,max_dewpoint_f,min_dewpoint_f,precip_in,"
- "avg_wind_speed_kts,avg_wind_drct,min_rh,avg_rh,max_rh,"
- "climo_high_f,climo_low_f,climo_precip_in,snow_in,snowd_in,"
- "min_feel,avg_feel,max_feel,max_wind_speed_kts,max_wind_gust_kts,"
- "srad_mj"
-).split(",")
-
-
-class MyCGI(CGIModel):
- ets: datetime = Field(None, description="End date to query")
- format: str = Field("csv", description="The format of the output")
- na: str = Field("None", description="The NA value to use")
- network: str = Field(..., description="Network Identifier")
- station: ListOrCSVType = Field(
- [],
- description=(
- "Comma delimited or multi-param station identifiers, "
- "_ALL for all stations in network (deprecated)"
- ),
- )
- stations: ListOrCSVType = Field(
- [],
- description=(
- "Comma delimited or multi-param station identifiers, "
- "_ALL for all stations in network"
- ),
- )
- sts: datetime = Field(None, description="Start date to query")
- var: ListOrCSVType = Field(
- None,
- description=(
- "Comma delimited or multi-param variable names to include in "
- f"output, columns are: {DEFAULT_COLS}"
- ),
- )
- year1: int = Field(None, description="Start year when sts is not provided")
- month1: int = Field(
- None, description="Start month when sts is not provided"
- )
- day1: int = Field(None, description="Start day when sts is not provided")
- year2: int = Field(None, description="End year when ets is not provided")
- month2: int = Field(None, description="End month when ets is not provided")
- day2: int = Field(None, description="End day when ets is not provided")
-
-
-def overloaded():
- """Prevent automation from overwhelming the server"""
-
- with get_dbconn("iem") as pgconn:
- cursor = pgconn.cursor()
- cursor.execute("select one::float from system_loadavg")
- val = cursor.fetchone()[0]
- if val > 25: # Cut back on logging
- sys.stderr.write(f"/cgi-bin/request/daily.py over cpu thres: {val}\n")
- return val > 20
-
-
-def get_climate(network, stations):
- """Fetch the climatology for these stations"""
- nt = NetworkTable(network, only_online=False)
- if not nt.sts:
- return "ERROR: Invalid network specified"
- clisites = []
- for station in stations:
- if station == "_ALL":
- for sid in nt.sts:
- clid = nt.sts[sid]["ncei91"]
- if clid not in clisites:
- clisites.append(clid)
- break
- if station not in nt.sts:
- return f"ERROR: station: {station} not found in network: {network}"
- clid = nt.sts[station]["ncei91"]
- if clid not in clisites:
- clisites.append(clid)
- with get_sqlalchemy_conn("coop") as conn:
- df = pd.read_sql(
- text(
- """
- SELECT station, to_char(valid, 'mmdd') as sday,
- high as climo_high_f, low as climo_low_f,
- precip as climo_precip_in from ncei_climate91
- where station = ANY(:clisites)
- """
- ),
- conn,
- params={"clisites": clisites},
- )
- return df
-
-
-def get_data(network, sts, ets, stations, cols, na, fmt):
- """Go fetch data please"""
- if not cols:
- cols = copy.deepcopy(DEFAULT_COLS)
- cols.insert(0, "day")
- cols.insert(0, "station")
- climate = get_climate(network, stations)
- if isinstance(climate, str):
- return climate
-
- with get_sqlalchemy_conn("iem") as conn:
- df = pd.read_sql(
- text(
- """
- SELECT id as station, day, max_tmpf as max_temp_f,
- min_tmpf as min_temp_f, max_dwpf as max_dewpoint_f,
- min_dwpf as min_dewpoint_f,
- pday as precip_in,
- avg_sknt as avg_wind_speed_kts,
- vector_avg_drct as avg_wind_drct,
- min_rh, avg_rh, max_rh,
- snow as snow_in,
- snowd as snowd_in,
- min_feel, avg_feel, max_feel,
- max_sknt as max_wind_speed_kts,
- max_gust as max_wind_gust_kts,
- srad_mj, ncei91, to_char(day, 'mmdd') as sday
- from summary s JOIN stations t
- on (t.iemid = s.iemid) WHERE
- s.day >= :st and s.day <= :et and
- t.network = :n and t.id = ANY(:ds)
- ORDER by day ASC"""
- ),
- conn,
- params={"st": sts, "et": ets, "n": network, "ds": stations},
- )
- # Join to climate data frame
- df = df.merge(
- climate,
- how="left",
- left_on=["ncei91", "sday"],
- right_on=["station", "sday"],
- suffixes=("", "_r"),
- )
- df = df[df.columns.intersection(cols)]
- if na != "blank":
- df = df.fillna(na)
- if fmt == "json":
- return df.to_json(orient="records")
- if fmt == "excel":
- bio = BytesIO()
- with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
- df.to_excel(writer, sheet_name="Data", index=False)
- return bio.getvalue()
-
- sio = StringIO()
- df.to_csv(sio, index=False)
- return sio.getvalue()
-
-
-@iemapp(help=__doc__, schema=MyCGI, parse_times=True)
-def application(environ, start_response):
- """See how we are called"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Missing start and end times")
- sts, ets = environ["sts"].date(), environ["ets"].date()
-
- if sts.year != ets.year and overloaded():
- start_response(
- "503 Service Unavailable", [("Content-type", "text/plain")]
- )
- return [b"ERROR: server over capacity, please try later"]
-
- fmt = environ.get("format", "csv")
- stations = environ["stations"]
- if not stations:
- stations = environ["station"]
- if not stations:
- start_response("200 OK", [("Content-type", "text/plain")])
- return [b"ERROR: No stations specified for request"]
- network = environ["network"][:20]
- if "_ALL" in stations:
- if (ets - sts).days > 366:
- raise IncompleteWebRequest(
- "Must request a year or less when requesting all stations"
- )
- stations = list(NetworkTable(network, only_online=False).sts.keys())
- cols = environ["var"]
- na = environ["na"]
- if na not in ["M", "None", "blank"]:
- start_response("200 OK", [("Content-type", "text/plain")])
- return [b"ERROR: Invalid `na` value provided. {M, None, blank}"]
- if fmt != "excel":
- start_response("200 OK", [("Content-type", "text/plain")])
- return [
- get_data(network, sts, ets, stations, cols, na, fmt).encode(
- "ascii"
- )
- ]
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=daily.xlsx"),
- ]
- start_response("200 OK", headers)
- return [get_data(network, sts, ets, stations, cols, na, fmt)]
+from iemweb.request.daily import application # noqa: F401
diff --git a/cgi-bin/request/feel.py b/cgi-bin/request/feel.py
index d22558932a..b89a6e941f 100644
--- a/cgi-bin/request/feel.py
+++ b/cgi-bin/request/feel.py
@@ -1,56 +1,3 @@
-"""FEEL data download"""
+"""implemented in /pylib/iemweb/request/feel.py"""
-# pylint: disable=abstract-class-instantiated
-from io import BytesIO
-
-import pandas as pd
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-def run(sts, ets, start_response):
- """Get data!"""
- params = {"sts": sts, "ets": ets}
- with get_sqlalchemy_conn("other") as dbconn:
- sql = (
- "SELECT * from feel_data_daily where "
- "valid >= :sts and valid < :ets ORDER by valid ASC"
- )
- df = pd.read_sql(text(sql), dbconn, params=params)
-
- sql = (
- "SELECT * from feel_data_hourly where "
- "valid >= :sts and valid < :ets ORDER by valid ASC"
- )
- df2 = pd.read_sql(text(sql), dbconn, params=params)
-
- def fmt(val):
- """Lovely hack."""
- return val.strftime("%Y-%m-%d %H:%M")
-
- df2["valid"] = df2["valid"].apply(fmt)
-
- bio = BytesIO()
- with pd.ExcelWriter(bio, engine="openpyxl") as writer:
- df.to_excel(writer, sheet_name="Daily Data", index=False)
- df2.to_excel(writer, sheet_name="Hourly Data", index=False)
-
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment;Filename=feel.xlsx"),
- ]
- start_response("200 OK", headers)
- return bio.getvalue()
-
-
-@iemapp()
-def application(environ, start_response):
- """Get stuff"""
- if "sts" not in environ:
- raise IncompleteWebRequest("GET parameters for start time missing")
-
- return [run(environ["sts"], environ["ets"], start_response)]
+from iemweb.request.feel import application # noqa: F401
diff --git a/cgi-bin/request/gis/awc_gairmets.py b/cgi-bin/request/gis/awc_gairmets.py
index 2e2d486780..b84c5b6b6a 100644
--- a/cgi-bin/request/gis/awc_gairmets.py
+++ b/cgi-bin/request/gis/awc_gairmets.py
@@ -1,138 +1,3 @@
-""".. title:: AWC Graphical AIRMETs
+"""implemented in /pylib/iemweb/request/gis/awc_gairmets.py"""
-Documentation for /cgi-bin/request/gis/awc_gairmets.py
-------------------------------------------------------
-
-This service emits the archive of IEM's best attempt at processing graphical
-AIRMETs.
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO
-
-# Third Party
-import fiona
-import geopandas as gpd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, iemapp
-
-fiona.supported_drivers["KML"] = "rw"
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- format: str = Field("shp", description="Output Format")
- sts: AwareDatetime = Field(None, description="Start Time")
- ets: AwareDatetime = Field(None, description="End Time")
- year1: int = Field(
- None, description="Start Year in UTC, when sts not set."
- )
- month1: int = Field(
- None, description="Start Month in UTC, when sts not set."
- )
- day1: int = Field(None, description="Start Day in UTC, when sts not set.")
- hour1: int = Field(0, description="Start Hour in UTC, when sts not set.")
- minute1: int = Field(
- 0, description="Start Minute in UTC, when sts not set."
- )
- year2: int = Field(None, description="End Year in UTC, when ets not set.")
- month2: int = Field(
- None, description="End Month in UTC, when ets not set."
- )
- day2: int = Field(None, description="End Day in UTC, when ets not set.")
- hour2: int = Field(0, description="End Hour in UTC, when ets not set.")
- minute2: int = Field(0, description="End Minute in UTC, when ets not set.")
-
-
-def run(ctx, start_response):
- """Do something!"""
- common = "at time zone 'UTC', 'YYYY-MM-DD\"T\"HH24:MI:00\"Z\"'"
- schema = {
- "geometry": "Polygon",
- "properties": {
- "NAME": "str:64",
- "LABEL": "str:4",
- "GML_ID": "str:32",
- "VALID_AT": "str:20",
- "VALID_FM": "str:20",
- "VALID_TO": "str:20",
- "ISSUTIME": "str:20",
- "PROD_ID": "str:36",
- "STATUS": "str:32",
- "HZTYPE": "str:256",
- "WXCOND": "str:256",
- },
- }
- with get_sqlalchemy_conn("postgis") as conn:
- df = gpd.read_postgis(
- "select label, gml_id, "
- f"gml_id || ' ' || to_char(valid_at {common}) as name, "
- f"to_char(valid_at {common}) as valid_at, "
- f"to_char(valid_from {common}) as valid_fm, "
- f"to_char(valid_to {common}) as valid_to, "
- f"to_char(issuetime {common}) as issutime, "
- "product_id as prod_id, status, hazard_type as hztype, "
- "array_to_string(weather_conditions, ',') as wxcond, geom "
- "from airmets WHERE issuetime >= %s and "
- "issuetime < %s ORDER by valid_at ASC",
- conn,
- params=(
- ctx["sts"],
- ctx["ets"],
- ),
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"airmets_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
- if ctx["format"] == "kml":
- fp = BytesIO()
- with fiona.drivers():
- df.to_file(fp, driver="KML", NameField="NAME", engine="fiona")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.kml"),
- ]
- start_response("200 OK", headers)
- return fp.getvalue()
-
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- for suffix in ["shp", "shx", "dbf"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do something fun!"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Start and End Time are required!")
- ctx = {
- "sts": environ["sts"],
- "ets": environ["ets"],
- "format": environ["format"],
- }
- return [run(ctx, start_response)]
+from iemweb.request.gis.awc_gairmets import application # noqa: F401
diff --git a/cgi-bin/request/gis/cwas.py b/cgi-bin/request/gis/cwas.py
index 0d3de89710..9a5e01ef76 100644
--- a/cgi-bin/request/gis/cwas.py
+++ b/cgi-bin/request/gis/cwas.py
@@ -1,101 +1,3 @@
-""".. title:: CWAS Data Service
+"""implemented in /pylib/iemweb/request/gis/cwas.py"""
-Documentation for /cgi-bin/request/gis/cwas.py
-----------------------------------------------
-
-To be written.
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO
-
-# Third Party
-import fiona
-import geopandas as gpd
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import iemapp
-
-fiona.supported_drivers["KML"] = "rw"
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-def run(ctx, start_response):
- """Do something!"""
- common = "at time zone 'UTC', 'YYYY-MM-DD\"T\"HH24:MI:00\"Z\"'"
- schema = {
- "geometry": "Polygon",
- "properties": {
- "CENTER": "str:4",
- "ISSUE": "str:20",
- "EXPIRE": "str:20",
- "PROD_ID": "str:36",
- "NARRATIV": "str:256",
- "NUMBER": "int",
- },
- }
- with get_sqlalchemy_conn("postgis") as conn:
- df = gpd.read_postgis(
- "select center, "
- f"to_char(issue {common}) as issue, "
- f"to_char(expire {common}) as expire, "
- "product_id as prod_id, narrative as narrativ, num as number, "
- "geom from cwas WHERE issue >= %s and "
- "issue < %s ORDER by issue ASC",
- conn,
- params=(
- ctx["sts"],
- ctx["ets"],
- ),
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"cwas_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
- if ctx["format"] == "kml":
- fp = BytesIO()
- with fiona.drivers():
- df.to_file(fp, driver="KML")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.kml"),
- ]
- start_response("200 OK", headers)
- return fp.getvalue()
-
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- for suffix in ["shp", "shx", "dbf"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__)
-def application(environ, start_response):
- """Do something fun!"""
- if "sts" not in environ:
- raise IncompleteWebRequest("GET start time parameters missing")
- ctx = {
- "sts": environ["sts"],
- "ets": environ["ets"],
- "format": environ.get("format", "shp"),
- }
- return [run(ctx, start_response)]
+from iemweb.request.gis.cwas import application # noqa: F401
diff --git a/cgi-bin/request/gis/lsr.py b/cgi-bin/request/gis/lsr.py
index 9152c4ccf9..10c5b91cc0 100644
--- a/cgi-bin/request/gis/lsr.py
+++ b/cgi-bin/request/gis/lsr.py
@@ -1,3 +1,3 @@
-"""Implemented at /pylib/iemweb/request/gis/lsr.py"""
+"""implemented in /pylib/iemweb/request/gis/lsr.py"""
from iemweb.request.gis.lsr import application # noqa: F401
diff --git a/cgi-bin/request/gis/nexrad_storm_attrs.py b/cgi-bin/request/gis/nexrad_storm_attrs.py
index 390dfb8298..a1d28bfd4b 100644
--- a/cgi-bin/request/gis/nexrad_storm_attrs.py
+++ b/cgi-bin/request/gis/nexrad_storm_attrs.py
@@ -1,186 +1,3 @@
-""".. title:: NEXRAD Storm Attributes Data Service
+"""implemented in /pylib/iemweb/request/gis/nexrad_storm_attrs.py"""
-Return to `request form `_.
-
-Documentation for /cgi-bin/request/gis/nexrad_storm_attrs.py
-------------------------------------------------------------
-
-This service provides IEM processed NWS NEXRAD Storm Attribute table data. This
-archive updates in real-time as level 3 NCR products are received. If you
-request more than two radar sites, the time span is limited to 7 days.
-
-Changelog
----------
-
-- 2024-06-11: Initial documentation release
-
-Example Usage
--------------
-
-Provide all attributes between 2024-06-11 00:00 and 2024-06-11 23:59 UTC
-
-https://mesonet.agron.iastate.edu/cgi-bin/request/gis/nexrad_storm_attrs.py?\
-fmt=shp&sts=2024-06-11T00:00:00Z&ets=2024-06-11T23:59:59Z
-
-"""
-
-import datetime
-import zipfile
-from io import BytesIO, StringIO
-
-import shapefile
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- ets: AwareDatetime = Field(None, description="End of Time for request")
- fmt: str = Field(
- "shp", description="Format of output", pattern="^(shp|csv)$"
- )
- radar: ListOrCSVType = Field([], description="Radar Sites to include")
- sts: AwareDatetime = Field(None, description="Start of Time for request")
- year1: int = Field(
- None, description="Year for start of time if sts not set"
- )
- month1: int = Field(
- None, description="Month for start of time if sts not set"
- )
- day1: int = Field(None, description="Day for start of time if sts not set")
- hour1: int = Field(
- None, description="Hour for start of time if sts not set"
- )
- minute1: int = Field(
- None, description="Minute for start of time if sts not set"
- )
- year2: int = Field(None, description="Year for end of time if ets not set")
- month2: int = Field(
- None, description="Month for end of time if ets not set"
- )
- day2: int = Field(None, description="Day for end of time if ets not set")
- hour2: int = Field(None, description="Hour for end of time if ets not set")
- minute2: int = Field(
- None, description="Minute for end of time if ets not set"
- )
-
-
-def run(environ, start_response):
- """Do something!"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Missing start or end time parameters.")
- sio = StringIO()
-
- # Need to limit what we are allowing them to request as the file would get
- # massive. So lets set arbitrary values of
- # 1) If 2 or more RADARs, less than 7 days
- radarlimit = ""
- if environ["radar"] and "ALL" not in environ["radar"]:
- radarlimit = " and nexrad = ANY(:radar) "
- if (
- len(environ["radar"]) > 2
- and (environ["ets"] - environ["sts"]).days > 6
- ):
- environ["ets"] = environ["sts"] + datetime.timedelta(days=7)
- fn = f"stormattr_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
-
- with get_sqlalchemy_conn("radar") as conn:
- res = conn.execute(
- text(f"""
- SELECT to_char(valid at time zone 'UTC', 'YYYYMMDDHH24MI')
- as utctime,
- storm_id, nexrad, azimuth, range, tvs, meso, posh, poh, max_size,
- vil, max_dbz, max_dbz_height, top, drct, sknt,
- ST_y(geom) as lat, ST_x(geom) as lon
- from nexrad_attributes_log WHERE
- valid >= :sts and valid < :ets {radarlimit} ORDER by valid ASC
- """),
- {
- "sts": environ["sts"],
- "ets": environ["ets"],
- "radar": environ["radar"],
- },
- )
- if res.rowcount == 0:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
-
- if environ["fmt"] == "csv":
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.csv"),
- ]
- start_response("200 OK", headers)
- sio.write(
- (
- "VALID,STORM_ID,NEXRAD,AZIMUTH,RANGE,TVS,MESO,POSH,"
- "POH,MAX_SIZE,VIL,MAX_DBZ,MAZ_DBZ_H,TOP,DRCT,SKNT,LAT,LON\n"
- )
- )
- for row in res:
- sio.write(",".join([str(s) for s in row]) + "\n")
- return sio.getvalue().encode("ascii", "ignore")
-
- shpio = BytesIO()
- shxio = BytesIO()
- dbfio = BytesIO()
-
- with shapefile.Writer(shp=shpio, shx=shxio, dbf=dbfio) as shp:
- # C is ASCII characters
- # N is a double precision integer limited to around 18 characters
- # length
- # D is for dates in the YYYYMMDD format,
- # with no spaces or hyphens between the sections
- # F is for floating point numbers with the same length limits as N
- # L is for logical data which is stored in the shapefile's attr
- # table as a short integer as a 1 (true) or a 0 (false).
- # The values it can receive are 1, 0, y, n, Y, N, T, F
- # or the python builtins True and False
- shp.field("VALID", "C", 12)
- shp.field("STORM_ID", "C", 2)
- shp.field("NEXRAD", "C", 3)
- shp.field("AZIMUTH", "N", 3, 0)
- shp.field("RANGE", "N", 3, 0)
- shp.field("TVS", "C", 10)
- shp.field("MESO", "C", 10)
- shp.field("POSH", "N", 3, 0)
- shp.field("POH", "N", 3, 0)
- shp.field("MAX_SIZE", "F", 5, 2)
- shp.field("VIL", "N", 3, 0)
- shp.field("MAX_DBZ", "N", 3, 0)
- shp.field("MAX_DBZ_H", "F", 5, 2)
- shp.field("TOP", "F", 9, 2)
- shp.field("DRCT", "N", 3, 0)
- shp.field("SKNT", "N", 3, 0)
- shp.field("LAT", "F", 10, 4)
- shp.field("LON", "F", 10, 4)
- for row in res:
- shp.point(row[-1], row[-2])
- shp.record(*row)
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open("/opt/iem/data/gis/meta/4326.prj", encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- zf.writestr(f"{fn}.shp", shpio.getvalue())
- zf.writestr(f"{fn}.shx", shxio.getvalue())
- zf.writestr(f"{fn}.dbf", dbfio.getvalue())
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do something fun!"""
- return [run(environ, start_response)]
+from iemweb.request.gis.nexrad_storm_attrs import application # noqa: F401
diff --git a/cgi-bin/request/gis/pireps.py b/cgi-bin/request/gis/pireps.py
index f89376051c..61d7d7ae6f 100644
--- a/cgi-bin/request/gis/pireps.py
+++ b/cgi-bin/request/gis/pireps.py
@@ -1,212 +1,3 @@
-""".. title:: Pilot Weather Report (PIREP) Data Service
+"""implemented in /pylib/iemweb/request/gis/pireps.py"""
-Documentation for /cgi-bin/request/gis/pireps.py
-------------------------------------------------
-
-This service emits processed and raw PIREP data. At this time, you must
-request 120 days or less of data at one time if you do not filter the request.
-
-Changelog
----------
-
-- 2024-06-28: Initital documentation release
-
-Example Requests
-----------------
-
-Provide all PIREPs for the month of June 2024 over Chicago ARTCC in CSV:
-
-https://mesonet.agron.iastate.edu/cgi-bin/request/gis/pireps.py?\
-sts=2024-06-01T00:00:00Z&ets=2024-07-01T00:00:00Z&artcc=ZAU&fmt=csv
-
-"""
-
-import datetime
-import zipfile
-from io import BytesIO, StringIO
-
-import shapefile
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- artcc: ListOrCSVType = Field(
- default=[],
- description="The ARTCC to limit the query to, use _ALL for all",
- )
- ets: AwareDatetime = Field(
- default=None, description="The end time of the query"
- )
- fmt: str = Field(
- default="shp", description="The format of the output file"
- )
- sts: AwareDatetime = Field(
- default=None, description="The start time of the query"
- )
- year1: int = Field(
- default=2000,
- description="The start year of the query, when sts is not provided",
- )
- month1: int = Field(
- default=1,
- description="The start month of the query, when sts is not provided",
- )
- day1: int = Field(
- default=1,
- description="The start day of the query, when sts is not provided",
- )
- degrees: float = Field(
- default=1.0,
- description="The distance in degrees for a spatial filter",
- gt=0,
- lt=90,
- )
- filter: bool = Field(
- default=False,
- description="Should we filter by distance from a point?",
- )
- lat: float = Field(
- default=41.99,
- description="The latitude of the point to filter by",
- )
- lon: float = Field(
- default=-91.99,
- description="The longitude of the point to filter by",
- )
- hour1: int = Field(
- default=0,
- description="The start hour of the query, when sts is not provided",
- )
- minute1: int = Field(
- default=0,
- description="The start minute of the query, when sts is not provided",
- )
- year2: int = Field(
- default=2000,
- description="The end year of the query, when ets is not provided",
- )
- month2: int = Field(
- default=1,
- description="The end month of the query, when ets is not provided",
- )
- day2: int = Field(
- default=1,
- description="The end day of the query, when ets is not provided",
- )
- hour2: int = Field(
- default=0,
- description="The end hour of the query, when ets is not provided",
- )
- minute2: int = Field(
- default=0,
- description="The end minute of the query, when ets is not provided",
- )
-
-
-def run(environ, start_response):
- """Go run!"""
- artcc_sql = ""
- if "_ALL" not in environ["artcc"] and environ["artcc"]:
- artcc_sql = " artcc = ANY(:artcc) and "
- params = {
- "artcc": environ["artcc"],
- "distance": environ["degrees"],
- "lat": environ["lat"],
- "lon": environ["lon"],
- "sts": environ["sts"],
- "ets": environ["ets"],
- }
-
- spatialsql = ""
- if environ["filter"]:
- spatialsql = (
- "ST_Distance(geom::geometry, ST_SetSRID(ST_Point(:lon, :lat), "
- "4326)) <= :distance and "
- )
- else:
- if (environ["ets"] - environ["sts"]).days > 120:
- environ["ets"] = environ["sts"] + datetime.timedelta(days=120)
- sql = f"""
- SELECT to_char(valid at time zone 'UTC', 'YYYYMMDDHH24MI') as utctime,
- case when is_urgent then 'T' else 'F' end,
- substr(replace(aircraft_type, ',', ' '), 0, 40),
- substr(replace(report, ',', ' '), 0, 255),
- substr(trim(substring(replace(report, ',', ' '),
- '/IC([^/]*)/?')), 0, 255) as icing,
- substr(trim(substring(replace(report, ',', ' '),
- '/TB([^/]*)/?')), 0, 255) as turb,
- artcc, ST_y(geom::geometry) as lat, ST_x(geom::geometry) as lon
- from pireps WHERE {spatialsql} {artcc_sql}
- valid >= :sts and valid < :ets ORDER by valid ASC
- """
- fn = f"pireps_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
-
- with get_sqlalchemy_conn("postgis") as conn:
- res = conn.execute(text(sql), params)
- if res.rowcount == 0:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
-
- if environ["fmt"] == "csv":
- sio = StringIO()
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.csv"),
- ]
- start_response("200 OK", headers)
- sio.write(
- "VALID,URGENT,AIRCRAFT,REPORT,ICING,TURBULENCE,ATRCC,LAT,LON\n"
- )
- for row in res:
- sio.write(",".join([str(s) for s in row]) + "\n")
- return sio.getvalue().encode("ascii", "ignore")
-
- shpio = BytesIO()
- shxio = BytesIO()
- dbfio = BytesIO()
-
- with shapefile.Writer(shx=shxio, dbf=dbfio, shp=shpio) as shp:
- shp.field("VALID", "C", 12)
- shp.field("URGENT", "C", 1)
- shp.field("AIRCRAFT", "C", 40)
- shp.field("REPORT", "C", 255) # Max field size is 255
- shp.field("ICING", "C", 255) # Max field size is 255
- shp.field("TURB", "C", 255) # Max field size is 255
- shp.field("ARTCC", "C", 3)
- shp.field("LAT", "F", 7, 4)
- shp.field("LON", "F", 9, 4)
- for row in res:
- if row[-1] is None:
- continue
- shp.point(row[-1], row[-2])
- shp.record(*row)
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open("/opt/iem/data/gis/meta/4326.prj", encoding="ascii") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- zf.writestr(f"{fn}.shp", shpio.getvalue())
- zf.writestr(f"{fn}.shx", shxio.getvalue())
- zf.writestr(f"{fn}.dbf", dbfio.getvalue())
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do something fun!"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("GET start time parameters missing.")
- return [run(environ, start_response)]
+from iemweb.request.gis.pireps import application # noqa: F401
diff --git a/cgi-bin/request/gis/sigmets.py b/cgi-bin/request/gis/sigmets.py
index d9afbd5a1f..7eb0e6e8f1 100644
--- a/cgi-bin/request/gis/sigmets.py
+++ b/cgi-bin/request/gis/sigmets.py
@@ -1,165 +1,3 @@
-""".. title:: SIGMET Data Service
+"""implemented in /pylib/iemweb/request/gis/sigmets.py"""
-Return to `User Frontend `_
-
-Documentation for /cgi-bin/request/gis/sigmets.py
--------------------------------------------------
-
-This service emits SIGMET data for a given time period.
-
-Changelog
----------
-
-- 2024-07-11: Migration to pydantic validation and added CSV,Excel format
- options.
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO, StringIO
-
-# Third Party
-import fiona
-import geopandas as gpd
-import pandas as pd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.reference import ISO8601
-from pyiem.webutil import CGIModel, iemapp
-from sqlalchemy import text
-
-fiona.supported_drivers["KML"] = "rw"
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- format: str = Field(
- default="shp",
- description="Output format, either shp, kml, csv, or excel",
- pattern="^(shp|kml|csv|excel)$",
- )
- sts: AwareDatetime = Field(default=None, description="Start Time")
- ets: AwareDatetime = Field(default=None, description="End Time")
- year1: int = Field(default=None, description="Start Year, if sts not set")
- month1: int = Field(
- default=None, description="Start Month, if sts not set"
- )
- day1: int = Field(default=None, description="Start Day, if sts not set")
- hour1: int = Field(default=None, description="Start Hour, if sts not set")
- minute1: int = Field(
- default=None, description="Start Minute, if sts not set"
- )
- year2: int = Field(default=None, description="End Year, if ets not set")
- month2: int = Field(default=None, description="End Month, if ets not set")
- day2: int = Field(default=None, description="End Day, if ets not set")
- hour2: int = Field(default=None, description="End Hour, if ets not set")
- minute2: int = Field(
- default=None, description="End Minute, if ets not set"
- )
-
-
-def run(ctx, start_response):
- """Do something!"""
- with get_sqlalchemy_conn("postgis") as conn:
- df = gpd.read_postgis(
- text("""
- select label || ' ' || sigmet_type as name, label,
- sigmet_type as type,
- issue at time zone 'UTC' as issue,
- expire at time zone 'UTC' as expire, geom,
- product_id as PROD_ID
- from sigmets_archive WHERE issue >= :sts and
- issue < :ets ORDER by issue ASC
- """),
- conn,
- params={
- "sts": ctx["sts"],
- "ets": ctx["ets"],
- },
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
- for col in ["issue", "expire"]:
- df[col] = df[col].dt.strftime(ISO8601)
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"sigmets_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
- if ctx["format"] == "kml":
- fp = BytesIO()
- with fiona.drivers():
- df.to_file(fp, driver="KML", NameField="NAME", engine="fiona")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.kml"),
- ]
- start_response("200 OK", headers)
- return fp.getvalue()
- if ctx["format"] == "csv":
- fp = StringIO()
- df.drop(columns="geom").to_csv(fp, index=False)
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.csv"),
- ]
- start_response("200 OK", headers)
- return fp.getvalue().encode("ascii")
- if ctx["format"] == "excel":
- fp = BytesIO()
- with pd.ExcelWriter(fp) as writer:
- df.drop(columns="geom").to_excel(writer, index=False)
- headers = [
- ("Content-type", EXL),
- ("Content-Disposition", f"attachment; filename={fn}.xlsx"),
- ]
- start_response("200 OK", headers)
- return fp.getvalue()
-
- schema = {
- "geometry": "Polygon",
- "properties": {
- "NAME": "str:64",
- "LABEL": "str:16",
- "TYPE": "str:1",
- "ISSUE": "str:20",
- "EXPIRE": "str:20",
- "PROD_ID": "str:36",
- },
- }
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- for suffix in ["shp", "shx", "dbf"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do something fun!"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("GET start or end time parameters missing")
- ctx = {
- "sts": environ["sts"],
- "ets": environ["ets"],
- "format": environ["format"],
- }
- return [run(ctx, start_response)]
+from iemweb.request.gis.sigmets import application # noqa: F401
diff --git a/cgi-bin/request/gis/spc_mcd.py b/cgi-bin/request/gis/spc_mcd.py
index 19371ad129..5b0cbf59e8 100644
--- a/cgi-bin/request/gis/spc_mcd.py
+++ b/cgi-bin/request/gis/spc_mcd.py
@@ -1,144 +1,3 @@
-""".. title:: Storm Prediction Center Mesoscale Convective Discussion
+"""implemented in /pylib/iemweb/request/gis/spc_mcd.py"""
-Documentation for /cgi-bin/request/gis/spc_mcd.py
--------------------------------------------------
-
-The IEM archives Storm Prediction Center Mesoscale Convective Discussions (MCD)
-in real-time and makes them available for download via this service. The
-raw product text is not emitted here, but the ``prod_id`` is included, which
-is a reference to the raw product text.
-
-Changelog
----------
-
-- 2024-05-29: Initial documentation
-
-Example Usage
--------------
-
-Return all MCDs for 2023
-
-https://mesonet.agron.iastate.edu/cgi-bin/request/gis/spc_mcd.py?\
-sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO
-
-# Third Party
-import geopandas as gpd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, iemapp
-
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- sts: AwareDatetime = Field(None, description="Start Time")
- ets: AwareDatetime = Field(None, description="End Time")
- year1: int = Field(
- None, description="Start UTC Year when sts is not provided"
- )
- year2: int = Field(
- None, description="End UTC Year when ets is not provided"
- )
- month1: int = Field(
- None, description="Start UTC Month when sts is not provided"
- )
- month2: int = Field(
- None, description="End UTC Month when ets is not provided"
- )
- day1: int = Field(
- None, description="Start UTC Day when sts is not provided"
- )
- day2: int = Field(None, description="End UTC Day when ets is not provided")
- hour1: int = Field(
- None, description="Start UTC Hour when sts is not provided"
- )
- hour2: int = Field(
- None, description="End UTC Hour when ets is not provided"
- )
- minute1: int = Field(
- None, description="Start UTC Minute when sts is not provided"
- )
- minute2: int = Field(
- None, description="End UTC Minute when ets is not provided"
- )
-
-
-def run(ctx, start_response):
- """Do something!"""
- common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
- schema = {
- "geometry": "Polygon",
- "properties": {
- "ISSUE": "str:12",
- "EXPIRE": "str:12",
- "PROD_ID": "str:35",
- "YEAR": "int",
- "NUM": "int",
- "CONFIDEN": "int",
- "CONCERN": "str:64",
- },
- }
- with get_sqlalchemy_conn("postgis") as conn:
- df = gpd.read_postgis(
- "select "
- f"to_char(issue {common}) as issue, "
- f"to_char(expire {common}) as expire, "
- "product_id as prod_id, year, num, watch_confidence as confiden, "
- "concerning as concern, geom "
- "from mcd WHERE issue >= %s and "
- "issue < %s ORDER by issue ASC",
- conn,
- params=(
- ctx["sts"],
- ctx["ets"],
- ),
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"mcd_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
-
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- for suffix in ["shp", "shx", "dbf"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do something fun!"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("GET sts/ets parameter not provided")
- if environ["sts"] > environ["ets"]:
- environ["sts"], environ["ets"] = environ["ets"], environ["sts"]
- ctx = {
- "sts": environ["sts"],
- "ets": environ["ets"],
- }
- return [run(ctx, start_response)]
+from iemweb.request.gis.spc_mcd import application # noqa: F401
diff --git a/cgi-bin/request/gis/spc_outlooks.py b/cgi-bin/request/gis/spc_outlooks.py
index 0eb353a38f..bb431c0d6f 100644
--- a/cgi-bin/request/gis/spc_outlooks.py
+++ b/cgi-bin/request/gis/spc_outlooks.py
@@ -1,158 +1,3 @@
-""".. title:: Download SPC Convective and Fire Weather or WPC ERO Outlooks
+"""implemented in /pylib/iemweb/request/gis/spc_outlooks.py"""
-Documentation for /cgi-bin/request/gis/spc_outlooks.py
-------------------------------------------------------
-
-This application allows for the download of SPC Convective and Fire Weather
-or WPC Excessive Rainfall Outlooks in shapefile format.
-
-Changelog
----------
-
-- 2024-06-14: Initial documentation of this backend
-
-Example Requests
-----------------
-
-Provide all of the day 2 convective outlooks for the year 2024:
-
-https://mesonet.agron.iastate.edu/cgi-bin/request/gis/spc_outlooks.py?d=2&\
-type=C&sts=2024-01-01T00:00Z&ets=2025-01-01T00:00Z
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO
-
-# Third Party
-import geopandas as gpd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- d: ListOrCSVType = Field(
- ["1", "2", "3", "4", "5", "6", "7", "8"], description="Days to include"
- )
- ets: AwareDatetime = Field(
- None, description="End of the period to include"
- )
- geom: str = Field(
- "geom_layers",
- description=(
- "Express geometries either as layers or non-overlapping "
- "geometries."
- ),
- pattern="geom_layers|geom",
- )
- sts: AwareDatetime = Field(
- None, description="Start of the period to include"
- )
- type: ListOrCSVType = Field(
- ["C", "F"], description="Outlook types to include"
- )
- year1: int = Field(None, description="Start year when sts is not set.")
- month1: int = Field(None, description="Start month when sts is not set.")
- day1: int = Field(None, description="Start day when sts is not set.")
- hour1: int = Field(None, description="Start hour when sts is not set.")
- minute1: int = Field(None, description="Start minute when sts is not set.")
- year2: int = Field(None, description="End year when ets is not set.")
- month2: int = Field(None, description="End month when ets is not set.")
- day2: int = Field(None, description="End day when ets is not set.")
- hour2: int = Field(None, description="End hour when ets is not set.")
- minute2: int = Field(None, description="End minute when ets is not set.")
-
-
-def get_context(environ):
- """Figure out the CGI variables passed to this script"""
- types = [x[0].upper() for x in environ["type"]]
- days = [int(x) for x in environ["d"]]
- return {
- "sts": environ["sts"],
- "ets": environ["ets"],
- "types": types,
- "days": days,
- "geom_col": environ["geom"],
- }
-
-
-def run(ctx, start_response):
- """Do something!"""
- common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
- schema = {
- "geometry": "MultiPolygon",
- "properties": {
- "ISSUE": "str:12",
- "EXPIRE": "str:12",
- "PRODISS": "str:12",
- "TYPE": "str:1",
- "DAY": "int",
- "THRESHOLD": "str:4",
- "CATEGORY": "str:48", # 43 as checked max, to save space
- "CYCLE": "int",
- },
- }
- with get_sqlalchemy_conn("postgis") as conn:
- df = gpd.read_postgis(
- text(f"""select
- to_char(issue {common}) as issue,
- to_char(expire {common}) as expire,
- to_char(product_issue {common}) as prodiss,
- outlook_type as type, day, threshold, category, cycle,
- {ctx["geom_col"]} as geom
- from spc_outlooks WHERE product_issue >= :sts and
- product_issue < :ets and outlook_type = ANY(:types)
- and day = ANY(:days)
- ORDER by product_issue ASC
- """),
- conn,
- params={
- "sts": ctx["sts"],
- "ets": ctx["ets"],
- "types": ctx["types"],
- "days": ctx["days"],
- },
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"outlooks_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
-
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- for suffix in ["shp", "shx", "dbf"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do something fun!"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("GET start/end time parameters missing")
- ctx = get_context(environ)
- return [run(ctx, start_response)]
+from iemweb.request.gis.spc_outlooks import application # noqa: F401
diff --git a/cgi-bin/request/gis/spc_watch.py b/cgi-bin/request/gis/spc_watch.py
index e418e5ebe6..4e1553bffd 100644
--- a/cgi-bin/request/gis/spc_watch.py
+++ b/cgi-bin/request/gis/spc_watch.py
@@ -1,175 +1,3 @@
-""".. title:: Download SPC Watch Polygons and Metadata
+"""implemented in /pylib/iemweb/request/gis/spc_watch.py"""
-Documentation for /cgi-bin/request/gis/spc_watch.py
----------------------------------------------------
-
-The IEM archives the Storm Prediction Center (SPC) watch polygons and
-associated metadata. Please note that these polygons are no longer the
-official watch geography with watch-by-county being the official product.
-These polygons are still generally useful and somewhat accurate to the actual
-watch geographic extent.
-
-Changelog
----------
-
-- 2024-06-27: Default `hour1`,`hour2`,`minute1`,`minute2` to 0, so they do not
-need to be provided.
-- 2024-06-09: Initial Documentation
-
-Example Usage
--------------
-
-Return all watch polygons for UTC 2024 in GeoJSON.
-
-https://mesonet.agron.iastate.edu/cgi-bin/request/gis/spc_watch.py?\
-sts=2024-01-01T00:00:00Z&ets=2025-01-01T00:00:00Z&format=geojson
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO
-
-# Third Party
-import fiona
-import geopandas as gpd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, iemapp
-from sqlalchemy import text
-
-fiona.supported_drivers["KML"] = "rw"
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- ets: AwareDatetime = Field(None, description="End Time")
- format: str = Field("shp", description="Output format")
- sts: AwareDatetime = Field(None, description="Start Time")
- year1: int = Field(None, description="Start year when sts is not provided")
- year2: int = Field(None, description="End year when ets is not provided")
- month1: int = Field(
- None, description="Start month when sts is not provided"
- )
- month2: int = Field(None, description="End month when ets is not provided")
- day1: int = Field(None, description="Start day when sts is not provided")
- day2: int = Field(None, description="End day when ets is not provided")
- hour1: int = Field(0, description="Start hour when sts is not provided")
- hour2: int = Field(0, description="End hour when ets is not provided")
- minute1: int = Field(
- 0, description="Start minute when sts is not provided"
- )
- minute2: int = Field(0, description="End minute when ets is not provided")
-
-
-def start_headers(start_response, ctx, fn):
- """Figure out the proper headers for the output"""
- suffix = "zip" if ctx["format"] == "shp" else ctx["format"]
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.{suffix}"),
- ]
- start_response("200 OK", headers)
-
-
-def run(environ, start_response):
- """Do something!"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Missing start or end time")
- common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
- schema = {
- "geometry": "MultiPolygon",
- "properties": {
- "ISSUE": "str:12",
- "EXPIRE": "str:12",
- "SEL": "str:5",
- "TYPE": "str:3",
- "NUM": "int",
- "P_TORTWO": "int",
- "P_TOREF2": "int",
- "P_WIND10": "int",
- "P_WIND65": "int",
- "P_HAIL10": "int",
- "P_HAIL2I": "int",
- "P_HAILWND": "int",
- "MAX_HAIL": "float",
- "MAX_GUST": "int",
- "MAX_TOPS": "int",
- "MV_DRCT": "int",
- "MV_SKNT": "int",
- "IS_PDS": "bool",
- },
- }
- with get_sqlalchemy_conn("postgis") as conn:
- df = gpd.read_postgis(
- text(f"""select
- to_char(issued {common}) as issue,
- to_char(expired {common}) as expire,
- sel, type, num, geom,
- tornadoes_2m as p_tortwo, tornadoes_1m_strong as p_toref2,
- wind_10m as p_wind10, wind_1m_65kt as p_wind65,
- hail_10m as p_hail10, hail_1m_2inch as p_hail2i,
- hail_wind_6m as p_hailwnd, max_hail_size as max_hail,
- max_wind_gust_knots as max_gust, max_tops_feet as max_tops,
- storm_motion_drct as mv_drct, storm_motion_sknt as mv_sknt,
- is_pds
- from watches WHERE issued >= :sts and
- issued < :ets ORDER by issued ASC
- """),
- conn,
- params={
- "sts": environ["sts"],
- "ets": environ["ets"],
- },
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"watches_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
- start_headers(start_response, environ, fn)
- if environ["format"] == "csv":
- return df.to_csv(index=False).encode("utf-8")
- if environ["format"] == "geojson":
- with tempfile.NamedTemporaryFile("w", delete=True) as tmp:
- df.to_file(tmp.name, driver="GeoJSON")
- with open(tmp.name, encoding="utf8") as fh:
- res = fh.read()
- return res.encode("utf-8")
- if environ["format"] == "kml":
- df["NAME"] = (
- df["ISSUE"].str.slice(0, 4)
- + ": "
- + df["TYPE"]
- + " #"
- + df["NUM"].apply(str)
- )
- fp = BytesIO()
- with fiona.drivers():
- df.to_file(fp, driver="KML", NameField="NAME", engine="fiona")
- return fp.getvalue()
-
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- for suffix in ["shp", "shx", "dbf"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do something fun!"""
- return [run(environ, start_response)]
+from iemweb.request.gis.spc_watch import application # noqa: F401
diff --git a/cgi-bin/request/gis/sps.py b/cgi-bin/request/gis/sps.py
index 7c9f6b4a65..6f1594086f 100644
--- a/cgi-bin/request/gis/sps.py
+++ b/cgi-bin/request/gis/sps.py
@@ -1,91 +1,3 @@
-""".. title:: Special Weather Statement (SPS) Data Service
+"""implemented in /pylib/iemweb/request/gis/sps.py"""
-Documentation for /cgi-bin/request/gis/sps.py
----------------------------------------------
-
-To be written.
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO
-
-# Third Party
-import geopandas as gpd
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import get_sqlalchemy_conn
-from pyiem.webutil import iemapp
-
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-def run(ctx, start_response):
- """Do something!"""
- common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
- schema = {
- "geometry": "Polygon",
- "properties": {
- "ISSUE": "str:12",
- "EXPIRE": "str:12",
- "PROD_ID": "str:32",
- "WFO": "str:3",
- "LNDSPOUT": "str:64",
- "WTRSPOUT": "str:64",
- "MAX_HAIL": "str:16",
- "MAX_WIND": "str:16",
- "TML_VALD": "str:12",
- "TML_DRCT": "int",
- "TML_SKNT": "int",
- },
- }
- with get_sqlalchemy_conn("postgis") as pgconn:
- df = gpd.read_postgis(
- "select "
- f"to_char(issue {common}) as issue, "
- f"to_char(expire {common}) as expire, "
- f"product_id as prod_id, "
- "wfo, landspout as lndspout, waterspout as wtrspout, "
- "max_hail_size as max_hail, max_wind_gust as max_wind, "
- f"to_char(tml_valid {common}) as tml_vald, "
- "tml_direction as tml_drct, "
- "tml_sknt, geom from sps WHERE issue >= %s and "
- "issue < %s and not ST_isempty(geom) ORDER by issue ASC",
- pgconn,
- params=(ctx["sts"], ctx["ets"]),
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: no results found for your query"
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"sps_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
-
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="ascii") as fp:
- zf.writestr(f"{fn}.prj", fp.read())
- for suffix in ("shp", "shx", "dbf"):
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return zio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__)
-def application(environ, start_response):
- """Do something fun!"""
- if "sts" not in environ:
- raise IncompleteWebRequest("GET start timestamp params missing")
- ctx = {"sts": environ["sts"], "ets": environ["ets"]}
- return [run(ctx, start_response)]
+from iemweb.request.gis.sps import application # noqa: F401
diff --git a/cgi-bin/request/gis/watch_by_county.py b/cgi-bin/request/gis/watch_by_county.py
index f7ef370710..a2ea04d9b8 100644
--- a/cgi-bin/request/gis/watch_by_county.py
+++ b/cgi-bin/request/gis/watch_by_county.py
@@ -1,140 +1,3 @@
-"""Watch by county, a one-off"""
+"""implemented in /pylib/iemweb/request/gis/watch_by_county.py"""
-import tempfile
-import zipfile
-from io import BytesIO
-
-from osgeo import ogr
-from pydantic import Field
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import utc
-from pyiem.webutil import CGIModel, iemapp
-
-ogr.UseExceptions()
-PROJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- etn: int = Field(None, description="Event ID")
- year: int = Field(None, description="Year of valid timestamp")
- month: int = Field(None, description="Month of valid timestamp")
- day: int = Field(None, description="Day of valid timestamp")
- hour: int = Field(None, description="Hour of valid timestamp")
- minute: int = Field(None, description="Minute of valid timestamp")
-
-
-def get_ts_fn(environ):
- """Figure out what is requested."""
- # Get CGI vars
- if environ["year"] is not None:
- ts = utc(
- environ["year"],
- environ["month"],
- environ["day"],
- environ["hour"],
- environ["minute"],
- )
- fn = f"watch_by_county_{ts:%Y%m%d%H%M}"
- else:
- ts = utc()
- fn = "watch_by_county"
- return ts, fn
-
-
-@iemapp(help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Go Main Go"""
- try:
- ts, fn = get_ts_fn(environ)
- except Exception as exp:
- raise IncompleteWebRequest("bad input provided") from exp
- if environ["etn"] is not None:
- etnLimiter = f"and eventid = {int(environ.get('etn'))}"
- fn = f"watch_by_county_{ts:Y%m%d%H%M}_{int(environ.get('etn'))}"
- else:
- etnLimiter = ""
-
- with tempfile.TemporaryDirectory() as tmpdir:
- table = f"warnings_{ts.year}"
- source = ogr.Open(
- "PG:host=iemdb-postgis.local dbname=postgis "
- f"user=nobody tables={table}(tgeom)"
- )
-
- out_driver = ogr.GetDriverByName("ESRI Shapefile")
- out_ds = out_driver.CreateDataSource(f"{tmpdir}/{fn}.shp")
- out_layer = out_ds.CreateLayer("polygon", None, ogr.wkbPolygon)
-
- fd = ogr.FieldDefn("ISSUED", ogr.OFTString)
- fd.SetWidth(12)
- out_layer.CreateField(fd)
-
- fd = ogr.FieldDefn("EXPIRED", ogr.OFTString)
- fd.SetWidth(12)
- out_layer.CreateField(fd)
-
- fd = ogr.FieldDefn("PHENOM", ogr.OFTString)
- fd.SetWidth(2)
- out_layer.CreateField(fd)
-
- fd = ogr.FieldDefn("SIG", ogr.OFTString)
- fd.SetWidth(1)
- out_layer.CreateField(fd)
-
- fd = ogr.FieldDefn("ETN", ogr.OFTInteger)
- out_layer.CreateField(fd)
-
- tt = ts.strftime("%Y-%m-%d %H:%M+00")
- sql = f"""
- select phenomena, eventid, ST_multi(ST_union(u.geom)) as tgeom,
- max(to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI'))
- as utcexpire,
- min(to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI'))
- as utcissue
- from warnings_{ts.year} w JOIN ugcs u on (u.gid = w.gid)
- WHERE significance = 'A' and phenomena IN ('TO','SV')
- and issue > '{tt}'::timestamp -'3 days':: interval
- and issue <= '{tt}' and
- expire > '{tt}' {etnLimiter}
- GROUP by phenomena, eventid ORDER by phenomena ASC
- """
-
- data = source.ExecuteSQL(sql)
-
- while True:
- feat = data.GetNextFeature()
- if not feat:
- break
- geom = feat.GetGeometryRef()
-
- featDef = ogr.Feature(out_layer.GetLayerDefn())
- featDef.SetGeometry(geom)
- featDef.SetField("PHENOM", feat.GetField("phenomena"))
- featDef.SetField("SIG", "A")
- featDef.SetField("ETN", feat.GetField("eventid"))
- featDef.SetField("ISSUED", feat.GetField("utcissue"))
- featDef.SetField("EXPIRED", feat.GetField("utcexpire"))
-
- out_layer.CreateFeature(featDef)
- feat.Destroy()
-
- source.Destroy()
- out_ds.Destroy()
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PROJFILE, encoding="ascii") as fp:
- zf.writestr(f"{fn}.prj", fp.read())
- for suffix in ("shp", "shx", "dbf"):
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
-
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
- return [zio.getvalue()]
+from iemweb.request.gis.watch_by_county import application # noqa: F401
diff --git a/cgi-bin/request/gis/watchwarn.py b/cgi-bin/request/gis/watchwarn.py
index 8583c9ce84..aa22231b59 100644
--- a/cgi-bin/request/gis/watchwarn.py
+++ b/cgi-bin/request/gis/watchwarn.py
@@ -1,593 +1,3 @@
-""".. title:: NWS Watch/Warning/Advisory (WWA) Data Service
+"""implemented in /pylib/iemweb/request/gis/watchwarn.py"""
-Return to `Download User Interface `_.
-
-Documentation for /cgi-bin/request/gis/watchwarn.py
----------------------------------------------------
-
-This service emits shapefiles (with additional csv included),
-or even Excel files. This service is
-rather blunt force and perhaps you should review the mountain of adhoc JSON/API
-services found at
-`IEM Legacy JSON Services `_ or at
-`IEM API Services `_ .
-
-Changelog
----------
-
-- 2024-07-03: Added a `accept=csv` option to allow for CSV output.
-- 2024-06-26: Added `limitpds` parameter to limit the request to only include
-products that have a PDS (Particularly Dangerous Situation) tag or phrasing.
-- 2024-05-14: To mitigate against large requests that overwhelm the server, a
-limit of one year's worth of data is now in place for requests that do not
-limit the request by either state, phenomena, nor wfo.
-- 2024-05-09: Migrated to pydantic based CGI input validation.
-
-Example Usage
--------------
-
-Return all Areal Flood, Flash Flood, Severe Thunderstorm, and Tornado Watch
-and Warnings for the state of Mississippi during 2024. Note how the phenomena
-and significance parameters are repeated so that each combination is present.
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/gis/watchwarn.py?\
-accept=shapefile&sts=2024-01-01T00:00Z&ets=2025-01-01T00:00Z&\
-location_group=states&states=MS&limitps=yes&phenomena=FF,FA,SV,TO,FF,FA,SV,TO&\
-significance=W,W,W,W,A,A,A,A
-
-Return all Tornado Warnings for the Des Moines WFO in shapefile format during
-2023.
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/gis/watchwarn.py?accept=shapefile&sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z&wfo[]=DMX&limitps=yes&phenomena=TO&significance=W
-
-"""
-
-import datetime
-import tempfile
-import zipfile
-from io import BytesIO
-
-import fiona
-import pandas as pd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_dbconnc, get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import utc
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from shapely.geometry import mapping
-from shapely.wkb import loads
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- accept: str = Field(
- "shapefile",
- pattern="^(shapefile|excel|csv)$",
- description="The format to return, either shapefile or excel.",
- )
- addsvs: str = Field(
- "no",
- pattern="^(yes|no)$",
- description="Include polygons that were included within any followup "
- "statements after issuance.",
- )
- ets: AwareDatetime = Field(
- None,
- description="The end timestamp in UTC. The format is ISO8601, e.g. "
- "2010-06-01T00:00Z.",
- )
- limit0: str = Field(
- "no",
- pattern="^(yes|no)$",
- description="If yes, only include Tornado, Severe Thunderstorm, "
- "Flash Flood, and Marine Warnings.",
- )
- limit1: str = Field(
- "no",
- pattern="^(yes|no)$",
- description="If yes, only include Storm Based Warnings.",
- )
- limit2: str = Field(
- "no",
- pattern="^(yes|no)$",
- description="If yes, only include Emergency Warnings.",
- )
- limitpds: bool = Field(
- False,
- description=(
- "If yes, only include products that have a PDS "
- "(Particularly Dangerous Situation) tag or phrasing."
- ),
- )
- limitps: str = Field(
- "no",
- pattern="^(yes|no)$",
- description="If yes, only include the specified phenomena and "
- "significance.",
- )
- location_group: str = Field(
- "wfo",
- pattern="^(wfo|states)$",
- description="The location group to use, either wfo or states.",
- )
- phenomena: ListOrCSVType = Field(
- ["TO"],
- description="The two character VTEC phenomena(s) to include. If you "
- "provide more than one value, the length must correspond and align "
- "with the ``significance`` parameter.",
- )
- simple: str = Field(
- "no",
- pattern="^(yes|no)$",
- description="If yes, use a simplified geometry for the UGC "
- "counties/zones.",
- )
- significance: ListOrCSVType = Field(
- ["W"],
- description="The one character VTEC significance to include, if you "
- "provide more than one value, the length must correspond "
- "and align with the ``phenomena`` parameter.",
- )
- states: ListOrCSVType = Field(
- None, description="List of states to include data for."
- )
- sts: AwareDatetime = Field(
- None,
- description="The start timestamp in UTC. The format is ISO8601, e.g. "
- "2010-06-01T00:00Z.",
- )
- timeopt: int = Field(
- 1,
- description="The time option to use, either 1 or 2, default is 1, "
- "which uses the start and end timestamps to determine "
- "which events to include. Option 2 uses the at timestamp "
- "to determine which events to include.",
- )
- wfo: ListOrCSVType = Field(
- None, description="List of WFOs to include data for."
- )
- wfos: ListOrCSVType = Field(
- None, description="Legacy parameter, update to use ``wfo``."
- )
- year1: int = Field(
- None,
- description="The start timestamp components in UTC, if you specify a "
- "sts parameter, these are ignored.",
- )
- year2: int = Field(
- None,
- description="The end timestamp components in UTC, if you specify a "
- "ets parameter, these are ignored.",
- )
- year3: int = Field(
- None,
- description="The at timestamp components in UTC. When timeopt is 2, "
- "this is used to find all events that were valid at this "
- "time.",
- )
- month1: int = Field(
- None,
- description="The start timestamp components in UTC, if you specify a "
- "sts parameter, these are ignored.",
- )
- month2: int = Field(
- None,
- description="The end timestamp components in UTC, if you specify a "
- "ets parameter, these are ignored.",
- )
- month3: int = Field(
- None,
- description="The at timestamp components in UTC. When timeopt is 2, "
- "this is used to find all events that were valid at this "
- "time.",
- )
- day1: int = Field(
- None,
- description="The start timestamp components in UTC, if you specify a "
- "sts parameter, these are ignored.",
- )
- day2: int = Field(
- None,
- description="The end timestamp components in UTC, if you specify a "
- "ets parameter, these are ignored.",
- )
- day3: int = Field(
- None,
- description="The at timestamp components in UTC. When timeopt is 2, "
- "this is used to find all events that were valid at this "
- "time.",
- )
- hour1: int = Field(
- None,
- description="The start timestamp components in UTC, if you specify a "
- "sts parameter, these are ignored.",
- )
- hour2: int = Field(
- None,
- description="The end timestamp components in UTC, if you specify a "
- "ets parameter, these are ignored.",
- )
- hour3: int = Field(
- None,
- description="The at timestamp components in UTC. When timeopt is 2, "
- "this is used to find all events that were valid at this "
- "time.",
- )
- minute1: int = Field(
- None,
- description="The start timestamp components in UTC, if you specify a "
- "sts parameter, these are ignored.",
- )
- minute2: int = Field(
- None,
- description="The end timestamp components in UTC, if you specify a "
- "ets parameter, these are ignored.",
- )
- minute3: int = Field(
- None,
- description="The at timestamp components in UTC. When timeopt is 2, "
- "this is used to find all events that were valid at this "
- "time.",
- )
-
-
-def dfmt(text):
- """Produce a prettier format for CSV."""
- if text is None or len(text) != 12:
- return ""
- return f"{text[:4]}-{text[4:6]}-{text[6:8]} {text[8:10]}:{text[10:12]}"
-
-
-def char3(wfos):
- """Make sure we don't have any 4 char IDs."""
- res = []
- for wfo in wfos:
- res.append(wfo[1:] if len(wfo) == 4 else wfo) # noqa
- return res
-
-
-def parse_wfo_location_group(environ):
- """Parse wfoLimiter"""
- limiter = ""
- wfos = environ["wfo"]
- if environ["wfos"]:
- wfos = environ["wfos"]
- if wfos is not None and "ALL" not in wfos:
- if len(wfos) == 1:
- wfo = wfos[0]
- wfo = wfo[1:] if len(wfo) == 4 else wfo
- limiter = f" and w.wfo = '{wfo}' "
- else:
- limiter = f" and w.wfo in {tuple(char3(wfos))} "
-
- return limiter
-
-
-def build_sql(environ):
- """Build the SQL statement."""
- sts = environ["sts"]
- ets = environ["ets"]
- table_extra = ""
- if environ["location_group"] == "states":
- if environ["states"]:
- states = [x[:2].upper() for x in environ["states"]]
- states.append("XX") # Hack for 1 length
- wfo_limiter = (
- " and ST_Intersects(s.the_geom, w.geom) "
- f"and s.state_abbr in {tuple(states)} "
- )
- wfo_limiter2 = f" and substr(w.ugc, 1, 2) in {tuple(states)} "
- table_extra = " , states s "
- else:
- raise ValueError("No state specified")
- else: # wfo
- wfo_limiter = parse_wfo_location_group(environ)
- wfo_limiter2 = wfo_limiter
-
- if environ["timeopt"] != 2:
- if sts is None or ets is None:
- raise IncompleteWebRequest("Missing start or end time parameters")
- # Keep size low
- if wfo_limiter == "" and (ets - sts) > datetime.timedelta(days=366):
- raise IncompleteWebRequest("Please shorten request to <1 year.")
- # Change to postgis db once we have the wfo list
- fn = f"wwa_{sts:%Y%m%d%H%M}_{ets:%Y%m%d%H%M}"
- else:
- year3 = int(environ.get("year3"))
- month3 = int(environ.get("month3"))
- day3 = int(environ.get("day3"))
- hour3 = int(environ.get("hour3"))
- minute3 = int(environ.get("minute3"))
- sts = utc(year3, month3, day3, hour3, minute3)
- ets = sts
- fn = f"wwa_{sts:%Y%m%d%H%M}"
-
- limiter = ""
- if environ["limit0"] == "yes":
- limiter = (
- " and phenomena IN ('TO','SV','FF','MA') and significance = 'W' "
- )
- if environ["limitps"] == "yes":
- phenom = environ["phenomena"]
- sig = environ["significance"]
- parts = []
- for p, s in zip(phenom, sig):
- parts.append(
- f"(phenomena = '{p[:2]}' and significance = '{s[:1]}') "
- )
- limiter = f" and ({' or '.join(parts)}) "
-
- sbwlimiter = " WHERE gtype = 'P' " if environ["limit1"] == "yes" else ""
-
- elimiter = " and is_emergency " if environ["limit2"] == "yes" else ""
- pdslimiter = " and is_pds " if environ["limitpds"] else ""
-
- warnings_table = "warnings"
- sbw_table = "sbw"
- if sts.year == ets.year:
- warnings_table = f"warnings_{sts.year}"
- sbw_table = f"sbw_{sts.year}"
-
- geomcol = "geom"
- if environ["simple"] == "yes":
- geomcol = "simple_geom"
-
- cols = (
- "wfo, utc_issue, utc_expire, utc_prodissue, utc_init_expire, "
- "phenomena, gtype, significance, eventid, status, ugc, area2d, "
- "utc_updated, hvtec_nwsli, hvtec_severity, hvtec_cause, hvtec_record, "
- "is_emergency, utc_polygon_begin, utc_polygon_end, windtag, hailtag, "
- "tornadotag, damagetag, product_id "
- )
- if environ["accept"] not in ["excel", "csv"]:
- cols = f"geo, {cols}"
-
- timelimit = f"issue >= '{sts}' and issue < '{ets}'"
- if environ["timeopt"] == 2:
- timelimit = (
- f"issue <= '{sts}' and "
- f"issue > '{sts + datetime.timedelta(days=-30)}' and "
- f"expire > '{sts}'"
- )
- else:
- if wfo_limiter == "" and limiter == "" and (ets - sts).days > 366:
- raise IncompleteWebRequest(
- "You must limit your request to a year or less."
- )
- sbwtimelimit = timelimit
- statuslimit = " status = 'NEW' "
- if environ["addsvs"] == "yes":
- statuslimit = " status != 'CAN' "
- sbwtimelimit = timelimit.replace(
- "issue",
- "coalesce(issue, polygon_begin)",
- )
- # NB: need distinct since state join could return multiple
- return (
- f"""
- WITH stormbased as (
- SELECT distinct w.geom as geo, 'P'::text as gtype, significance, wfo,
- status, eventid, ''::text as ugc,
- phenomena,
- ST_area( ST_transform(w.geom,9311) ) / 1000000.0 as area2d,
- to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_expire,
- to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
- to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_prodissue,
- to_char(polygon_begin at time zone 'UTC', 'YYYYMMDDHH24MI')
- as utc_polygon_begin,
- to_char(polygon_end at time zone 'UTC', 'YYYYMMDDHH24MI')
- as utc_polygon_end,
- to_char(init_expire at time zone 'UTC',
- 'YYYYMMDDHH24MI') as utc_init_expire,
- to_char(updated at time zone 'UTC',
- 'YYYYMMDDHH24MI') as utc_updated,
- hvtec_nwsli, hvtec_severity, hvtec_cause, hvtec_record, is_emergency,
- windtag, hailtag, tornadotag,
- coalesce(damagetag, floodtag_damage) as damagetag,
- product_id
- from {sbw_table} w {table_extra}
- WHERE {statuslimit} and {sbwtimelimit}
- {wfo_limiter} {limiter} {elimiter} {pdslimiter}
- ),
- countybased as (
- SELECT u.{geomcol} as geo, 'C'::text as gtype,
- significance,
- w.wfo, status, eventid, u.ugc, phenomena,
- u.area2163 as area2d,
- to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_expire,
- to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
- to_char(product_issue at time zone 'UTC',
- 'YYYYMMDDHH24MI') as utc_prodissue,
- null as utc_polygon_begin,
- null as utc_polygon_end,
- to_char(init_expire at time zone 'UTC',
- 'YYYYMMDDHH24MI') as utc_init_expire,
- to_char(updated at time zone 'UTC',
- 'YYYYMMDDHH24MI') as utc_updated,
- hvtec_nwsli, hvtec_severity, hvtec_cause, hvtec_record, is_emergency,
- null::real as windtag, null::real as hailtag, null::varchar as tornadotag,
- null::varchar as damagetag,
- product_ids[1] as product_id
- from {warnings_table} w JOIN ugcs u on (u.gid = w.gid) WHERE
- {timelimit} {wfo_limiter2} {limiter} {elimiter} {pdslimiter}
- )
- SELECT {cols} from stormbased UNION ALL
- SELECT {cols} from countybased {sbwlimiter}
- """,
- fn,
- )
-
-
-def do_excel(sql, fmt):
- """Generate an Excel format response."""
- with get_sqlalchemy_conn("postgis") as conn:
- df = pd.read_sql(sql, conn, index_col=None)
- if fmt == "excel" and len(df.index) >= 1048576:
- raise IncompleteWebRequest("Result too large for Excel download")
- # Back-convert datetimes :/
- for col in (
- "utc_issue utc_expire utc_prodissue utc_updated utc_polygon_begin "
- "utc_polygon_end"
- ).split():
- df[col] = pd.to_datetime(
- df[col],
- errors="coerce",
- format="%Y%m%d%H%M",
- ).dt.strftime("%Y-%m-%d %H:%M")
- if fmt == "csv":
- return df.to_csv(index=False).encode("ascii")
- bio = BytesIO()
- # pylint: disable=abstract-class-instantiated
- with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
- df.to_excel(writer, sheet_name="VTEC WaWA", index=False)
- return bio.getvalue()
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Go Main Go"""
- if environ["sts"] is None:
- raise IncompleteWebRequest("Missing start time parameters")
- try:
- sql, fn = build_sql(environ)
- except ValueError as exp:
- start_response("400 Bad Request", [("Content-type", "text/plain")])
- return [str(exp).encode("ascii")]
-
- if environ["accept"] == "excel":
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", f"attachment; Filename={fn}.xlsx"),
- ]
- start_response("200 OK", headers)
- return [do_excel(sql, environ["accept"])]
- if environ["accept"] == "csv":
- headers = [
- ("Content-type", "text/csv"),
- ("Content-disposition", f"attachment; Filename={fn}.csv"),
- ]
- start_response("200 OK", headers)
- return [do_excel(sql, environ["accept"])]
- pgconn, cursor = get_dbconnc("postgis", cursor_name="streaming")
-
- cursor.execute(sql)
-
- # Filenames are racy, so we need to have a temp folder
- with tempfile.TemporaryDirectory() as tmpdir:
- with open(f"{tmpdir}/{fn}.csv", "w", encoding="ascii") as csv:
- csv.write(
- "WFO,ISSUED,EXPIRED,INIT_ISS,INIT_EXP,PHENOM,GTYPE,SIG,ETN,"
- "STATUS,NWS_UGC,AREA_KM2,UPDATED,HVTEC_NWSLI,HVTEC_SEVERITY,"
- "HVTEC_CAUSE,HVTEC_RECORD,IS_EMERGENCY,POLYBEGIN,POLYEND,"
- "WINDTAG,HAILTAG,TORNADOTAG,DAMAGETAG,PRODUCT_ID\n"
- )
- with fiona.open(
- f"{tmpdir}/{fn}.shp",
- "w",
- crs="EPSG:4326",
- driver="ESRI Shapefile",
- schema={
- "geometry": "MultiPolygon",
- "properties": {
- "WFO": "str:3",
- "ISSUED": "str:12",
- "EXPIRED": "str:12",
- "INIT_ISS": "str:12",
- "INIT_EXP": "str:12",
- "PHENOM": "str:2",
- "GTYPE": "str:1",
- "SIG": "str:1",
- "ETN": "str:4",
- "STATUS": "str:3",
- "NWS_UGC": "str:6",
- "AREA_KM2": "float",
- "UPDATED": "str:12",
- "HV_NWSLI": "str:5",
- "HV_SEV": "str:1",
- "HV_CAUSE": "str:2",
- "HV_REC": "str:2",
- "EMERGENC": "bool",
- "POLY_BEG": "str:12",
- "POLY_END": "str:12",
- "WINDTAG": "float",
- "HAILTAG": "float",
- "TORNTAG": "str:16",
- "DAMAGTAG": "str:16",
- "PROD_ID": "str:36",
- },
- },
- ) as output:
- for row in cursor:
- if row["geo"] is None:
- continue
- mp = loads(row["geo"], hex=True)
- csv.write(
- f"{row['wfo']},{dfmt(row['utc_issue'])},"
- f"{dfmt(row['utc_expire'])},"
- f"{dfmt(row['utc_prodissue'])},"
- f"{dfmt(row['utc_init_expire'])},"
- f"{row['phenomena']},{row['gtype']},"
- f"{row['significance']},{row['eventid']},"
- f"{row['status']},"
- f"{row['ugc']},{row['area2d']:.2f},"
- f"{dfmt(row['utc_updated'])},"
- f"{row['hvtec_nwsli']},{row['hvtec_severity']},"
- f"{row['hvtec_cause']},{row['hvtec_record']},"
- f"{row['is_emergency']},"
- f"{dfmt(row['utc_polygon_begin'])},"
- f"{dfmt(row['utc_polygon_end'])},{row['windtag']},"
- f"{row['hailtag']},{row['tornadotag']},"
- f"{row['damagetag']},{row['product_id']}\n"
- )
- output.write(
- {
- "properties": {
- "WFO": row["wfo"],
- "ISSUED": row["utc_issue"],
- "EXPIRED": row["utc_expire"],
- "INIT_ISS": row["utc_prodissue"],
- "INIT_EXP": row["utc_init_expire"],
- "PHENOM": row["phenomena"],
- "GTYPE": row["gtype"],
- "SIG": row["significance"],
- "ETN": row["eventid"],
- "STATUS": row["status"],
- "NWS_UGC": row["ugc"],
- "AREA_KM2": row["area2d"],
- "UPDATED": row["utc_updated"],
- "HV_NWSLI": row["hvtec_nwsli"],
- "HV_SEV": row["hvtec_severity"],
- "HV_CAUSE": row["hvtec_cause"],
- "HV_REC": row["hvtec_record"],
- "EMERGENC": row["is_emergency"],
- "POLY_BEG": row["utc_polygon_begin"],
- "POLY_END": row["utc_polygon_end"],
- "WINDTAG": row["windtag"],
- "HAILTAG": row["hailtag"],
- "TORNTAG": row["tornadotag"],
- "DAMAGTAG": row["damagetag"],
- "PROD_ID": row["product_id"],
- },
- "geometry": mapping(mp),
- }
- )
-
- with zipfile.ZipFile(
- f"{tmpdir}/{fn}.zip", "w", zipfile.ZIP_DEFLATED
- ) as zf:
- for suffix in ["shp", "shx", "dbf", "cpg", "prj", "csv"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- with open(f"{tmpdir}/{fn}.zip", "rb") as fh:
- payload = fh.read()
- cursor.close()
- pgconn.close()
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return [payload]
+from iemweb.request.gis.watchwarn import application # noqa: F401
diff --git a/cgi-bin/request/gis/wpc_mpd.py b/cgi-bin/request/gis/wpc_mpd.py
index 3cc5800d9c..b1c76c4d20 100644
--- a/cgi-bin/request/gis/wpc_mpd.py
+++ b/cgi-bin/request/gis/wpc_mpd.py
@@ -1,82 +1,3 @@
-""".. title:: WPC MPD Shapefile Download
+"""implemented in /pylib/iemweb/request/gis/wpc_mpd.py"""
-Documentation for /cgi-bin/request/gis/wpc_mpd.py
--------------------------------------------------
-
-To be written.
-
-"""
-
-# Local
-import tempfile
-import zipfile
-from io import BytesIO
-
-# Third Party
-import geopandas as gpd
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import get_sqlalchemy_conn
-from pyiem.webutil import iemapp
-
-PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
-
-
-@iemapp(default_tz="UTC", help=__doc__)
-def application(environ, start_response):
- """Do something!"""
- if "sts" not in environ:
- raise IncompleteWebRequest("Missing start time GET params")
- if environ["sts"] > environ["ets"]:
- environ["sts"], environ["ets"] = environ["ets"], environ["sts"]
- common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
- schema = {
- "geometry": "Polygon",
- "properties": {
- "ISSUE": "str:12",
- "EXPIRE": "str:12",
- "PROD_ID": "str:35",
- "YEAR": "int",
- "NUM": "int",
- "CONCERN": "str:64",
- },
- }
- with get_sqlalchemy_conn("postgis") as conn:
- df = gpd.read_postgis(
- "select "
- f"to_char(issue {common}) as issue, "
- f"to_char(expire {common}) as expire, "
- "product_id as prod_id, year, num, "
- "concerning as concern, geom "
- "from mpd WHERE issue >= %s and "
- "issue < %s ORDER by issue ASC",
- conn,
- params=(
- environ["sts"],
- environ["ets"],
- ),
- geom_col="geom",
- )
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return [b"ERROR: no results found for your query"]
- df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
- fn = f"mpd_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
-
- with tempfile.TemporaryDirectory() as tmpdir:
- df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
-
- zio = BytesIO()
- with zipfile.ZipFile(
- zio, mode="w", compression=zipfile.ZIP_DEFLATED
- ) as zf:
- with open(PRJFILE, encoding="utf-8") as fh:
- zf.writestr(f"{fn}.prj", fh.read())
- for suffix in ["shp", "shx", "dbf"]:
- zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}.zip"),
- ]
- start_response("200 OK", headers)
-
- return [zio.getvalue()]
+from iemweb.request.gis.wpc_mpd import application # noqa: F401
diff --git a/cgi-bin/request/grx_rings.py b/cgi-bin/request/grx_rings.py
index 38f21352a1..0a35cdb8e1 100644
--- a/cgi-bin/request/grx_rings.py
+++ b/cgi-bin/request/grx_rings.py
@@ -1,109 +1,3 @@
-"""Author: Zach Hiris"""
+"""implemented in /pylib/iemweb/request/grx_rings.py"""
-import math
-from html import escape
-from io import StringIO
-
-from pyiem.util import html_escape
-from pyiem.webutil import iemapp
-
-
-def createCircleAroundWithRadius(lat, lon, radiusMiles):
- """Create circle."""
- latArray = []
- lonArray = []
-
- for brng in range(360):
- lat2, lon2 = getLocation(lat, lon, brng, radiusMiles)
- latArray.append(lat2)
- lonArray.append(lon2)
-
- return lonArray, latArray
-
-
-def getLocation(lat1, lon1, brng, distanceMiles):
- """getLocation."""
- lat1 = lat1 * math.pi / 180.0
- lon1 = lon1 * math.pi / 180.0
-
- # earth radius - If ever needed to be in km vs. miles, change R
- R = 3959
- distanceMiles = distanceMiles / R
-
- brng = (brng / 90) * math.pi / 2
-
- lat2 = math.asin(
- math.sin(lat1) * math.cos(distanceMiles)
- + math.cos(lat1) * math.sin(distanceMiles) * math.cos(brng)
- )
- lon2 = lon1 + math.atan2(
- math.sin(brng) * math.sin(distanceMiles) * math.cos(lat1),
- math.cos(distanceMiles) - math.sin(lat1) * math.sin(lat2),
- )
- lon2 = 180.0 * lon2 / math.pi
- lat2 = 180.0 * lat2 / math.pi
-
- return lat2, lon2
-
-
-@iemapp()
-def application(environ, start_response):
- """Go Main Go."""
- fn = escape(environ.get("fn", "placefile_rings.txt"))
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", f"attachment; filename={fn}"),
- ]
- start_response("200 OK", headers)
-
- # Things for the user to theoretically input:
- loc = html_escape(environ.get("loc", "Jack Trice Stadium"))
- try:
- lat = environ.get("lat", 42.014004)
- lon = environ.get("lon", -93.635773)
- if isinstance(lat, list):
- lat = lat[0]
- if isinstance(lon, list):
- lon = lon[0]
- pointLat = float(lat)
- pointLon = float(lon)
- except ValueError:
- return [b"ERROR: Invalid lat or lon valid provided."]
- sio = StringIO()
- sio.write(
- f"; This is a placefile to draw a range ring x miles from: {loc}\n"
- "; Created by Zach Hiris - 8/9/2019\n"
- "; Code adapted from Jonathan Scholtes (2016)\n\n\n"
- "Threshold: 999 \n"
- f"Title: Rings @ {loc}\n"
- )
-
- for i in range(3):
- try:
- distanceInMiles = float(environ.get(f"m{i}", 100))
- except ValueError:
- return [f"ERROR: Invalid m{i} provided.".encode("ascii")]
- if distanceInMiles <= 0.00001:
- continue
- try:
- r = int(float(environ.get(f"r{i}", 255)))
- g = int(float(environ.get(f"g{i}", 255)))
- b = int(float(environ.get(f"b{i}", 0)))
- a = int(float(environ.get(f"a{i}", 255)))
- except ValueError:
- return [b"ERROR: Invalid color provided."]
- t = environ.get(f"t{i}", "").replace("\n", "\\n")
-
- # Create the lon/lat pairs
- X, Y = createCircleAroundWithRadius(
- pointLat, pointLon, distanceInMiles
- )
- ll = "\\n" if t != "" else ""
- sio.write(
- f"Color: {r} {g} {b} {a}\n"
- f'Line: 2, 0, "{t}{ll}{distanceInMiles:.1f} miles from {loc}" \n'
- )
- for x, y in zip(X, Y):
- sio.write(f" {y}, {x}\n")
- sio.write("End:\n\n")
- return [sio.getvalue().encode("utf-8")]
+from iemweb.request.grx_rings import application # noqa: F401
diff --git a/cgi-bin/request/hads.py b/cgi-bin/request/hads.py
index a16f6d24a3..cd686b5fc4 100644
--- a/cgi-bin/request/hads.py
+++ b/cgi-bin/request/hads.py
@@ -1,240 +1,3 @@
-""".. title:: HADS Data Request
+"""implemented in /pylib/iemweb/request/hads.py"""
-`IEM API Mainpage `_
-
-Documentation on /cgi-bin/request/hads.py
------------------------------------------
-
-The backend database for this application has many billion rows of data, so
-requests can be slow.
-
-Changelog
----------
-
-- 2024-04-18: Allowed cross-year requests, but limited to 365 days when
- requesting more than one station.
-- 2024-04-09: Migrated to pydantic based CGI field validation.
-- 2024-03-15: Initial documentation added
-
-"""
-
-# pylint: disable=abstract-class-instantiated
-from datetime import timedelta
-from io import BytesIO, StringIO
-from typing import Optional
-
-import pandas as pd
-from pydantic import AwareDatetime, Field, field_validator
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.network import Table as NetworkTable
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-DELIMITERS = {"comma": ",", "space": " ", "tab": "\t"}
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- delim: str = Field(
- "comma",
- description="Delimiter for output",
- pattern="^(comma|space|tab)$",
- )
- ets: AwareDatetime = Field(None, description="End Time for request")
- network: str = Field(None, description="Network Identifier")
- stations: ListOrCSVType = Field(..., description="Station Identifier(s)")
- sts: AwareDatetime = Field(None, description="Start Time for request")
- threshold: Optional[float] = Field(
- None, description="Threshold Value for Searching"
- )
- thresholdvar: str = Field(
- None,
- description="Threshold Variable for Searching",
- pattern="^(RG|PCP)$",
- )
- what: str = Field(
- "dl", description="Output format", pattern="^(dl|txt|html|excel)$"
- )
- year: int = Field(
- None,
- description=(
- "Legacy year value when this service only supported 1 year at a "
- "time."
- ),
- )
- year1: Optional[int] = Field(
- None,
- description="Start year for request, when sts not set.",
- )
- year2: Optional[int] = Field(
- None,
- description="End year for request, when ets not set.",
- )
- month1: int = Field(
- None,
- description="Start month for request, when sts not set.",
- )
- month2: int = Field(
- None,
- description="End month for request, when ets not set.",
- )
- day1: int = Field(
- None,
- description="Start day for request, when sts not set.",
- )
- day2: int = Field(
- None,
- description="End day for request, when ets not set.",
- )
- hour1: int = Field(
- 0,
- description="Start hour for request, when sts not set.",
- )
- hour2: int = Field(
- 0,
- description="End hour for request, when ets not set.",
- )
- minute1: int = Field(
- 0,
- description="Start minute for request, when sts not set.",
- )
- minute2: int = Field(
- 0,
- description="End minute for request, when ets not set.",
- )
-
- @field_validator("threshold", mode="before")
- def check_threshold(cls, value):
- """Allow empty string."""
- return None if value == "" else value
-
-
-def threshold_search(table, threshold, thresholdvar):
- """Do the threshold searching magic"""
- cols = list(table.columns.values)
- searchfor = f"HGI{thresholdvar.upper()}"
- cols5 = [s[:5] for s in cols]
- mycol = cols[cols5.index(searchfor)]
- above = False
- maxrunning = -99
- maxvalid = None
- res = []
- for (station, valid), row in table.iterrows():
- val = row[mycol]
- if val > threshold and not above:
- res.append(
- dict(
- station=station,
- utc_valid=valid,
- event="START",
- value=val,
- varname=mycol,
- )
- )
- above = True
- if val > threshold and above:
- if val > maxrunning:
- maxrunning = val
- maxvalid = valid
- if val < threshold and above:
- res.append(
- dict(
- station=station,
- utc_valid=maxvalid,
- event="MAX",
- value=maxrunning,
- varname=mycol,
- )
- )
- res.append(
- dict(
- station=station,
- utc_valid=valid,
- event="END",
- value=val,
- varname=mycol,
- )
- )
- above = False
- maxrunning = -99
- maxvalid = None
-
- return pd.DataFrame(res)
-
-
-@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Go do something"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Error, missing start or end time")
- delimiter = DELIMITERS[environ["delim"]]
- stations = environ["stations"]
- if "_ALL" in stations and environ["network"] is not None:
- stations = list(NetworkTable(environ["network"][:10]).sts.keys())
- if (environ["ets"] - environ["sts"]) > timedelta(hours=24):
- environ["ets"] = environ["sts"] + timedelta(hours=24)
- if len(stations) > 1 and (environ["ets"] - environ["sts"]) > timedelta(
- days=365
- ):
- raise IncompleteWebRequest(
- "Error, more than one station and more than 365 days requested"
- )
- if not stations:
- raise IncompleteWebRequest("Error, no stations specified!")
- sql = text(
- """
- SELECT station, valid at time zone 'UTC' as utc_valid, key, value
- from raw WHERE station = ANY(:ids) and
- valid BETWEEN :sts and :ets and value > -999
- ORDER by valid ASC
- """
- )
- params = {"ids": stations, "sts": environ["sts"], "ets": environ["ets"]}
-
- with get_sqlalchemy_conn("hads") as conn:
- df = pd.read_sql(sql, conn, params=params)
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return [b"Error, no results found for query!"]
- table = df.pivot_table(
- values="value", columns=["key"], index=["station", "utc_valid"]
- )
- if environ["threshold"] is not None:
- if len(stations) > 1:
- start_response("200 OK", [("Content-type", "text/plain")])
- return [b"Can not do threshold search for more than one station"]
- table = threshold_search(
- table, environ["threshold"], environ["thresholdvar"]
- )
-
- sio = StringIO()
- if environ["what"] == "txt":
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", "attachment; filename=hads.txt"),
- ]
- start_response("200 OK", headers)
- table.to_csv(sio, sep=delimiter)
- return [sio.getvalue().encode("ascii")]
- if environ["what"] == "html":
- headers = [("Content-type", "text/html")]
- start_response("200 OK", headers)
- table.to_html(sio)
- return [sio.getvalue().encode("ascii")]
- if environ["what"] == "excel":
- bio = BytesIO()
- with pd.ExcelWriter(bio, engine="openpyxl") as writer:
- table.to_excel(writer, sheet_name="Data", index=True)
-
- headers = [
- ("Content-type", EXL),
- ("Content-Disposition", "attachment; filename=hads.xlsx"),
- ]
- start_response("200 OK", headers)
- return [bio.getvalue()]
- start_response("200 OK", [("Content-type", "text/plain")])
- table.to_csv(sio, sep=delimiter)
- return [sio.getvalue().encode("ascii")]
+from iemweb.request.hads import application # noqa: F401
diff --git a/cgi-bin/request/hourlyprecip.py b/cgi-bin/request/hourlyprecip.py
index ca9c101e3c..bed8b39218 100644
--- a/cgi-bin/request/hourlyprecip.py
+++ b/cgi-bin/request/hourlyprecip.py
@@ -1,96 +1,3 @@
-""".. title:: Hourly Precipitation Data Service
+"""implemented in /pylib/iemweb/request/hourlyprecip.py"""
-Documentation for /cgi-bin/request/hourlyprecip.py
---------------------------------------------------
-
-This service emits hourly precipitation data based on processed METAR
-observations by the IEM.
-
-"""
-
-from zoneinfo import ZoneInfo
-
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_dbconn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- ets: AwareDatetime = Field(
- None, description="The end of the requested interval."
- )
- lalo: bool = Field(False, description="Include the lat/lon in the output.")
- network: str = Field(
- "IA_ASOS",
- description="The network to request data for.",
- max_length=12,
- )
- st: bool = Field(False, description="Include the state in the output.")
- station: ListOrCSVType = Field(
- [], description="The station(s) to request data for."
- )
- sts: AwareDatetime = Field(
- None, description="The start of the requested interval."
- )
- tz: str = Field(
- "America/Chicago",
- description=(
- "The timezone to present the data in and for requested interval."
- ),
- )
- year1: int = Field(None, description="The start year, when sts is unset.")
- month1: int = Field(
- None, description="The start month, when sts is unset."
- )
- day1: int = Field(None, description="The start day, when sts is unset.")
- year2: int = Field(None, description="The end year, when ets is unset.")
- month2: int = Field(None, description="The end month, when ets is unset.")
- day2: int = Field(None, description="The end day, when ets is unset.")
-
-
-def get_data(network, environ, tzinfo):
- """Go fetch data please"""
- pgconn = get_dbconn("iem")
- cursor = pgconn.cursor()
- res = "station,network,valid,precip_in"
- sql = ""
- if environ["lalo"]:
- res += ",lat,lon"
- sql += " , st_y(geom) as lat, st_x(geom) as lon "
- if environ["st"]:
- res += ",st"
- sql += ", state "
- res += "\n"
- cursor.execute(
- f"""
- SELECT id, t.network, valid, phour {sql}
- from hourly h JOIN stations t on
- (h.iemid = t.iemid) WHERE
- valid >= %s and valid < %s and t.network = %s and t.id = ANY(%s)
- ORDER by valid ASC
- """,
- (environ["sts"], environ["ets"], network, environ["station"]),
- )
- for row in cursor:
- res += (
- f"{row[0]},{row[1]},{row[2].astimezone(tzinfo):%Y-%m-%d %H:%M},"
- f"{','.join([str(x) for x in row[3:]])}\n"
- )
-
- return res.encode("ascii", "ignore")
-
-
-@iemapp(help=__doc__, default_tz="America/Chicago", schema=Schema)
-def application(environ, start_response):
- """run rabbit run"""
- tzinfo = ZoneInfo(environ["tz"])
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Missing start or end time.")
- if not environ["station"]:
- raise IncompleteWebRequest("No station= was specified.")
- start_response("200 OK", [("Content-type", "text/plain")])
- network = environ["network"]
- return [get_data(network, environ, tzinfo)]
+from iemweb.request.hourlyprecip import application # noqa: F401
diff --git a/cgi-bin/request/isusm.py b/cgi-bin/request/isusm.py
index 92b21425fb..b4b8d758c3 100644
--- a/cgi-bin/request/isusm.py
+++ b/cgi-bin/request/isusm.py
@@ -1,480 +1,3 @@
-"""Download interface for ISU-SM data."""
+"""implemented in /pylib/iemweb/request/isusm.py"""
-import datetime
-from io import BytesIO, StringIO
-from zoneinfo import ZoneInfo
-
-import numpy as np
-import pandas as pd
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import convert_value
-from pyiem.webutil import ensure_list, iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-MISSING = {"", "M", "-99"}
-SV_DEPTHS = [2, 4, 8, 12, 14, 16, 20, 24, 28, 30, 32, 36, 40, 42, 52]
-
-
-def get_stations(environ):
- """Figure out which stations were requested"""
- # Dragons, sts could now be a datetime, but legacy, it could be a list
- # of stations as legacy frontend used it for a variable
- sts = ensure_list(environ, "station")
- if not sts and not isinstance(environ.get("sts", ""), datetime.datetime):
- sts = ensure_list(environ, "sts")
- return sts
-
-
-def get_delimiter(environ):
- """Figure out what is the requested delimiter"""
- d = environ.get("delim", "comma")
- if d == "comma":
- return ","
- return "\t"
-
-
-def fetch_daily(environ, cols):
- """Return a fetching of daily data"""
- stations = get_stations(environ)
-
- if not cols:
- cols = [
- "station",
- "valid",
- "high",
- "low",
- "rh_min",
- "rh",
- "rh_max",
- "gdd50",
- "solar",
- "precip",
- "speed",
- "gust",
- "et",
- "soil04t",
- "soil12t",
- "soil24t",
- "soil50t",
- "soil12vwc",
- "soil24vwc",
- "soil50vwc",
- ]
- else:
- cols.insert(0, "valid")
- cols.insert(0, "station")
- if "sv" in cols:
- # SoilVue 10 data
- for depth in SV_DEPTHS:
- for c2 in ["t", "vwc"]:
- cols.append(f"sv_{c2}{depth}")
- else:
- for col in list(cols):
- if col.startswith("sv") and len(col) > 2:
- depth = int(col[2:])
- for c2 in ["t", "vwc"]:
- cols.append(f"sv_{c2}{depth}")
- with get_sqlalchemy_conn("isuag") as conn:
- df = pd.read_sql(
- text(
- """
- --- Get the Daily Max/Min soil values
- WITH soils as (
- SELECT station, date(valid) as date,
- min(rh_avg_qc) as rh_min,
- avg(rh_avg_qc) as rh,
- max(rh_avg_qc) as rh_max,
- min(t12_c_avg_qc) as soil12tn, max(t12_c_avg_qc) as soil12tx,
- min(t24_c_avg_qc) as soil24tn, max(t24_c_avg_qc) as soil24tx,
- min(t50_c_avg_qc) as soil50tn, max(t50_c_avg_qc) as soil50tx
- from sm_hourly where
- valid >= :sts and valid < :ets and station = ANY(:stations)
- GROUP by station, date
- ), daily as (
- SELECT *,
- t4_c_min_qc as soil04tn, t4_c_max_qc as soil04tx,
- round(gddxx(50, 86, c2f( tair_c_max_qc ),
- c2f( tair_c_min_qc ))::numeric,1) as gdd50 from sm_daily WHERE
- valid >= :sts and valid < :ets and station = ANY(:stations)
- )
- SELECT d.*, s.rh_min, s.rh, s.rh_max,
- s.soil12tn, s.soil12tx, s.soil24tn, s.soil24tx, s.soil50tn, s.soil50tx
- FROM soils s JOIN daily d on (d.station = s.station and s.date = d.valid)
- ORDER by d.valid ASC
- """
- ),
- conn,
- params={
- "sts": environ["sts"],
- "ets": environ["ets"],
- "stations": stations,
- },
- index_col=None,
- )
-
- if df.empty:
- return df, []
-
- df = df.fillna(np.nan).infer_objects()
-
- # Direct copy / rename
- xref = {
- "rh_avg_qc": "relh",
- "rain_in_tot_qc": "precip",
- "winddir_d1_wvt_qc": "drct",
- "vwc12_qc": "soil12vwc",
- "vwc24_qc": "soil24vwc",
- "vwc50_qc": "soil50vwc",
- "dailyet_qc": "et",
- }
- df = df.rename(columns=xref, errors="ignore")
- # Mul by 100 for %
- for depth in [12, 24, 50]:
- df[f"soil{depth}vwc"] = df[f"soil{depth}vwc"] * 100.0
- # Now we need to do some mass data conversion, sigh
- tc = {
- "high": "tair_c_max_qc",
- "low": "tair_c_min_qc",
- "soil04t": "t4_c_avg_qc",
- "soil04tn": "soil04tn",
- "soil04tx": "soil04tx",
- "soil12t": "t12_c_avg_qc",
- "soil12tn": "soil12tn",
- "soil12tx": "soil12tx",
- "soil24t": "t24_c_avg_qc",
- "soil24tn": "soil24tn",
- "soil24tx": "soil24tx",
- "soil50t": "t50_c_avg_qc",
- "soil50tn": "soil50tn",
- "soil50tx": "soil50tx",
- }
- for key, col in tc.items():
- if key not in cols:
- continue
- # Do the work
- df[key] = convert_value(df[col].values, "degC", "degF")
-
- if "speed" in cols:
- df = df.rename(columns={"ws_mph_qc": "speed"})
- if "gust" in cols:
- df = df.rename(columns={"ws_mph_max_qc": "gust"})
- if "sv" in cols:
- # SoilVue 10 data
- for depth in SV_DEPTHS:
- df[f"sv_t{depth}"] = convert_value(
- df[f"sv_t{depth}_qc"].values, "degC", "degF"
- )
- # Copy
- df[f"sv_vwc{depth}"] = df[f"sv_vwc{depth}_qc"]
- # Remove the original
- cols.remove("sv")
- else:
- for col in list(cols):
- if col.startswith("sv_r"):
- df[col] = convert_value(df[f"{col}_qc"].values, "degC", "degF")
- cols.remove(col)
- elif col.startswith("sv_vwc"):
- df[col] = df[f"{col}_qc"]
- cols.remove(col)
-
- # Convert solar radiation to J/m2
- if "solar" in cols:
- df["solar"] = df["slrkj_tot_qc"] * 1000.0
- if "solar_mj" in cols:
- df["solar_mj"] = df["slrkj_tot_qc"] / 1000.0
- if "et" in cols:
- df["et"] = convert_value(df["et"], "mm", "inch")
-
- overwrite = (
- "bp_mb lwmv_1 lwmv_2 lwmdry_1_tot lwmcon_1_tot lwmwet_1_tot "
- "lwmdry_2_tot lwmcon_2_tot lwmwet_2_tot bpres_avg"
- ).split()
- for col in overwrite:
- if col in cols:
- # Overwrite
- df[col] = df[f"{col}_qc"]
-
- return df, cols
-
-
-def fetch_hourly(environ, cols):
- """Process the request for hourly/minute data."""
- stations = get_stations(environ)
-
- if not cols:
- cols = [
- "station",
- "valid",
- "tmpf",
- "relh",
- "solar",
- "precip",
- "speed",
- "drct",
- "et",
- "soil04t",
- "soil12t",
- "soil24t",
- "soil50t",
- "soil12vwc",
- "soil24vwc",
- "soil50vwc",
- ]
- else:
- cols.insert(0, "valid")
- cols.insert(0, "station")
-
- table = "sm_hourly"
- sqlextra = ", null as bp_mb_qc "
- if environ.get("timeres") == "minute":
- table = "sm_minute"
- sqlextra = ", null as etalfalfa_qc"
- if "sv" in cols:
- # SoilVue 10 data
- for depth in SV_DEPTHS:
- for c2 in ["t", "vwc"]:
- cols.append(f"sv_{c2}{depth}")
- else:
- for col in list(cols):
- if col.startswith("sv") and len(col) > 2:
- depth = int(col[2:])
- for c2 in ["t", "vwc"]:
- cols.append(f"sv_{c2}{depth}")
- with get_sqlalchemy_conn("isuag") as conn:
- df = pd.read_sql(
- text(
- f"""
- SELECT *, valid at time zone 'UTC' as utc_valid {sqlextra}
- from {table} WHERE valid >= :sts and valid < :ets and
- station = ANY(:stations) ORDER by valid ASC
- """
- ),
- conn,
- params={
- "sts": environ["sts"],
- "ets": environ["ets"],
- "stations": stations,
- },
- index_col=None,
- )
- if df.empty:
- return df, cols
-
- # Muck with the timestamp column
- if environ.get("tz") == "utc":
- df["valid"] = df["utc_valid"].dt.strftime("%Y-%m-%d %H:%M+00")
- else:
- df["valid"] = (
- df["utc_valid"]
- .dt.tz_localize("UTC")
- .dt.tz_convert("US/Central")
- .dt.strftime("%Y-%m-%d %H:%M")
- )
-
- df = df.fillna(np.nan).infer_objects()
- # Direct copy / rename
- xref = {
- "rh_avg_qc": "relh",
- "rain_in_tot_qc": "precip",
- "winddir_d1_wvt_qc": "drct",
- "vwc12_qc": "soil12vwc",
- "vwc24_qc": "soil24vwc",
- "vwc50_qc": "soil50vwc",
- }
- df = df.rename(columns=xref, errors="ignore")
- # Mul by 100 for %
- for depth in [12, 24, 50]:
- df[f"soil{depth}vwc"] = df[f"soil{depth}vwc"] * 100.0
- # Now we need to do some mass data conversion, sigh
- tc = {
- "tmpf": "tair_c_avg_qc",
- "soil04t": "t4_c_avg_qc",
- "soil12t": "t12_c_avg_qc",
- "soil24t": "t24_c_avg_qc",
- "soil50t": "t50_c_avg_qc",
- }
- for key, col in tc.items():
- if key not in cols:
- continue
- # Do the work
- df[key] = convert_value(df[col].values, "degC", "degF")
-
- if "sv" in cols:
- # SoilVue 10 data
- for depth in SV_DEPTHS:
- df[f"sv_t{depth}"] = convert_value(
- df[f"sv_t{depth}_qc"].values, "degC", "degF"
- )
- # Copy
- df[f"sv_vwc{depth}"] = df[f"sv_vwc{depth}_qc"]
- # Remove the original
- cols.remove("sv")
- else:
- for col in list(cols):
- if col.startswith("sv_t"):
- df[col] = convert_value(df[f"{col}_qc"].values, "degC", "degF")
- cols.remove(col)
- elif col.startswith("sv_vwc"):
- # Copy
- df[col] = df[f"{col}_qc"]
- cols.remove(col)
-
- # Convert solar radiation to J/m2
- if "solar" in cols:
- df["solar"] = df["slrkj_tot_qc"] * 1000.0
-
- if "speed" in cols:
- df["speed"] = df["ws_mph_qc"]
-
- if "et" in cols:
- df["et"] = convert_value(df["etalfalfa_qc"].values, "mm", "inch")
-
- overwrite = (
- "bp_mb lwmv_1 lwmv_2 lwmdry_1_tot lwmcon_1_tot lwmwet_1_tot "
- "lwmdry_2_tot lwmcon_2_tot lwmwet_2_tot bpres_avg"
- ).split()
- for col in overwrite:
- if col in cols:
- # Overwrite
- df[col] = df[f"{col}_qc"]
-
- return df, cols
-
-
-def muck_timestamps(environ):
- """Atone for previous sins with sts variable..."""
- # No action necessary
- if isinstance(environ["sts"], datetime.datetime):
- return
- environ["station"] = ensure_list(environ, "sts")
- environ["sts"] = datetime.datetime(
- int(environ["year1"]),
- int(environ["month1"]),
- int(environ["day1"]),
- tzinfo=ZoneInfo("America/Chicago"),
- )
- if environ["sts"] == environ["ets"]:
- environ["ets"] = environ["sts"] + datetime.timedelta(days=1)
-
-
-def fetch_inversion(environ, cols):
- """Process the request for inversion data."""
- stations = get_stations(environ)
-
- cols = [
- "station",
- "valid",
- "tair_15",
- "tair_5",
- "tair_10",
- "speed",
- "gust",
- ]
-
- with get_sqlalchemy_conn("isuag") as conn:
- df = pd.read_sql(
- text(
- """
- SELECT station, valid at time zone 'UTC' as utc_valid,
- tair_15_c_avg_qc, tair_5_c_avg_qc, tair_10_c_avg_qc,
- ws_ms_avg_qc, ws_ms_max_qc
- from sm_inversion WHERE valid >= :sts and valid < :ets and
- station = ANY(:stations) ORDER by valid ASC
- """
- ),
- conn,
- params={
- "sts": environ["sts"],
- "ets": environ["ets"],
- "stations": stations,
- },
- index_col=None,
- )
- if df.empty:
- return df, cols
-
- # Muck with the timestamp column
- if environ.get("tz") == "utc":
- df["valid"] = df["utc_valid"].dt.strftime("%Y-%m-%d %H:%M+00")
- else:
- df["valid"] = (
- df["utc_valid"]
- .dt.tz_localize("UTC")
- .dt.tz_convert("US/Central")
- .dt.strftime("%Y-%m-%d %H:%M")
- )
-
- df = df.fillna(np.nan).infer_objects()
- # Direct copy / rename
- # Now we need to do some mass data conversion, sigh
- tc = {
- "tair_15": "tair_15_c_avg_qc",
- "tair_5": "tair_5_c_avg_qc",
- "tair_10": "tair_10_c_avg_qc",
- }
- for key, col in tc.items():
- # Do the work
- df[key] = convert_value(df[col].values, "degC", "degF")
-
- df["speed"] = convert_value(df["ws_ms_avg_qc"].values, "mps", "mph")
- df["gust"] = convert_value(df["ws_ms_max_qc"].values, "mps", "mph")
-
- return df, cols
-
-
-@iemapp()
-def application(environ, start_response):
- """Do things"""
- if "sts" not in environ:
- raise IncompleteWebRequest("Missing start time parameters")
- try:
- muck_timestamps(environ)
- except Exception as exp:
- raise IncompleteWebRequest("Invalid date/station provided") from exp
- mode = environ.get("mode", "hourly")
- cols = ensure_list(environ, "vars")
- fmt = environ.get("format", "csv").lower()
- todisk = environ.get("todisk", "no")
- if mode == "hourly":
- df, cols = fetch_hourly(environ, cols)
- elif mode == "inversion":
- df, cols = fetch_inversion(environ, cols)
- else:
- df, cols = fetch_daily(environ, cols)
- miss = environ.get("missing", "-99")
- assert miss in MISSING
- df = df.replace({np.nan: miss})
- # compute columns present in both cols and df.columns
- # pandas intersection is not order preserving, so we do this
- cols = [c for c in cols if c in df.columns]
- if fmt == "excel":
- bio = BytesIO()
- # pylint: disable=abstract-class-instantiated
- if cols:
- with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
- df.to_excel(
- writer, sheet_name="Data", columns=cols, index=False
- )
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=isusm.xlsx"),
- ]
- start_response("200 OK", headers)
- return [bio.getvalue()]
-
- delim = "," if fmt == "comma" else "\t"
- sio = StringIO()
- # careful of precision here
- df.to_csv(sio, index=False, columns=cols, sep=delim, float_format="%.4f")
-
- if todisk == "yes":
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", "attachment; filename=isusm.txt"),
- ]
- else:
- headers = [("Content-type", "text/plain")]
- start_response("200 OK", headers)
- return [sio.getvalue().encode("ascii")]
+from iemweb.request.isusm import application # noqa: F401
diff --git a/cgi-bin/request/metars.py b/cgi-bin/request/metars.py
index 2236b69391..443c1234fc 100644
--- a/cgi-bin/request/metars.py
+++ b/cgi-bin/request/metars.py
@@ -1,84 +1,3 @@
-""".. title:: Request Hour's worth of METARs
+"""implemented in /pylib/iemweb/request/metars.py"""
-Documentation for /cgi-bin/request/metars.py
---------------------------------------------
-
-This is a very simple service that intends on emitting a text file of METARs
-that is ammenable to being ingested by other software. Each METAR is on a
-single line and the file is sorted by the observation time.
-
-Example Usage:
---------------
-
-Retrieve all METARs for the hour starting at 00 UTC on 1 January 2016:
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/metars.py?valid=2016010100
-
-"""
-
-import datetime
-import sys
-from io import StringIO
-from zoneinfo import ZoneInfo
-
-from pydantic import AwareDatetime, Field, field_validator
-from pyiem.webutil import CGIModel, iemapp
-
-
-class Schema(CGIModel):
- """Our schema for this request"""
-
- valid: AwareDatetime = Field(
- ...,
- description=(
- "Hour truncated UTC timestamp to request data for. The "
- "format is `YYYYMMDDHH`."
- ),
- )
-
- @field_validator("valid", mode="before")
- def parse_valid(cls, value):
- """Ensure valid is a valid datetime"""
- return datetime.datetime.strptime(value, "%Y%m%d%H").replace(
- tzinfo=ZoneInfo("UTC")
- )
-
-
-def check_load(cursor):
- """A crude check that aborts this script if there is too much
- demand at the moment"""
- cursor.execute(
- "select pid from pg_stat_activity where query ~* 'FETCH' "
- "and datname = 'asos'"
- )
- if len(cursor.fetchall()) > 9:
- sys.stderr.write(
- f"/cgi-bin/request/metars.py over capacity: {cursor.rowcount}\n"
- )
- return False
- return True
-
-
-@iemapp(iemdb="asos", iemdb_cursorname="streamer", schema=Schema, help=__doc__)
-def application(environ, start_response):
- """Do Something"""
- cursor = environ["iemdb.asos.cursor"]
- if not check_load(cursor):
- start_response(
- "503 Service Unavailable", [("Content-type", "text/plain")]
- )
- return [b"ERROR: server over capacity, please try later"]
- start_response("200 OK", [("Content-type", "text/plain")])
- valid = environ["valid"]
- cursor.execute(
- """
- SELECT metar from alldata
- WHERE valid >= %s and valid < %s and metar is not null
- ORDER by valid ASC
- """,
- (valid, valid + datetime.timedelta(hours=1)),
- )
- sio = StringIO()
- for row in cursor:
- sio.write("%s\n" % (row["metar"].replace("\n", " "),))
- return [sio.getvalue().encode("ascii", "ignore")]
+from iemweb.request.metars import application # noqa: F401
diff --git a/cgi-bin/request/mos.py b/cgi-bin/request/mos.py
index 0919298233..51729ab671 100644
--- a/cgi-bin/request/mos.py
+++ b/cgi-bin/request/mos.py
@@ -1,157 +1,3 @@
-""".. title:: Model Output Statistics (MOS) Data
+"""implemented in /pylib/iemweb/request/mos.py"""
-Documentation for /cgi-bin/request/mos.py
------------------------------------------
-
-This application provides access to the Model Output Statistics (MOS) data
-that the IEM processes and archives.
-
-Example Usage
-~~~~~~~~~~~~~
-
-Return all the NBS MOS data for KDSM for MOS runs made on 14 Dec 2023
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/mos.py?\
-station=KDSM&model=NBS&sts=2023-12-14T00:00Z&ets=2023-12-15T00:00Z&format=csv
-
-"""
-
-from io import BytesIO, StringIO
-
-import pandas as pd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class MyModel(CGIModel):
- """Our model"""
-
- format: str = Field(
- "csv",
- description="The format of the data response. csv, json, or excel",
- pattern=r"^(csv|json|excel)$",
- )
- model: str = Field(
- ...,
- description="The model to query",
- pattern=r"^(AVN|ETA|GFS|LAV|MEX|NAM|NBE|NBS)$",
- )
- ets: AwareDatetime = Field(
- None,
- description="The end time for the data request",
- )
- station: str = Field(..., description="The 4 character station identifier")
- sts: AwareDatetime = Field(
- None,
- description="The start time for the data request",
- )
- year1: int = Field(
- None,
- description="The start year for the data request, when sts is not set",
- )
- month1: int = Field(
- None,
- description=(
- "The start month for the data request, when sts is not set"
- ),
- )
- day1: int = Field(
- None,
- description="The start day for the data request, when sts is not set",
- )
- hour1: int = Field(
- None,
- description="The start hour for the data request, when sts is not set",
- )
- year2: int = Field(
- None,
- description="The end year for the data request, when ets is not set",
- )
- month2: int = Field(
- None,
- description="The end month for the data request, when ets is not set",
- )
- day2: int = Field(
- None,
- description="The end day for the data request, when ets is not set",
- )
- hour2: int = Field(
- None,
- description="The end hour for the data request, when ets is not set",
- )
-
-
-def get_data(sts, ets, station, model, fmt):
- """Go fetch data please"""
- model2 = model
- if model == "NAM":
- model2 = "ETA"
- if model == "GFS":
- model2 = "AVN"
- with get_sqlalchemy_conn("mos") as conn:
- df = pd.read_sql(
- text(
- """
- select
- runtime at time zone 'UTC' as utc_runtime,
- ftime at time zone 'UTC' as utc_ftime,
- *, t06_1 ||'/'||t06_2 as t06,
- t12_1 ||'/'|| t12_2 as t12 from alldata WHERE station = :station
- and runtime >= :sts and runtime <= :ets and
- (model = :model1 or model = :model2)
- ORDER by runtime,ftime ASC"""
- ),
- conn,
- params={
- "sts": sts,
- "ets": ets,
- "model1": model,
- "model2": model2,
- "station": station,
- },
- )
- df = df.drop(columns=["runtime", "ftime"]).rename(
- columns={"utc_runtime": "runtime", "utc_ftime": "ftime"}
- )
- if not df.empty:
- df = df.dropna(axis=1, how="all")
- if fmt == "json":
- return df.to_json(orient="records")
- if fmt == "excel":
- bio = BytesIO()
- # pylint: disable=abstract-class-instantiated
- with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
- df.to_excel(writer, sheet_name="Data", index=False)
- return bio.getvalue()
-
- sio = StringIO()
- df.to_csv(sio, index=False)
- return sio.getvalue()
-
-
-@iemapp(help=__doc__, schema=MyModel, default_tz="UTC")
-def application(environ, start_response):
- """See how we are called"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Missing sts and/or ets")
- fmt = environ["format"]
- station = environ["station"].upper()
- model = environ["model"]
- if fmt != "excel":
- start_response("200 OK", [("Content-type", "text/plain")])
- return [
- get_data(
- environ["sts"], environ["ets"], station, model, fmt
- ).encode("ascii")
- ]
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=mos.xlsx"),
- ]
- start_response("200 OK", headers)
- return [get_data(environ["sts"], environ["ets"], station, model, fmt)]
+from iemweb.request.mos import application # noqa: F401
diff --git a/cgi-bin/request/nass_iowa.py b/cgi-bin/request/nass_iowa.py
index ea810e058d..ad196767b9 100644
--- a/cgi-bin/request/nass_iowa.py
+++ b/cgi-bin/request/nass_iowa.py
@@ -1,43 +1,3 @@
-""".. title:: Download NASS Iowa Data
+"""implemented in /pylib/iemweb/request/nass_iowa.py"""
-Documentation for /cgi-bin/request/nass_iowa.py
------------------------------------------------
-
-This service provides a download of the NASS Iowa data that is ingested into
-the IEM database. The data is available in Excel format. There are no options
-to this service at this time.
-
-Example Usage
-~~~~~~~~~~~~~
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/nass_iowa.py
-
-"""
-
-from io import BytesIO
-
-import pandas as pd
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.webutil import iemapp
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-@iemapp(help=__doc__)
-def application(_environ, start_response):
- """Go Main Go"""
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=nass_iowa.xlsx"),
- ]
- start_response("200 OK", headers)
- with get_sqlalchemy_conn("coop") as conn:
- df = pd.read_sql(
- "SELECT * from nass_iowa ORDER by valid ASC",
- conn,
- parse_dates="load_time",
- )
- df["load_time"] = df["load_time"].dt.strftime("%Y-%m-%d")
- bio = BytesIO()
- df.to_excel(bio, index=False)
- return [bio.getvalue()]
+from iemweb.request.nass_iowa import application # noqa: F401
diff --git a/cgi-bin/request/nlaeflux.py b/cgi-bin/request/nlaeflux.py
index c61f78a392..2996c3a1ab 100644
--- a/cgi-bin/request/nlaeflux.py
+++ b/cgi-bin/request/nlaeflux.py
@@ -1,53 +1,3 @@
-"""Download backend for NLAE Flux Data."""
+"""implemented in /pylib/iemweb/request/nlaeflux.py"""
-import pandas as pd
-from pydantic import Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.util import utc
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-
-class Schema(CGIModel):
- """Request arguments."""
-
- syear: int = Field(..., description="Start Year")
- smonth: int = Field(..., description="Start Month")
- sday: int = Field(..., description="Start Day")
- eyear: int = Field(..., description="End Year")
- emonth: int = Field(..., description="End Month")
- eday: int = Field(..., description="End Day")
- station: ListOrCSVType = Field(..., description="Station Identifier")
-
-
-@iemapp(help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Handle mod_wsgi request."""
- sts = utc(
- int(environ["syear"]), int(environ["smonth"]), int(environ["sday"])
- )
- ets = utc(
- int(environ["eyear"]), int(environ["emonth"]), int(environ["eday"])
- )
- stations = environ["station"]
- with get_sqlalchemy_conn("other") as conn:
- df = pd.read_sql(
- text(
- """
- select *, valid at time zone 'UTC' as utc_valid
- from flux_data where valid >= :sts and valid < :ets
- and station = ANY(:stations)
- """
- ),
- conn,
- params={"stations": stations, "sts": sts, "ets": ets},
- parse_dates=["utc_valid"],
- )
- df["valid"] = df["utc_valid"].dt.strftime("%Y-%m-%d %H:%M:%S")
- df = df.drop(columns=["utc_valid"])
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", "attachment; filename=fluxdata.txt"),
- ]
- start_response("200 OK", headers)
- return [df.to_csv(index=False).encode("ascii")]
+from iemweb.request.nlaeflux import application # noqa: F401
diff --git a/cgi-bin/request/other.py b/cgi-bin/request/other.py
index deecde4cbe..c86257bb4d 100644
--- a/cgi-bin/request/other.py
+++ b/cgi-bin/request/other.py
@@ -1,67 +1,3 @@
-"""
-Download interface for data from 'other' network
-"""
+"""implemented in /pylib/iemweb/request/other.py"""
-from io import StringIO
-
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import get_dbconnc
-from pyiem.webutil import iemapp
-
-
-def fetcher(station, sts, ets):
- """
- Fetch the data
- """
- cols = [
- "station",
- "valid",
- "tmpf",
- "dwpf",
- "drct",
- "sknt",
- "gust",
- "relh",
- "alti",
- "pcpncnt",
- "pday",
- "pmonth",
- "srad",
- ]
-
- pgconn, cursor = get_dbconnc("other")
- cursor.execute(
- """
- SELECT * from alldata where station = %s and valid between %s and %s
- ORDER by valid ASC
- """,
- (station, sts.strftime("%Y-%m-%d"), ets.strftime("%Y-%m-%d")),
- )
-
- sio = StringIO()
- sio.write(
- (
- "station,valid_CST_CDT,air_tmp_F,dew_point_F,"
- "wind_dir_deg,wind_sped_kts,wind_gust_kts,relh_%,"
- "alti_in,pcpncnt_in,precip_day_in,precip_month_in,"
- "solar_rad_wms\n"
- )
- )
-
- for row in cursor:
- sio.write(",".join(f"{row[col]}" for col in cols))
- sio.write("\n")
- pgconn.close()
- return sio.getvalue().encode("ascii")
-
-
-@iemapp()
-def application(environ, start_response):
- """
- Do something!
- """
- if "sts" not in environ:
- raise IncompleteWebRequest("GET start time parameters missing")
- station = environ.get("station", "")[:10]
- start_response("200 OK", [("Content-type", "text/plain")])
- return [fetcher(station, environ["sts"], environ["ets"])]
+from iemweb.request.other import application # noqa: F401
diff --git a/cgi-bin/request/purpleair.py b/cgi-bin/request/purpleair.py
index cae96b940f..fe946d1504 100644
--- a/cgi-bin/request/purpleair.py
+++ b/cgi-bin/request/purpleair.py
@@ -1,55 +1,3 @@
-"""
-Purple Air Quality Sensor
-"""
+"""implemented in /pylib/iemweb/request/purpleair.py"""
-from io import BytesIO
-
-import pandas as pd
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-def run(environ, start_response):
- """run()"""
- sql = text(
- """
- select * from purpleair where valid >= :sts and valid < :ets
- ORDER by valid asc
- """
- )
- with get_sqlalchemy_conn("other") as conn:
- df = pd.read_sql(
- sql, conn, params={"sts": environ["sts"], "ets": environ["ets"]}
- )
- if environ.get("excel", "no") == "yes":
- start_response(
- "200 OK",
- [
- ("Content-type", EXL),
- ("Content-Disposition", "attachment; filename=purpleair.xlsx"),
- ],
- )
- bio = BytesIO()
- df.to_excel(bio, index=False, engine="openpyxl")
- return bio.getvalue()
- start_response(
- "200 OK",
- [
- ("Content-type", "application/octet-stream"),
- ("Content-Disposition", "attachment; filename=purpleair.csv"),
- ],
- )
- return df.to_csv(None, index=False).encode("ascii")
-
-
-@iemapp(default_tz="America/Chicago")
-def application(environ, start_response):
- """Go Main Go"""
- if "sts" not in environ:
- raise IncompleteWebRequest("GET start time parameters missing")
-
- return [run(environ, start_response)]
+from iemweb.request.purpleair import application # noqa: F401
diff --git a/cgi-bin/request/raob.py b/cgi-bin/request/raob.py
index 55b0e75e6d..75115b702a 100644
--- a/cgi-bin/request/raob.py
+++ b/cgi-bin/request/raob.py
@@ -1,116 +1,3 @@
-""".. title:: RAOB Data Service
+"""implemented in /pylib/iemweb/request/raob.py"""
-Documentation for /cgi-bin/request/raob.py
-------------------------------------------
-
-To be written.
-"""
-
-import datetime
-from io import StringIO
-from zoneinfo import ZoneInfo
-
-from pyiem.database import get_dbconn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.network import Table as NetworkTable
-from pyiem.webutil import iemapp
-
-
-def m(val):
- """Helper"""
- if val is None:
- return "M"
- return val
-
-
-def fetcher(station, sts, ets):
- """Do fetching"""
- sio = StringIO()
- dbconn = get_dbconn("raob")
- cursor = dbconn.cursor("raobstreamer")
- stations = [station]
- if station.startswith("_"):
- nt = NetworkTable("RAOB", only_online=False)
- stations = nt.sts[station]["name"].split("--")[1].strip().split(",")
-
- cursor.execute(
- """
- SELECT f.valid at time zone 'UTC', p.levelcode, p.pressure, p.height,
- p.tmpc, p.dwpc, p.drct, round((p.smps * 1.94384)::numeric,0),
- p.bearing, p.range_miles, f.station from
- raob_profile p JOIN raob_flights f on
- (f.fid = p.fid) WHERE f.station = ANY(%s) and valid >= %s and valid < %s
- """,
- (stations, sts, ets),
- )
- sio.write(
- (
- "station,validUTC,levelcode,pressure_mb,height_m,tmpc,"
- "dwpc,drct,speed_kts,bearing,range_sm\n"
- )
- )
- for row in cursor:
- sio.write(
- ("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n")
- % (
- row[10],
- m(row[0]),
- m(row[1]),
- m(row[2]),
- m(row[3]),
- m(row[4]),
- m(row[5]),
- m(row[6]),
- m(row[7]),
- m(row[8]),
- m(row[9]),
- )
- )
- return sio.getvalue().encode("ascii", "ignore")
-
-
-def friendly_date(form, key):
- """More forgiving date conversion"""
- val = form.get(key)
- try:
- val = val.strip()
- if len(val.split()) == 1:
- dt = datetime.datetime.strptime(val, "%m/%d/%Y")
- else:
- dt = datetime.datetime.strptime(val, "%m/%d/%Y %H:%M")
- dt = dt.replace(tzinfo=ZoneInfo("UTC"))
- except Exception:
- return (
- f"Invalid {key} date provided, should be '%m/%d/%Y %H:%M'"
- " in UTC timezone"
- )
- return dt
-
-
-@iemapp(help=__doc__)
-def application(environ, start_response):
- """Go Main Go"""
- if "sts" not in environ:
- raise IncompleteWebRequest("GET parameter sts= missing")
- sts = friendly_date(environ, "sts")
- ets = friendly_date(environ, "ets")
- for val in [sts, ets]:
- if not isinstance(val, datetime.datetime):
- headers = [("Content-type", "text/plain")]
- start_response("500 Internal Server Error", headers)
- return [val.encode("ascii")]
-
- station = environ.get("station", "KOAX")[:4]
- if environ.get("dl", None) is not None:
- headers = [
- ("Content-type", "application/octet-stream"),
- (
- "Content-Disposition",
- "attachment; "
- f"filename={station}_{sts:%Y%m%d%H}_{ets:%Y%m%d%H}.txt",
- ),
- ]
- else:
- headers = [("Content-type", "text/plain")]
- start_response("200 OK", headers)
- return [fetcher(station, sts, ets)]
+from iemweb.request.raob import application # noqa: F401
diff --git a/cgi-bin/request/raster2netcdf.py b/cgi-bin/request/raster2netcdf.py
index 4e4696afad..a7a50b6d58 100644
--- a/cgi-bin/request/raster2netcdf.py
+++ b/cgi-bin/request/raster2netcdf.py
@@ -1,145 +1,3 @@
-""".. title:: Raster to NetCDF Data Service
+"""implemented in /pylib/iemweb/request/raster2netcdf.py"""
-Documentation for /cgi-bin/request/raster2netcdf.py
----------------------------------------------------
-
-To be written.
-"""
-
-import datetime
-import os
-import tempfile
-from io import BytesIO
-from zoneinfo import ZoneInfo
-
-import netCDF4
-import numpy as np
-from PIL import Image
-from pydantic import Field
-from pyiem.database import get_dbconn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, iemapp
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- dstr: str = Field(
- "201710251200",
- description="UTC Datetime to request data for",
- max_length=12,
- )
- prod: str = Field("", description="Product to request", max_length=100)
-
-
-def get_gridinfo(filename, xpoints, ypoints):
- """Figure out the grid navigation, sigh"""
- with open(f"{filename[:-4]}.wld", encoding="ascii") as fh:
- lines = fh.readlines()
- dx = float(lines[0])
- dy = float(lines[3])
- west = float(lines[4])
- north = float(lines[5])
- south = north + dy * ypoints
- lats = np.arange(0, ypoints) * (0 - dy) + south
- lons = np.arange(0, xpoints) * dx + west
- return lons, lats
-
-
-def get_table(prod):
- """Return our lookup table"""
- pgconn = get_dbconn("mesosite")
- cursor = pgconn.cursor()
- xref = [1.0e20] * 256
- cursor.execute(
- "SELECT id, filename_template, units, cf_long_name "
- "from iemrasters where name = %s",
- (prod,),
- )
- if cursor.rowcount == 0:
- raise IncompleteWebRequest("Unknown product")
- (rid, template, units, long_name) = cursor.fetchone()
- cursor.execute(
- """
- SELECT coloridx, value from iemrasters_lookup
- WHERE iemraster_id = %s and value is not null
- ORDER by coloridx ASC
- """,
- (rid,),
- )
- for row in cursor:
- xref[row[0]] = row[1]
- return np.array(xref), template, units, long_name
-
-
-def make_netcdf(xpoints, ypoints, lons, lats):
- """generate the netcdf file"""
- tmpobj = tempfile.NamedTemporaryFile(suffix=".nc", delete=False)
- with netCDF4.Dataset(tmpobj.name, "w") as nc:
- nc.Conventions = "CF-1.6"
- nc.createDimension("lat", ypoints)
- nc.createDimension("lon", xpoints)
- nclon = nc.createVariable("lon", np.float32, ("lon",))
- nclon.units = "degree_east"
- nclon.long_name = "longitude"
- nclon[:] = lons
- nclat = nc.createVariable("lat", np.float32, ("lat",))
- nclat.units = "degree_north"
- nclat.long_name = "latitude"
- nclat[:] = lats
- return tmpobj.name
-
-
-def do_work(valid, prod, start_response):
- """Our workflow"""
- # Get lookup table
- xref, template, units, long_name = get_table(prod)
- # Get RASTER
- fn = valid.strftime(template)
- if not os.path.isfile(fn):
- start_response("200 OK", [("Content-type", "text/plain")])
- return b"ERROR: The IEM Archives do not have this file available"
- raster = np.flipud(np.array(Image.open(fn)))
- (ypoints, xpoints) = raster.shape
- # build lat, lon arrays
- lons, lats = get_gridinfo(fn, xpoints, ypoints)
- # create netcdf file
- tmpname = make_netcdf(xpoints, ypoints, lons, lats)
- with netCDF4.Dataset(tmpname, "a") as nc:
- # write data
- ncvar = nc.createVariable(
- prod, float, ("lat", "lon"), zlib=True, fill_value=1.0e20
- )
- ncvar.units = units
- ncvar.long_name = long_name
- ncvar.coordinates = "lon lat"
- # convert RASTER via lookup table
- ncvar[:] = xref[raster]
- # send data to user
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-disposition", "attachment; filename=res.nc"),
- ]
- start_response("200 OK", headers)
- bio = BytesIO()
- with open(tmpname, "rb") as fh:
- bio.write(fh.read())
- # remove tmp netcdf file
- os.unlink(tmpname)
- return bio.getvalue()
-
-
-@iemapp(help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Do great things"""
- dstr = environ["dstr"]
- prod = environ["prod"]
- if prod == "":
- raise IncompleteWebRequest("prod is required")
- try:
- valid = datetime.datetime.strptime(dstr, "%Y%m%d%H%M").replace(
- tzinfo=ZoneInfo("UTC")
- )
- except Exception as exp:
- raise IncompleteWebRequest("dstr not in form %Y%m%d%H%M") from exp
- return [do_work(valid, prod, start_response)]
+from iemweb.request.raster2netcdf import application # noqa: F401
diff --git a/cgi-bin/request/rwis.py b/cgi-bin/request/rwis.py
index daa4e4c802..8dd2d5f43f 100644
--- a/cgi-bin/request/rwis.py
+++ b/cgi-bin/request/rwis.py
@@ -1,99 +1,3 @@
-"""Download Interface for RWIS data"""
+"""implemented in /pylib/iemweb/request/rwis.py"""
-# pylint: disable=abstract-class-instantiated
-from io import BytesIO, StringIO
-
-import pandas as pd
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.network import Table as NetworkTable
-from pyiem.util import get_sqlalchemy_conn
-from pyiem.webutil import ensure_list, iemapp
-from sqlalchemy import text
-
-DELIMITERS = {"comma": ",", "space": " ", "tab": "\t"}
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-@iemapp(default_tz="America/Chicago")
-def application(environ, start_response):
- """Go do something"""
- include_latlon = environ.get("gis", "no").lower() == "yes"
- myvars = ensure_list(environ, "vars")
- myvars.insert(0, "station")
- myvars.insert(1, "obtime")
- delimiter = DELIMITERS.get(environ.get("delim", "comma"))
- what = environ.get("what", "dl")
- tzname = environ.get("tz", "UTC")
- src = environ.get("src", "atmos")
- stations = ensure_list(environ, "stations")
- if not stations:
- raise IncompleteWebRequest("Missing GET parameter stations=")
-
- tbl = "alldata"
- if src in ["soil", "traffic"]:
- tbl = f"alldata_{src}"
- network = environ.get("network", "IA_RWIS")
- nt = NetworkTable(network, only_online=False)
- if "_ALL" in stations:
- stations = list(nt.sts.keys())
- params = {
- "tzname": tzname,
- "ids": stations,
- "sts": environ["sts"],
- "ets": environ["ets"],
- }
- sql = text(
- f"SELECT *, valid at time zone :tzname as obtime from {tbl} "
- "WHERE station = ANY(:ids) and valid BETWEEN :sts and :ets "
- "ORDER by valid ASC"
- )
- with get_sqlalchemy_conn("rwis") as conn:
- df = pd.read_sql(sql, conn, params=params)
- if df.empty:
- start_response("200 OK", [("Content-type", "text/plain")])
- return [b"Sorry, no results found for query!"]
- if include_latlon:
- myvars.insert(2, "longitude")
- myvars.insert(3, "latitude")
-
- def get_lat(station):
- """hack"""
- return nt.sts[station]["lat"]
-
- def get_lon(station):
- """hack"""
- return nt.sts[station]["lon"]
-
- df["latitude"] = [get_lat(x) for x in df["station"]]
- df["longitude"] = [get_lon(x) for x in df["station"]]
-
- sio = StringIO()
- if what in ["txt", "download"]:
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-disposition", "attachment; filename=rwis.txt"),
- ]
- start_response("200 OK", headers)
- df.to_csv(sio, index=False, sep=delimiter, columns=myvars)
- return [sio.getvalue().encode("ascii")]
- if what == "html":
- start_response("200 OK", [("Content-type", "text/html")])
- df.to_html(sio, columns=myvars)
- return [sio.getvalue().encode("ascii")]
- if what == "excel":
- if len(df.index) >= 1048576:
- start_response("200 OK", [("Content-type", "text/plain")])
- return [b"Dataset too large for excel format."]
- bio = BytesIO()
- with pd.ExcelWriter(bio) as writer:
- df.to_excel(writer, sheet_name="Data", index=False, columns=myvars)
-
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=rwis.xlsx"),
- ]
- start_response("200 OK", headers)
- return [bio.getvalue()]
- start_response("200 OK", [("Content-type", "text/plain")])
- df.to_csv(sio, sep=delimiter, columns=df.columns.intersection(myvars))
- return [sio.getvalue().encode("ascii")]
+from iemweb.request.rwis import application # noqa: F401
diff --git a/cgi-bin/request/scan.py b/cgi-bin/request/scan.py
index 46cd4fb628..1f80ea29d1 100644
--- a/cgi-bin/request/scan.py
+++ b/cgi-bin/request/scan.py
@@ -1,55 +1,3 @@
-"""SCAN download backend."""
+"""implemented in /pylib/iemweb/request/scan.py"""
-from io import StringIO
-
-import pandas as pd
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import get_sqlalchemy_conn
-from pyiem.webutil import ensure_list, iemapp
-from sqlalchemy import text
-
-DELIMITERS = {"comma": ",", "space": " ", "tab": "\t"}
-
-
-def get_df(stations, sts, ets):
- """Get what the database has!"""
- with get_sqlalchemy_conn("scan") as conn:
- df = pd.read_sql(
- text(
- "select * from alldata where station = ANY(:ids) and "
- "valid >= :sts and valid < :ets "
- "order by station asc, valid asc"
- ),
- conn,
- params={"ids": stations, "sts": sts, "ets": ets},
- )
- if not df.empty:
- df["valid"] = df["valid"].dt.strftime("%Y-%m-%d %H:%M")
- return df
-
-
-@iemapp(default_tz="UTC")
-def application(environ, start_response):
- """
- Do something!
- """
- if "sts" not in environ:
- raise IncompleteWebRequest("GET start time parameters missing")
- stations = ensure_list(environ, "stations")
- varnames = ensure_list(environ, "vars")
- varnames.insert(0, "valid")
- varnames.insert(0, "station")
- what = environ.get("what", "dl")
- delimiter = DELIMITERS.get(environ.get("delim", "comma"))
- df = get_df(stations, environ["sts"], environ["ets"])
- if what in ["txt", "download"]:
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-disposition", "attachment; filename=scan.txt"),
- ]
- else:
- headers = [("Content-type", "text/plain")]
- start_response("200 OK", headers)
- sio = StringIO()
- df.to_csv(sio, index=False, sep=delimiter, columns=varnames)
- return [sio.getvalue().encode("ascii")]
+from iemweb.request.scan import application # noqa: F401
diff --git a/cgi-bin/request/scp.py b/cgi-bin/request/scp.py
index 575bafc412..5edbcf2299 100644
--- a/cgi-bin/request/scp.py
+++ b/cgi-bin/request/scp.py
@@ -1,130 +1,3 @@
-""".. title:: Satellite Cloud Product (SCP) Request
+"""implemented in /pylib/iemweb/request/scp.py"""
-Documentation for /cgi-bin/request/scp.py
---------------------------------------------
-
-This script is used to request Satellite Cloud Product (SCP) data from the
-IEM's ASOS database.
-
-Examples:
----------
-
-Download all 2023 data for KBUR
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/scp.py?station=KBUR&sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z
-
-"""
-
-from io import StringIO
-
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-
-class Schema(CGIModel):
- """Our schema for this request"""
-
- ets: AwareDatetime = Field(
- None,
- description=(
- "End timestamp with timezone included to request data for."
- ),
- )
- station: ListOrCSVType = Field(
- None,
- description=(
- "Four or Five character station identifier(s) to request data for."
- ),
- )
- sts: AwareDatetime = Field(
- None,
- description=(
- "Start timestamp with timezone included to request data for."
- ),
- )
- year1: int = Field(
- None,
- description=(
- "Year to request data for, this is an alternative to sts/ets."
- ),
- )
- year2: int = Field(
- None,
- description=(
- "Year to request data for, this is an alternative to sts/ets."
- ),
- )
- month1: int = Field(
- None,
- description=(
- "Month to request data for, this is an alternative to sts/ets."
- ),
- )
- month2: int = Field(
- None,
- description=(
- "Month to request data for, this is an alternative to sts/ets."
- ),
- )
- day1: int = Field(
- None,
- description=(
- "Day to request data for, this is an alternative to sts/ets."
- ),
- )
- day2: int = Field(
- None,
- description=(
- "Day to request data for, this is an alternative to sts/ets."
- ),
- )
- hour1: int = Field(0, description="Hour to request data for.")
- hour2: int = Field(0, description="Hour to request data for.")
- minute1: int = Field(0, description="Minute to request data for.")
- minute2: int = Field(0, description="Minute to request data for.")
-
-
-@iemapp(schema=Schema, help=__doc__)
-def application(environ, start_response):
- """Do Something"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Both start and end time must be provided!")
- start_response("200 OK", [("Content-type", "text/plain")])
- slimiter = ""
- params = {
- "sts": environ["sts"],
- "ets": environ["ets"],
- "station": environ["station"],
- }
- if environ["station"]:
- slimiter = "station = ANY(:station)"
- sio = StringIO()
- sio.write("station,utc_valid,mid,high,cldtop1,cldtop2,eca,source\n")
- with get_sqlalchemy_conn("asos") as conn:
- res = conn.execute(
- text(f"""
- SELECT station, valid at time zone 'UTC' as utc_valid, mid, high,
- cldtop1, cldtop2, eca, source from scp_alldata
- WHERE valid >= :sts and valid < :ets and {slimiter}
- ORDER by valid ASC
- """),
- params,
- )
- for row in res:
- sio.write(
- ("%s,%s,%s,%s,%s,%s,%s,%s\n")
- % (
- row[0],
- row[1].strftime("%Y-%m-%d %H:%M:%S"),
- row[2],
- row[3],
- row[4],
- row[5],
- row[6],
- row[7],
- )
- )
- return [sio.getvalue().encode("ascii", "ignore")]
+from iemweb.request.scp import application # noqa: F401
diff --git a/cgi-bin/request/ss.py b/cgi-bin/request/ss.py
index e9e71a56e6..eab5780870 100644
--- a/cgi-bin/request/ss.py
+++ b/cgi-bin/request/ss.py
@@ -1,165 +1,3 @@
-"""
-Return a simple CSV of stuart smith data
+"""implemented in /pylib/iemweb/request/ss.py"""
-Levelogger Reading (ft)
-Barologger Reading
-Temp (C)
-Barologger Air Temp (C)
-Conductivity (micro-S)
-7.20473 ch1_data_p
-2.68857 ch2_data_p
-21.1 ch1_data_t
-18.19 ch2_data_t
-48 ch1_data_c
-"""
-
-from io import BytesIO
-
-import pandas as pd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-LOOKUP = {
- 9100104: "SSP #6",
- 9100135: "SSP #8",
- 9100131: "SSP #1",
- 9100156: "SSP #7",
-}
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- excel: bool = Field(description="Return Excel File", default=False)
- opt: str = Field(description="bubbler or gage", default="gage")
- station: ListOrCSVType = Field(
- default=[], description="Station ID to query"
- )
- sts: AwareDatetime = Field(description="Start Time", default=None)
- ets: AwareDatetime = Field(description="End Time", default=None)
- year1: int = Field(
- description="Start year, when sts is not set.", default=None
- )
- month1: int = Field(
- description="Start month, when sts is not set.", default=None
- )
- day1: int = Field(
- description="Start day, when sts is not set.", default=None
- )
- year2: int = Field(
- description="End year, when ets is not set.", default=None
- )
- month2: int = Field(
- description="End month, when ets is not set.", default=None
- )
- day2: int = Field(
- description="End day, when ets is not set.", default=None
- )
-
-
-def gage_run(sts, ets, stations, excel, start_response):
- """run()"""
- if not stations:
- stations = LOOKUP.keys()
-
- sql = text(
- """select date(valid) as date, to_char(valid, 'HH24:MI:SS') as time,
- site_serial, ch1_data_p, ch2_data_p,
- ch1_data_t, ch2_data_t, ch1_data_c
- from ss_logger_data WHERE valid between :sts and :ets and
- site_serial = ANY(:stations) ORDER by valid ASC"""
- )
- with get_sqlalchemy_conn("other") as conn:
- df = pd.read_sql(
- sql, conn, params={"sts": sts, "ets": ets, "stations": stations}
- )
- eheaders = [
- "date",
- "time",
- "site_serial",
- "Levelogger Reading (ft)",
- "Barologger Reading",
- "Water Temp (C)",
- "Barologger Air Temp (C)",
- "Conductivity (micro-S)",
- ]
-
- if excel:
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=stuartsmith.xlsx"),
- ]
- start_response("200 OK", headers)
- bio = BytesIO()
- df.to_excel(bio, header=eheaders, index=False, engine="openpyxl")
- return bio.getvalue()
- start_response("200 OK", [("Content-type", "text/plain")])
- return df.to_csv(None, index=False).encode("ascii")
-
-
-def bubbler_run(sts, ets, excel, start_response):
- """run()"""
- sql = text(
- """
- WITH one as (SELECT valid, value from ss_bubbler WHERE
- valid between :sts and :ets and field = 'Batt Voltage'),
- two as (SELECT valid, value from ss_bubbler WHERE
- valid between :sts and :ets and field = 'STAGE'),
- three as (SELECT valid, value from ss_bubbler WHERE
- valid between :sts and :ets and field = 'Water Temp')
-
- SELECT date(coalesce(one.valid, two.valid, three.valid)) as date,
- to_char(coalesce(one.valid, two.valid, three.valid), 'HH24:MI:SS') as time,
- one.value as "batt voltage",
- two.value as "stage",
- three.value as "water temp"
- from one FULL OUTER JOIN two on (one.valid = two.valid)
- FULL OUTER JOIN three on (coalesce(two.valid,one.valid) = three.valid)
- ORDER by date ASC, time ASC
- """
- )
- with get_sqlalchemy_conn("other") as conn:
- df = pd.read_sql(sql, conn, params={"sts": sts, "ets": ets})
- if excel:
- headers = [
- ("Content-type", "application/vnd.ms-excel"),
- ("Content-disposition", "attachment; Filename=stuartsmith.xls"),
- ]
- start_response("200 OK", headers)
- bio = BytesIO()
- df.to_excel(bio, index=False)
- return bio.getvalue()
- start_response("200 OK", [("Content-type", "text/plain")])
- return df.to_csv(None, index=False).encode("ascii")
-
-
-@iemapp(default_tz="America/Chicago", help=__doc__, schema=Schema)
-def application(environ, start_response):
- """Go Main Go"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("GET start time parameters missing")
- opt = environ["opt"]
-
- stations = environ["station"]
- if opt == "bubbler":
- return [
- bubbler_run(
- environ["sts"],
- environ["ets"],
- environ["excel"],
- start_response,
- )
- ]
- return [
- gage_run(
- environ["sts"],
- environ["ets"],
- stations,
- environ["excel"],
- start_response,
- )
- ]
+from iemweb.request.ss import application # noqa: F401
diff --git a/cgi-bin/request/taf.py b/cgi-bin/request/taf.py
index 98bc39ac4e..a60ee244a7 100644
--- a/cgi-bin/request/taf.py
+++ b/cgi-bin/request/taf.py
@@ -1,142 +1,3 @@
-""".. title:: Terminal Aerodrome Forecast (TAF) Data
+"""implemented in /pylib/iemweb/request/taf.py"""
-Documentation for /cgi-bin/request/taf.py
------------------------------------------
-
-This service provides access to Terminal Aerodrome Forecast (TAF) data for
-specified stations and time ranges.
-
-Example Usage
-~~~~~~~~~~~~~
-
-Request all of Des Moines TAF for the month of January 2024 in CSV format:
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/taf.py?station=DSM&sts=2024-01-01T00:00Z&ets=2024-02-01T00:00Z&fmt=csv
-
-Request the past 240 hours of TAF data for Chicago O'Hare in Excel format:
-
- https://mesonet.agron.iastate.edu/cgi-bin/request/taf.py?station=ORD&hours=240&fmt=excel
-"""
-
-from datetime import timedelta
-from io import BytesIO
-from zoneinfo import ZoneInfo
-
-import pandas as pd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.util import utc
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class MyModel(CGIModel):
- """Our model"""
-
- hours: int = Field(
- None,
- description=(
- "Request data for the time period from now until this many hours "
- "in the past. Overrides any sts or ets values."
- ),
- le=2400,
- gt=0,
- )
- fmt: str = Field(
- "csv",
- description="The format of the output file, either 'csv' or 'excel'",
- )
- tz: str = Field("UTC", description="The timezone to use for timestamps")
- sts: AwareDatetime = Field(
- None, description="The start timestamp for the data"
- )
- ets: AwareDatetime = Field(
- None, description="The end timestamp for the data"
- )
- station: ListOrCSVType = Field(
- ...,
- description=(
- "The station(s) to request data for, "
- "either multi params or comma separated"
- ),
- )
- year1: int = Field(None, description="The start year, if not using sts")
- month1: int = Field(None, description="The start month, if not using sts")
- day1: int = Field(None, description="The start day, if not using sts")
- hour1: int = Field(0, description="The start hour, if not using sts")
- minute1: int = Field(0, description="The start minute, if not using sts")
- year2: int = Field(None, description="The end year, if not using ets")
- month2: int = Field(None, description="The end month, if not using ets")
- day2: int = Field(None, description="The end day, if not using ets")
- hour2: int = Field(0, description="The end hour, if not using ets")
- minute2: int = Field(0, description="The end minute, if not using ets")
-
-
-def run(start_response, environ):
- """Get data!"""
- with get_sqlalchemy_conn("asos") as dbconn:
- df = pd.read_sql(
- text(
- """
- select t.station, t.valid at time zone 'UTC' as valid,
- f.valid at time zone 'UTC' as fx_valid, raw, is_tempo,
- end_valid at time zone 'UTC' as fx_valid_end,
- sknt, drct, gust, visibility,
- presentwx, skyc, skyl, ws_level, ws_drct, ws_sknt, product_id
- from taf t JOIN taf_forecast f on (t.id = f.taf_id)
- WHERE t.station = ANY(:stations) and f.valid >= :sts
- and f.valid < :ets order by t.valid
- """
- ),
- dbconn,
- params={
- "stations": environ["station"],
- "sts": environ["sts"],
- "ets": environ["ets"],
- },
- parse_dates=["valid", "fx_valid", "fx_valid_end"],
- )
- # muck the timezones
- if not df.empty:
- tzinfo = ZoneInfo(environ["tz"])
- for col in ["valid", "fx_valid", "fx_valid_end"]:
- df[col] = (
- df[col].dt.tz_localize(tzinfo).dt.strftime("%Y-%m-%d %H:%M")
- )
-
- bio = BytesIO()
- if environ["fmt"] == "excel":
- with pd.ExcelWriter(bio, engine="openpyxl") as writer:
- df.to_excel(writer, sheet_name="TAF Data", index=False)
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment;Filename=taf.xlsx"),
- ]
- else:
- df.to_csv(bio, index=False)
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-disposition", "attachment;Filename=taf.csv"),
- ]
- start_response("200 OK", headers)
- return bio.getvalue()
-
-
-def rect(station):
- """Cleanup."""
- station = station.upper()
- if len(station) == 3:
- return f"K{station}"
- return station
-
-
-@iemapp(help=__doc__, schema=MyModel)
-def application(environ, start_response):
- """Get stuff"""
- if environ["hours"] is not None:
- environ["ets"] = utc()
- environ["sts"] = environ["ets"] - timedelta(hours=environ["hours"])
- environ["station"] = [rect(x) for x in environ["station"]]
- return [run(start_response, environ)]
+from iemweb.request.taf import application # noqa: F401
diff --git a/cgi-bin/request/talltowers.py b/cgi-bin/request/talltowers.py
index 8c47092541..c7dc3d3cab 100644
--- a/cgi-bin/request/talltowers.py
+++ b/cgi-bin/request/talltowers.py
@@ -1,128 +1,3 @@
-"""Process talltowers data request."""
+"""implemented in /pylib/iemweb/request/talltowers.py"""
-import datetime
-from io import BytesIO, StringIO
-from zoneinfo import ZoneInfo
-
-import pandas as pd
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.util import get_dbconn, get_sqlalchemy_conn
-from pyiem.webutil import ensure_list, iemapp
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-TOWERIDS = {0: "ETTI4", 1: "MCAI4"}
-
-
-def get_stations(environ):
- """Figure out the requested station"""
- stations = ensure_list(environ, "station")
- towers = []
- for tid, nwsli in TOWERIDS.items():
- if nwsli in stations:
- towers.append(tid)
-
- return towers
-
-
-def get_time_bounds(form, tzinfo):
- """Figure out the exact time bounds desired"""
- y1 = int(form.get("year1"))
- y2 = int(form.get("year2"))
- m1 = int(form.get("month1"))
- m2 = int(form.get("month2"))
- d1 = int(form.get("day1"))
- d2 = int(form.get("day2"))
- h1 = int(form.get("hour1"))
- h2 = int(form.get("hour2"))
- sts = datetime.datetime(y1, m1, d1, h1, tzinfo=tzinfo)
- ets = datetime.datetime(y2, m2, d2, h2, tzinfo=tzinfo)
- if ets < sts:
- sts, ets = ets, sts
- ets = min([sts + datetime.timedelta(days=32), ets])
-
- return sts, ets
-
-
-def get_columns(cursor):
- """What have we here."""
- cursor.execute(
- "SELECT column_name FROM information_schema.columns "
- "WHERE table_schema = 'public' AND table_name = 'data_analog'"
- )
- res = [row[0] for row in cursor]
- return res
-
-
-@iemapp()
-def application(environ, start_response):
- """Go main Go"""
- pgconn = get_dbconn("talltowers", user="tt_web")
- columns = get_columns(pgconn.cursor())
- tzname = environ.get("tz", "Etc/UTC")
- tzinfo = ZoneInfo(tzname)
-
- stations = get_stations(environ)
- if not stations:
- raise IncompleteWebRequest("No stations")
- sts, ets = get_time_bounds(environ, tzinfo)
- fmt = environ.get("format")
- # Build out our variable list
- tokens = []
- zz = ensure_list(environ, "z")
- varnames = ensure_list(environ, "var")
- aggs = ensure_list(environ, "agg")
- for z in zz:
- for v in varnames:
- v1 = v
- v2 = ""
- if v.find("_") > -1:
- v1, v2 = v.split("_")
- v2 = f"_{v2}"
- colname = f"{v1}_{z}m{v2}"
- if colname not in columns:
- continue
- for agg in aggs:
- tokens.append(f"{agg}({colname}) as {colname}_{agg}") # noqa
-
- tw = int(environ.get("window", 1))
-
- sql = f"""
- SELECT tower,
- (date_trunc('hour', valid) +
- (((date_part('minute', valid)::integer / {tw}::integer) * {tw}::integer)
- || ' minutes')::interval) at time zone %s as ts,
- {','.join(tokens)} from
- data_analog where tower = ANY(%s) and valid >= %s and valid < %s
- GROUP by tower, ts ORDER by tower, ts
- """
- with get_sqlalchemy_conn("talltowers", user="tt_web") as conn:
- df = pd.read_sql(
- sql,
- conn,
- params=(tzname, stations, sts, ets),
- )
- df = df.rename(columns={"ts": "valid"})
- df["tower"] = df["tower"].replace(TOWERIDS)
- pgconn.close()
- if fmt in ["tdf", "comma"]:
- headers = [
- ("Content-type", "application/octet-stream"),
- ("Content-disposition", "attachment; filename=talltowers.txt"),
- ]
- start_response("200 OK", headers)
- sio = StringIO()
- df.to_csv(sio, sep="," if fmt == "comma" else "\t", index=False)
- return [sio.getvalue().encode("utf8")]
-
- # Excel
- bio = BytesIO()
- # pylint: disable=abstract-class-instantiated
- with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
- df.to_excel(writer, sheet_name="Data", index=False)
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=talltowers.xlsx"),
- ]
- start_response("200 OK", headers)
- return [bio.getvalue()]
+from iemweb.request.talltowers import application # noqa: F401
diff --git a/cgi-bin/request/tempwind_aloft.py b/cgi-bin/request/tempwind_aloft.py
index 041c803a41..e63309d766 100644
--- a/cgi-bin/request/tempwind_aloft.py
+++ b/cgi-bin/request/tempwind_aloft.py
@@ -1,149 +1,3 @@
-""".. title:: Temperature and Wind Aloft Data Service
+"""implemented in /pylib/iemweb/request/tempwind_aloft.py"""
-Documentation for /cgi-bin/request/tempwind_aloft.py
-----------------------------------------------------
-
-This service emits processed data from a temperature and winds aloft product.
-
-Example Usage
-~~~~~~~~~~~~~
-
-Request all data for `KDSM` for 2023.
-
-https://mesonet.agron.iastate.edu/cgi-bin/request/tempwind_aloft.py?station=KDSM&sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z
-
-"""
-
-from io import BytesIO, StringIO
-
-import pandas as pd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class Schema(CGIModel):
- """See how we are called."""
-
- ets: AwareDatetime = Field(
- None,
- description="The end time of the data request",
- )
- format: str = Field(
- "csv",
- description="The format of the output (csv json or excel)",
- pattern="^(csv|json|excel)$",
- )
- na: str = Field(
- "M",
- description="The value to use for missing data",
- pattern="^(M|None|blank)$",
- )
- sts: AwareDatetime = Field(
- None,
- description="The start time of the data request",
- )
- station: ListOrCSVType = Field(
- ...,
- description="The station identifier(s) to request data for",
- )
- tz: str = Field(
- "UTC",
- description=(
- "The timezone to use for timestamps in request and response, it "
- "should be something recognized by the pytz library."
- ),
- )
- year1: int = Field(
- None,
- description="The year for the start time, if sts is not provided",
- )
- year2: int = Field(
- None,
- description="The year for the end time, if ets is not provided",
- )
- month1: int = Field(
- None,
- description="The month for the start time, if sts is not provided",
- )
- month2: int = Field(
- None,
- description="The month for the end time, if ets is not provided",
- )
- day1: int = Field(
- None,
- description="The day for the start time, if sts is not provided",
- )
- day2: int = Field(
- None,
- description="The day for the end time, if ets is not provided",
- )
-
-
-def get_data(stations, sts, ets, tz, na, fmt):
- """Go fetch data please"""
- with get_sqlalchemy_conn("asos") as conn:
- df = pd.read_sql(
- text(
- """
- SELECT *,
- to_char(obtime at time zone :tz, 'YYYY/MM/DD HH24:MI')
- as obtime2,
- to_char(ftime at time zone :tz, 'YYYY/MM/DD HH24:MI')
- as ftime2
- from alldata_tempwind_aloft WHERE ftime >= :sts and
- ftime <= :ets and station = ANY(:stations) ORDER by obtime, ftime
- """
- ),
- conn,
- params={"sts": sts, "ets": ets, "stations": stations, "tz": tz},
- )
- df = df.drop(columns=["obtime", "ftime"]).rename(
- columns={"obtime2": "obtime", "ftime2": "ftime"}
- )
- cols = df.columns.values.tolist()
- cols.remove("ftime")
- cols.remove("obtime")
- cols.insert(1, "obtime")
- cols.insert(2, "ftime")
- df = df[cols].dropna(axis=1, how="all")
- if na != "blank":
- df = df.fillna(na)
- if fmt == "json":
- return df.to_json(orient="records")
- if fmt == "excel":
- bio = BytesIO()
- with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
- df.to_excel(writer, sheet_name="Data", index=False)
- return bio.getvalue()
-
- sio = StringIO()
- df.to_csv(sio, index=False)
- return sio.getvalue()
-
-
-@iemapp(help=__doc__, schema=Schema)
-def application(environ, start_response):
- """See how we are called"""
-
- fmt = environ["format"]
- tz = environ["tz"]
- stations = environ["station"]
- na = environ["na"]
- if fmt != "excel":
- start_response("200 OK", [("Content-type", "text/plain")])
- return [
- get_data(
- stations, environ["sts"], environ["ets"], tz, na, fmt
- ).encode("ascii")
- ]
- lll = "stations" if len(stations) > 1 else stations[0]
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", f"attachment; Filename={lll}.xlsx"),
- ]
- start_response("200 OK", headers)
- return [get_data(stations, environ["sts"], environ["ets"], tz, na, fmt)]
+from iemweb.request.tempwind_aloft import application # noqa: F401
diff --git a/cgi-bin/request/wmo_bufr_srf.py b/cgi-bin/request/wmo_bufr_srf.py
index ff4c5f890d..fe19f8ce88 100644
--- a/cgi-bin/request/wmo_bufr_srf.py
+++ b/cgi-bin/request/wmo_bufr_srf.py
@@ -1,125 +1,3 @@
-""".. title:: WMO BUFR Surface Data
+"""implemented in /pylib/iemweb/request/wmo_bufr_srf.py"""
-Documentation for /cgi-bin/request/wmo_bufr_srf.py
---------------------------------------------------
-
-This application provides access to the IEM processed archives of stations
-reporting via the WMO BUFR Surface.
-
-"""
-
-from io import BytesIO, StringIO
-
-import pandas as pd
-from pydantic import AwareDatetime, Field
-from pyiem.database import get_sqlalchemy_conn
-from pyiem.exceptions import IncompleteWebRequest
-from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
-from sqlalchemy import text
-
-EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-
-
-class MyModel(CGIModel):
- """Our model"""
-
- format: str = Field(
- "csv",
- description="The format of the data response. csv, json, or excel",
- pattern=r"^(csv|json|excel)$",
- )
- ets: AwareDatetime = Field(
- None,
- description="The end time for the data request",
- )
- stations: ListOrCSVType = Field(..., description="The station identifiers")
- sts: AwareDatetime = Field(
- None,
- description="The start time for the data request",
- )
- year1: int = Field(
- None,
- description="The start year for the data request, when sts is not set",
- )
- month1: int = Field(
- None,
- description=(
- "The start month for the data request, when sts is not set"
- ),
- )
- day1: int = Field(
- None,
- description="The start day for the data request, when sts is not set",
- )
- hour1: int = Field(
- None,
- description="The start hour for the data request, when sts is not set",
- )
- year2: int = Field(
- None,
- description="The end year for the data request, when ets is not set",
- )
- month2: int = Field(
- None,
- description="The end month for the data request, when ets is not set",
- )
- day2: int = Field(
- None,
- description="The end day for the data request, when ets is not set",
- )
- hour2: int = Field(
- None,
- description="The end hour for the data request, when ets is not set",
- )
-
-
-def get_data(sts, ets, stations, fmt):
- """Go fetch data please"""
- with get_sqlalchemy_conn("other") as conn:
- df = pd.read_sql(
- text(
- """
- select
- valid at time zone 'UTC' as utc_valid, * from alldata
- WHERE station = ANY(:stations)
- and valid >= :sts and valid <= :ets
- ORDER by valid, station ASC"""
- ),
- conn,
- params={
- "sts": sts,
- "ets": ets,
- "stations": stations,
- },
- )
- df = df.drop(columns=["valid"])
- if fmt == "json":
- return df.to_json(orient="records")
- if fmt == "excel":
- bio = BytesIO()
- # pylint: disable=abstract-class-instantiated
- with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
- df.to_excel(writer, sheet_name="Data", index=False)
- return bio.getvalue()
-
- sio = StringIO()
- df.to_csv(sio, index=False)
- return sio.getvalue()
-
-
-@iemapp(help=__doc__, schema=MyModel, default_tz="UTC")
-def application(environ, start_response):
- """See how we are called"""
- if environ["sts"] is None or environ["ets"] is None:
- raise IncompleteWebRequest("Missing sts and/or ets")
- stations = environ["stations"]
- fmt = environ["format"]
- if fmt != "excel":
- start_response("200 OK", [("Content-type", "text/plain")])
- return get_data(environ["sts"], environ["ets"], stations, fmt)
- headers = [
- ("Content-type", EXL),
- ("Content-disposition", "attachment; Filename=mos.xlsx"),
- ]
- start_response("200 OK", headers)
- return [get_data(environ["sts"], environ["ets"], stations, fmt)]
+from iemweb.request.wmo_bufr_srf import application # noqa: F401
diff --git a/pylib/iemweb/autoplot/__init__.py b/pylib/iemweb/autoplot/__init__.py
index 63765e02bd..d36b5ec6bb 100644
--- a/pylib/iemweb/autoplot/__init__.py
+++ b/pylib/iemweb/autoplot/__init__.py
@@ -109,7 +109,7 @@ def import_script(p: int):
{
"id": 216,
"label": (
- "Consecutive Days by Year with High/Low Temp "
+ "Consecutive Days by Year with Daily Summary Variable "
"Above/Below Threshold"
),
},
diff --git a/pylib/iemweb/autoplot/scripts200/p216.py b/pylib/iemweb/autoplot/scripts200/p216.py
index e06e113cdb..88e6c16de0 100644
--- a/pylib/iemweb/autoplot/scripts200/p216.py
+++ b/pylib/iemweb/autoplot/scripts200/p216.py
@@ -1,10 +1,16 @@
"""
This chart presents the longest daily streaks of having some
-temperature threshold meet.
+threshold meet. This tool presents a number of variables that may not be
+observed by your station or network. If you pick a below threshold, then the
+year is split on 1 July and the year plotted is the year of the second half of
+that period. Daily Data Request Form
+provides the raw values for the automated stations. The download portal for
+the long term climate sites is here.
"""
import datetime
+import matplotlib.ticker as mticker
import numpy as np
import pandas as pd
from pyiem.database import get_sqlalchemy_conn
@@ -16,23 +22,43 @@
from iemweb.autoplot import get_monofont
PDICT = {
- "above": "Temperature At or Above (AOA) Threshold",
- "below": "Temperature Below Threshold",
+ "above": "At or Above (AOA) Threshold",
+ "below": "Below Threshold",
+}
+PDICT2 = {
+ "high": "Daily High Temperature",
+ "low": "Daily Low Temperature",
+ "max_dwpf": "Daily Max Dew Point",
+ "min_dwpf": "Daily Min Dew Point",
+ "max_feel": "Daily Max Feels Like Temp",
+ "min_feel": "Daily Min Feels Like Temp",
+ "max_sknt": "Daily Max Sustained Wind Speed",
+ "max_gust": "Daily Max Wind Gust",
+}
+UNITS = {
+ "high": "F",
+ "low": "F",
+ "max_dwpf": "F",
+ "min_dwpf": "F",
+ "max_feel": "F",
+ "min_feel": "F",
+ "max_sknt": "kts",
+ "max_gust": "kts",
}
-PDICT2 = {"high": "High Temperature", "low": "Low Temperature"}
def get_description():
"""Return a dict describing how to call this plotter"""
desc = {"description": __doc__, "data": True}
desc["arguments"] = [
- dict(
- type="station",
- name="station",
- default="IATDSM",
- label="Select Station:",
- network="IACLIMATE",
- ),
+ {
+ "type": "sid",
+ "name": "station",
+ "default": "IATDSM",
+ "label": "Select Station:",
+ "network": "IACLIMATE",
+ "include_climodat": True,
+ },
dict(
type="select",
name="var",
@@ -51,16 +77,53 @@ def get_description():
type="int",
name="threshold",
default="32",
- label="Temperature Threshold (F):",
+ label="hreshold (F or knots):",
),
]
return desc
+def get_data(ctx):
+ """Get the data for this plot."""
+ varname = ctx["var"]
+ station = ctx["station"]
+ if ctx["network"].find("CLIMATE") > 0:
+ with get_sqlalchemy_conn("coop") as conn:
+ obsdf = pd.read_sql(
+ text(f"""
+ select day, {varname},
+ case when month > 6 then year + 1 else year end as season
+ from alldata where station = :station and {varname} is not null
+ ORDER by day ASC
+ """),
+ conn,
+ params={"station": station},
+ parse_dates="day",
+ index_col="day",
+ )
+ else:
+ varname = {"high": "max_tmpf", "low": "min_tmpf"}.get(varname, varname)
+ with get_sqlalchemy_conn("iem") as conn:
+ obsdf = pd.read_sql(
+ text(f"""
+ select day, {varname},
+ case when extract(month from day) > 6 then
+ extract(year from day) + 1 else extract(year from day)
+ end as season
+ from summary where iemid = :iemid and {varname} is not null
+ ORDER by day ASC
+ """),
+ conn,
+ params={"iemid": ctx["_nt"].sts[station]["iemid"]},
+ parse_dates="day",
+ index_col="day",
+ )
+ return obsdf
+
+
def plotter(fdict):
"""Go"""
ctx = get_autoplot_context(fdict, get_description())
- station = ctx["station"]
threshold = ctx["threshold"]
varname = ctx["var"]
mydir = ctx["dir"]
@@ -70,56 +133,49 @@ def plotter(fdict):
startdate = None
running = 0
row = None
- with get_sqlalchemy_conn("coop") as conn:
- res = conn.execute(
- text(f"""
- select day, {varname},
- case when month > 6 then year + 1 else year end
- from alldata where station = :station and {varname} is not null
- ORDER by day ASC
- """),
- {"station": station},
- )
- if res.rowcount == 0:
- raise NoDataFound("Did not find any observations for station.")
- for row in res:
- if op(row[1], threshold):
- if running == 0:
- startdate = row[0]
- running += 1
- continue
+ obsdf = get_data(ctx)
+ if obsdf.empty:
+ raise NoDataFound("Did not find any observations for station.")
+ obsdf = obsdf.reindex(pd.date_range(obsdf.index.min(), obsdf.index.max()))
+ for day, row in obsdf.iterrows():
+ if op(row[varname], threshold):
if running == 0:
- continue
- rows.append(
- {
- "days": running,
- "season": row[0].year if mydir == "above" else row[2],
- "startdate": startdate,
- "enddate": row[0] - datetime.timedelta(days=1),
- }
- )
- running = 0
- if running > 0:
- rows.append(
- {
- "days": running,
- "season": row[0].year if mydir == "above" else row[2],
- "startdate": startdate,
- "enddate": row[0],
- }
- )
+ startdate = day
+ running += 1
+ continue
+ if running == 0:
+ continue
+ rows.append(
+ {
+ "days": running,
+ "season": day.year if mydir == "above" else row["season"],
+ "startdate": startdate,
+ "enddate": day - datetime.timedelta(days=1),
+ }
+ )
+ running = 0
+ if running > 0:
+ rows.append(
+ {
+ "days": running,
+ "season": day.year if mydir == "above" else row["season"],
+ "startdate": startdate,
+ "enddate": day,
+ }
+ )
if not rows:
raise NoDataFound("Failed to find any streaks for given threshold.")
df = pd.DataFrame(rows)
- label = "AOA" if mydir == "above" else "below"
- label2 = "Year" if mydir == "above" else "Season"
+ label = "at or above" if mydir == "above" else "below"
+ label2 = "Yearly" if mydir == "above" else "Seasonal"
title = (
- f"{ctx['_sname']}:: {label2} Max Consec Days with "
- f"{varname.capitalize()} {label} {threshold}"
- r"$^\circ$F"
+ f"{ctx['_sname']} "
+ f"[{obsdf.index[0]:%-d %b %Y}-{obsdf.index[-1]:%-d %b %Y}]:: "
+ f"{label2} Maximum Consecutive Days with"
)
- fig = figure(title=title, apctx=ctx)
+ subtitle = f"{PDICT2[varname]} {label} {threshold} {UNITS[varname]}"
+ fig = figure(title=title, subtitle=subtitle, apctx=ctx)
ax = fig.add_axes([0.1, 0.1, 0.5, 0.8])
ax.set_ylabel(f"Max Streak by {label2} [days]")
ax.grid(True)
@@ -135,6 +191,7 @@ def plotter(fdict):
bbox=dict(color="white"),
)
ax.set_xlabel(label2)
+ ax.yaxis.set_major_locator(mticker.MaxNLocator(integer=True))
# List out longest
monofont = get_monofont()
diff --git a/pylib/iemweb/request/asos.py b/pylib/iemweb/request/asos.py
new file mode 100644
index 0000000000..1064588ad7
--- /dev/null
+++ b/pylib/iemweb/request/asos.py
@@ -0,0 +1,586 @@
+""".. title:: ASOS/METAR Backend Service
+
+`IEM API Mainpage `_
+
+Documentation on /cgi-bin/request/asos.py
+-----------------------------------------
+
+This cgi-bin script provides METAR/ASOS data. It has a IP-based rate limit for
+requests to prevent abuse. A `503 Service Unavailable` response will be
+returned if the server is under heavy load.
+
+Changelog:
+
+- **2024-04-01** Fix recently introduced bug with time sort order.
+- **2024-03-29** This service had an intermediate bug whereby if the `tz` value
+ was not provided, it would default to `America/Chicago` instead of `UTC`.
+- **2024-03-29** Migrated to pydantic based request validation. Will be
+ monitoring for any issues.
+- **2024-03-14** Initial documentation release.
+
+Example Usage
+-------------
+
+Get the past 24 hours of air temperature and dew point for Des Moines and
+Mason City, Iowa.
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/asos.py?data=tmpf&data=dwpf&station=DSM&station=MCW&hours=24
+
+"""
+
+import datetime
+import sys
+from io import StringIO
+from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
+
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_dbconn
+from pyiem.network import Table as NetworkTable
+from pyiem.util import utc
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+
+NULLS = {"M": "M", "null": "null", "empty": ""}
+TRACE_OPTS = {"T": "T", "null": "null", "empty": "", "0.0001": "0.0001"}
+AVAILABLE = [
+ "tmpf",
+ "dwpf",
+ "relh",
+ "drct",
+ "sknt",
+ "p01i",
+ "alti",
+ "mslp",
+ "vsby",
+ "gust",
+ "skyc1",
+ "skyc2",
+ "skyc3",
+ "skyc4",
+ "skyl1",
+ "skyl2",
+ "skyl3",
+ "skyl4",
+ "wxcodes",
+ "ice_accretion_1hr",
+ "ice_accretion_3hr",
+ "ice_accretion_6hr",
+ "peak_wind_gust",
+ "peak_wind_drct",
+ "peak_wind_time",
+ "feel",
+ "metar",
+ "snowdepth",
+]
+# inline is so much faster!
+CONV_COLS = {
+ "tmpc": "f2c(tmpf) as tmpc",
+ "dwpc": "f2c(dwpf) as dwpc",
+ "p01m": "p01i * 25.4 as p01m",
+ "sped": "sknt * 1.15 as sped",
+ "gust_mph": "gust * 1.15 as gust_mph",
+ "peak_wind_gust_mph": "peak_wind_gust * 1.15 as peak_wind_gust_mph",
+}
+
+
+class MyModel(CGIModel):
+ """Request Model."""
+
+ data: ListOrCSVType = Field(
+ None,
+ description=(
+ "The data columns to return, defaults to all. The available "
+ "options are: tmpf, dwpf, relh, drct, sknt, p01i, alti, mslp, "
+ "vsby, gust, skyc1, skyc2, skyc3, skyc4, skyl1, skyl2, skyl3, "
+ "skyl4, wxcodes, ice_accretion_1hr, ice_accretion_3hr, "
+ "ice_accretion_6hr, peak_wind_gust, peak_wind_drct, "
+ "peak_wind_time, feel, metar, snowdepth"
+ ),
+ )
+ direct: bool = Field(
+ False,
+ description=(
+ "If set to 'yes', the data will be directly downloaded as a file."
+ ),
+ )
+ elev: bool = Field(
+ False,
+ description=(
+ "If set to 'yes', the elevation (m) of the station will be "
+ "included in the output."
+ ),
+ )
+ ets: AwareDatetime = Field(
+ None,
+ description=("The end time of the data request."),
+ )
+ format: str = Field(
+ "onlycomma",
+ description=(
+ "The format of the data, defaults to onlycomma. The available "
+ "options are: onlycomma, tdf."
+ ),
+ )
+ hours: int = Field(
+ None,
+ description=(
+ "The number of hours of data to return prior to the current "
+ "timestamp. Can not be more than 24 if no stations are specified."
+ ),
+ )
+ latlon: bool = Field(
+ False,
+ description=(
+ "If set to 'yes', the latitude and longitude of the station will "
+ "be included in the output."
+ ),
+ )
+ missing: str = Field(
+ "M",
+ description=(
+ "How to represent missing values, defaults to M. Other options "
+ "are 'null' and 'empty'."
+ ),
+ pattern="^(M|null|empty)$",
+ )
+ nometa: bool = Field(
+ False,
+ description=(
+ "If set to 'yes', the column headers will not be included in the "
+ "output."
+ ),
+ )
+ network: ListOrCSVType = Field(
+ None,
+ description="The network to query, defaults to all networks.",
+ )
+ report_type: ListOrCSVType = Field(
+ [],
+ description=(
+ "The report type to query, defaults to all. The available "
+ "options are: 1 (HFMETAR), 3 (Routine), 4 (Specials)."
+ ),
+ )
+ station: ListOrCSVType = Field(
+ None,
+ description=(
+ "The station identifier to query, defaults to all stations and "
+ "if you do not specify any stations, you can only request 24 "
+ "hours of data."
+ ),
+ )
+ sts: AwareDatetime = Field(
+ None,
+ description=("The start time of the data request."),
+ )
+ trace: str = Field(
+ "0.0001",
+ description=(
+ "How to represent trace values, defaults to 0.0001. Other "
+ "options are 'null' and 'empty'."
+ ),
+ pattern="^(0.0001|null|empty|T)$",
+ )
+ tz: str = Field(
+ "UTC",
+ description=(
+ "The timezone to use for the request timestamps (when not "
+ "providing already tz-aware ``sts`` and ``ets`` values) and the "
+ "output valid timestamp. It is highly recommended to set this to "
+ "UTC to ensure it is set. This string should be "
+ "something that the Python ``zoneinfo`` library can understand."
+ ),
+ )
+ year1: int = Field(
+ None,
+ description=(
+ "The year of the start time, defaults to the time zone provided "
+ "by `tzname`. If `sts` is not provided."
+ ),
+ )
+ month1: int = Field(
+ None,
+ description=(
+ "The month of the start time, defaults to the time zone provided "
+ "by `tzname`. If `sts` is not provided."
+ ),
+ )
+ day1: int = Field(
+ None,
+ description=(
+ "The day of the start time, defaults to the time zone provided by "
+ "`tzname`. If `sts` is not provided."
+ ),
+ )
+ hour1: int = Field(
+ 0,
+ description=(
+ "The hour of the start time, defaults to the time zone provided "
+ "by `tzname`. If `sts` is not provided."
+ ),
+ )
+ minute1: int = Field(
+ 0,
+ description=(
+ "The minute of the start time, defaults to the time zone provided "
+ "by `tzname`. If `sts` is not provided."
+ ),
+ )
+ year2: int = Field(
+ None,
+ description=(
+ "The year of the end time, defaults to the time zone provided by "
+ "`tzname`. If `ets` is not provided."
+ ),
+ )
+ month2: int = Field(
+ None,
+ description=(
+ "The month of the end time, defaults to the time zone provided by "
+ "`tzname`. If `ets` is not provided."
+ ),
+ )
+ day2: int = Field(
+ None,
+ description=(
+ "The day of the end time, defaults to the time zone provided by "
+ "`tzname`. If `ets` is not provided."
+ ),
+ )
+ hour2: int = Field(
+ 0,
+ description=(
+ "The hour of the end time, defaults to the time zone provided by "
+ "`tzname`. If `ets` is not provided."
+ ),
+ )
+ minute2: int = Field(
+ 0,
+ description=(
+ "The minute of the end time, defaults to the time zone provided "
+ "by `tzname`. If `ets` is not provided."
+ ),
+ )
+
+
+def fmt_time(val, missing, _trace, tzinfo):
+ """Format timestamp."""
+ if val is None:
+ return missing
+ return (val.astimezone(tzinfo)).strftime("%Y-%m-%d %H:%M")
+
+
+def fmt_trace(val, missing, trace, _tzinfo):
+ """Format precip."""
+ if val is None:
+ return missing
+ # careful with this comparison
+ if 0 < val < 0.009999:
+ return trace
+ return f"{val:.2f}"
+
+
+def fmt_simple(val, missing, _trace, _tzinfo):
+ """Format simplely."""
+ if val is None:
+ return missing
+ return dance(val).replace(",", " ").replace("\n", " ")
+
+
+def fmt_wxcodes(val, missing, _trace, _tzinfo):
+ """Format weather codes."""
+ if val is None:
+ return missing
+ return " ".join(val)
+
+
+def fmt_f2(val, missing, _trace, _tzinfo):
+ """Simple 2 place formatter."""
+ if val is None:
+ return missing
+ return f"{val:.2f}"
+
+
+def fmt_f0(val, missing, _trace, _tzinfo):
+ """Simple 0 place formatter."""
+ if val is None:
+ return missing
+ return f"{val:.0f}"
+
+
+def dance(val):
+ """Force the val to ASCII."""
+ return val.encode("ascii", "ignore").decode("ascii")
+
+
+def overloaded():
+ """Prevent automation from overwhelming the server"""
+
+ with get_dbconn("asos") as pgconn:
+ cursor = pgconn.cursor()
+ cursor.execute("select one::float from system_loadavg")
+ val = cursor.fetchone()[0]
+ if val > 30: # Cut back on logging
+ sys.stderr.write(f"/cgi-bin/request/asos.py over cpu thres: {val}\n")
+ return val > 20
+
+
+def get_stations(form):
+ """Figure out the requested station"""
+ if not form["station"]:
+ if form["network"] is not None:
+ nt = NetworkTable(form["network"], only_online=False)
+ return list(nt.sts.keys())
+ return []
+ stations = form["station"]
+ if not stations:
+ return []
+ # allow folks to specify the ICAO codes for K*** sites
+ for i, station in enumerate(stations):
+ if len(station) == 4 and station[0] == "K":
+ stations[i] = station[1:]
+ return stations
+
+
+def get_time_bounds(form, tzinfo):
+ """Figure out the exact time bounds desired"""
+ if form["hours"] is not None:
+ ets = utc()
+ sts = ets - datetime.timedelta(hours=int(form.get("hours")))
+ return sts, ets
+ # Here lie dragons, so tricky to get a proper timestamp
+ try:
+
+ def _get(num):
+ return datetime.datetime(
+ form[f"year{num}"],
+ form[f"month{num}"],
+ form[f"day{num}"],
+ form[f"hour{num}"],
+ form[f"minute{num}"],
+ )
+
+ if form["sts"] is None:
+ form["sts"] = _get("1").replace(tzinfo=tzinfo)
+ if form["ets"] is None:
+ form["ets"] = _get("2").replace(tzinfo=tzinfo)
+ except Exception:
+ return None, None
+
+ if form["sts"] == form["ets"]:
+ form["ets"] += datetime.timedelta(days=1)
+ if form["sts"] > form["ets"]:
+ form["sts"], form["ets"] = form["ets"], form["sts"]
+ return form["sts"], form["ets"]
+
+
+def build_querycols(form):
+ """Which database columns correspond to our query."""
+ req = form["data"]
+ if not req or "all" in req:
+ return AVAILABLE
+ res = []
+ for col in req:
+ if col == "presentwx":
+ res.append("wxcodes")
+ elif col in AVAILABLE:
+ res.append(col)
+ elif col in CONV_COLS:
+ res.append(CONV_COLS[col])
+ if not res:
+ res.append("tmpf")
+ return res
+
+
+def toobusy(pgconn, name):
+ """Check internal logging..."""
+ cursor = pgconn.cursor()
+ cursor.execute(
+ "SELECT pid from pg_stat_activity where query ~* %s",
+ (name,),
+ )
+ over = cursor.rowcount > 6
+ if over and cursor.rowcount > 9: # cut back on logging
+ sys.stderr.write(f"asos.py cursors {cursor.rowcount}: {name}\n")
+ cursor.close()
+ return over
+
+
+@iemapp(help=__doc__, parse_times=False, schema=MyModel)
+def application(environ, start_response):
+ """Go main"""
+ if environ["REQUEST_METHOD"] == "OPTIONS":
+ start_response("400 Bad Request", [("Content-type", "text/plain")])
+ yield b"Allow: GET,POST,OPTIONS"
+ return
+ if overloaded():
+ start_response(
+ "503 Service Unavailable", [("Content-type", "text/plain")]
+ )
+ yield b"ERROR: server over capacity, please try later"
+ return
+ try:
+ tzname = environ["tz"].strip()
+ if tzname in ["etc/utc", ""]:
+ tzname = "UTC"
+ tzinfo = ZoneInfo(tzname)
+ except ZoneInfoNotFoundError as exp:
+ start_response("400 Bad Request", [("Content-type", "text/plain")])
+ sys.stderr.write(f"asos.py invalid tz: {exp}\n")
+ yield b"Invalid Timezone (tz) provided"
+ return
+ pgconn = get_dbconn("asos")
+ cursor_name = f"mystream_{environ.get('REMOTE_ADDR')}"
+ if toobusy(pgconn, cursor_name):
+ pgconn.close()
+ start_response(
+ "503 Service Unavailable", [("Content-type", "text/plain")]
+ )
+ yield b"ERROR: server over capacity, please try later"
+ return
+ acursor = pgconn.cursor(cursor_name, scrollable=False)
+ acursor.itersize = 2000
+
+ report_types = [int(i) for i in environ["report_type"]]
+ sts, ets = get_time_bounds(environ, tzinfo)
+ if sts is None:
+ pgconn.close()
+ start_response(
+ "422 Unprocessable Entity", [("Content-type", "text/plain")]
+ )
+ yield b"Invalid times provided."
+ return
+ stations = get_stations(environ)
+ if not stations:
+ # We are asking for all-data. We limit the amount of data returned to
+ # one day or less
+ if (ets - sts) > datetime.timedelta(hours=24):
+ pgconn.close()
+ start_response("400 Bad Request", [("Content-type", "text/plain")])
+ yield b"When requesting all-stations, must be less than 24 hours."
+ return
+ delim = environ["format"]
+ headers = []
+ if environ["direct"]:
+ headers.append(("Content-type", "application/octet-stream"))
+ suffix = "tsv" if delim in ["tdf", "onlytdf"] else "csv"
+ if not stations or len(stations) > 1:
+ fn = f"asos.{suffix}"
+ else:
+ fn = f"{stations[0]}.{suffix}"
+ headers.append(("Content-Disposition", f"attachment; filename={fn}"))
+ else:
+ headers.append(("Content-type", "text/plain"))
+ start_response("200 OK", headers)
+
+ # How should null values be represented
+ missing = NULLS[environ["missing"]]
+ # How should trace values be represented
+ trace = TRACE_OPTS[environ["trace"]]
+
+ querycols = build_querycols(environ)
+
+ if delim in ["tdf", "onlytdf"]:
+ rD = "\t"
+ else:
+ rD = ","
+
+ gisextra = environ["latlon"]
+ elev_extra = environ["elev"]
+ table = "alldata"
+ metalimiter = ""
+ colextra = "0 as lon, 0 as lat, 0 as elev, "
+ if gisextra or elev_extra:
+ colextra = "ST_X(geom) as lon, ST_Y(geom) as lat, elevation, "
+ table = "alldata a JOIN stations t on (a.station = t.id)"
+ metalimiter = "t.network ~* 'ASOS' and "
+
+ rlimiter = ""
+ # Munge legacy report_type=2 into 2,3,4 see akrherz/iem#104
+ if 2 in report_types:
+ report_types.extend([3, 4])
+ if len(report_types) == 1:
+ rlimiter = f" and report_type = {report_types[0]}"
+ elif len(report_types) > 1:
+ rlimiter = f" and report_type in {tuple(report_types)}"
+ sqlcols = ",".join(querycols)
+ sorder = "DESC" if environ["hours"] is not None else "ASC"
+ if stations:
+ acursor.execute(
+ f"SELECT station, valid, {colextra} {sqlcols} from {table} "
+ f"WHERE {metalimiter} valid >= %s and valid < %s and "
+ f"station = ANY(%s) {rlimiter} ORDER by valid {sorder}",
+ (sts, ets, stations),
+ )
+ else:
+ acursor.execute(
+ f"SELECT station, valid, {colextra} {sqlcols} from {table} "
+ f"WHERE {metalimiter} valid >= %s and valid < %s {rlimiter} "
+ f"ORDER by valid {sorder}",
+ (sts, ets),
+ )
+ sio = StringIO()
+ if delim not in ["onlytdf", "onlycomma"]:
+ sio.write(f"#DEBUG: Format Typ -> {delim}\n")
+ sio.write(f"#DEBUG: Time Period -> {sts} {ets}\n")
+ sio.write(f"#DEBUG: Time Zone -> {tzinfo}\n")
+ sio.write(
+ "#DEBUG: Data Contact -> daryl herzmann "
+ "akrherz@iastate.edu 515-294-5978\n"
+ )
+ sio.write(f"#DEBUG: Entries Found -> {acursor.rowcount}\n")
+ nometa = environ["nometa"]
+ if not nometa:
+ sio.write(f"station{rD}valid{rD}")
+ if gisextra:
+ sio.write(f"lon{rD}lat{rD}")
+ if elev_extra:
+ sio.write(f"elevation{rD}")
+ # hack to convert tmpf as tmpc to tmpc
+ sio.write(
+ f"{rD.join([c.rsplit(' as ', maxsplit=1)[-1] for c in querycols])}"
+ )
+ sio.write("\n")
+
+ ff = {
+ "wxcodes": fmt_wxcodes,
+ "metar": fmt_simple,
+ "skyc1": fmt_simple,
+ "skyc2": fmt_simple,
+ "skyc3": fmt_simple,
+ "skyc4": fmt_simple,
+ "p01i": fmt_trace,
+ "p01i * 25.4 as p01m": fmt_trace,
+ "ice_accretion_1hr": fmt_trace,
+ "ice_accretion_3hr": fmt_trace,
+ "ice_accretion_6hr": fmt_trace,
+ "peak_wind_time": fmt_time,
+ "snowdepth": fmt_f0,
+ }
+ # The default is the %.2f formatter
+ formatters = [ff.get(col, fmt_f2) for col in querycols]
+
+ for rownum, row in enumerate(acursor):
+ if not nometa:
+ sio.write(row[0] + rD)
+ sio.write(
+ (row[1].astimezone(tzinfo)).strftime("%Y-%m-%d %H:%M") + rD
+ )
+ if gisextra:
+ sio.write(f"{row[2]:.4f}{rD}{row[3]:.4f}{rD}")
+ if elev_extra:
+ sio.write(f"{row[4]:.2f}{rD}")
+ sio.write(
+ rD.join(
+ [
+ func(val, missing, trace, tzinfo)
+ for func, val in zip(formatters, row[5:])
+ ]
+ )
+ + "\n"
+ )
+ if rownum > 0 and rownum % 1000 == 0:
+ yield sio.getvalue().encode("ascii", "ignore")
+ sio = StringIO()
+ acursor.close()
+ pgconn.close()
+ yield sio.getvalue().encode("ascii", "ignore")
diff --git a/pylib/iemweb/request/asos1min.py b/pylib/iemweb/request/asos1min.py
new file mode 100644
index 0000000000..f054625acd
--- /dev/null
+++ b/pylib/iemweb/request/asos1min.py
@@ -0,0 +1,202 @@
+""".. title:: ASOS 1 Minute Data Request
+
+Documentation for /cgi-bin/request/asos1min.py
+----------------------------------------------
+
+This service provides the ASOS 1 minute data provided by NCEI and is not the
+"one minute data" via MADIS. There is an availability delay of about 24 hours
+due to the way NCEI collects the data from the ASOS sites.
+
+Examples
+--------
+
+Request air temperature data for Ames IA KAMW for 2022, but only provide data
+at 1 hour intervals. Provide timestamps in UTC timezone.
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/asos1min.py?station=KAMW\
+&vars=tmpf&sts=2022-01-01T00:00Z&ets=2023-01-01T00:00Z&sample=1hour\
+&what=download&tz=UTC
+
+"""
+
+from io import StringIO
+
+from pydantic import AwareDatetime, Field
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+
+SAMPLING = {
+ "1min": 1,
+ "5min": 5,
+ "10min": 10,
+ "20min": 20,
+ "1hour": 60,
+}
+DELIM = {"space": " ", "comma": ",", "tab": "\t", ",": ","}
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ delim: str = Field(
+ "comma",
+ description="Delimiter to use in output",
+ pattern="^(comma|space|tab|,)$",
+ )
+ ets: AwareDatetime = Field(None, description="End timestamp for data")
+ gis: bool = Field(
+ False, description="Include Lat/Lon information in output"
+ )
+ sample: str = Field(
+ "1min",
+ description="Sampling period for data",
+ pattern="^(1min|5min|10min|20min|1hour)$",
+ )
+ station: ListOrCSVType = Field(
+ ..., description="Station(s) to request data for"
+ )
+ sts: AwareDatetime = Field(None, description="Start timestamp for data")
+ tz: str = Field(
+ "UTC",
+ description="Timezone to use for the output and input timestamps",
+ )
+ vars: ListOrCSVType = Field(
+ None, description="Variable(s) to request data for"
+ )
+ what: str = Field(
+ "dl", description="Output format", pattern="^(download|view)$"
+ )
+ year1: int = Field(None, description="Start year for data")
+ month1: int = Field(None, description="Start month for data")
+ day1: int = Field(None, description="Start day for data")
+ hour1: int = Field(0, description="Start hour for data")
+ minute1: int = Field(0, description="Start minute for data")
+ year2: int = Field(None, description="End year for data")
+ month2: int = Field(None, description="End month for data")
+ day2: int = Field(None, description="End day for data")
+ hour2: int = Field(0, description="End hour for data")
+ minute2: int = Field(0, description="End minute for data")
+
+
+def get_station_metadata(environ, stations) -> dict:
+ """build a dictionary."""
+ cursor = environ["iemdb.mesosite.cursor"]
+ cursor.execute(
+ """
+ SELECT id, name, round(ST_x(geom)::numeric, 4) as lon,
+ round(ST_y(geom)::numeric, 4) as lat from stations
+ where id = ANY(%s) and network ~* 'ASOS'
+ """,
+ (stations,),
+ )
+ res = {}
+ for row in cursor:
+ res[row["id"]] = dict(name=row["name"], lon=row["lon"], lat=row["lat"])
+ for station in stations:
+ if station not in res:
+ raise IncompleteWebRequest(f"Unknown station provided: {station}")
+ return res
+
+
+def compute_prefixes(sio, environ, delim, stations, tz) -> dict:
+ """"""
+ station_meta = get_station_metadata(environ, stations)
+ prefixes = {}
+ if environ["gis"]:
+ sio.write(
+ delim.join(
+ ["station", "station_name", "lat", "lon", f"valid({tz})", ""]
+ )
+ )
+ for station in stations:
+ prefixes[station] = (
+ delim.join(
+ [
+ station,
+ station_meta[station]["name"].replace(delim, "_"),
+ str(station_meta[station]["lat"]),
+ str(station_meta[station]["lon"]),
+ ]
+ )
+ + delim
+ )
+ else:
+ sio.write(delim.join(["station", "station_name", f"valid({tz})", ""]))
+ for station in stations:
+ prefixes[station] = (
+ delim.join(
+ [
+ station,
+ station_meta[station]["name"].replace(delim, "_"),
+ ]
+ )
+ + delim
+ )
+ return prefixes
+
+
+@iemapp(
+ iemdb=["asos1min", "mesosite"],
+ iemdb_cursor="blah",
+ help=__doc__,
+ schema=Schema,
+)
+def application(environ, start_response):
+ """Handle mod_wsgi request."""
+ if environ["station"] is None:
+ raise IncompleteWebRequest("No station= was specified in request.")
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Insufficient start timestamp variables.")
+ # Ensure we have uppercase stations
+ stations = [s.upper() for s in environ["station"]]
+ delim = DELIM[environ["delim"]]
+ sample = SAMPLING[environ["sample"]]
+ tz = environ["tz"]
+ varnames = environ["vars"]
+ if not varnames:
+ raise IncompleteWebRequest("No vars= was specified in request.")
+ cursor = environ["iemdb.asos1min.cursor"]
+ # get a list of columns we have in the alldata_1minute table
+ cursor.execute(
+ "select column_name from information_schema.columns where "
+ "table_name = 'alldata_1minute' ORDER by column_name"
+ )
+ columns = [row["column_name"] for row in cursor]
+ # cross check varnames now
+ for varname in varnames:
+ if varname not in columns:
+ raise IncompleteWebRequest(
+ f"Unknown variable {varname} specified in request."
+ )
+ cursor.execute(
+ """
+ select *,
+ to_char(valid at time zone %s, 'YYYY-MM-DD hh24:MI') as local_valid
+ from alldata_1minute
+ where station = ANY(%s) and valid >= %s and valid < %s and
+ extract(minute from valid) %% %s = 0 ORDER by station, valid
+ """,
+ (tz, stations, environ["sts"], environ["ets"], sample),
+ )
+ headers = []
+ if environ["what"] == "download":
+ headers.append(("Content-type", "application/octet-stream"))
+ headers.append(
+ ("Content-Disposition", "attachment; filename=changeme.txt")
+ )
+ else:
+ headers.append(("Content-type", "text/plain"))
+
+ sio = StringIO()
+ prefixes = compute_prefixes(sio, environ, delim, stations, tz)
+
+ sio.write(delim.join(varnames) + "\n")
+ rowfmt = delim.join([f"%({var})s" for var in varnames])
+ for row in cursor:
+ sio.write(prefixes[row["station"]])
+ sio.write(f"{row['local_valid']}{delim}")
+ sio.write((rowfmt % row).replace("None", "M"))
+ sio.write("\n")
+
+ start_response("200 OK", headers)
+ return [sio.getvalue().encode("ascii")]
diff --git a/pylib/iemweb/request/coop.py b/pylib/iemweb/request/coop.py
index 7bbcc5be38..0b0d98f236 100644
--- a/pylib/iemweb/request/coop.py
+++ b/pylib/iemweb/request/coop.py
@@ -1,4 +1,19 @@
-"""IEM Request Handler for climodat Data."""
+""".. title:: IEM Climodat Data Export
+
+Documentation for /cgi-bin/request/coop.py
+------------------------------------------
+
+This service is somewhat poorly named ``coop.py``, but is providing the IEM
+Climodat data, which is a combination of NWS COOP and NWS ASOS/AWOS data. There
+are a number of knobs here as this is one of the most popular datasets the IEM
+produces.
+
+Changelog
+---------
+
+- 2024-06-22: Initital documentation and backend conversion to pydantic.
+
+"""
from datetime import date, datetime, timedelta
from io import BytesIO, StringIO
@@ -6,15 +21,157 @@
import pandas as pd
from metpy.units import units
-from pyiem.database import get_sqlalchemy_conn
+from pydantic import Field
+from pyiem.database import get_dbconnc, get_sqlalchemy_conn
from pyiem.exceptions import IncompleteWebRequest
from pyiem.network import Table as NetworkTable
from pyiem.util import utc
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
from sqlalchemy import text
DEGF = units.degF
DEGC = units.degC
+DEGC = units.degC
+DEGF = units.degF
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ delim: str = Field(
+ "comma",
+ description=(
+ "The delimiter to use in the output file. "
+ "Options: comma, tab, space"
+ ),
+ )
+ gis: bool = Field(
+ False,
+ description="Include latitude and longitude columns in the output.",
+ )
+ inclatlon: bool = Field(
+ False,
+ description="Include latitude and longitude columns in the output.",
+ )
+ model: str = Field(
+ None,
+ description=(
+ "The model to use for output. Options: simple, apsim, "
+ "century, daycent, salus, dndc, swat. Specifying this will "
+ "override the 'vars' option."
+ ),
+ )
+ network: str = Field(
+ "IACLIMATE", description="The network to use for station lookups."
+ )
+ scenario: bool = Field(
+ False,
+ description=(
+ "Should data from a previous year, specified by scenario_year "
+ "be used to fill out the present year."
+ ),
+ )
+ scenario_year: int = Field(
+ 2020,
+ description=(
+ "The year to use as a scenario year, if scenario is true."
+ ),
+ )
+ station: ListOrCSVType = Field(
+ [], description="List of stations to include in the output."
+ )
+ stations: ListOrCSVType = Field(
+ [],
+ description=(
+ "List of stations to include in the output. Legacy variable name."
+ ),
+ )
+ vars: ListOrCSVType = Field(
+ [], description="List of variables to include in the output."
+ )
+ what: str = Field("view", description="The type of output to generate.")
+ with_header: bool = Field(
+ True, description="Include a header row in the output."
+ )
+ year1: int = Field(
+ date.today().year,
+ description="The starting year for the data request.",
+ )
+ month1: int = Field(
+ 1,
+ description="The starting month for the data request.",
+ )
+ day1: int = Field(
+ 1,
+ description="The starting day for the data request.",
+ )
+ year2: int = Field(
+ date.today().year,
+ description="The ending year for the data request.",
+ )
+ month2: int = Field(
+ date.today().month,
+ description="The ending month for the data request.",
+ )
+ day2: int = Field(
+ date.today().day,
+ description="The ending day for the data request.",
+ )
+
+
+def get_scenario_period(ctx):
+ """Compute the inclusive start and end dates to fetch scenario data for
+ Arguments:
+ ctx dictionary context this app was called with
+ """
+ if ctx["ets"].month == 2 and ctx["ets"].day == 29:
+ sts = date(ctx["scenario_year"], ctx["ets"].month, 28)
+ else:
+ sts = date(ctx["scenario_year"], ctx["ets"].month, ctx["ets"].day)
+ ets = date(ctx["scenario_year"], 12, 31)
+ return sts, ets
+
+
+def sane_date(year, month, day):
+ """Attempt to account for usage of days outside of the bounds for
+ a given month"""
+ # Calculate the last date of the given month
+ nextmonth = date(year, month, 1) + timedelta(days=35)
+ lastday = nextmonth.replace(day=1) - timedelta(days=1)
+ return date(year, month, min(day, lastday.day))
+
+
+def get_cgi_dates(environ):
+ """Figure out which dates are requested via the form, we shall attempt
+ to account for invalid dates provided!"""
+
+ ets = min(
+ sane_date(environ["year2"], environ["month2"], environ["day2"]),
+ date.today() - timedelta(days=1),
+ )
+
+ return [
+ sane_date(environ["year1"], environ["month1"], environ["day1"]),
+ ets,
+ ]
+
+
+def get_cgi_stations(environ):
+ """Figure out which stations the user wants, return a list of them"""
+ reqlist = environ["station"]
+ if not reqlist:
+ reqlist = environ["stations"]
+ if not reqlist:
+ return []
+ if "_ALL" in reqlist:
+ network = environ["network"]
+ nt = NetworkTable(network, only_online=False)
+ return list(nt.sts.keys())
+
+ return reqlist
+
def f2c(val):
"""Convert F to C."""
@@ -698,3 +855,89 @@ def do_swat(_cursor, ctx):
for fn, fp in zipfiles.items():
zf.writestr(fn, fp)
return sio.getvalue()
+
+
+@iemapp(help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """go main go"""
+ ctx = {}
+ ctx["stations"] = get_cgi_stations(environ)
+ if not ctx["stations"]:
+ raise IncompleteWebRequest("No stations were specified.")
+ ctx["sts"], ctx["ets"] = get_cgi_dates(environ)
+ ctx["myvars"] = environ["vars"]
+ # Model specification trumps vars[]
+ if environ["model"] is not None:
+ ctx["myvars"] = [environ["model"]]
+ ctx["what"] = environ["what"]
+ ctx["delim"] = environ["delim"]
+ ctx["inclatlon"] = environ["gis"]
+ ctx["scenario"] = environ["scenario"]
+ ctx["scenario_year"] = 2099
+ if ctx["scenario"] == "yes":
+ ctx["scenario_year"] = environ["scenario_year"]
+ ctx["scenario_sts"], ctx["scenario_ets"] = get_scenario_period(ctx)
+ ctx["with_header"] = environ["with_header"]
+
+ # TODO: this code stinks and is likely buggy
+ headers = []
+ if (
+ "apsim" in ctx["myvars"]
+ or "daycent" in ctx["myvars"]
+ or "century" in ctx["myvars"]
+ or "salus" in ctx["myvars"]
+ ):
+ if ctx["what"] == "download":
+ headers.append(("Content-type", "application/octet-stream"))
+ headers.append(
+ ("Content-Disposition", "attachment; filename=metdata.txt")
+ )
+ else:
+ headers.append(("Content-type", "text/plain"))
+ elif "dndc" not in ctx["myvars"] and ctx["what"] != "excel":
+ if ctx["what"] == "download":
+ headers.append(("Content-type", "application/octet-stream"))
+ dlfn = "changeme.txt"
+ if len(ctx["stations"]) < 10:
+ dlfn = f"{'_'.join(ctx['stations'])}.txt"
+ headers.append(
+ ("Content-Disposition", f"attachment; filename={dlfn}")
+ )
+ else:
+ headers.append(("Content-type", "text/plain"))
+ elif "dndc" in ctx["myvars"]:
+ headers.append(("Content-type", "application/octet-stream"))
+ headers.append(
+ ("Content-Disposition", "attachment; filename=dndc.zip")
+ )
+ elif "swat" in ctx["myvars"]:
+ headers.append(("Content-type", "application/octet-stream"))
+ headers.append(
+ ("Content-Disposition", "attachment; filename=swatfiles.zip")
+ )
+ elif ctx["what"] == "excel":
+ headers.append(("Content-type", EXL))
+ headers.append(
+ ("Content-Disposition", "attachment; filename=nwscoop.xlsx")
+ )
+
+ conn, cursor = get_dbconnc("coop")
+ start_response("200 OK", headers)
+ # OK, now we fret
+ if "daycent" in ctx["myvars"]:
+ res = do_daycent(cursor, ctx)
+ elif "century" in ctx["myvars"]:
+ res = do_century(cursor, ctx)
+ elif "apsim" in ctx["myvars"]:
+ res = do_apsim(cursor, ctx)
+ elif "dndc" in ctx["myvars"]:
+ res = do_dndc(cursor, ctx)
+ elif "salus" in ctx["myvars"]:
+ res = do_salus(cursor, ctx)
+ elif "swat" in ctx["myvars"]:
+ res = do_swat(None, ctx)
+ else:
+ res = do_simple(cursor, ctx)
+ cursor.close()
+ conn.close()
+ return [res]
diff --git a/pylib/iemweb/request/daily.py b/pylib/iemweb/request/daily.py
new file mode 100644
index 0000000000..e865521795
--- /dev/null
+++ b/pylib/iemweb/request/daily.py
@@ -0,0 +1,237 @@
+""".. title:: IEM Computed Daily Summaries
+
+Documentation for /cgi-bin/request/daily.py
+-------------------------------------------
+
+This data source contains a combination of IEM computed calendar day summaries
+and some more official totals with some sites reporting explicit values. One
+should also note that typically the airport stations are for a 24 hour period
+over standard time, which means 1 AM to 1 AM daylight time.
+
+Example Usage
+-------------
+
+Request all high temperature data for Ames, IA (AMW) for the month of January
+2019:
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?sts=2019-01-01&ets=2019-01-31&network=IA_ASOS&stations=AMW&var=max_temp_f&format=csv
+
+
+Request daily precipitation and the climatology for all stations in Washington
+state on 23 June 2023 in Excel format:
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?sts=2023-06-23&ets=2023-06-23&network=WA_ASOS&stations=_ALL&var=precip_in,climo_precip_in&format=excel
+
+"""
+
+import copy
+import sys
+from datetime import datetime
+from io import BytesIO, StringIO
+
+import pandas as pd
+from pydantic import Field
+from pyiem.database import get_dbconn, get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.network import Table as NetworkTable
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+DEFAULT_COLS = (
+ "max_temp_f,min_temp_f,max_dewpoint_f,min_dewpoint_f,precip_in,"
+ "avg_wind_speed_kts,avg_wind_drct,min_rh,avg_rh,max_rh,"
+ "climo_high_f,climo_low_f,climo_precip_in,snow_in,snowd_in,"
+ "min_feel,avg_feel,max_feel,max_wind_speed_kts,max_wind_gust_kts,"
+ "srad_mj"
+).split(",")
+
+
+class MyCGI(CGIModel):
+ ets: datetime = Field(None, description="End date to query")
+ format: str = Field("csv", description="The format of the output")
+ na: str = Field("None", description="The NA value to use")
+ network: str = Field(..., description="Network Identifier")
+ station: ListOrCSVType = Field(
+ [],
+ description=(
+ "Comma delimited or multi-param station identifiers, "
+ "_ALL for all stations in network (deprecated)"
+ ),
+ )
+ stations: ListOrCSVType = Field(
+ [],
+ description=(
+ "Comma delimited or multi-param station identifiers, "
+ "_ALL for all stations in network"
+ ),
+ )
+ sts: datetime = Field(None, description="Start date to query")
+ var: ListOrCSVType = Field(
+ None,
+ description=(
+ "Comma delimited or multi-param variable names to include in "
+ f"output, columns are: {DEFAULT_COLS}"
+ ),
+ )
+ year1: int = Field(None, description="Start year when sts is not provided")
+ month1: int = Field(
+ None, description="Start month when sts is not provided"
+ )
+ day1: int = Field(None, description="Start day when sts is not provided")
+ year2: int = Field(None, description="End year when ets is not provided")
+ month2: int = Field(None, description="End month when ets is not provided")
+ day2: int = Field(None, description="End day when ets is not provided")
+
+
+def overloaded():
+ """Prevent automation from overwhelming the server"""
+
+ with get_dbconn("iem") as pgconn:
+ cursor = pgconn.cursor()
+ cursor.execute("select one::float from system_loadavg")
+ val = cursor.fetchone()[0]
+ if val > 25: # Cut back on logging
+ sys.stderr.write(f"/cgi-bin/request/daily.py over cpu thres: {val}\n")
+ return val > 20
+
+
+def get_climate(network, stations):
+ """Fetch the climatology for these stations"""
+ nt = NetworkTable(network, only_online=False)
+ if not nt.sts:
+ return "ERROR: Invalid network specified"
+ clisites = []
+ for station in stations:
+ if station == "_ALL":
+ for sid in nt.sts:
+ clid = nt.sts[sid]["ncei91"]
+ if clid not in clisites:
+ clisites.append(clid)
+ break
+ if station not in nt.sts:
+ return f"ERROR: station: {station} not found in network: {network}"
+ clid = nt.sts[station]["ncei91"]
+ if clid not in clisites:
+ clisites.append(clid)
+ with get_sqlalchemy_conn("coop") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ SELECT station, to_char(valid, 'mmdd') as sday,
+ high as climo_high_f, low as climo_low_f,
+ precip as climo_precip_in from ncei_climate91
+ where station = ANY(:clisites)
+ """
+ ),
+ conn,
+ params={"clisites": clisites},
+ )
+ return df
+
+
+def get_data(network, sts, ets, stations, cols, na, fmt):
+ """Go fetch data please"""
+ if not cols:
+ cols = copy.deepcopy(DEFAULT_COLS)
+ cols.insert(0, "day")
+ cols.insert(0, "station")
+ climate = get_climate(network, stations)
+ if isinstance(climate, str):
+ return climate
+
+ with get_sqlalchemy_conn("iem") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ SELECT id as station, day, max_tmpf as max_temp_f,
+ min_tmpf as min_temp_f, max_dwpf as max_dewpoint_f,
+ min_dwpf as min_dewpoint_f,
+ pday as precip_in,
+ avg_sknt as avg_wind_speed_kts,
+ vector_avg_drct as avg_wind_drct,
+ min_rh, avg_rh, max_rh,
+ snow as snow_in,
+ snowd as snowd_in,
+ min_feel, avg_feel, max_feel,
+ max_sknt as max_wind_speed_kts,
+ max_gust as max_wind_gust_kts,
+ srad_mj, ncei91, to_char(day, 'mmdd') as sday
+ from summary s JOIN stations t
+ on (t.iemid = s.iemid) WHERE
+ s.day >= :st and s.day <= :et and
+ t.network = :n and t.id = ANY(:ds)
+ ORDER by day ASC"""
+ ),
+ conn,
+ params={"st": sts, "et": ets, "n": network, "ds": stations},
+ )
+ # Join to climate data frame
+ df = df.merge(
+ climate,
+ how="left",
+ left_on=["ncei91", "sday"],
+ right_on=["station", "sday"],
+ suffixes=("", "_r"),
+ )
+ df = df[df.columns.intersection(cols)]
+ if na != "blank":
+ df = df.fillna(na)
+ if fmt == "json":
+ return df.to_json(orient="records")
+ if fmt == "excel":
+ bio = BytesIO()
+ with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
+ df.to_excel(writer, sheet_name="Data", index=False)
+ return bio.getvalue()
+
+ sio = StringIO()
+ df.to_csv(sio, index=False)
+ return sio.getvalue()
+
+
+@iemapp(help=__doc__, schema=MyCGI, parse_times=True)
+def application(environ, start_response):
+ """See how we are called"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Missing start and end times")
+ sts, ets = environ["sts"].date(), environ["ets"].date()
+
+ if sts.year != ets.year and overloaded():
+ start_response(
+ "503 Service Unavailable", [("Content-type", "text/plain")]
+ )
+ return [b"ERROR: server over capacity, please try later"]
+
+ fmt = environ.get("format", "csv")
+ stations = environ["stations"]
+ if not stations:
+ stations = environ["station"]
+ if not stations:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [b"ERROR: No stations specified for request"]
+ network = environ["network"][:20]
+ if "_ALL" in stations:
+ if (ets - sts).days > 366:
+ raise IncompleteWebRequest(
+ "Must request a year or less when requesting all stations"
+ )
+ stations = list(NetworkTable(network, only_online=False).sts.keys())
+ cols = environ["var"]
+ na = environ["na"]
+ if na not in ["M", "None", "blank"]:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [b"ERROR: Invalid `na` value provided. {M, None, blank}"]
+ if fmt != "excel":
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [
+ get_data(network, sts, ets, stations, cols, na, fmt).encode(
+ "ascii"
+ )
+ ]
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=daily.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [get_data(network, sts, ets, stations, cols, na, fmt)]
diff --git a/pylib/iemweb/request/feel.py b/pylib/iemweb/request/feel.py
new file mode 100644
index 0000000000..d22558932a
--- /dev/null
+++ b/pylib/iemweb/request/feel.py
@@ -0,0 +1,56 @@
+"""FEEL data download"""
+
+# pylint: disable=abstract-class-instantiated
+from io import BytesIO
+
+import pandas as pd
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+def run(sts, ets, start_response):
+ """Get data!"""
+ params = {"sts": sts, "ets": ets}
+ with get_sqlalchemy_conn("other") as dbconn:
+ sql = (
+ "SELECT * from feel_data_daily where "
+ "valid >= :sts and valid < :ets ORDER by valid ASC"
+ )
+ df = pd.read_sql(text(sql), dbconn, params=params)
+
+ sql = (
+ "SELECT * from feel_data_hourly where "
+ "valid >= :sts and valid < :ets ORDER by valid ASC"
+ )
+ df2 = pd.read_sql(text(sql), dbconn, params=params)
+
+ def fmt(val):
+ """Lovely hack."""
+ return val.strftime("%Y-%m-%d %H:%M")
+
+ df2["valid"] = df2["valid"].apply(fmt)
+
+ bio = BytesIO()
+ with pd.ExcelWriter(bio, engine="openpyxl") as writer:
+ df.to_excel(writer, sheet_name="Daily Data", index=False)
+ df2.to_excel(writer, sheet_name="Hourly Data", index=False)
+
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment;Filename=feel.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return bio.getvalue()
+
+
+@iemapp()
+def application(environ, start_response):
+ """Get stuff"""
+ if "sts" not in environ:
+ raise IncompleteWebRequest("GET parameters for start time missing")
+
+ return [run(environ["sts"], environ["ets"], start_response)]
diff --git a/pylib/iemweb/request/gis/awc_gairmets.py b/pylib/iemweb/request/gis/awc_gairmets.py
new file mode 100644
index 0000000000..2e2d486780
--- /dev/null
+++ b/pylib/iemweb/request/gis/awc_gairmets.py
@@ -0,0 +1,138 @@
+""".. title:: AWC Graphical AIRMETs
+
+Documentation for /cgi-bin/request/gis/awc_gairmets.py
+------------------------------------------------------
+
+This service emits the archive of IEM's best attempt at processing graphical
+AIRMETs.
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO
+
+# Third Party
+import fiona
+import geopandas as gpd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, iemapp
+
+fiona.supported_drivers["KML"] = "rw"
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ format: str = Field("shp", description="Output Format")
+ sts: AwareDatetime = Field(None, description="Start Time")
+ ets: AwareDatetime = Field(None, description="End Time")
+ year1: int = Field(
+ None, description="Start Year in UTC, when sts not set."
+ )
+ month1: int = Field(
+ None, description="Start Month in UTC, when sts not set."
+ )
+ day1: int = Field(None, description="Start Day in UTC, when sts not set.")
+ hour1: int = Field(0, description="Start Hour in UTC, when sts not set.")
+ minute1: int = Field(
+ 0, description="Start Minute in UTC, when sts not set."
+ )
+ year2: int = Field(None, description="End Year in UTC, when ets not set.")
+ month2: int = Field(
+ None, description="End Month in UTC, when ets not set."
+ )
+ day2: int = Field(None, description="End Day in UTC, when ets not set.")
+ hour2: int = Field(0, description="End Hour in UTC, when ets not set.")
+ minute2: int = Field(0, description="End Minute in UTC, when ets not set.")
+
+
+def run(ctx, start_response):
+ """Do something!"""
+ common = "at time zone 'UTC', 'YYYY-MM-DD\"T\"HH24:MI:00\"Z\"'"
+ schema = {
+ "geometry": "Polygon",
+ "properties": {
+ "NAME": "str:64",
+ "LABEL": "str:4",
+ "GML_ID": "str:32",
+ "VALID_AT": "str:20",
+ "VALID_FM": "str:20",
+ "VALID_TO": "str:20",
+ "ISSUTIME": "str:20",
+ "PROD_ID": "str:36",
+ "STATUS": "str:32",
+ "HZTYPE": "str:256",
+ "WXCOND": "str:256",
+ },
+ }
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = gpd.read_postgis(
+ "select label, gml_id, "
+ f"gml_id || ' ' || to_char(valid_at {common}) as name, "
+ f"to_char(valid_at {common}) as valid_at, "
+ f"to_char(valid_from {common}) as valid_fm, "
+ f"to_char(valid_to {common}) as valid_to, "
+ f"to_char(issuetime {common}) as issutime, "
+ "product_id as prod_id, status, hazard_type as hztype, "
+ "array_to_string(weather_conditions, ',') as wxcond, geom "
+ "from airmets WHERE issuetime >= %s and "
+ "issuetime < %s ORDER by valid_at ASC",
+ conn,
+ params=(
+ ctx["sts"],
+ ctx["ets"],
+ ),
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"airmets_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
+ if ctx["format"] == "kml":
+ fp = BytesIO()
+ with fiona.drivers():
+ df.to_file(fp, driver="KML", NameField="NAME", engine="fiona")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.kml"),
+ ]
+ start_response("200 OK", headers)
+ return fp.getvalue()
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ for suffix in ["shp", "shx", "dbf"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do something fun!"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Start and End Time are required!")
+ ctx = {
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "format": environ["format"],
+ }
+ return [run(ctx, start_response)]
diff --git a/pylib/iemweb/request/gis/cwas.py b/pylib/iemweb/request/gis/cwas.py
new file mode 100644
index 0000000000..0d3de89710
--- /dev/null
+++ b/pylib/iemweb/request/gis/cwas.py
@@ -0,0 +1,101 @@
+""".. title:: CWAS Data Service
+
+Documentation for /cgi-bin/request/gis/cwas.py
+----------------------------------------------
+
+To be written.
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO
+
+# Third Party
+import fiona
+import geopandas as gpd
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import iemapp
+
+fiona.supported_drivers["KML"] = "rw"
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+def run(ctx, start_response):
+ """Do something!"""
+ common = "at time zone 'UTC', 'YYYY-MM-DD\"T\"HH24:MI:00\"Z\"'"
+ schema = {
+ "geometry": "Polygon",
+ "properties": {
+ "CENTER": "str:4",
+ "ISSUE": "str:20",
+ "EXPIRE": "str:20",
+ "PROD_ID": "str:36",
+ "NARRATIV": "str:256",
+ "NUMBER": "int",
+ },
+ }
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = gpd.read_postgis(
+ "select center, "
+ f"to_char(issue {common}) as issue, "
+ f"to_char(expire {common}) as expire, "
+ "product_id as prod_id, narrative as narrativ, num as number, "
+ "geom from cwas WHERE issue >= %s and "
+ "issue < %s ORDER by issue ASC",
+ conn,
+ params=(
+ ctx["sts"],
+ ctx["ets"],
+ ),
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"cwas_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
+ if ctx["format"] == "kml":
+ fp = BytesIO()
+ with fiona.drivers():
+ df.to_file(fp, driver="KML")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.kml"),
+ ]
+ start_response("200 OK", headers)
+ return fp.getvalue()
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ for suffix in ["shp", "shx", "dbf"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__)
+def application(environ, start_response):
+ """Do something fun!"""
+ if "sts" not in environ:
+ raise IncompleteWebRequest("GET start time parameters missing")
+ ctx = {
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "format": environ.get("format", "shp"),
+ }
+ return [run(ctx, start_response)]
diff --git a/pylib/iemweb/request/gis/nexrad_storm_attrs.py b/pylib/iemweb/request/gis/nexrad_storm_attrs.py
new file mode 100644
index 0000000000..390dfb8298
--- /dev/null
+++ b/pylib/iemweb/request/gis/nexrad_storm_attrs.py
@@ -0,0 +1,186 @@
+""".. title:: NEXRAD Storm Attributes Data Service
+
+Return to `request form `_.
+
+Documentation for /cgi-bin/request/gis/nexrad_storm_attrs.py
+------------------------------------------------------------
+
+This service provides IEM processed NWS NEXRAD Storm Attribute table data. This
+archive updates in real-time as level 3 NCR products are received. If you
+request more than two radar sites, the time span is limited to 7 days.
+
+Changelog
+---------
+
+- 2024-06-11: Initial documentation release
+
+Example Usage
+-------------
+
+Provide all attributes between 2024-06-11 00:00 and 2024-06-11 23:59 UTC
+
+https://mesonet.agron.iastate.edu/cgi-bin/request/gis/nexrad_storm_attrs.py?\
+fmt=shp&sts=2024-06-11T00:00:00Z&ets=2024-06-11T23:59:59Z
+
+"""
+
+import datetime
+import zipfile
+from io import BytesIO, StringIO
+
+import shapefile
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ ets: AwareDatetime = Field(None, description="End of Time for request")
+ fmt: str = Field(
+ "shp", description="Format of output", pattern="^(shp|csv)$"
+ )
+ radar: ListOrCSVType = Field([], description="Radar Sites to include")
+ sts: AwareDatetime = Field(None, description="Start of Time for request")
+ year1: int = Field(
+ None, description="Year for start of time if sts not set"
+ )
+ month1: int = Field(
+ None, description="Month for start of time if sts not set"
+ )
+ day1: int = Field(None, description="Day for start of time if sts not set")
+ hour1: int = Field(
+ None, description="Hour for start of time if sts not set"
+ )
+ minute1: int = Field(
+ None, description="Minute for start of time if sts not set"
+ )
+ year2: int = Field(None, description="Year for end of time if ets not set")
+ month2: int = Field(
+ None, description="Month for end of time if ets not set"
+ )
+ day2: int = Field(None, description="Day for end of time if ets not set")
+ hour2: int = Field(None, description="Hour for end of time if ets not set")
+ minute2: int = Field(
+ None, description="Minute for end of time if ets not set"
+ )
+
+
+def run(environ, start_response):
+ """Do something!"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Missing start or end time parameters.")
+ sio = StringIO()
+
+ # Need to limit what we are allowing them to request as the file would get
+ # massive. So lets set arbitrary values of
+ # 1) If 2 or more RADARs, less than 7 days
+ radarlimit = ""
+ if environ["radar"] and "ALL" not in environ["radar"]:
+ radarlimit = " and nexrad = ANY(:radar) "
+ if (
+ len(environ["radar"]) > 2
+ and (environ["ets"] - environ["sts"]).days > 6
+ ):
+ environ["ets"] = environ["sts"] + datetime.timedelta(days=7)
+ fn = f"stormattr_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
+
+ with get_sqlalchemy_conn("radar") as conn:
+ res = conn.execute(
+ text(f"""
+ SELECT to_char(valid at time zone 'UTC', 'YYYYMMDDHH24MI')
+ as utctime,
+ storm_id, nexrad, azimuth, range, tvs, meso, posh, poh, max_size,
+ vil, max_dbz, max_dbz_height, top, drct, sknt,
+ ST_y(geom) as lat, ST_x(geom) as lon
+ from nexrad_attributes_log WHERE
+ valid >= :sts and valid < :ets {radarlimit} ORDER by valid ASC
+ """),
+ {
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "radar": environ["radar"],
+ },
+ )
+ if res.rowcount == 0:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+
+ if environ["fmt"] == "csv":
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.csv"),
+ ]
+ start_response("200 OK", headers)
+ sio.write(
+ (
+ "VALID,STORM_ID,NEXRAD,AZIMUTH,RANGE,TVS,MESO,POSH,"
+ "POH,MAX_SIZE,VIL,MAX_DBZ,MAZ_DBZ_H,TOP,DRCT,SKNT,LAT,LON\n"
+ )
+ )
+ for row in res:
+ sio.write(",".join([str(s) for s in row]) + "\n")
+ return sio.getvalue().encode("ascii", "ignore")
+
+ shpio = BytesIO()
+ shxio = BytesIO()
+ dbfio = BytesIO()
+
+ with shapefile.Writer(shp=shpio, shx=shxio, dbf=dbfio) as shp:
+ # C is ASCII characters
+ # N is a double precision integer limited to around 18 characters
+ # length
+ # D is for dates in the YYYYMMDD format,
+ # with no spaces or hyphens between the sections
+ # F is for floating point numbers with the same length limits as N
+ # L is for logical data which is stored in the shapefile's attr
+ # table as a short integer as a 1 (true) or a 0 (false).
+ # The values it can receive are 1, 0, y, n, Y, N, T, F
+ # or the python builtins True and False
+ shp.field("VALID", "C", 12)
+ shp.field("STORM_ID", "C", 2)
+ shp.field("NEXRAD", "C", 3)
+ shp.field("AZIMUTH", "N", 3, 0)
+ shp.field("RANGE", "N", 3, 0)
+ shp.field("TVS", "C", 10)
+ shp.field("MESO", "C", 10)
+ shp.field("POSH", "N", 3, 0)
+ shp.field("POH", "N", 3, 0)
+ shp.field("MAX_SIZE", "F", 5, 2)
+ shp.field("VIL", "N", 3, 0)
+ shp.field("MAX_DBZ", "N", 3, 0)
+ shp.field("MAX_DBZ_H", "F", 5, 2)
+ shp.field("TOP", "F", 9, 2)
+ shp.field("DRCT", "N", 3, 0)
+ shp.field("SKNT", "N", 3, 0)
+ shp.field("LAT", "F", 10, 4)
+ shp.field("LON", "F", 10, 4)
+ for row in res:
+ shp.point(row[-1], row[-2])
+ shp.record(*row)
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open("/opt/iem/data/gis/meta/4326.prj", encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ zf.writestr(f"{fn}.shp", shpio.getvalue())
+ zf.writestr(f"{fn}.shx", shxio.getvalue())
+ zf.writestr(f"{fn}.dbf", dbfio.getvalue())
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do something fun!"""
+ return [run(environ, start_response)]
diff --git a/pylib/iemweb/request/gis/pireps.py b/pylib/iemweb/request/gis/pireps.py
new file mode 100644
index 0000000000..f89376051c
--- /dev/null
+++ b/pylib/iemweb/request/gis/pireps.py
@@ -0,0 +1,212 @@
+""".. title:: Pilot Weather Report (PIREP) Data Service
+
+Documentation for /cgi-bin/request/gis/pireps.py
+------------------------------------------------
+
+This service emits processed and raw PIREP data. At this time, you must
+request 120 days or less of data at one time if you do not filter the request.
+
+Changelog
+---------
+
+- 2024-06-28: Initital documentation release
+
+Example Requests
+----------------
+
+Provide all PIREPs for the month of June 2024 over Chicago ARTCC in CSV:
+
+https://mesonet.agron.iastate.edu/cgi-bin/request/gis/pireps.py?\
+sts=2024-06-01T00:00:00Z&ets=2024-07-01T00:00:00Z&artcc=ZAU&fmt=csv
+
+"""
+
+import datetime
+import zipfile
+from io import BytesIO, StringIO
+
+import shapefile
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ artcc: ListOrCSVType = Field(
+ default=[],
+ description="The ARTCC to limit the query to, use _ALL for all",
+ )
+ ets: AwareDatetime = Field(
+ default=None, description="The end time of the query"
+ )
+ fmt: str = Field(
+ default="shp", description="The format of the output file"
+ )
+ sts: AwareDatetime = Field(
+ default=None, description="The start time of the query"
+ )
+ year1: int = Field(
+ default=2000,
+ description="The start year of the query, when sts is not provided",
+ )
+ month1: int = Field(
+ default=1,
+ description="The start month of the query, when sts is not provided",
+ )
+ day1: int = Field(
+ default=1,
+ description="The start day of the query, when sts is not provided",
+ )
+ degrees: float = Field(
+ default=1.0,
+ description="The distance in degrees for a spatial filter",
+ gt=0,
+ lt=90,
+ )
+ filter: bool = Field(
+ default=False,
+ description="Should we filter by distance from a point?",
+ )
+ lat: float = Field(
+ default=41.99,
+ description="The latitude of the point to filter by",
+ )
+ lon: float = Field(
+ default=-91.99,
+ description="The longitude of the point to filter by",
+ )
+ hour1: int = Field(
+ default=0,
+ description="The start hour of the query, when sts is not provided",
+ )
+ minute1: int = Field(
+ default=0,
+ description="The start minute of the query, when sts is not provided",
+ )
+ year2: int = Field(
+ default=2000,
+ description="The end year of the query, when ets is not provided",
+ )
+ month2: int = Field(
+ default=1,
+ description="The end month of the query, when ets is not provided",
+ )
+ day2: int = Field(
+ default=1,
+ description="The end day of the query, when ets is not provided",
+ )
+ hour2: int = Field(
+ default=0,
+ description="The end hour of the query, when ets is not provided",
+ )
+ minute2: int = Field(
+ default=0,
+ description="The end minute of the query, when ets is not provided",
+ )
+
+
+def run(environ, start_response):
+ """Go run!"""
+ artcc_sql = ""
+ if "_ALL" not in environ["artcc"] and environ["artcc"]:
+ artcc_sql = " artcc = ANY(:artcc) and "
+ params = {
+ "artcc": environ["artcc"],
+ "distance": environ["degrees"],
+ "lat": environ["lat"],
+ "lon": environ["lon"],
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ }
+
+ spatialsql = ""
+ if environ["filter"]:
+ spatialsql = (
+ "ST_Distance(geom::geometry, ST_SetSRID(ST_Point(:lon, :lat), "
+ "4326)) <= :distance and "
+ )
+ else:
+ if (environ["ets"] - environ["sts"]).days > 120:
+ environ["ets"] = environ["sts"] + datetime.timedelta(days=120)
+ sql = f"""
+ SELECT to_char(valid at time zone 'UTC', 'YYYYMMDDHH24MI') as utctime,
+ case when is_urgent then 'T' else 'F' end,
+ substr(replace(aircraft_type, ',', ' '), 0, 40),
+ substr(replace(report, ',', ' '), 0, 255),
+ substr(trim(substring(replace(report, ',', ' '),
+ '/IC([^/]*)/?')), 0, 255) as icing,
+ substr(trim(substring(replace(report, ',', ' '),
+ '/TB([^/]*)/?')), 0, 255) as turb,
+ artcc, ST_y(geom::geometry) as lat, ST_x(geom::geometry) as lon
+ from pireps WHERE {spatialsql} {artcc_sql}
+ valid >= :sts and valid < :ets ORDER by valid ASC
+ """
+ fn = f"pireps_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
+
+ with get_sqlalchemy_conn("postgis") as conn:
+ res = conn.execute(text(sql), params)
+ if res.rowcount == 0:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+
+ if environ["fmt"] == "csv":
+ sio = StringIO()
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.csv"),
+ ]
+ start_response("200 OK", headers)
+ sio.write(
+ "VALID,URGENT,AIRCRAFT,REPORT,ICING,TURBULENCE,ATRCC,LAT,LON\n"
+ )
+ for row in res:
+ sio.write(",".join([str(s) for s in row]) + "\n")
+ return sio.getvalue().encode("ascii", "ignore")
+
+ shpio = BytesIO()
+ shxio = BytesIO()
+ dbfio = BytesIO()
+
+ with shapefile.Writer(shx=shxio, dbf=dbfio, shp=shpio) as shp:
+ shp.field("VALID", "C", 12)
+ shp.field("URGENT", "C", 1)
+ shp.field("AIRCRAFT", "C", 40)
+ shp.field("REPORT", "C", 255) # Max field size is 255
+ shp.field("ICING", "C", 255) # Max field size is 255
+ shp.field("TURB", "C", 255) # Max field size is 255
+ shp.field("ARTCC", "C", 3)
+ shp.field("LAT", "F", 7, 4)
+ shp.field("LON", "F", 9, 4)
+ for row in res:
+ if row[-1] is None:
+ continue
+ shp.point(row[-1], row[-2])
+ shp.record(*row)
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open("/opt/iem/data/gis/meta/4326.prj", encoding="ascii") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ zf.writestr(f"{fn}.shp", shpio.getvalue())
+ zf.writestr(f"{fn}.shx", shxio.getvalue())
+ zf.writestr(f"{fn}.dbf", dbfio.getvalue())
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do something fun!"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("GET start time parameters missing.")
+ return [run(environ, start_response)]
diff --git a/pylib/iemweb/request/gis/sigmets.py b/pylib/iemweb/request/gis/sigmets.py
new file mode 100644
index 0000000000..d9afbd5a1f
--- /dev/null
+++ b/pylib/iemweb/request/gis/sigmets.py
@@ -0,0 +1,165 @@
+""".. title:: SIGMET Data Service
+
+Return to `User Frontend `_
+
+Documentation for /cgi-bin/request/gis/sigmets.py
+-------------------------------------------------
+
+This service emits SIGMET data for a given time period.
+
+Changelog
+---------
+
+- 2024-07-11: Migration to pydantic validation and added CSV,Excel format
+ options.
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO, StringIO
+
+# Third Party
+import fiona
+import geopandas as gpd
+import pandas as pd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.reference import ISO8601
+from pyiem.webutil import CGIModel, iemapp
+from sqlalchemy import text
+
+fiona.supported_drivers["KML"] = "rw"
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ format: str = Field(
+ default="shp",
+ description="Output format, either shp, kml, csv, or excel",
+ pattern="^(shp|kml|csv|excel)$",
+ )
+ sts: AwareDatetime = Field(default=None, description="Start Time")
+ ets: AwareDatetime = Field(default=None, description="End Time")
+ year1: int = Field(default=None, description="Start Year, if sts not set")
+ month1: int = Field(
+ default=None, description="Start Month, if sts not set"
+ )
+ day1: int = Field(default=None, description="Start Day, if sts not set")
+ hour1: int = Field(default=None, description="Start Hour, if sts not set")
+ minute1: int = Field(
+ default=None, description="Start Minute, if sts not set"
+ )
+ year2: int = Field(default=None, description="End Year, if ets not set")
+ month2: int = Field(default=None, description="End Month, if ets not set")
+ day2: int = Field(default=None, description="End Day, if ets not set")
+ hour2: int = Field(default=None, description="End Hour, if ets not set")
+ minute2: int = Field(
+ default=None, description="End Minute, if ets not set"
+ )
+
+
+def run(ctx, start_response):
+ """Do something!"""
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = gpd.read_postgis(
+ text("""
+ select label || ' ' || sigmet_type as name, label,
+ sigmet_type as type,
+ issue at time zone 'UTC' as issue,
+ expire at time zone 'UTC' as expire, geom,
+ product_id as PROD_ID
+ from sigmets_archive WHERE issue >= :sts and
+ issue < :ets ORDER by issue ASC
+ """),
+ conn,
+ params={
+ "sts": ctx["sts"],
+ "ets": ctx["ets"],
+ },
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+ for col in ["issue", "expire"]:
+ df[col] = df[col].dt.strftime(ISO8601)
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"sigmets_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
+ if ctx["format"] == "kml":
+ fp = BytesIO()
+ with fiona.drivers():
+ df.to_file(fp, driver="KML", NameField="NAME", engine="fiona")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.kml"),
+ ]
+ start_response("200 OK", headers)
+ return fp.getvalue()
+ if ctx["format"] == "csv":
+ fp = StringIO()
+ df.drop(columns="geom").to_csv(fp, index=False)
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.csv"),
+ ]
+ start_response("200 OK", headers)
+ return fp.getvalue().encode("ascii")
+ if ctx["format"] == "excel":
+ fp = BytesIO()
+ with pd.ExcelWriter(fp) as writer:
+ df.drop(columns="geom").to_excel(writer, index=False)
+ headers = [
+ ("Content-type", EXL),
+ ("Content-Disposition", f"attachment; filename={fn}.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return fp.getvalue()
+
+ schema = {
+ "geometry": "Polygon",
+ "properties": {
+ "NAME": "str:64",
+ "LABEL": "str:16",
+ "TYPE": "str:1",
+ "ISSUE": "str:20",
+ "EXPIRE": "str:20",
+ "PROD_ID": "str:36",
+ },
+ }
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ for suffix in ["shp", "shx", "dbf"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do something fun!"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("GET start or end time parameters missing")
+ ctx = {
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "format": environ["format"],
+ }
+ return [run(ctx, start_response)]
diff --git a/pylib/iemweb/request/gis/spc_mcd.py b/pylib/iemweb/request/gis/spc_mcd.py
new file mode 100644
index 0000000000..19371ad129
--- /dev/null
+++ b/pylib/iemweb/request/gis/spc_mcd.py
@@ -0,0 +1,144 @@
+""".. title:: Storm Prediction Center Mesoscale Convective Discussion
+
+Documentation for /cgi-bin/request/gis/spc_mcd.py
+-------------------------------------------------
+
+The IEM archives Storm Prediction Center Mesoscale Convective Discussions (MCD)
+in real-time and makes them available for download via this service. The
+raw product text is not emitted here, but the ``prod_id`` is included, which
+is a reference to the raw product text.
+
+Changelog
+---------
+
+- 2024-05-29: Initial documentation
+
+Example Usage
+-------------
+
+Return all MCDs for 2023
+
+https://mesonet.agron.iastate.edu/cgi-bin/request/gis/spc_mcd.py?\
+sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO
+
+# Third Party
+import geopandas as gpd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, iemapp
+
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ sts: AwareDatetime = Field(None, description="Start Time")
+ ets: AwareDatetime = Field(None, description="End Time")
+ year1: int = Field(
+ None, description="Start UTC Year when sts is not provided"
+ )
+ year2: int = Field(
+ None, description="End UTC Year when ets is not provided"
+ )
+ month1: int = Field(
+ None, description="Start UTC Month when sts is not provided"
+ )
+ month2: int = Field(
+ None, description="End UTC Month when ets is not provided"
+ )
+ day1: int = Field(
+ None, description="Start UTC Day when sts is not provided"
+ )
+ day2: int = Field(None, description="End UTC Day when ets is not provided")
+ hour1: int = Field(
+ None, description="Start UTC Hour when sts is not provided"
+ )
+ hour2: int = Field(
+ None, description="End UTC Hour when ets is not provided"
+ )
+ minute1: int = Field(
+ None, description="Start UTC Minute when sts is not provided"
+ )
+ minute2: int = Field(
+ None, description="End UTC Minute when ets is not provided"
+ )
+
+
+def run(ctx, start_response):
+ """Do something!"""
+ common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
+ schema = {
+ "geometry": "Polygon",
+ "properties": {
+ "ISSUE": "str:12",
+ "EXPIRE": "str:12",
+ "PROD_ID": "str:35",
+ "YEAR": "int",
+ "NUM": "int",
+ "CONFIDEN": "int",
+ "CONCERN": "str:64",
+ },
+ }
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = gpd.read_postgis(
+ "select "
+ f"to_char(issue {common}) as issue, "
+ f"to_char(expire {common}) as expire, "
+ "product_id as prod_id, year, num, watch_confidence as confiden, "
+ "concerning as concern, geom "
+ "from mcd WHERE issue >= %s and "
+ "issue < %s ORDER by issue ASC",
+ conn,
+ params=(
+ ctx["sts"],
+ ctx["ets"],
+ ),
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"mcd_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ for suffix in ["shp", "shx", "dbf"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do something fun!"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("GET sts/ets parameter not provided")
+ if environ["sts"] > environ["ets"]:
+ environ["sts"], environ["ets"] = environ["ets"], environ["sts"]
+ ctx = {
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ }
+ return [run(ctx, start_response)]
diff --git a/pylib/iemweb/request/gis/spc_outlooks.py b/pylib/iemweb/request/gis/spc_outlooks.py
new file mode 100644
index 0000000000..0eb353a38f
--- /dev/null
+++ b/pylib/iemweb/request/gis/spc_outlooks.py
@@ -0,0 +1,158 @@
+""".. title:: Download SPC Convective and Fire Weather or WPC ERO Outlooks
+
+Documentation for /cgi-bin/request/gis/spc_outlooks.py
+------------------------------------------------------
+
+This application allows for the download of SPC Convective and Fire Weather
+or WPC Excessive Rainfall Outlooks in shapefile format.
+
+Changelog
+---------
+
+- 2024-06-14: Initial documentation of this backend
+
+Example Requests
+----------------
+
+Provide all of the day 2 convective outlooks for the year 2024:
+
+https://mesonet.agron.iastate.edu/cgi-bin/request/gis/spc_outlooks.py?d=2&\
+type=C&sts=2024-01-01T00:00Z&ets=2025-01-01T00:00Z
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO
+
+# Third Party
+import geopandas as gpd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ d: ListOrCSVType = Field(
+ ["1", "2", "3", "4", "5", "6", "7", "8"], description="Days to include"
+ )
+ ets: AwareDatetime = Field(
+ None, description="End of the period to include"
+ )
+ geom: str = Field(
+ "geom_layers",
+ description=(
+ "Express geometries either as layers or non-overlapping "
+ "geometries."
+ ),
+ pattern="geom_layers|geom",
+ )
+ sts: AwareDatetime = Field(
+ None, description="Start of the period to include"
+ )
+ type: ListOrCSVType = Field(
+ ["C", "F"], description="Outlook types to include"
+ )
+ year1: int = Field(None, description="Start year when sts is not set.")
+ month1: int = Field(None, description="Start month when sts is not set.")
+ day1: int = Field(None, description="Start day when sts is not set.")
+ hour1: int = Field(None, description="Start hour when sts is not set.")
+ minute1: int = Field(None, description="Start minute when sts is not set.")
+ year2: int = Field(None, description="End year when ets is not set.")
+ month2: int = Field(None, description="End month when ets is not set.")
+ day2: int = Field(None, description="End day when ets is not set.")
+ hour2: int = Field(None, description="End hour when ets is not set.")
+ minute2: int = Field(None, description="End minute when ets is not set.")
+
+
+def get_context(environ):
+ """Figure out the CGI variables passed to this script"""
+ types = [x[0].upper() for x in environ["type"]]
+ days = [int(x) for x in environ["d"]]
+ return {
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "types": types,
+ "days": days,
+ "geom_col": environ["geom"],
+ }
+
+
+def run(ctx, start_response):
+ """Do something!"""
+ common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
+ schema = {
+ "geometry": "MultiPolygon",
+ "properties": {
+ "ISSUE": "str:12",
+ "EXPIRE": "str:12",
+ "PRODISS": "str:12",
+ "TYPE": "str:1",
+ "DAY": "int",
+ "THRESHOLD": "str:4",
+ "CATEGORY": "str:48", # 43 as checked max, to save space
+ "CYCLE": "int",
+ },
+ }
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = gpd.read_postgis(
+ text(f"""select
+ to_char(issue {common}) as issue,
+ to_char(expire {common}) as expire,
+ to_char(product_issue {common}) as prodiss,
+ outlook_type as type, day, threshold, category, cycle,
+ {ctx["geom_col"]} as geom
+ from spc_outlooks WHERE product_issue >= :sts and
+ product_issue < :ets and outlook_type = ANY(:types)
+ and day = ANY(:days)
+ ORDER by product_issue ASC
+ """),
+ conn,
+ params={
+ "sts": ctx["sts"],
+ "ets": ctx["ets"],
+ "types": ctx["types"],
+ "days": ctx["days"],
+ },
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"outlooks_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ for suffix in ["shp", "shx", "dbf"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do something fun!"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("GET start/end time parameters missing")
+ ctx = get_context(environ)
+ return [run(ctx, start_response)]
diff --git a/pylib/iemweb/request/gis/spc_watch.py b/pylib/iemweb/request/gis/spc_watch.py
new file mode 100644
index 0000000000..e418e5ebe6
--- /dev/null
+++ b/pylib/iemweb/request/gis/spc_watch.py
@@ -0,0 +1,175 @@
+""".. title:: Download SPC Watch Polygons and Metadata
+
+Documentation for /cgi-bin/request/gis/spc_watch.py
+---------------------------------------------------
+
+The IEM archives the Storm Prediction Center (SPC) watch polygons and
+associated metadata. Please note that these polygons are no longer the
+official watch geography with watch-by-county being the official product.
+These polygons are still generally useful and somewhat accurate to the actual
+watch geographic extent.
+
+Changelog
+---------
+
+- 2024-06-27: Default `hour1`,`hour2`,`minute1`,`minute2` to 0, so they do not
+need to be provided.
+- 2024-06-09: Initial Documentation
+
+Example Usage
+-------------
+
+Return all watch polygons for UTC 2024 in GeoJSON.
+
+https://mesonet.agron.iastate.edu/cgi-bin/request/gis/spc_watch.py?\
+sts=2024-01-01T00:00:00Z&ets=2025-01-01T00:00:00Z&format=geojson
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO
+
+# Third Party
+import fiona
+import geopandas as gpd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, iemapp
+from sqlalchemy import text
+
+fiona.supported_drivers["KML"] = "rw"
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ ets: AwareDatetime = Field(None, description="End Time")
+ format: str = Field("shp", description="Output format")
+ sts: AwareDatetime = Field(None, description="Start Time")
+ year1: int = Field(None, description="Start year when sts is not provided")
+ year2: int = Field(None, description="End year when ets is not provided")
+ month1: int = Field(
+ None, description="Start month when sts is not provided"
+ )
+ month2: int = Field(None, description="End month when ets is not provided")
+ day1: int = Field(None, description="Start day when sts is not provided")
+ day2: int = Field(None, description="End day when ets is not provided")
+ hour1: int = Field(0, description="Start hour when sts is not provided")
+ hour2: int = Field(0, description="End hour when ets is not provided")
+ minute1: int = Field(
+ 0, description="Start minute when sts is not provided"
+ )
+ minute2: int = Field(0, description="End minute when ets is not provided")
+
+
+def start_headers(start_response, ctx, fn):
+ """Figure out the proper headers for the output"""
+ suffix = "zip" if ctx["format"] == "shp" else ctx["format"]
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.{suffix}"),
+ ]
+ start_response("200 OK", headers)
+
+
+def run(environ, start_response):
+ """Do something!"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Missing start or end time")
+ common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
+ schema = {
+ "geometry": "MultiPolygon",
+ "properties": {
+ "ISSUE": "str:12",
+ "EXPIRE": "str:12",
+ "SEL": "str:5",
+ "TYPE": "str:3",
+ "NUM": "int",
+ "P_TORTWO": "int",
+ "P_TOREF2": "int",
+ "P_WIND10": "int",
+ "P_WIND65": "int",
+ "P_HAIL10": "int",
+ "P_HAIL2I": "int",
+ "P_HAILWND": "int",
+ "MAX_HAIL": "float",
+ "MAX_GUST": "int",
+ "MAX_TOPS": "int",
+ "MV_DRCT": "int",
+ "MV_SKNT": "int",
+ "IS_PDS": "bool",
+ },
+ }
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = gpd.read_postgis(
+ text(f"""select
+ to_char(issued {common}) as issue,
+ to_char(expired {common}) as expire,
+ sel, type, num, geom,
+ tornadoes_2m as p_tortwo, tornadoes_1m_strong as p_toref2,
+ wind_10m as p_wind10, wind_1m_65kt as p_wind65,
+ hail_10m as p_hail10, hail_1m_2inch as p_hail2i,
+ hail_wind_6m as p_hailwnd, max_hail_size as max_hail,
+ max_wind_gust_knots as max_gust, max_tops_feet as max_tops,
+ storm_motion_drct as mv_drct, storm_motion_sknt as mv_sknt,
+ is_pds
+ from watches WHERE issued >= :sts and
+ issued < :ets ORDER by issued ASC
+ """),
+ conn,
+ params={
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ },
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"watches_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
+ start_headers(start_response, environ, fn)
+ if environ["format"] == "csv":
+ return df.to_csv(index=False).encode("utf-8")
+ if environ["format"] == "geojson":
+ with tempfile.NamedTemporaryFile("w", delete=True) as tmp:
+ df.to_file(tmp.name, driver="GeoJSON")
+ with open(tmp.name, encoding="utf8") as fh:
+ res = fh.read()
+ return res.encode("utf-8")
+ if environ["format"] == "kml":
+ df["NAME"] = (
+ df["ISSUE"].str.slice(0, 4)
+ + ": "
+ + df["TYPE"]
+ + " #"
+ + df["NUM"].apply(str)
+ )
+ fp = BytesIO()
+ with fiona.drivers():
+ df.to_file(fp, driver="KML", NameField="NAME", engine="fiona")
+ return fp.getvalue()
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ for suffix in ["shp", "shx", "dbf"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do something fun!"""
+ return [run(environ, start_response)]
diff --git a/pylib/iemweb/request/gis/sps.py b/pylib/iemweb/request/gis/sps.py
new file mode 100644
index 0000000000..7c9f6b4a65
--- /dev/null
+++ b/pylib/iemweb/request/gis/sps.py
@@ -0,0 +1,91 @@
+""".. title:: Special Weather Statement (SPS) Data Service
+
+Documentation for /cgi-bin/request/gis/sps.py
+---------------------------------------------
+
+To be written.
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO
+
+# Third Party
+import geopandas as gpd
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import get_sqlalchemy_conn
+from pyiem.webutil import iemapp
+
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+def run(ctx, start_response):
+ """Do something!"""
+ common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
+ schema = {
+ "geometry": "Polygon",
+ "properties": {
+ "ISSUE": "str:12",
+ "EXPIRE": "str:12",
+ "PROD_ID": "str:32",
+ "WFO": "str:3",
+ "LNDSPOUT": "str:64",
+ "WTRSPOUT": "str:64",
+ "MAX_HAIL": "str:16",
+ "MAX_WIND": "str:16",
+ "TML_VALD": "str:12",
+ "TML_DRCT": "int",
+ "TML_SKNT": "int",
+ },
+ }
+ with get_sqlalchemy_conn("postgis") as pgconn:
+ df = gpd.read_postgis(
+ "select "
+ f"to_char(issue {common}) as issue, "
+ f"to_char(expire {common}) as expire, "
+ f"product_id as prod_id, "
+ "wfo, landspout as lndspout, waterspout as wtrspout, "
+ "max_hail_size as max_hail, max_wind_gust as max_wind, "
+ f"to_char(tml_valid {common}) as tml_vald, "
+ "tml_direction as tml_drct, "
+ "tml_sknt, geom from sps WHERE issue >= %s and "
+ "issue < %s and not ST_isempty(geom) ORDER by issue ASC",
+ pgconn,
+ params=(ctx["sts"], ctx["ets"]),
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: no results found for your query"
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"sps_{ctx['sts']:%Y%m%d%H%M}_{ctx['ets']:%Y%m%d%H%M}"
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="ascii") as fp:
+ zf.writestr(f"{fn}.prj", fp.read())
+ for suffix in ("shp", "shx", "dbf"):
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return zio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__)
+def application(environ, start_response):
+ """Do something fun!"""
+ if "sts" not in environ:
+ raise IncompleteWebRequest("GET start timestamp params missing")
+ ctx = {"sts": environ["sts"], "ets": environ["ets"]}
+ return [run(ctx, start_response)]
diff --git a/pylib/iemweb/request/gis/watch_by_county.py b/pylib/iemweb/request/gis/watch_by_county.py
new file mode 100644
index 0000000000..f7ef370710
--- /dev/null
+++ b/pylib/iemweb/request/gis/watch_by_county.py
@@ -0,0 +1,140 @@
+"""Watch by county, a one-off"""
+
+import tempfile
+import zipfile
+from io import BytesIO
+
+from osgeo import ogr
+from pydantic import Field
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import utc
+from pyiem.webutil import CGIModel, iemapp
+
+ogr.UseExceptions()
+PROJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ etn: int = Field(None, description="Event ID")
+ year: int = Field(None, description="Year of valid timestamp")
+ month: int = Field(None, description="Month of valid timestamp")
+ day: int = Field(None, description="Day of valid timestamp")
+ hour: int = Field(None, description="Hour of valid timestamp")
+ minute: int = Field(None, description="Minute of valid timestamp")
+
+
+def get_ts_fn(environ):
+ """Figure out what is requested."""
+ # Get CGI vars
+ if environ["year"] is not None:
+ ts = utc(
+ environ["year"],
+ environ["month"],
+ environ["day"],
+ environ["hour"],
+ environ["minute"],
+ )
+ fn = f"watch_by_county_{ts:%Y%m%d%H%M}"
+ else:
+ ts = utc()
+ fn = "watch_by_county"
+ return ts, fn
+
+
+@iemapp(help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Go Main Go"""
+ try:
+ ts, fn = get_ts_fn(environ)
+ except Exception as exp:
+ raise IncompleteWebRequest("bad input provided") from exp
+ if environ["etn"] is not None:
+ etnLimiter = f"and eventid = {int(environ.get('etn'))}"
+ fn = f"watch_by_county_{ts:Y%m%d%H%M}_{int(environ.get('etn'))}"
+ else:
+ etnLimiter = ""
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ table = f"warnings_{ts.year}"
+ source = ogr.Open(
+ "PG:host=iemdb-postgis.local dbname=postgis "
+ f"user=nobody tables={table}(tgeom)"
+ )
+
+ out_driver = ogr.GetDriverByName("ESRI Shapefile")
+ out_ds = out_driver.CreateDataSource(f"{tmpdir}/{fn}.shp")
+ out_layer = out_ds.CreateLayer("polygon", None, ogr.wkbPolygon)
+
+ fd = ogr.FieldDefn("ISSUED", ogr.OFTString)
+ fd.SetWidth(12)
+ out_layer.CreateField(fd)
+
+ fd = ogr.FieldDefn("EXPIRED", ogr.OFTString)
+ fd.SetWidth(12)
+ out_layer.CreateField(fd)
+
+ fd = ogr.FieldDefn("PHENOM", ogr.OFTString)
+ fd.SetWidth(2)
+ out_layer.CreateField(fd)
+
+ fd = ogr.FieldDefn("SIG", ogr.OFTString)
+ fd.SetWidth(1)
+ out_layer.CreateField(fd)
+
+ fd = ogr.FieldDefn("ETN", ogr.OFTInteger)
+ out_layer.CreateField(fd)
+
+ tt = ts.strftime("%Y-%m-%d %H:%M+00")
+ sql = f"""
+ select phenomena, eventid, ST_multi(ST_union(u.geom)) as tgeom,
+ max(to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI'))
+ as utcexpire,
+ min(to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI'))
+ as utcissue
+ from warnings_{ts.year} w JOIN ugcs u on (u.gid = w.gid)
+ WHERE significance = 'A' and phenomena IN ('TO','SV')
+ and issue > '{tt}'::timestamp -'3 days':: interval
+ and issue <= '{tt}' and
+ expire > '{tt}' {etnLimiter}
+ GROUP by phenomena, eventid ORDER by phenomena ASC
+ """
+
+ data = source.ExecuteSQL(sql)
+
+ while True:
+ feat = data.GetNextFeature()
+ if not feat:
+ break
+ geom = feat.GetGeometryRef()
+
+ featDef = ogr.Feature(out_layer.GetLayerDefn())
+ featDef.SetGeometry(geom)
+ featDef.SetField("PHENOM", feat.GetField("phenomena"))
+ featDef.SetField("SIG", "A")
+ featDef.SetField("ETN", feat.GetField("eventid"))
+ featDef.SetField("ISSUED", feat.GetField("utcissue"))
+ featDef.SetField("EXPIRED", feat.GetField("utcexpire"))
+
+ out_layer.CreateFeature(featDef)
+ feat.Destroy()
+
+ source.Destroy()
+ out_ds.Destroy()
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PROJFILE, encoding="ascii") as fp:
+ zf.writestr(f"{fn}.prj", fp.read())
+ for suffix in ("shp", "shx", "dbf"):
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+ return [zio.getvalue()]
diff --git a/pylib/iemweb/request/gis/watchwarn.py b/pylib/iemweb/request/gis/watchwarn.py
new file mode 100644
index 0000000000..8583c9ce84
--- /dev/null
+++ b/pylib/iemweb/request/gis/watchwarn.py
@@ -0,0 +1,593 @@
+""".. title:: NWS Watch/Warning/Advisory (WWA) Data Service
+
+Return to `Download User Interface `_.
+
+Documentation for /cgi-bin/request/gis/watchwarn.py
+---------------------------------------------------
+
+This service emits shapefiles (with additional csv included),
+or even Excel files. This service is
+rather blunt force and perhaps you should review the mountain of adhoc JSON/API
+services found at
+`IEM Legacy JSON Services `_ or at
+`IEM API Services `_ .
+
+Changelog
+---------
+
+- 2024-07-03: Added a `accept=csv` option to allow for CSV output.
+- 2024-06-26: Added `limitpds` parameter to limit the request to only include
+products that have a PDS (Particularly Dangerous Situation) tag or phrasing.
+- 2024-05-14: To mitigate against large requests that overwhelm the server, a
+limit of one year's worth of data is now in place for requests that do not
+limit the request by either state, phenomena, nor wfo.
+- 2024-05-09: Migrated to pydantic based CGI input validation.
+
+Example Usage
+-------------
+
+Return all Areal Flood, Flash Flood, Severe Thunderstorm, and Tornado Watch
+and Warnings for the state of Mississippi during 2024. Note how the phenomena
+and significance parameters are repeated so that each combination is present.
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/gis/watchwarn.py?\
+accept=shapefile&sts=2024-01-01T00:00Z&ets=2025-01-01T00:00Z&\
+location_group=states&states=MS&limitps=yes&phenomena=FF,FA,SV,TO,FF,FA,SV,TO&\
+significance=W,W,W,W,A,A,A,A
+
+Return all Tornado Warnings for the Des Moines WFO in shapefile format during
+2023.
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/gis/watchwarn.py?accept=shapefile&sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z&wfo[]=DMX&limitps=yes&phenomena=TO&significance=W
+
+"""
+
+import datetime
+import tempfile
+import zipfile
+from io import BytesIO
+
+import fiona
+import pandas as pd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_dbconnc, get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import utc
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from shapely.geometry import mapping
+from shapely.wkb import loads
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ accept: str = Field(
+ "shapefile",
+ pattern="^(shapefile|excel|csv)$",
+ description="The format to return, either shapefile or excel.",
+ )
+ addsvs: str = Field(
+ "no",
+ pattern="^(yes|no)$",
+ description="Include polygons that were included within any followup "
+ "statements after issuance.",
+ )
+ ets: AwareDatetime = Field(
+ None,
+ description="The end timestamp in UTC. The format is ISO8601, e.g. "
+ "2010-06-01T00:00Z.",
+ )
+ limit0: str = Field(
+ "no",
+ pattern="^(yes|no)$",
+ description="If yes, only include Tornado, Severe Thunderstorm, "
+ "Flash Flood, and Marine Warnings.",
+ )
+ limit1: str = Field(
+ "no",
+ pattern="^(yes|no)$",
+ description="If yes, only include Storm Based Warnings.",
+ )
+ limit2: str = Field(
+ "no",
+ pattern="^(yes|no)$",
+ description="If yes, only include Emergency Warnings.",
+ )
+ limitpds: bool = Field(
+ False,
+ description=(
+ "If yes, only include products that have a PDS "
+ "(Particularly Dangerous Situation) tag or phrasing."
+ ),
+ )
+ limitps: str = Field(
+ "no",
+ pattern="^(yes|no)$",
+ description="If yes, only include the specified phenomena and "
+ "significance.",
+ )
+ location_group: str = Field(
+ "wfo",
+ pattern="^(wfo|states)$",
+ description="The location group to use, either wfo or states.",
+ )
+ phenomena: ListOrCSVType = Field(
+ ["TO"],
+ description="The two character VTEC phenomena(s) to include. If you "
+ "provide more than one value, the length must correspond and align "
+ "with the ``significance`` parameter.",
+ )
+ simple: str = Field(
+ "no",
+ pattern="^(yes|no)$",
+ description="If yes, use a simplified geometry for the UGC "
+ "counties/zones.",
+ )
+ significance: ListOrCSVType = Field(
+ ["W"],
+ description="The one character VTEC significance to include, if you "
+ "provide more than one value, the length must correspond "
+ "and align with the ``phenomena`` parameter.",
+ )
+ states: ListOrCSVType = Field(
+ None, description="List of states to include data for."
+ )
+ sts: AwareDatetime = Field(
+ None,
+ description="The start timestamp in UTC. The format is ISO8601, e.g. "
+ "2010-06-01T00:00Z.",
+ )
+ timeopt: int = Field(
+ 1,
+ description="The time option to use, either 1 or 2, default is 1, "
+ "which uses the start and end timestamps to determine "
+ "which events to include. Option 2 uses the at timestamp "
+ "to determine which events to include.",
+ )
+ wfo: ListOrCSVType = Field(
+ None, description="List of WFOs to include data for."
+ )
+ wfos: ListOrCSVType = Field(
+ None, description="Legacy parameter, update to use ``wfo``."
+ )
+ year1: int = Field(
+ None,
+ description="The start timestamp components in UTC, if you specify a "
+ "sts parameter, these are ignored.",
+ )
+ year2: int = Field(
+ None,
+ description="The end timestamp components in UTC, if you specify a "
+ "ets parameter, these are ignored.",
+ )
+ year3: int = Field(
+ None,
+ description="The at timestamp components in UTC. When timeopt is 2, "
+ "this is used to find all events that were valid at this "
+ "time.",
+ )
+ month1: int = Field(
+ None,
+ description="The start timestamp components in UTC, if you specify a "
+ "sts parameter, these are ignored.",
+ )
+ month2: int = Field(
+ None,
+ description="The end timestamp components in UTC, if you specify a "
+ "ets parameter, these are ignored.",
+ )
+ month3: int = Field(
+ None,
+ description="The at timestamp components in UTC. When timeopt is 2, "
+ "this is used to find all events that were valid at this "
+ "time.",
+ )
+ day1: int = Field(
+ None,
+ description="The start timestamp components in UTC, if you specify a "
+ "sts parameter, these are ignored.",
+ )
+ day2: int = Field(
+ None,
+ description="The end timestamp components in UTC, if you specify a "
+ "ets parameter, these are ignored.",
+ )
+ day3: int = Field(
+ None,
+ description="The at timestamp components in UTC. When timeopt is 2, "
+ "this is used to find all events that were valid at this "
+ "time.",
+ )
+ hour1: int = Field(
+ None,
+ description="The start timestamp components in UTC, if you specify a "
+ "sts parameter, these are ignored.",
+ )
+ hour2: int = Field(
+ None,
+ description="The end timestamp components in UTC, if you specify a "
+ "ets parameter, these are ignored.",
+ )
+ hour3: int = Field(
+ None,
+ description="The at timestamp components in UTC. When timeopt is 2, "
+ "this is used to find all events that were valid at this "
+ "time.",
+ )
+ minute1: int = Field(
+ None,
+ description="The start timestamp components in UTC, if you specify a "
+ "sts parameter, these are ignored.",
+ )
+ minute2: int = Field(
+ None,
+ description="The end timestamp components in UTC, if you specify a "
+ "ets parameter, these are ignored.",
+ )
+ minute3: int = Field(
+ None,
+ description="The at timestamp components in UTC. When timeopt is 2, "
+ "this is used to find all events that were valid at this "
+ "time.",
+ )
+
+
+def dfmt(text):
+ """Produce a prettier format for CSV."""
+ if text is None or len(text) != 12:
+ return ""
+ return f"{text[:4]}-{text[4:6]}-{text[6:8]} {text[8:10]}:{text[10:12]}"
+
+
+def char3(wfos):
+ """Make sure we don't have any 4 char IDs."""
+ res = []
+ for wfo in wfos:
+ res.append(wfo[1:] if len(wfo) == 4 else wfo) # noqa
+ return res
+
+
+def parse_wfo_location_group(environ):
+ """Parse wfoLimiter"""
+ limiter = ""
+ wfos = environ["wfo"]
+ if environ["wfos"]:
+ wfos = environ["wfos"]
+ if wfos is not None and "ALL" not in wfos:
+ if len(wfos) == 1:
+ wfo = wfos[0]
+ wfo = wfo[1:] if len(wfo) == 4 else wfo
+ limiter = f" and w.wfo = '{wfo}' "
+ else:
+ limiter = f" and w.wfo in {tuple(char3(wfos))} "
+
+ return limiter
+
+
+def build_sql(environ):
+ """Build the SQL statement."""
+ sts = environ["sts"]
+ ets = environ["ets"]
+ table_extra = ""
+ if environ["location_group"] == "states":
+ if environ["states"]:
+ states = [x[:2].upper() for x in environ["states"]]
+ states.append("XX") # Hack for 1 length
+ wfo_limiter = (
+ " and ST_Intersects(s.the_geom, w.geom) "
+ f"and s.state_abbr in {tuple(states)} "
+ )
+ wfo_limiter2 = f" and substr(w.ugc, 1, 2) in {tuple(states)} "
+ table_extra = " , states s "
+ else:
+ raise ValueError("No state specified")
+ else: # wfo
+ wfo_limiter = parse_wfo_location_group(environ)
+ wfo_limiter2 = wfo_limiter
+
+ if environ["timeopt"] != 2:
+ if sts is None or ets is None:
+ raise IncompleteWebRequest("Missing start or end time parameters")
+ # Keep size low
+ if wfo_limiter == "" and (ets - sts) > datetime.timedelta(days=366):
+ raise IncompleteWebRequest("Please shorten request to <1 year.")
+ # Change to postgis db once we have the wfo list
+ fn = f"wwa_{sts:%Y%m%d%H%M}_{ets:%Y%m%d%H%M}"
+ else:
+ year3 = int(environ.get("year3"))
+ month3 = int(environ.get("month3"))
+ day3 = int(environ.get("day3"))
+ hour3 = int(environ.get("hour3"))
+ minute3 = int(environ.get("minute3"))
+ sts = utc(year3, month3, day3, hour3, minute3)
+ ets = sts
+ fn = f"wwa_{sts:%Y%m%d%H%M}"
+
+ limiter = ""
+ if environ["limit0"] == "yes":
+ limiter = (
+ " and phenomena IN ('TO','SV','FF','MA') and significance = 'W' "
+ )
+ if environ["limitps"] == "yes":
+ phenom = environ["phenomena"]
+ sig = environ["significance"]
+ parts = []
+ for p, s in zip(phenom, sig):
+ parts.append(
+ f"(phenomena = '{p[:2]}' and significance = '{s[:1]}') "
+ )
+ limiter = f" and ({' or '.join(parts)}) "
+
+ sbwlimiter = " WHERE gtype = 'P' " if environ["limit1"] == "yes" else ""
+
+ elimiter = " and is_emergency " if environ["limit2"] == "yes" else ""
+ pdslimiter = " and is_pds " if environ["limitpds"] else ""
+
+ warnings_table = "warnings"
+ sbw_table = "sbw"
+ if sts.year == ets.year:
+ warnings_table = f"warnings_{sts.year}"
+ sbw_table = f"sbw_{sts.year}"
+
+ geomcol = "geom"
+ if environ["simple"] == "yes":
+ geomcol = "simple_geom"
+
+ cols = (
+ "wfo, utc_issue, utc_expire, utc_prodissue, utc_init_expire, "
+ "phenomena, gtype, significance, eventid, status, ugc, area2d, "
+ "utc_updated, hvtec_nwsli, hvtec_severity, hvtec_cause, hvtec_record, "
+ "is_emergency, utc_polygon_begin, utc_polygon_end, windtag, hailtag, "
+ "tornadotag, damagetag, product_id "
+ )
+ if environ["accept"] not in ["excel", "csv"]:
+ cols = f"geo, {cols}"
+
+ timelimit = f"issue >= '{sts}' and issue < '{ets}'"
+ if environ["timeopt"] == 2:
+ timelimit = (
+ f"issue <= '{sts}' and "
+ f"issue > '{sts + datetime.timedelta(days=-30)}' and "
+ f"expire > '{sts}'"
+ )
+ else:
+ if wfo_limiter == "" and limiter == "" and (ets - sts).days > 366:
+ raise IncompleteWebRequest(
+ "You must limit your request to a year or less."
+ )
+ sbwtimelimit = timelimit
+ statuslimit = " status = 'NEW' "
+ if environ["addsvs"] == "yes":
+ statuslimit = " status != 'CAN' "
+ sbwtimelimit = timelimit.replace(
+ "issue",
+ "coalesce(issue, polygon_begin)",
+ )
+ # NB: need distinct since state join could return multiple
+ return (
+ f"""
+ WITH stormbased as (
+ SELECT distinct w.geom as geo, 'P'::text as gtype, significance, wfo,
+ status, eventid, ''::text as ugc,
+ phenomena,
+ ST_area( ST_transform(w.geom,9311) ) / 1000000.0 as area2d,
+ to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_expire,
+ to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
+ to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_prodissue,
+ to_char(polygon_begin at time zone 'UTC', 'YYYYMMDDHH24MI')
+ as utc_polygon_begin,
+ to_char(polygon_end at time zone 'UTC', 'YYYYMMDDHH24MI')
+ as utc_polygon_end,
+ to_char(init_expire at time zone 'UTC',
+ 'YYYYMMDDHH24MI') as utc_init_expire,
+ to_char(updated at time zone 'UTC',
+ 'YYYYMMDDHH24MI') as utc_updated,
+ hvtec_nwsli, hvtec_severity, hvtec_cause, hvtec_record, is_emergency,
+ windtag, hailtag, tornadotag,
+ coalesce(damagetag, floodtag_damage) as damagetag,
+ product_id
+ from {sbw_table} w {table_extra}
+ WHERE {statuslimit} and {sbwtimelimit}
+ {wfo_limiter} {limiter} {elimiter} {pdslimiter}
+ ),
+ countybased as (
+ SELECT u.{geomcol} as geo, 'C'::text as gtype,
+ significance,
+ w.wfo, status, eventid, u.ugc, phenomena,
+ u.area2163 as area2d,
+ to_char(expire at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_expire,
+ to_char(issue at time zone 'UTC', 'YYYYMMDDHH24MI') as utc_issue,
+ to_char(product_issue at time zone 'UTC',
+ 'YYYYMMDDHH24MI') as utc_prodissue,
+ null as utc_polygon_begin,
+ null as utc_polygon_end,
+ to_char(init_expire at time zone 'UTC',
+ 'YYYYMMDDHH24MI') as utc_init_expire,
+ to_char(updated at time zone 'UTC',
+ 'YYYYMMDDHH24MI') as utc_updated,
+ hvtec_nwsli, hvtec_severity, hvtec_cause, hvtec_record, is_emergency,
+ null::real as windtag, null::real as hailtag, null::varchar as tornadotag,
+ null::varchar as damagetag,
+ product_ids[1] as product_id
+ from {warnings_table} w JOIN ugcs u on (u.gid = w.gid) WHERE
+ {timelimit} {wfo_limiter2} {limiter} {elimiter} {pdslimiter}
+ )
+ SELECT {cols} from stormbased UNION ALL
+ SELECT {cols} from countybased {sbwlimiter}
+ """,
+ fn,
+ )
+
+
+def do_excel(sql, fmt):
+ """Generate an Excel format response."""
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = pd.read_sql(sql, conn, index_col=None)
+ if fmt == "excel" and len(df.index) >= 1048576:
+ raise IncompleteWebRequest("Result too large for Excel download")
+ # Back-convert datetimes :/
+ for col in (
+ "utc_issue utc_expire utc_prodissue utc_updated utc_polygon_begin "
+ "utc_polygon_end"
+ ).split():
+ df[col] = pd.to_datetime(
+ df[col],
+ errors="coerce",
+ format="%Y%m%d%H%M",
+ ).dt.strftime("%Y-%m-%d %H:%M")
+ if fmt == "csv":
+ return df.to_csv(index=False).encode("ascii")
+ bio = BytesIO()
+ # pylint: disable=abstract-class-instantiated
+ with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
+ df.to_excel(writer, sheet_name="VTEC WaWA", index=False)
+ return bio.getvalue()
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Go Main Go"""
+ if environ["sts"] is None:
+ raise IncompleteWebRequest("Missing start time parameters")
+ try:
+ sql, fn = build_sql(environ)
+ except ValueError as exp:
+ start_response("400 Bad Request", [("Content-type", "text/plain")])
+ return [str(exp).encode("ascii")]
+
+ if environ["accept"] == "excel":
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", f"attachment; Filename={fn}.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [do_excel(sql, environ["accept"])]
+ if environ["accept"] == "csv":
+ headers = [
+ ("Content-type", "text/csv"),
+ ("Content-disposition", f"attachment; Filename={fn}.csv"),
+ ]
+ start_response("200 OK", headers)
+ return [do_excel(sql, environ["accept"])]
+ pgconn, cursor = get_dbconnc("postgis", cursor_name="streaming")
+
+ cursor.execute(sql)
+
+ # Filenames are racy, so we need to have a temp folder
+ with tempfile.TemporaryDirectory() as tmpdir:
+ with open(f"{tmpdir}/{fn}.csv", "w", encoding="ascii") as csv:
+ csv.write(
+ "WFO,ISSUED,EXPIRED,INIT_ISS,INIT_EXP,PHENOM,GTYPE,SIG,ETN,"
+ "STATUS,NWS_UGC,AREA_KM2,UPDATED,HVTEC_NWSLI,HVTEC_SEVERITY,"
+ "HVTEC_CAUSE,HVTEC_RECORD,IS_EMERGENCY,POLYBEGIN,POLYEND,"
+ "WINDTAG,HAILTAG,TORNADOTAG,DAMAGETAG,PRODUCT_ID\n"
+ )
+ with fiona.open(
+ f"{tmpdir}/{fn}.shp",
+ "w",
+ crs="EPSG:4326",
+ driver="ESRI Shapefile",
+ schema={
+ "geometry": "MultiPolygon",
+ "properties": {
+ "WFO": "str:3",
+ "ISSUED": "str:12",
+ "EXPIRED": "str:12",
+ "INIT_ISS": "str:12",
+ "INIT_EXP": "str:12",
+ "PHENOM": "str:2",
+ "GTYPE": "str:1",
+ "SIG": "str:1",
+ "ETN": "str:4",
+ "STATUS": "str:3",
+ "NWS_UGC": "str:6",
+ "AREA_KM2": "float",
+ "UPDATED": "str:12",
+ "HV_NWSLI": "str:5",
+ "HV_SEV": "str:1",
+ "HV_CAUSE": "str:2",
+ "HV_REC": "str:2",
+ "EMERGENC": "bool",
+ "POLY_BEG": "str:12",
+ "POLY_END": "str:12",
+ "WINDTAG": "float",
+ "HAILTAG": "float",
+ "TORNTAG": "str:16",
+ "DAMAGTAG": "str:16",
+ "PROD_ID": "str:36",
+ },
+ },
+ ) as output:
+ for row in cursor:
+ if row["geo"] is None:
+ continue
+ mp = loads(row["geo"], hex=True)
+ csv.write(
+ f"{row['wfo']},{dfmt(row['utc_issue'])},"
+ f"{dfmt(row['utc_expire'])},"
+ f"{dfmt(row['utc_prodissue'])},"
+ f"{dfmt(row['utc_init_expire'])},"
+ f"{row['phenomena']},{row['gtype']},"
+ f"{row['significance']},{row['eventid']},"
+ f"{row['status']},"
+ f"{row['ugc']},{row['area2d']:.2f},"
+ f"{dfmt(row['utc_updated'])},"
+ f"{row['hvtec_nwsli']},{row['hvtec_severity']},"
+ f"{row['hvtec_cause']},{row['hvtec_record']},"
+ f"{row['is_emergency']},"
+ f"{dfmt(row['utc_polygon_begin'])},"
+ f"{dfmt(row['utc_polygon_end'])},{row['windtag']},"
+ f"{row['hailtag']},{row['tornadotag']},"
+ f"{row['damagetag']},{row['product_id']}\n"
+ )
+ output.write(
+ {
+ "properties": {
+ "WFO": row["wfo"],
+ "ISSUED": row["utc_issue"],
+ "EXPIRED": row["utc_expire"],
+ "INIT_ISS": row["utc_prodissue"],
+ "INIT_EXP": row["utc_init_expire"],
+ "PHENOM": row["phenomena"],
+ "GTYPE": row["gtype"],
+ "SIG": row["significance"],
+ "ETN": row["eventid"],
+ "STATUS": row["status"],
+ "NWS_UGC": row["ugc"],
+ "AREA_KM2": row["area2d"],
+ "UPDATED": row["utc_updated"],
+ "HV_NWSLI": row["hvtec_nwsli"],
+ "HV_SEV": row["hvtec_severity"],
+ "HV_CAUSE": row["hvtec_cause"],
+ "HV_REC": row["hvtec_record"],
+ "EMERGENC": row["is_emergency"],
+ "POLY_BEG": row["utc_polygon_begin"],
+ "POLY_END": row["utc_polygon_end"],
+ "WINDTAG": row["windtag"],
+ "HAILTAG": row["hailtag"],
+ "TORNTAG": row["tornadotag"],
+ "DAMAGTAG": row["damagetag"],
+ "PROD_ID": row["product_id"],
+ },
+ "geometry": mapping(mp),
+ }
+ )
+
+ with zipfile.ZipFile(
+ f"{tmpdir}/{fn}.zip", "w", zipfile.ZIP_DEFLATED
+ ) as zf:
+ for suffix in ["shp", "shx", "dbf", "cpg", "prj", "csv"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ with open(f"{tmpdir}/{fn}.zip", "rb") as fh:
+ payload = fh.read()
+ cursor.close()
+ pgconn.close()
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return [payload]
diff --git a/pylib/iemweb/request/gis/wpc_mpd.py b/pylib/iemweb/request/gis/wpc_mpd.py
new file mode 100644
index 0000000000..3cc5800d9c
--- /dev/null
+++ b/pylib/iemweb/request/gis/wpc_mpd.py
@@ -0,0 +1,82 @@
+""".. title:: WPC MPD Shapefile Download
+
+Documentation for /cgi-bin/request/gis/wpc_mpd.py
+-------------------------------------------------
+
+To be written.
+
+"""
+
+# Local
+import tempfile
+import zipfile
+from io import BytesIO
+
+# Third Party
+import geopandas as gpd
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import get_sqlalchemy_conn
+from pyiem.webutil import iemapp
+
+PRJFILE = "/opt/iem/data/gis/meta/4326.prj"
+
+
+@iemapp(default_tz="UTC", help=__doc__)
+def application(environ, start_response):
+ """Do something!"""
+ if "sts" not in environ:
+ raise IncompleteWebRequest("Missing start time GET params")
+ if environ["sts"] > environ["ets"]:
+ environ["sts"], environ["ets"] = environ["ets"], environ["sts"]
+ common = "at time zone 'UTC', 'YYYYMMDDHH24MI'"
+ schema = {
+ "geometry": "Polygon",
+ "properties": {
+ "ISSUE": "str:12",
+ "EXPIRE": "str:12",
+ "PROD_ID": "str:35",
+ "YEAR": "int",
+ "NUM": "int",
+ "CONCERN": "str:64",
+ },
+ }
+ with get_sqlalchemy_conn("postgis") as conn:
+ df = gpd.read_postgis(
+ "select "
+ f"to_char(issue {common}) as issue, "
+ f"to_char(expire {common}) as expire, "
+ "product_id as prod_id, year, num, "
+ "concerning as concern, geom "
+ "from mpd WHERE issue >= %s and "
+ "issue < %s ORDER by issue ASC",
+ conn,
+ params=(
+ environ["sts"],
+ environ["ets"],
+ ),
+ geom_col="geom",
+ )
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [b"ERROR: no results found for your query"]
+ df.columns = [s.upper() if s != "geom" else "geom" for s in df.columns]
+ fn = f"mpd_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}"
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ df.to_file(f"{tmpdir}/{fn}.shp", schema=schema, engine="fiona")
+
+ zio = BytesIO()
+ with zipfile.ZipFile(
+ zio, mode="w", compression=zipfile.ZIP_DEFLATED
+ ) as zf:
+ with open(PRJFILE, encoding="utf-8") as fh:
+ zf.writestr(f"{fn}.prj", fh.read())
+ for suffix in ["shp", "shx", "dbf"]:
+ zf.write(f"{tmpdir}/{fn}.{suffix}", f"{fn}.{suffix}")
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}.zip"),
+ ]
+ start_response("200 OK", headers)
+
+ return [zio.getvalue()]
diff --git a/pylib/iemweb/request/grx_rings.py b/pylib/iemweb/request/grx_rings.py
new file mode 100644
index 0000000000..38f21352a1
--- /dev/null
+++ b/pylib/iemweb/request/grx_rings.py
@@ -0,0 +1,109 @@
+"""Author: Zach Hiris"""
+
+import math
+from html import escape
+from io import StringIO
+
+from pyiem.util import html_escape
+from pyiem.webutil import iemapp
+
+
+def createCircleAroundWithRadius(lat, lon, radiusMiles):
+ """Create circle."""
+ latArray = []
+ lonArray = []
+
+ for brng in range(360):
+ lat2, lon2 = getLocation(lat, lon, brng, radiusMiles)
+ latArray.append(lat2)
+ lonArray.append(lon2)
+
+ return lonArray, latArray
+
+
+def getLocation(lat1, lon1, brng, distanceMiles):
+ """getLocation."""
+ lat1 = lat1 * math.pi / 180.0
+ lon1 = lon1 * math.pi / 180.0
+
+ # earth radius - If ever needed to be in km vs. miles, change R
+ R = 3959
+ distanceMiles = distanceMiles / R
+
+ brng = (brng / 90) * math.pi / 2
+
+ lat2 = math.asin(
+ math.sin(lat1) * math.cos(distanceMiles)
+ + math.cos(lat1) * math.sin(distanceMiles) * math.cos(brng)
+ )
+ lon2 = lon1 + math.atan2(
+ math.sin(brng) * math.sin(distanceMiles) * math.cos(lat1),
+ math.cos(distanceMiles) - math.sin(lat1) * math.sin(lat2),
+ )
+ lon2 = 180.0 * lon2 / math.pi
+ lat2 = 180.0 * lat2 / math.pi
+
+ return lat2, lon2
+
+
+@iemapp()
+def application(environ, start_response):
+ """Go Main Go."""
+ fn = escape(environ.get("fn", "placefile_rings.txt"))
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", f"attachment; filename={fn}"),
+ ]
+ start_response("200 OK", headers)
+
+ # Things for the user to theoretically input:
+ loc = html_escape(environ.get("loc", "Jack Trice Stadium"))
+ try:
+ lat = environ.get("lat", 42.014004)
+ lon = environ.get("lon", -93.635773)
+ if isinstance(lat, list):
+ lat = lat[0]
+ if isinstance(lon, list):
+ lon = lon[0]
+ pointLat = float(lat)
+ pointLon = float(lon)
+ except ValueError:
+ return [b"ERROR: Invalid lat or lon valid provided."]
+ sio = StringIO()
+ sio.write(
+ f"; This is a placefile to draw a range ring x miles from: {loc}\n"
+ "; Created by Zach Hiris - 8/9/2019\n"
+ "; Code adapted from Jonathan Scholtes (2016)\n\n\n"
+ "Threshold: 999 \n"
+ f"Title: Rings @ {loc}\n"
+ )
+
+ for i in range(3):
+ try:
+ distanceInMiles = float(environ.get(f"m{i}", 100))
+ except ValueError:
+ return [f"ERROR: Invalid m{i} provided.".encode("ascii")]
+ if distanceInMiles <= 0.00001:
+ continue
+ try:
+ r = int(float(environ.get(f"r{i}", 255)))
+ g = int(float(environ.get(f"g{i}", 255)))
+ b = int(float(environ.get(f"b{i}", 0)))
+ a = int(float(environ.get(f"a{i}", 255)))
+ except ValueError:
+ return [b"ERROR: Invalid color provided."]
+ t = environ.get(f"t{i}", "").replace("\n", "\\n")
+
+ # Create the lon/lat pairs
+ X, Y = createCircleAroundWithRadius(
+ pointLat, pointLon, distanceInMiles
+ )
+ ll = "\\n" if t != "" else ""
+ sio.write(
+ f"Color: {r} {g} {b} {a}\n"
+ f'Line: 2, 0, "{t}{ll}{distanceInMiles:.1f} miles from {loc}" \n'
+ )
+ for x, y in zip(X, Y):
+ sio.write(f" {y}, {x}\n")
+ sio.write("End:\n\n")
+ return [sio.getvalue().encode("utf-8")]
diff --git a/pylib/iemweb/request/hads.py b/pylib/iemweb/request/hads.py
new file mode 100644
index 0000000000..a16f6d24a3
--- /dev/null
+++ b/pylib/iemweb/request/hads.py
@@ -0,0 +1,240 @@
+""".. title:: HADS Data Request
+
+`IEM API Mainpage `_
+
+Documentation on /cgi-bin/request/hads.py
+-----------------------------------------
+
+The backend database for this application has many billion rows of data, so
+requests can be slow.
+
+Changelog
+---------
+
+- 2024-04-18: Allowed cross-year requests, but limited to 365 days when
+ requesting more than one station.
+- 2024-04-09: Migrated to pydantic based CGI field validation.
+- 2024-03-15: Initial documentation added
+
+"""
+
+# pylint: disable=abstract-class-instantiated
+from datetime import timedelta
+from io import BytesIO, StringIO
+from typing import Optional
+
+import pandas as pd
+from pydantic import AwareDatetime, Field, field_validator
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.network import Table as NetworkTable
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+DELIMITERS = {"comma": ",", "space": " ", "tab": "\t"}
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ delim: str = Field(
+ "comma",
+ description="Delimiter for output",
+ pattern="^(comma|space|tab)$",
+ )
+ ets: AwareDatetime = Field(None, description="End Time for request")
+ network: str = Field(None, description="Network Identifier")
+ stations: ListOrCSVType = Field(..., description="Station Identifier(s)")
+ sts: AwareDatetime = Field(None, description="Start Time for request")
+ threshold: Optional[float] = Field(
+ None, description="Threshold Value for Searching"
+ )
+ thresholdvar: str = Field(
+ None,
+ description="Threshold Variable for Searching",
+ pattern="^(RG|PCP)$",
+ )
+ what: str = Field(
+ "dl", description="Output format", pattern="^(dl|txt|html|excel)$"
+ )
+ year: int = Field(
+ None,
+ description=(
+ "Legacy year value when this service only supported 1 year at a "
+ "time."
+ ),
+ )
+ year1: Optional[int] = Field(
+ None,
+ description="Start year for request, when sts not set.",
+ )
+ year2: Optional[int] = Field(
+ None,
+ description="End year for request, when ets not set.",
+ )
+ month1: int = Field(
+ None,
+ description="Start month for request, when sts not set.",
+ )
+ month2: int = Field(
+ None,
+ description="End month for request, when ets not set.",
+ )
+ day1: int = Field(
+ None,
+ description="Start day for request, when sts not set.",
+ )
+ day2: int = Field(
+ None,
+ description="End day for request, when ets not set.",
+ )
+ hour1: int = Field(
+ 0,
+ description="Start hour for request, when sts not set.",
+ )
+ hour2: int = Field(
+ 0,
+ description="End hour for request, when ets not set.",
+ )
+ minute1: int = Field(
+ 0,
+ description="Start minute for request, when sts not set.",
+ )
+ minute2: int = Field(
+ 0,
+ description="End minute for request, when ets not set.",
+ )
+
+ @field_validator("threshold", mode="before")
+ def check_threshold(cls, value):
+ """Allow empty string."""
+ return None if value == "" else value
+
+
+def threshold_search(table, threshold, thresholdvar):
+ """Do the threshold searching magic"""
+ cols = list(table.columns.values)
+ searchfor = f"HGI{thresholdvar.upper()}"
+ cols5 = [s[:5] for s in cols]
+ mycol = cols[cols5.index(searchfor)]
+ above = False
+ maxrunning = -99
+ maxvalid = None
+ res = []
+ for (station, valid), row in table.iterrows():
+ val = row[mycol]
+ if val > threshold and not above:
+ res.append(
+ dict(
+ station=station,
+ utc_valid=valid,
+ event="START",
+ value=val,
+ varname=mycol,
+ )
+ )
+ above = True
+ if val > threshold and above:
+ if val > maxrunning:
+ maxrunning = val
+ maxvalid = valid
+ if val < threshold and above:
+ res.append(
+ dict(
+ station=station,
+ utc_valid=maxvalid,
+ event="MAX",
+ value=maxrunning,
+ varname=mycol,
+ )
+ )
+ res.append(
+ dict(
+ station=station,
+ utc_valid=valid,
+ event="END",
+ value=val,
+ varname=mycol,
+ )
+ )
+ above = False
+ maxrunning = -99
+ maxvalid = None
+
+ return pd.DataFrame(res)
+
+
+@iemapp(default_tz="UTC", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Go do something"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Error, missing start or end time")
+ delimiter = DELIMITERS[environ["delim"]]
+ stations = environ["stations"]
+ if "_ALL" in stations and environ["network"] is not None:
+ stations = list(NetworkTable(environ["network"][:10]).sts.keys())
+ if (environ["ets"] - environ["sts"]) > timedelta(hours=24):
+ environ["ets"] = environ["sts"] + timedelta(hours=24)
+ if len(stations) > 1 and (environ["ets"] - environ["sts"]) > timedelta(
+ days=365
+ ):
+ raise IncompleteWebRequest(
+ "Error, more than one station and more than 365 days requested"
+ )
+ if not stations:
+ raise IncompleteWebRequest("Error, no stations specified!")
+ sql = text(
+ """
+ SELECT station, valid at time zone 'UTC' as utc_valid, key, value
+ from raw WHERE station = ANY(:ids) and
+ valid BETWEEN :sts and :ets and value > -999
+ ORDER by valid ASC
+ """
+ )
+ params = {"ids": stations, "sts": environ["sts"], "ets": environ["ets"]}
+
+ with get_sqlalchemy_conn("hads") as conn:
+ df = pd.read_sql(sql, conn, params=params)
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [b"Error, no results found for query!"]
+ table = df.pivot_table(
+ values="value", columns=["key"], index=["station", "utc_valid"]
+ )
+ if environ["threshold"] is not None:
+ if len(stations) > 1:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [b"Can not do threshold search for more than one station"]
+ table = threshold_search(
+ table, environ["threshold"], environ["thresholdvar"]
+ )
+
+ sio = StringIO()
+ if environ["what"] == "txt":
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", "attachment; filename=hads.txt"),
+ ]
+ start_response("200 OK", headers)
+ table.to_csv(sio, sep=delimiter)
+ return [sio.getvalue().encode("ascii")]
+ if environ["what"] == "html":
+ headers = [("Content-type", "text/html")]
+ start_response("200 OK", headers)
+ table.to_html(sio)
+ return [sio.getvalue().encode("ascii")]
+ if environ["what"] == "excel":
+ bio = BytesIO()
+ with pd.ExcelWriter(bio, engine="openpyxl") as writer:
+ table.to_excel(writer, sheet_name="Data", index=True)
+
+ headers = [
+ ("Content-type", EXL),
+ ("Content-Disposition", "attachment; filename=hads.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [bio.getvalue()]
+ start_response("200 OK", [("Content-type", "text/plain")])
+ table.to_csv(sio, sep=delimiter)
+ return [sio.getvalue().encode("ascii")]
diff --git a/pylib/iemweb/request/hourlyprecip.py b/pylib/iemweb/request/hourlyprecip.py
new file mode 100644
index 0000000000..ca9c101e3c
--- /dev/null
+++ b/pylib/iemweb/request/hourlyprecip.py
@@ -0,0 +1,96 @@
+""".. title:: Hourly Precipitation Data Service
+
+Documentation for /cgi-bin/request/hourlyprecip.py
+--------------------------------------------------
+
+This service emits hourly precipitation data based on processed METAR
+observations by the IEM.
+
+"""
+
+from zoneinfo import ZoneInfo
+
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_dbconn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ ets: AwareDatetime = Field(
+ None, description="The end of the requested interval."
+ )
+ lalo: bool = Field(False, description="Include the lat/lon in the output.")
+ network: str = Field(
+ "IA_ASOS",
+ description="The network to request data for.",
+ max_length=12,
+ )
+ st: bool = Field(False, description="Include the state in the output.")
+ station: ListOrCSVType = Field(
+ [], description="The station(s) to request data for."
+ )
+ sts: AwareDatetime = Field(
+ None, description="The start of the requested interval."
+ )
+ tz: str = Field(
+ "America/Chicago",
+ description=(
+ "The timezone to present the data in and for requested interval."
+ ),
+ )
+ year1: int = Field(None, description="The start year, when sts is unset.")
+ month1: int = Field(
+ None, description="The start month, when sts is unset."
+ )
+ day1: int = Field(None, description="The start day, when sts is unset.")
+ year2: int = Field(None, description="The end year, when ets is unset.")
+ month2: int = Field(None, description="The end month, when ets is unset.")
+ day2: int = Field(None, description="The end day, when ets is unset.")
+
+
+def get_data(network, environ, tzinfo):
+ """Go fetch data please"""
+ pgconn = get_dbconn("iem")
+ cursor = pgconn.cursor()
+ res = "station,network,valid,precip_in"
+ sql = ""
+ if environ["lalo"]:
+ res += ",lat,lon"
+ sql += " , st_y(geom) as lat, st_x(geom) as lon "
+ if environ["st"]:
+ res += ",st"
+ sql += ", state "
+ res += "\n"
+ cursor.execute(
+ f"""
+ SELECT id, t.network, valid, phour {sql}
+ from hourly h JOIN stations t on
+ (h.iemid = t.iemid) WHERE
+ valid >= %s and valid < %s and t.network = %s and t.id = ANY(%s)
+ ORDER by valid ASC
+ """,
+ (environ["sts"], environ["ets"], network, environ["station"]),
+ )
+ for row in cursor:
+ res += (
+ f"{row[0]},{row[1]},{row[2].astimezone(tzinfo):%Y-%m-%d %H:%M},"
+ f"{','.join([str(x) for x in row[3:]])}\n"
+ )
+
+ return res.encode("ascii", "ignore")
+
+
+@iemapp(help=__doc__, default_tz="America/Chicago", schema=Schema)
+def application(environ, start_response):
+ """run rabbit run"""
+ tzinfo = ZoneInfo(environ["tz"])
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Missing start or end time.")
+ if not environ["station"]:
+ raise IncompleteWebRequest("No station= was specified.")
+ start_response("200 OK", [("Content-type", "text/plain")])
+ network = environ["network"]
+ return [get_data(network, environ, tzinfo)]
diff --git a/pylib/iemweb/request/isusm.py b/pylib/iemweb/request/isusm.py
new file mode 100644
index 0000000000..92b21425fb
--- /dev/null
+++ b/pylib/iemweb/request/isusm.py
@@ -0,0 +1,480 @@
+"""Download interface for ISU-SM data."""
+
+import datetime
+from io import BytesIO, StringIO
+from zoneinfo import ZoneInfo
+
+import numpy as np
+import pandas as pd
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import convert_value
+from pyiem.webutil import ensure_list, iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+MISSING = {"", "M", "-99"}
+SV_DEPTHS = [2, 4, 8, 12, 14, 16, 20, 24, 28, 30, 32, 36, 40, 42, 52]
+
+
+def get_stations(environ):
+ """Figure out which stations were requested"""
+ # Dragons, sts could now be a datetime, but legacy, it could be a list
+ # of stations as legacy frontend used it for a variable
+ sts = ensure_list(environ, "station")
+ if not sts and not isinstance(environ.get("sts", ""), datetime.datetime):
+ sts = ensure_list(environ, "sts")
+ return sts
+
+
+def get_delimiter(environ):
+ """Figure out what is the requested delimiter"""
+ d = environ.get("delim", "comma")
+ if d == "comma":
+ return ","
+ return "\t"
+
+
+def fetch_daily(environ, cols):
+ """Return a fetching of daily data"""
+ stations = get_stations(environ)
+
+ if not cols:
+ cols = [
+ "station",
+ "valid",
+ "high",
+ "low",
+ "rh_min",
+ "rh",
+ "rh_max",
+ "gdd50",
+ "solar",
+ "precip",
+ "speed",
+ "gust",
+ "et",
+ "soil04t",
+ "soil12t",
+ "soil24t",
+ "soil50t",
+ "soil12vwc",
+ "soil24vwc",
+ "soil50vwc",
+ ]
+ else:
+ cols.insert(0, "valid")
+ cols.insert(0, "station")
+ if "sv" in cols:
+ # SoilVue 10 data
+ for depth in SV_DEPTHS:
+ for c2 in ["t", "vwc"]:
+ cols.append(f"sv_{c2}{depth}")
+ else:
+ for col in list(cols):
+ if col.startswith("sv") and len(col) > 2:
+ depth = int(col[2:])
+ for c2 in ["t", "vwc"]:
+ cols.append(f"sv_{c2}{depth}")
+ with get_sqlalchemy_conn("isuag") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ --- Get the Daily Max/Min soil values
+ WITH soils as (
+ SELECT station, date(valid) as date,
+ min(rh_avg_qc) as rh_min,
+ avg(rh_avg_qc) as rh,
+ max(rh_avg_qc) as rh_max,
+ min(t12_c_avg_qc) as soil12tn, max(t12_c_avg_qc) as soil12tx,
+ min(t24_c_avg_qc) as soil24tn, max(t24_c_avg_qc) as soil24tx,
+ min(t50_c_avg_qc) as soil50tn, max(t50_c_avg_qc) as soil50tx
+ from sm_hourly where
+ valid >= :sts and valid < :ets and station = ANY(:stations)
+ GROUP by station, date
+ ), daily as (
+ SELECT *,
+ t4_c_min_qc as soil04tn, t4_c_max_qc as soil04tx,
+ round(gddxx(50, 86, c2f( tair_c_max_qc ),
+ c2f( tair_c_min_qc ))::numeric,1) as gdd50 from sm_daily WHERE
+ valid >= :sts and valid < :ets and station = ANY(:stations)
+ )
+ SELECT d.*, s.rh_min, s.rh, s.rh_max,
+ s.soil12tn, s.soil12tx, s.soil24tn, s.soil24tx, s.soil50tn, s.soil50tx
+ FROM soils s JOIN daily d on (d.station = s.station and s.date = d.valid)
+ ORDER by d.valid ASC
+ """
+ ),
+ conn,
+ params={
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "stations": stations,
+ },
+ index_col=None,
+ )
+
+ if df.empty:
+ return df, []
+
+ df = df.fillna(np.nan).infer_objects()
+
+ # Direct copy / rename
+ xref = {
+ "rh_avg_qc": "relh",
+ "rain_in_tot_qc": "precip",
+ "winddir_d1_wvt_qc": "drct",
+ "vwc12_qc": "soil12vwc",
+ "vwc24_qc": "soil24vwc",
+ "vwc50_qc": "soil50vwc",
+ "dailyet_qc": "et",
+ }
+ df = df.rename(columns=xref, errors="ignore")
+ # Mul by 100 for %
+ for depth in [12, 24, 50]:
+ df[f"soil{depth}vwc"] = df[f"soil{depth}vwc"] * 100.0
+ # Now we need to do some mass data conversion, sigh
+ tc = {
+ "high": "tair_c_max_qc",
+ "low": "tair_c_min_qc",
+ "soil04t": "t4_c_avg_qc",
+ "soil04tn": "soil04tn",
+ "soil04tx": "soil04tx",
+ "soil12t": "t12_c_avg_qc",
+ "soil12tn": "soil12tn",
+ "soil12tx": "soil12tx",
+ "soil24t": "t24_c_avg_qc",
+ "soil24tn": "soil24tn",
+ "soil24tx": "soil24tx",
+ "soil50t": "t50_c_avg_qc",
+ "soil50tn": "soil50tn",
+ "soil50tx": "soil50tx",
+ }
+ for key, col in tc.items():
+ if key not in cols:
+ continue
+ # Do the work
+ df[key] = convert_value(df[col].values, "degC", "degF")
+
+ if "speed" in cols:
+ df = df.rename(columns={"ws_mph_qc": "speed"})
+ if "gust" in cols:
+ df = df.rename(columns={"ws_mph_max_qc": "gust"})
+ if "sv" in cols:
+ # SoilVue 10 data
+ for depth in SV_DEPTHS:
+ df[f"sv_t{depth}"] = convert_value(
+ df[f"sv_t{depth}_qc"].values, "degC", "degF"
+ )
+ # Copy
+ df[f"sv_vwc{depth}"] = df[f"sv_vwc{depth}_qc"]
+ # Remove the original
+ cols.remove("sv")
+ else:
+ for col in list(cols):
+ if col.startswith("sv_r"):
+ df[col] = convert_value(df[f"{col}_qc"].values, "degC", "degF")
+ cols.remove(col)
+ elif col.startswith("sv_vwc"):
+ df[col] = df[f"{col}_qc"]
+ cols.remove(col)
+
+ # Convert solar radiation to J/m2
+ if "solar" in cols:
+ df["solar"] = df["slrkj_tot_qc"] * 1000.0
+ if "solar_mj" in cols:
+ df["solar_mj"] = df["slrkj_tot_qc"] / 1000.0
+ if "et" in cols:
+ df["et"] = convert_value(df["et"], "mm", "inch")
+
+ overwrite = (
+ "bp_mb lwmv_1 lwmv_2 lwmdry_1_tot lwmcon_1_tot lwmwet_1_tot "
+ "lwmdry_2_tot lwmcon_2_tot lwmwet_2_tot bpres_avg"
+ ).split()
+ for col in overwrite:
+ if col in cols:
+ # Overwrite
+ df[col] = df[f"{col}_qc"]
+
+ return df, cols
+
+
+def fetch_hourly(environ, cols):
+ """Process the request for hourly/minute data."""
+ stations = get_stations(environ)
+
+ if not cols:
+ cols = [
+ "station",
+ "valid",
+ "tmpf",
+ "relh",
+ "solar",
+ "precip",
+ "speed",
+ "drct",
+ "et",
+ "soil04t",
+ "soil12t",
+ "soil24t",
+ "soil50t",
+ "soil12vwc",
+ "soil24vwc",
+ "soil50vwc",
+ ]
+ else:
+ cols.insert(0, "valid")
+ cols.insert(0, "station")
+
+ table = "sm_hourly"
+ sqlextra = ", null as bp_mb_qc "
+ if environ.get("timeres") == "minute":
+ table = "sm_minute"
+ sqlextra = ", null as etalfalfa_qc"
+ if "sv" in cols:
+ # SoilVue 10 data
+ for depth in SV_DEPTHS:
+ for c2 in ["t", "vwc"]:
+ cols.append(f"sv_{c2}{depth}")
+ else:
+ for col in list(cols):
+ if col.startswith("sv") and len(col) > 2:
+ depth = int(col[2:])
+ for c2 in ["t", "vwc"]:
+ cols.append(f"sv_{c2}{depth}")
+ with get_sqlalchemy_conn("isuag") as conn:
+ df = pd.read_sql(
+ text(
+ f"""
+ SELECT *, valid at time zone 'UTC' as utc_valid {sqlextra}
+ from {table} WHERE valid >= :sts and valid < :ets and
+ station = ANY(:stations) ORDER by valid ASC
+ """
+ ),
+ conn,
+ params={
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "stations": stations,
+ },
+ index_col=None,
+ )
+ if df.empty:
+ return df, cols
+
+ # Muck with the timestamp column
+ if environ.get("tz") == "utc":
+ df["valid"] = df["utc_valid"].dt.strftime("%Y-%m-%d %H:%M+00")
+ else:
+ df["valid"] = (
+ df["utc_valid"]
+ .dt.tz_localize("UTC")
+ .dt.tz_convert("US/Central")
+ .dt.strftime("%Y-%m-%d %H:%M")
+ )
+
+ df = df.fillna(np.nan).infer_objects()
+ # Direct copy / rename
+ xref = {
+ "rh_avg_qc": "relh",
+ "rain_in_tot_qc": "precip",
+ "winddir_d1_wvt_qc": "drct",
+ "vwc12_qc": "soil12vwc",
+ "vwc24_qc": "soil24vwc",
+ "vwc50_qc": "soil50vwc",
+ }
+ df = df.rename(columns=xref, errors="ignore")
+ # Mul by 100 for %
+ for depth in [12, 24, 50]:
+ df[f"soil{depth}vwc"] = df[f"soil{depth}vwc"] * 100.0
+ # Now we need to do some mass data conversion, sigh
+ tc = {
+ "tmpf": "tair_c_avg_qc",
+ "soil04t": "t4_c_avg_qc",
+ "soil12t": "t12_c_avg_qc",
+ "soil24t": "t24_c_avg_qc",
+ "soil50t": "t50_c_avg_qc",
+ }
+ for key, col in tc.items():
+ if key not in cols:
+ continue
+ # Do the work
+ df[key] = convert_value(df[col].values, "degC", "degF")
+
+ if "sv" in cols:
+ # SoilVue 10 data
+ for depth in SV_DEPTHS:
+ df[f"sv_t{depth}"] = convert_value(
+ df[f"sv_t{depth}_qc"].values, "degC", "degF"
+ )
+ # Copy
+ df[f"sv_vwc{depth}"] = df[f"sv_vwc{depth}_qc"]
+ # Remove the original
+ cols.remove("sv")
+ else:
+ for col in list(cols):
+ if col.startswith("sv_t"):
+ df[col] = convert_value(df[f"{col}_qc"].values, "degC", "degF")
+ cols.remove(col)
+ elif col.startswith("sv_vwc"):
+ # Copy
+ df[col] = df[f"{col}_qc"]
+ cols.remove(col)
+
+ # Convert solar radiation to J/m2
+ if "solar" in cols:
+ df["solar"] = df["slrkj_tot_qc"] * 1000.0
+
+ if "speed" in cols:
+ df["speed"] = df["ws_mph_qc"]
+
+ if "et" in cols:
+ df["et"] = convert_value(df["etalfalfa_qc"].values, "mm", "inch")
+
+ overwrite = (
+ "bp_mb lwmv_1 lwmv_2 lwmdry_1_tot lwmcon_1_tot lwmwet_1_tot "
+ "lwmdry_2_tot lwmcon_2_tot lwmwet_2_tot bpres_avg"
+ ).split()
+ for col in overwrite:
+ if col in cols:
+ # Overwrite
+ df[col] = df[f"{col}_qc"]
+
+ return df, cols
+
+
+def muck_timestamps(environ):
+ """Atone for previous sins with sts variable..."""
+ # No action necessary
+ if isinstance(environ["sts"], datetime.datetime):
+ return
+ environ["station"] = ensure_list(environ, "sts")
+ environ["sts"] = datetime.datetime(
+ int(environ["year1"]),
+ int(environ["month1"]),
+ int(environ["day1"]),
+ tzinfo=ZoneInfo("America/Chicago"),
+ )
+ if environ["sts"] == environ["ets"]:
+ environ["ets"] = environ["sts"] + datetime.timedelta(days=1)
+
+
+def fetch_inversion(environ, cols):
+ """Process the request for inversion data."""
+ stations = get_stations(environ)
+
+ cols = [
+ "station",
+ "valid",
+ "tair_15",
+ "tair_5",
+ "tair_10",
+ "speed",
+ "gust",
+ ]
+
+ with get_sqlalchemy_conn("isuag") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ SELECT station, valid at time zone 'UTC' as utc_valid,
+ tair_15_c_avg_qc, tair_5_c_avg_qc, tair_10_c_avg_qc,
+ ws_ms_avg_qc, ws_ms_max_qc
+ from sm_inversion WHERE valid >= :sts and valid < :ets and
+ station = ANY(:stations) ORDER by valid ASC
+ """
+ ),
+ conn,
+ params={
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "stations": stations,
+ },
+ index_col=None,
+ )
+ if df.empty:
+ return df, cols
+
+ # Muck with the timestamp column
+ if environ.get("tz") == "utc":
+ df["valid"] = df["utc_valid"].dt.strftime("%Y-%m-%d %H:%M+00")
+ else:
+ df["valid"] = (
+ df["utc_valid"]
+ .dt.tz_localize("UTC")
+ .dt.tz_convert("US/Central")
+ .dt.strftime("%Y-%m-%d %H:%M")
+ )
+
+ df = df.fillna(np.nan).infer_objects()
+ # Direct copy / rename
+ # Now we need to do some mass data conversion, sigh
+ tc = {
+ "tair_15": "tair_15_c_avg_qc",
+ "tair_5": "tair_5_c_avg_qc",
+ "tair_10": "tair_10_c_avg_qc",
+ }
+ for key, col in tc.items():
+ # Do the work
+ df[key] = convert_value(df[col].values, "degC", "degF")
+
+ df["speed"] = convert_value(df["ws_ms_avg_qc"].values, "mps", "mph")
+ df["gust"] = convert_value(df["ws_ms_max_qc"].values, "mps", "mph")
+
+ return df, cols
+
+
+@iemapp()
+def application(environ, start_response):
+ """Do things"""
+ if "sts" not in environ:
+ raise IncompleteWebRequest("Missing start time parameters")
+ try:
+ muck_timestamps(environ)
+ except Exception as exp:
+ raise IncompleteWebRequest("Invalid date/station provided") from exp
+ mode = environ.get("mode", "hourly")
+ cols = ensure_list(environ, "vars")
+ fmt = environ.get("format", "csv").lower()
+ todisk = environ.get("todisk", "no")
+ if mode == "hourly":
+ df, cols = fetch_hourly(environ, cols)
+ elif mode == "inversion":
+ df, cols = fetch_inversion(environ, cols)
+ else:
+ df, cols = fetch_daily(environ, cols)
+ miss = environ.get("missing", "-99")
+ assert miss in MISSING
+ df = df.replace({np.nan: miss})
+ # compute columns present in both cols and df.columns
+ # pandas intersection is not order preserving, so we do this
+ cols = [c for c in cols if c in df.columns]
+ if fmt == "excel":
+ bio = BytesIO()
+ # pylint: disable=abstract-class-instantiated
+ if cols:
+ with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
+ df.to_excel(
+ writer, sheet_name="Data", columns=cols, index=False
+ )
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=isusm.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [bio.getvalue()]
+
+ delim = "," if fmt == "comma" else "\t"
+ sio = StringIO()
+ # careful of precision here
+ df.to_csv(sio, index=False, columns=cols, sep=delim, float_format="%.4f")
+
+ if todisk == "yes":
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", "attachment; filename=isusm.txt"),
+ ]
+ else:
+ headers = [("Content-type", "text/plain")]
+ start_response("200 OK", headers)
+ return [sio.getvalue().encode("ascii")]
diff --git a/pylib/iemweb/request/metars.py b/pylib/iemweb/request/metars.py
new file mode 100644
index 0000000000..2236b69391
--- /dev/null
+++ b/pylib/iemweb/request/metars.py
@@ -0,0 +1,84 @@
+""".. title:: Request Hour's worth of METARs
+
+Documentation for /cgi-bin/request/metars.py
+--------------------------------------------
+
+This is a very simple service that intends on emitting a text file of METARs
+that is ammenable to being ingested by other software. Each METAR is on a
+single line and the file is sorted by the observation time.
+
+Example Usage:
+--------------
+
+Retrieve all METARs for the hour starting at 00 UTC on 1 January 2016:
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/metars.py?valid=2016010100
+
+"""
+
+import datetime
+import sys
+from io import StringIO
+from zoneinfo import ZoneInfo
+
+from pydantic import AwareDatetime, Field, field_validator
+from pyiem.webutil import CGIModel, iemapp
+
+
+class Schema(CGIModel):
+ """Our schema for this request"""
+
+ valid: AwareDatetime = Field(
+ ...,
+ description=(
+ "Hour truncated UTC timestamp to request data for. The "
+ "format is `YYYYMMDDHH`."
+ ),
+ )
+
+ @field_validator("valid", mode="before")
+ def parse_valid(cls, value):
+ """Ensure valid is a valid datetime"""
+ return datetime.datetime.strptime(value, "%Y%m%d%H").replace(
+ tzinfo=ZoneInfo("UTC")
+ )
+
+
+def check_load(cursor):
+ """A crude check that aborts this script if there is too much
+ demand at the moment"""
+ cursor.execute(
+ "select pid from pg_stat_activity where query ~* 'FETCH' "
+ "and datname = 'asos'"
+ )
+ if len(cursor.fetchall()) > 9:
+ sys.stderr.write(
+ f"/cgi-bin/request/metars.py over capacity: {cursor.rowcount}\n"
+ )
+ return False
+ return True
+
+
+@iemapp(iemdb="asos", iemdb_cursorname="streamer", schema=Schema, help=__doc__)
+def application(environ, start_response):
+ """Do Something"""
+ cursor = environ["iemdb.asos.cursor"]
+ if not check_load(cursor):
+ start_response(
+ "503 Service Unavailable", [("Content-type", "text/plain")]
+ )
+ return [b"ERROR: server over capacity, please try later"]
+ start_response("200 OK", [("Content-type", "text/plain")])
+ valid = environ["valid"]
+ cursor.execute(
+ """
+ SELECT metar from alldata
+ WHERE valid >= %s and valid < %s and metar is not null
+ ORDER by valid ASC
+ """,
+ (valid, valid + datetime.timedelta(hours=1)),
+ )
+ sio = StringIO()
+ for row in cursor:
+ sio.write("%s\n" % (row["metar"].replace("\n", " "),))
+ return [sio.getvalue().encode("ascii", "ignore")]
diff --git a/pylib/iemweb/request/mos.py b/pylib/iemweb/request/mos.py
new file mode 100644
index 0000000000..0919298233
--- /dev/null
+++ b/pylib/iemweb/request/mos.py
@@ -0,0 +1,157 @@
+""".. title:: Model Output Statistics (MOS) Data
+
+Documentation for /cgi-bin/request/mos.py
+-----------------------------------------
+
+This application provides access to the Model Output Statistics (MOS) data
+that the IEM processes and archives.
+
+Example Usage
+~~~~~~~~~~~~~
+
+Return all the NBS MOS data for KDSM for MOS runs made on 14 Dec 2023
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/mos.py?\
+station=KDSM&model=NBS&sts=2023-12-14T00:00Z&ets=2023-12-15T00:00Z&format=csv
+
+"""
+
+from io import BytesIO, StringIO
+
+import pandas as pd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class MyModel(CGIModel):
+ """Our model"""
+
+ format: str = Field(
+ "csv",
+ description="The format of the data response. csv, json, or excel",
+ pattern=r"^(csv|json|excel)$",
+ )
+ model: str = Field(
+ ...,
+ description="The model to query",
+ pattern=r"^(AVN|ETA|GFS|LAV|MEX|NAM|NBE|NBS)$",
+ )
+ ets: AwareDatetime = Field(
+ None,
+ description="The end time for the data request",
+ )
+ station: str = Field(..., description="The 4 character station identifier")
+ sts: AwareDatetime = Field(
+ None,
+ description="The start time for the data request",
+ )
+ year1: int = Field(
+ None,
+ description="The start year for the data request, when sts is not set",
+ )
+ month1: int = Field(
+ None,
+ description=(
+ "The start month for the data request, when sts is not set"
+ ),
+ )
+ day1: int = Field(
+ None,
+ description="The start day for the data request, when sts is not set",
+ )
+ hour1: int = Field(
+ None,
+ description="The start hour for the data request, when sts is not set",
+ )
+ year2: int = Field(
+ None,
+ description="The end year for the data request, when ets is not set",
+ )
+ month2: int = Field(
+ None,
+ description="The end month for the data request, when ets is not set",
+ )
+ day2: int = Field(
+ None,
+ description="The end day for the data request, when ets is not set",
+ )
+ hour2: int = Field(
+ None,
+ description="The end hour for the data request, when ets is not set",
+ )
+
+
+def get_data(sts, ets, station, model, fmt):
+ """Go fetch data please"""
+ model2 = model
+ if model == "NAM":
+ model2 = "ETA"
+ if model == "GFS":
+ model2 = "AVN"
+ with get_sqlalchemy_conn("mos") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ select
+ runtime at time zone 'UTC' as utc_runtime,
+ ftime at time zone 'UTC' as utc_ftime,
+ *, t06_1 ||'/'||t06_2 as t06,
+ t12_1 ||'/'|| t12_2 as t12 from alldata WHERE station = :station
+ and runtime >= :sts and runtime <= :ets and
+ (model = :model1 or model = :model2)
+ ORDER by runtime,ftime ASC"""
+ ),
+ conn,
+ params={
+ "sts": sts,
+ "ets": ets,
+ "model1": model,
+ "model2": model2,
+ "station": station,
+ },
+ )
+ df = df.drop(columns=["runtime", "ftime"]).rename(
+ columns={"utc_runtime": "runtime", "utc_ftime": "ftime"}
+ )
+ if not df.empty:
+ df = df.dropna(axis=1, how="all")
+ if fmt == "json":
+ return df.to_json(orient="records")
+ if fmt == "excel":
+ bio = BytesIO()
+ # pylint: disable=abstract-class-instantiated
+ with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
+ df.to_excel(writer, sheet_name="Data", index=False)
+ return bio.getvalue()
+
+ sio = StringIO()
+ df.to_csv(sio, index=False)
+ return sio.getvalue()
+
+
+@iemapp(help=__doc__, schema=MyModel, default_tz="UTC")
+def application(environ, start_response):
+ """See how we are called"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Missing sts and/or ets")
+ fmt = environ["format"]
+ station = environ["station"].upper()
+ model = environ["model"]
+ if fmt != "excel":
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [
+ get_data(
+ environ["sts"], environ["ets"], station, model, fmt
+ ).encode("ascii")
+ ]
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=mos.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [get_data(environ["sts"], environ["ets"], station, model, fmt)]
diff --git a/pylib/iemweb/request/nass_iowa.py b/pylib/iemweb/request/nass_iowa.py
new file mode 100644
index 0000000000..ea810e058d
--- /dev/null
+++ b/pylib/iemweb/request/nass_iowa.py
@@ -0,0 +1,43 @@
+""".. title:: Download NASS Iowa Data
+
+Documentation for /cgi-bin/request/nass_iowa.py
+-----------------------------------------------
+
+This service provides a download of the NASS Iowa data that is ingested into
+the IEM database. The data is available in Excel format. There are no options
+to this service at this time.
+
+Example Usage
+~~~~~~~~~~~~~
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/nass_iowa.py
+
+"""
+
+from io import BytesIO
+
+import pandas as pd
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.webutil import iemapp
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+@iemapp(help=__doc__)
+def application(_environ, start_response):
+ """Go Main Go"""
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=nass_iowa.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ with get_sqlalchemy_conn("coop") as conn:
+ df = pd.read_sql(
+ "SELECT * from nass_iowa ORDER by valid ASC",
+ conn,
+ parse_dates="load_time",
+ )
+ df["load_time"] = df["load_time"].dt.strftime("%Y-%m-%d")
+ bio = BytesIO()
+ df.to_excel(bio, index=False)
+ return [bio.getvalue()]
diff --git a/pylib/iemweb/request/nlaeflux.py b/pylib/iemweb/request/nlaeflux.py
new file mode 100644
index 0000000000..c61f78a392
--- /dev/null
+++ b/pylib/iemweb/request/nlaeflux.py
@@ -0,0 +1,53 @@
+"""Download backend for NLAE Flux Data."""
+
+import pandas as pd
+from pydantic import Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.util import utc
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+
+class Schema(CGIModel):
+ """Request arguments."""
+
+ syear: int = Field(..., description="Start Year")
+ smonth: int = Field(..., description="Start Month")
+ sday: int = Field(..., description="Start Day")
+ eyear: int = Field(..., description="End Year")
+ emonth: int = Field(..., description="End Month")
+ eday: int = Field(..., description="End Day")
+ station: ListOrCSVType = Field(..., description="Station Identifier")
+
+
+@iemapp(help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Handle mod_wsgi request."""
+ sts = utc(
+ int(environ["syear"]), int(environ["smonth"]), int(environ["sday"])
+ )
+ ets = utc(
+ int(environ["eyear"]), int(environ["emonth"]), int(environ["eday"])
+ )
+ stations = environ["station"]
+ with get_sqlalchemy_conn("other") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ select *, valid at time zone 'UTC' as utc_valid
+ from flux_data where valid >= :sts and valid < :ets
+ and station = ANY(:stations)
+ """
+ ),
+ conn,
+ params={"stations": stations, "sts": sts, "ets": ets},
+ parse_dates=["utc_valid"],
+ )
+ df["valid"] = df["utc_valid"].dt.strftime("%Y-%m-%d %H:%M:%S")
+ df = df.drop(columns=["utc_valid"])
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", "attachment; filename=fluxdata.txt"),
+ ]
+ start_response("200 OK", headers)
+ return [df.to_csv(index=False).encode("ascii")]
diff --git a/pylib/iemweb/request/other.py b/pylib/iemweb/request/other.py
new file mode 100644
index 0000000000..deecde4cbe
--- /dev/null
+++ b/pylib/iemweb/request/other.py
@@ -0,0 +1,67 @@
+"""
+Download interface for data from 'other' network
+"""
+
+from io import StringIO
+
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import get_dbconnc
+from pyiem.webutil import iemapp
+
+
+def fetcher(station, sts, ets):
+ """
+ Fetch the data
+ """
+ cols = [
+ "station",
+ "valid",
+ "tmpf",
+ "dwpf",
+ "drct",
+ "sknt",
+ "gust",
+ "relh",
+ "alti",
+ "pcpncnt",
+ "pday",
+ "pmonth",
+ "srad",
+ ]
+
+ pgconn, cursor = get_dbconnc("other")
+ cursor.execute(
+ """
+ SELECT * from alldata where station = %s and valid between %s and %s
+ ORDER by valid ASC
+ """,
+ (station, sts.strftime("%Y-%m-%d"), ets.strftime("%Y-%m-%d")),
+ )
+
+ sio = StringIO()
+ sio.write(
+ (
+ "station,valid_CST_CDT,air_tmp_F,dew_point_F,"
+ "wind_dir_deg,wind_sped_kts,wind_gust_kts,relh_%,"
+ "alti_in,pcpncnt_in,precip_day_in,precip_month_in,"
+ "solar_rad_wms\n"
+ )
+ )
+
+ for row in cursor:
+ sio.write(",".join(f"{row[col]}" for col in cols))
+ sio.write("\n")
+ pgconn.close()
+ return sio.getvalue().encode("ascii")
+
+
+@iemapp()
+def application(environ, start_response):
+ """
+ Do something!
+ """
+ if "sts" not in environ:
+ raise IncompleteWebRequest("GET start time parameters missing")
+ station = environ.get("station", "")[:10]
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [fetcher(station, environ["sts"], environ["ets"])]
diff --git a/pylib/iemweb/request/purpleair.py b/pylib/iemweb/request/purpleair.py
new file mode 100644
index 0000000000..cae96b940f
--- /dev/null
+++ b/pylib/iemweb/request/purpleair.py
@@ -0,0 +1,55 @@
+"""
+Purple Air Quality Sensor
+"""
+
+from io import BytesIO
+
+import pandas as pd
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+def run(environ, start_response):
+ """run()"""
+ sql = text(
+ """
+ select * from purpleair where valid >= :sts and valid < :ets
+ ORDER by valid asc
+ """
+ )
+ with get_sqlalchemy_conn("other") as conn:
+ df = pd.read_sql(
+ sql, conn, params={"sts": environ["sts"], "ets": environ["ets"]}
+ )
+ if environ.get("excel", "no") == "yes":
+ start_response(
+ "200 OK",
+ [
+ ("Content-type", EXL),
+ ("Content-Disposition", "attachment; filename=purpleair.xlsx"),
+ ],
+ )
+ bio = BytesIO()
+ df.to_excel(bio, index=False, engine="openpyxl")
+ return bio.getvalue()
+ start_response(
+ "200 OK",
+ [
+ ("Content-type", "application/octet-stream"),
+ ("Content-Disposition", "attachment; filename=purpleair.csv"),
+ ],
+ )
+ return df.to_csv(None, index=False).encode("ascii")
+
+
+@iemapp(default_tz="America/Chicago")
+def application(environ, start_response):
+ """Go Main Go"""
+ if "sts" not in environ:
+ raise IncompleteWebRequest("GET start time parameters missing")
+
+ return [run(environ, start_response)]
diff --git a/pylib/iemweb/request/raob.py b/pylib/iemweb/request/raob.py
new file mode 100644
index 0000000000..55b0e75e6d
--- /dev/null
+++ b/pylib/iemweb/request/raob.py
@@ -0,0 +1,116 @@
+""".. title:: RAOB Data Service
+
+Documentation for /cgi-bin/request/raob.py
+------------------------------------------
+
+To be written.
+"""
+
+import datetime
+from io import StringIO
+from zoneinfo import ZoneInfo
+
+from pyiem.database import get_dbconn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.network import Table as NetworkTable
+from pyiem.webutil import iemapp
+
+
+def m(val):
+ """Helper"""
+ if val is None:
+ return "M"
+ return val
+
+
+def fetcher(station, sts, ets):
+ """Do fetching"""
+ sio = StringIO()
+ dbconn = get_dbconn("raob")
+ cursor = dbconn.cursor("raobstreamer")
+ stations = [station]
+ if station.startswith("_"):
+ nt = NetworkTable("RAOB", only_online=False)
+ stations = nt.sts[station]["name"].split("--")[1].strip().split(",")
+
+ cursor.execute(
+ """
+ SELECT f.valid at time zone 'UTC', p.levelcode, p.pressure, p.height,
+ p.tmpc, p.dwpc, p.drct, round((p.smps * 1.94384)::numeric,0),
+ p.bearing, p.range_miles, f.station from
+ raob_profile p JOIN raob_flights f on
+ (f.fid = p.fid) WHERE f.station = ANY(%s) and valid >= %s and valid < %s
+ """,
+ (stations, sts, ets),
+ )
+ sio.write(
+ (
+ "station,validUTC,levelcode,pressure_mb,height_m,tmpc,"
+ "dwpc,drct,speed_kts,bearing,range_sm\n"
+ )
+ )
+ for row in cursor:
+ sio.write(
+ ("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n")
+ % (
+ row[10],
+ m(row[0]),
+ m(row[1]),
+ m(row[2]),
+ m(row[3]),
+ m(row[4]),
+ m(row[5]),
+ m(row[6]),
+ m(row[7]),
+ m(row[8]),
+ m(row[9]),
+ )
+ )
+ return sio.getvalue().encode("ascii", "ignore")
+
+
+def friendly_date(form, key):
+ """More forgiving date conversion"""
+ val = form.get(key)
+ try:
+ val = val.strip()
+ if len(val.split()) == 1:
+ dt = datetime.datetime.strptime(val, "%m/%d/%Y")
+ else:
+ dt = datetime.datetime.strptime(val, "%m/%d/%Y %H:%M")
+ dt = dt.replace(tzinfo=ZoneInfo("UTC"))
+ except Exception:
+ return (
+ f"Invalid {key} date provided, should be '%m/%d/%Y %H:%M'"
+ " in UTC timezone"
+ )
+ return dt
+
+
+@iemapp(help=__doc__)
+def application(environ, start_response):
+ """Go Main Go"""
+ if "sts" not in environ:
+ raise IncompleteWebRequest("GET parameter sts= missing")
+ sts = friendly_date(environ, "sts")
+ ets = friendly_date(environ, "ets")
+ for val in [sts, ets]:
+ if not isinstance(val, datetime.datetime):
+ headers = [("Content-type", "text/plain")]
+ start_response("500 Internal Server Error", headers)
+ return [val.encode("ascii")]
+
+ station = environ.get("station", "KOAX")[:4]
+ if environ.get("dl", None) is not None:
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ (
+ "Content-Disposition",
+ "attachment; "
+ f"filename={station}_{sts:%Y%m%d%H}_{ets:%Y%m%d%H}.txt",
+ ),
+ ]
+ else:
+ headers = [("Content-type", "text/plain")]
+ start_response("200 OK", headers)
+ return [fetcher(station, sts, ets)]
diff --git a/pylib/iemweb/request/raster2netcdf.py b/pylib/iemweb/request/raster2netcdf.py
new file mode 100644
index 0000000000..4e4696afad
--- /dev/null
+++ b/pylib/iemweb/request/raster2netcdf.py
@@ -0,0 +1,145 @@
+""".. title:: Raster to NetCDF Data Service
+
+Documentation for /cgi-bin/request/raster2netcdf.py
+---------------------------------------------------
+
+To be written.
+"""
+
+import datetime
+import os
+import tempfile
+from io import BytesIO
+from zoneinfo import ZoneInfo
+
+import netCDF4
+import numpy as np
+from PIL import Image
+from pydantic import Field
+from pyiem.database import get_dbconn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, iemapp
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ dstr: str = Field(
+ "201710251200",
+ description="UTC Datetime to request data for",
+ max_length=12,
+ )
+ prod: str = Field("", description="Product to request", max_length=100)
+
+
+def get_gridinfo(filename, xpoints, ypoints):
+ """Figure out the grid navigation, sigh"""
+ with open(f"{filename[:-4]}.wld", encoding="ascii") as fh:
+ lines = fh.readlines()
+ dx = float(lines[0])
+ dy = float(lines[3])
+ west = float(lines[4])
+ north = float(lines[5])
+ south = north + dy * ypoints
+ lats = np.arange(0, ypoints) * (0 - dy) + south
+ lons = np.arange(0, xpoints) * dx + west
+ return lons, lats
+
+
+def get_table(prod):
+ """Return our lookup table"""
+ pgconn = get_dbconn("mesosite")
+ cursor = pgconn.cursor()
+ xref = [1.0e20] * 256
+ cursor.execute(
+ "SELECT id, filename_template, units, cf_long_name "
+ "from iemrasters where name = %s",
+ (prod,),
+ )
+ if cursor.rowcount == 0:
+ raise IncompleteWebRequest("Unknown product")
+ (rid, template, units, long_name) = cursor.fetchone()
+ cursor.execute(
+ """
+ SELECT coloridx, value from iemrasters_lookup
+ WHERE iemraster_id = %s and value is not null
+ ORDER by coloridx ASC
+ """,
+ (rid,),
+ )
+ for row in cursor:
+ xref[row[0]] = row[1]
+ return np.array(xref), template, units, long_name
+
+
+def make_netcdf(xpoints, ypoints, lons, lats):
+ """generate the netcdf file"""
+ tmpobj = tempfile.NamedTemporaryFile(suffix=".nc", delete=False)
+ with netCDF4.Dataset(tmpobj.name, "w") as nc:
+ nc.Conventions = "CF-1.6"
+ nc.createDimension("lat", ypoints)
+ nc.createDimension("lon", xpoints)
+ nclon = nc.createVariable("lon", np.float32, ("lon",))
+ nclon.units = "degree_east"
+ nclon.long_name = "longitude"
+ nclon[:] = lons
+ nclat = nc.createVariable("lat", np.float32, ("lat",))
+ nclat.units = "degree_north"
+ nclat.long_name = "latitude"
+ nclat[:] = lats
+ return tmpobj.name
+
+
+def do_work(valid, prod, start_response):
+ """Our workflow"""
+ # Get lookup table
+ xref, template, units, long_name = get_table(prod)
+ # Get RASTER
+ fn = valid.strftime(template)
+ if not os.path.isfile(fn):
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return b"ERROR: The IEM Archives do not have this file available"
+ raster = np.flipud(np.array(Image.open(fn)))
+ (ypoints, xpoints) = raster.shape
+ # build lat, lon arrays
+ lons, lats = get_gridinfo(fn, xpoints, ypoints)
+ # create netcdf file
+ tmpname = make_netcdf(xpoints, ypoints, lons, lats)
+ with netCDF4.Dataset(tmpname, "a") as nc:
+ # write data
+ ncvar = nc.createVariable(
+ prod, float, ("lat", "lon"), zlib=True, fill_value=1.0e20
+ )
+ ncvar.units = units
+ ncvar.long_name = long_name
+ ncvar.coordinates = "lon lat"
+ # convert RASTER via lookup table
+ ncvar[:] = xref[raster]
+ # send data to user
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-disposition", "attachment; filename=res.nc"),
+ ]
+ start_response("200 OK", headers)
+ bio = BytesIO()
+ with open(tmpname, "rb") as fh:
+ bio.write(fh.read())
+ # remove tmp netcdf file
+ os.unlink(tmpname)
+ return bio.getvalue()
+
+
+@iemapp(help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Do great things"""
+ dstr = environ["dstr"]
+ prod = environ["prod"]
+ if prod == "":
+ raise IncompleteWebRequest("prod is required")
+ try:
+ valid = datetime.datetime.strptime(dstr, "%Y%m%d%H%M").replace(
+ tzinfo=ZoneInfo("UTC")
+ )
+ except Exception as exp:
+ raise IncompleteWebRequest("dstr not in form %Y%m%d%H%M") from exp
+ return [do_work(valid, prod, start_response)]
diff --git a/pylib/iemweb/request/rwis.py b/pylib/iemweb/request/rwis.py
new file mode 100644
index 0000000000..daa4e4c802
--- /dev/null
+++ b/pylib/iemweb/request/rwis.py
@@ -0,0 +1,99 @@
+"""Download Interface for RWIS data"""
+
+# pylint: disable=abstract-class-instantiated
+from io import BytesIO, StringIO
+
+import pandas as pd
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.network import Table as NetworkTable
+from pyiem.util import get_sqlalchemy_conn
+from pyiem.webutil import ensure_list, iemapp
+from sqlalchemy import text
+
+DELIMITERS = {"comma": ",", "space": " ", "tab": "\t"}
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+@iemapp(default_tz="America/Chicago")
+def application(environ, start_response):
+ """Go do something"""
+ include_latlon = environ.get("gis", "no").lower() == "yes"
+ myvars = ensure_list(environ, "vars")
+ myvars.insert(0, "station")
+ myvars.insert(1, "obtime")
+ delimiter = DELIMITERS.get(environ.get("delim", "comma"))
+ what = environ.get("what", "dl")
+ tzname = environ.get("tz", "UTC")
+ src = environ.get("src", "atmos")
+ stations = ensure_list(environ, "stations")
+ if not stations:
+ raise IncompleteWebRequest("Missing GET parameter stations=")
+
+ tbl = "alldata"
+ if src in ["soil", "traffic"]:
+ tbl = f"alldata_{src}"
+ network = environ.get("network", "IA_RWIS")
+ nt = NetworkTable(network, only_online=False)
+ if "_ALL" in stations:
+ stations = list(nt.sts.keys())
+ params = {
+ "tzname": tzname,
+ "ids": stations,
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ }
+ sql = text(
+ f"SELECT *, valid at time zone :tzname as obtime from {tbl} "
+ "WHERE station = ANY(:ids) and valid BETWEEN :sts and :ets "
+ "ORDER by valid ASC"
+ )
+ with get_sqlalchemy_conn("rwis") as conn:
+ df = pd.read_sql(sql, conn, params=params)
+ if df.empty:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [b"Sorry, no results found for query!"]
+ if include_latlon:
+ myvars.insert(2, "longitude")
+ myvars.insert(3, "latitude")
+
+ def get_lat(station):
+ """hack"""
+ return nt.sts[station]["lat"]
+
+ def get_lon(station):
+ """hack"""
+ return nt.sts[station]["lon"]
+
+ df["latitude"] = [get_lat(x) for x in df["station"]]
+ df["longitude"] = [get_lon(x) for x in df["station"]]
+
+ sio = StringIO()
+ if what in ["txt", "download"]:
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-disposition", "attachment; filename=rwis.txt"),
+ ]
+ start_response("200 OK", headers)
+ df.to_csv(sio, index=False, sep=delimiter, columns=myvars)
+ return [sio.getvalue().encode("ascii")]
+ if what == "html":
+ start_response("200 OK", [("Content-type", "text/html")])
+ df.to_html(sio, columns=myvars)
+ return [sio.getvalue().encode("ascii")]
+ if what == "excel":
+ if len(df.index) >= 1048576:
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [b"Dataset too large for excel format."]
+ bio = BytesIO()
+ with pd.ExcelWriter(bio) as writer:
+ df.to_excel(writer, sheet_name="Data", index=False, columns=myvars)
+
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=rwis.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [bio.getvalue()]
+ start_response("200 OK", [("Content-type", "text/plain")])
+ df.to_csv(sio, sep=delimiter, columns=df.columns.intersection(myvars))
+ return [sio.getvalue().encode("ascii")]
diff --git a/pylib/iemweb/request/scan.py b/pylib/iemweb/request/scan.py
new file mode 100644
index 0000000000..46cd4fb628
--- /dev/null
+++ b/pylib/iemweb/request/scan.py
@@ -0,0 +1,55 @@
+"""SCAN download backend."""
+
+from io import StringIO
+
+import pandas as pd
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import get_sqlalchemy_conn
+from pyiem.webutil import ensure_list, iemapp
+from sqlalchemy import text
+
+DELIMITERS = {"comma": ",", "space": " ", "tab": "\t"}
+
+
+def get_df(stations, sts, ets):
+ """Get what the database has!"""
+ with get_sqlalchemy_conn("scan") as conn:
+ df = pd.read_sql(
+ text(
+ "select * from alldata where station = ANY(:ids) and "
+ "valid >= :sts and valid < :ets "
+ "order by station asc, valid asc"
+ ),
+ conn,
+ params={"ids": stations, "sts": sts, "ets": ets},
+ )
+ if not df.empty:
+ df["valid"] = df["valid"].dt.strftime("%Y-%m-%d %H:%M")
+ return df
+
+
+@iemapp(default_tz="UTC")
+def application(environ, start_response):
+ """
+ Do something!
+ """
+ if "sts" not in environ:
+ raise IncompleteWebRequest("GET start time parameters missing")
+ stations = ensure_list(environ, "stations")
+ varnames = ensure_list(environ, "vars")
+ varnames.insert(0, "valid")
+ varnames.insert(0, "station")
+ what = environ.get("what", "dl")
+ delimiter = DELIMITERS.get(environ.get("delim", "comma"))
+ df = get_df(stations, environ["sts"], environ["ets"])
+ if what in ["txt", "download"]:
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-disposition", "attachment; filename=scan.txt"),
+ ]
+ else:
+ headers = [("Content-type", "text/plain")]
+ start_response("200 OK", headers)
+ sio = StringIO()
+ df.to_csv(sio, index=False, sep=delimiter, columns=varnames)
+ return [sio.getvalue().encode("ascii")]
diff --git a/pylib/iemweb/request/scp.py b/pylib/iemweb/request/scp.py
new file mode 100644
index 0000000000..575bafc412
--- /dev/null
+++ b/pylib/iemweb/request/scp.py
@@ -0,0 +1,130 @@
+""".. title:: Satellite Cloud Product (SCP) Request
+
+Documentation for /cgi-bin/request/scp.py
+--------------------------------------------
+
+This script is used to request Satellite Cloud Product (SCP) data from the
+IEM's ASOS database.
+
+Examples:
+---------
+
+Download all 2023 data for KBUR
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/scp.py?station=KBUR&sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z
+
+"""
+
+from io import StringIO
+
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+
+class Schema(CGIModel):
+ """Our schema for this request"""
+
+ ets: AwareDatetime = Field(
+ None,
+ description=(
+ "End timestamp with timezone included to request data for."
+ ),
+ )
+ station: ListOrCSVType = Field(
+ None,
+ description=(
+ "Four or Five character station identifier(s) to request data for."
+ ),
+ )
+ sts: AwareDatetime = Field(
+ None,
+ description=(
+ "Start timestamp with timezone included to request data for."
+ ),
+ )
+ year1: int = Field(
+ None,
+ description=(
+ "Year to request data for, this is an alternative to sts/ets."
+ ),
+ )
+ year2: int = Field(
+ None,
+ description=(
+ "Year to request data for, this is an alternative to sts/ets."
+ ),
+ )
+ month1: int = Field(
+ None,
+ description=(
+ "Month to request data for, this is an alternative to sts/ets."
+ ),
+ )
+ month2: int = Field(
+ None,
+ description=(
+ "Month to request data for, this is an alternative to sts/ets."
+ ),
+ )
+ day1: int = Field(
+ None,
+ description=(
+ "Day to request data for, this is an alternative to sts/ets."
+ ),
+ )
+ day2: int = Field(
+ None,
+ description=(
+ "Day to request data for, this is an alternative to sts/ets."
+ ),
+ )
+ hour1: int = Field(0, description="Hour to request data for.")
+ hour2: int = Field(0, description="Hour to request data for.")
+ minute1: int = Field(0, description="Minute to request data for.")
+ minute2: int = Field(0, description="Minute to request data for.")
+
+
+@iemapp(schema=Schema, help=__doc__)
+def application(environ, start_response):
+ """Do Something"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Both start and end time must be provided!")
+ start_response("200 OK", [("Content-type", "text/plain")])
+ slimiter = ""
+ params = {
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ "station": environ["station"],
+ }
+ if environ["station"]:
+ slimiter = "station = ANY(:station)"
+ sio = StringIO()
+ sio.write("station,utc_valid,mid,high,cldtop1,cldtop2,eca,source\n")
+ with get_sqlalchemy_conn("asos") as conn:
+ res = conn.execute(
+ text(f"""
+ SELECT station, valid at time zone 'UTC' as utc_valid, mid, high,
+ cldtop1, cldtop2, eca, source from scp_alldata
+ WHERE valid >= :sts and valid < :ets and {slimiter}
+ ORDER by valid ASC
+ """),
+ params,
+ )
+ for row in res:
+ sio.write(
+ ("%s,%s,%s,%s,%s,%s,%s,%s\n")
+ % (
+ row[0],
+ row[1].strftime("%Y-%m-%d %H:%M:%S"),
+ row[2],
+ row[3],
+ row[4],
+ row[5],
+ row[6],
+ row[7],
+ )
+ )
+ return [sio.getvalue().encode("ascii", "ignore")]
diff --git a/pylib/iemweb/request/ss.py b/pylib/iemweb/request/ss.py
new file mode 100644
index 0000000000..e9e71a56e6
--- /dev/null
+++ b/pylib/iemweb/request/ss.py
@@ -0,0 +1,165 @@
+"""
+Return a simple CSV of stuart smith data
+
+Levelogger Reading (ft)
+Barologger Reading
+Temp (C)
+Barologger Air Temp (C)
+Conductivity (micro-S)
+7.20473 ch1_data_p
+2.68857 ch2_data_p
+21.1 ch1_data_t
+18.19 ch2_data_t
+48 ch1_data_c
+"""
+
+from io import BytesIO
+
+import pandas as pd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+LOOKUP = {
+ 9100104: "SSP #6",
+ 9100135: "SSP #8",
+ 9100131: "SSP #1",
+ 9100156: "SSP #7",
+}
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ excel: bool = Field(description="Return Excel File", default=False)
+ opt: str = Field(description="bubbler or gage", default="gage")
+ station: ListOrCSVType = Field(
+ default=[], description="Station ID to query"
+ )
+ sts: AwareDatetime = Field(description="Start Time", default=None)
+ ets: AwareDatetime = Field(description="End Time", default=None)
+ year1: int = Field(
+ description="Start year, when sts is not set.", default=None
+ )
+ month1: int = Field(
+ description="Start month, when sts is not set.", default=None
+ )
+ day1: int = Field(
+ description="Start day, when sts is not set.", default=None
+ )
+ year2: int = Field(
+ description="End year, when ets is not set.", default=None
+ )
+ month2: int = Field(
+ description="End month, when ets is not set.", default=None
+ )
+ day2: int = Field(
+ description="End day, when ets is not set.", default=None
+ )
+
+
+def gage_run(sts, ets, stations, excel, start_response):
+ """run()"""
+ if not stations:
+ stations = LOOKUP.keys()
+
+ sql = text(
+ """select date(valid) as date, to_char(valid, 'HH24:MI:SS') as time,
+ site_serial, ch1_data_p, ch2_data_p,
+ ch1_data_t, ch2_data_t, ch1_data_c
+ from ss_logger_data WHERE valid between :sts and :ets and
+ site_serial = ANY(:stations) ORDER by valid ASC"""
+ )
+ with get_sqlalchemy_conn("other") as conn:
+ df = pd.read_sql(
+ sql, conn, params={"sts": sts, "ets": ets, "stations": stations}
+ )
+ eheaders = [
+ "date",
+ "time",
+ "site_serial",
+ "Levelogger Reading (ft)",
+ "Barologger Reading",
+ "Water Temp (C)",
+ "Barologger Air Temp (C)",
+ "Conductivity (micro-S)",
+ ]
+
+ if excel:
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=stuartsmith.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ bio = BytesIO()
+ df.to_excel(bio, header=eheaders, index=False, engine="openpyxl")
+ return bio.getvalue()
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return df.to_csv(None, index=False).encode("ascii")
+
+
+def bubbler_run(sts, ets, excel, start_response):
+ """run()"""
+ sql = text(
+ """
+ WITH one as (SELECT valid, value from ss_bubbler WHERE
+ valid between :sts and :ets and field = 'Batt Voltage'),
+ two as (SELECT valid, value from ss_bubbler WHERE
+ valid between :sts and :ets and field = 'STAGE'),
+ three as (SELECT valid, value from ss_bubbler WHERE
+ valid between :sts and :ets and field = 'Water Temp')
+
+ SELECT date(coalesce(one.valid, two.valid, three.valid)) as date,
+ to_char(coalesce(one.valid, two.valid, three.valid), 'HH24:MI:SS') as time,
+ one.value as "batt voltage",
+ two.value as "stage",
+ three.value as "water temp"
+ from one FULL OUTER JOIN two on (one.valid = two.valid)
+ FULL OUTER JOIN three on (coalesce(two.valid,one.valid) = three.valid)
+ ORDER by date ASC, time ASC
+ """
+ )
+ with get_sqlalchemy_conn("other") as conn:
+ df = pd.read_sql(sql, conn, params={"sts": sts, "ets": ets})
+ if excel:
+ headers = [
+ ("Content-type", "application/vnd.ms-excel"),
+ ("Content-disposition", "attachment; Filename=stuartsmith.xls"),
+ ]
+ start_response("200 OK", headers)
+ bio = BytesIO()
+ df.to_excel(bio, index=False)
+ return bio.getvalue()
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return df.to_csv(None, index=False).encode("ascii")
+
+
+@iemapp(default_tz="America/Chicago", help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """Go Main Go"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("GET start time parameters missing")
+ opt = environ["opt"]
+
+ stations = environ["station"]
+ if opt == "bubbler":
+ return [
+ bubbler_run(
+ environ["sts"],
+ environ["ets"],
+ environ["excel"],
+ start_response,
+ )
+ ]
+ return [
+ gage_run(
+ environ["sts"],
+ environ["ets"],
+ stations,
+ environ["excel"],
+ start_response,
+ )
+ ]
diff --git a/pylib/iemweb/request/taf.py b/pylib/iemweb/request/taf.py
new file mode 100644
index 0000000000..98bc39ac4e
--- /dev/null
+++ b/pylib/iemweb/request/taf.py
@@ -0,0 +1,142 @@
+""".. title:: Terminal Aerodrome Forecast (TAF) Data
+
+Documentation for /cgi-bin/request/taf.py
+-----------------------------------------
+
+This service provides access to Terminal Aerodrome Forecast (TAF) data for
+specified stations and time ranges.
+
+Example Usage
+~~~~~~~~~~~~~
+
+Request all of Des Moines TAF for the month of January 2024 in CSV format:
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/taf.py?station=DSM&sts=2024-01-01T00:00Z&ets=2024-02-01T00:00Z&fmt=csv
+
+Request the past 240 hours of TAF data for Chicago O'Hare in Excel format:
+
+ https://mesonet.agron.iastate.edu/cgi-bin/request/taf.py?station=ORD&hours=240&fmt=excel
+"""
+
+from datetime import timedelta
+from io import BytesIO
+from zoneinfo import ZoneInfo
+
+import pandas as pd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.util import utc
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class MyModel(CGIModel):
+ """Our model"""
+
+ hours: int = Field(
+ None,
+ description=(
+ "Request data for the time period from now until this many hours "
+ "in the past. Overrides any sts or ets values."
+ ),
+ le=2400,
+ gt=0,
+ )
+ fmt: str = Field(
+ "csv",
+ description="The format of the output file, either 'csv' or 'excel'",
+ )
+ tz: str = Field("UTC", description="The timezone to use for timestamps")
+ sts: AwareDatetime = Field(
+ None, description="The start timestamp for the data"
+ )
+ ets: AwareDatetime = Field(
+ None, description="The end timestamp for the data"
+ )
+ station: ListOrCSVType = Field(
+ ...,
+ description=(
+ "The station(s) to request data for, "
+ "either multi params or comma separated"
+ ),
+ )
+ year1: int = Field(None, description="The start year, if not using sts")
+ month1: int = Field(None, description="The start month, if not using sts")
+ day1: int = Field(None, description="The start day, if not using sts")
+ hour1: int = Field(0, description="The start hour, if not using sts")
+ minute1: int = Field(0, description="The start minute, if not using sts")
+ year2: int = Field(None, description="The end year, if not using ets")
+ month2: int = Field(None, description="The end month, if not using ets")
+ day2: int = Field(None, description="The end day, if not using ets")
+ hour2: int = Field(0, description="The end hour, if not using ets")
+ minute2: int = Field(0, description="The end minute, if not using ets")
+
+
+def run(start_response, environ):
+ """Get data!"""
+ with get_sqlalchemy_conn("asos") as dbconn:
+ df = pd.read_sql(
+ text(
+ """
+ select t.station, t.valid at time zone 'UTC' as valid,
+ f.valid at time zone 'UTC' as fx_valid, raw, is_tempo,
+ end_valid at time zone 'UTC' as fx_valid_end,
+ sknt, drct, gust, visibility,
+ presentwx, skyc, skyl, ws_level, ws_drct, ws_sknt, product_id
+ from taf t JOIN taf_forecast f on (t.id = f.taf_id)
+ WHERE t.station = ANY(:stations) and f.valid >= :sts
+ and f.valid < :ets order by t.valid
+ """
+ ),
+ dbconn,
+ params={
+ "stations": environ["station"],
+ "sts": environ["sts"],
+ "ets": environ["ets"],
+ },
+ parse_dates=["valid", "fx_valid", "fx_valid_end"],
+ )
+ # muck the timezones
+ if not df.empty:
+ tzinfo = ZoneInfo(environ["tz"])
+ for col in ["valid", "fx_valid", "fx_valid_end"]:
+ df[col] = (
+ df[col].dt.tz_localize(tzinfo).dt.strftime("%Y-%m-%d %H:%M")
+ )
+
+ bio = BytesIO()
+ if environ["fmt"] == "excel":
+ with pd.ExcelWriter(bio, engine="openpyxl") as writer:
+ df.to_excel(writer, sheet_name="TAF Data", index=False)
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment;Filename=taf.xlsx"),
+ ]
+ else:
+ df.to_csv(bio, index=False)
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-disposition", "attachment;Filename=taf.csv"),
+ ]
+ start_response("200 OK", headers)
+ return bio.getvalue()
+
+
+def rect(station):
+ """Cleanup."""
+ station = station.upper()
+ if len(station) == 3:
+ return f"K{station}"
+ return station
+
+
+@iemapp(help=__doc__, schema=MyModel)
+def application(environ, start_response):
+ """Get stuff"""
+ if environ["hours"] is not None:
+ environ["ets"] = utc()
+ environ["sts"] = environ["ets"] - timedelta(hours=environ["hours"])
+ environ["station"] = [rect(x) for x in environ["station"]]
+ return [run(start_response, environ)]
diff --git a/pylib/iemweb/request/talltowers.py b/pylib/iemweb/request/talltowers.py
new file mode 100644
index 0000000000..8c47092541
--- /dev/null
+++ b/pylib/iemweb/request/talltowers.py
@@ -0,0 +1,128 @@
+"""Process talltowers data request."""
+
+import datetime
+from io import BytesIO, StringIO
+from zoneinfo import ZoneInfo
+
+import pandas as pd
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.util import get_dbconn, get_sqlalchemy_conn
+from pyiem.webutil import ensure_list, iemapp
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+TOWERIDS = {0: "ETTI4", 1: "MCAI4"}
+
+
+def get_stations(environ):
+ """Figure out the requested station"""
+ stations = ensure_list(environ, "station")
+ towers = []
+ for tid, nwsli in TOWERIDS.items():
+ if nwsli in stations:
+ towers.append(tid)
+
+ return towers
+
+
+def get_time_bounds(form, tzinfo):
+ """Figure out the exact time bounds desired"""
+ y1 = int(form.get("year1"))
+ y2 = int(form.get("year2"))
+ m1 = int(form.get("month1"))
+ m2 = int(form.get("month2"))
+ d1 = int(form.get("day1"))
+ d2 = int(form.get("day2"))
+ h1 = int(form.get("hour1"))
+ h2 = int(form.get("hour2"))
+ sts = datetime.datetime(y1, m1, d1, h1, tzinfo=tzinfo)
+ ets = datetime.datetime(y2, m2, d2, h2, tzinfo=tzinfo)
+ if ets < sts:
+ sts, ets = ets, sts
+ ets = min([sts + datetime.timedelta(days=32), ets])
+
+ return sts, ets
+
+
+def get_columns(cursor):
+ """What have we here."""
+ cursor.execute(
+ "SELECT column_name FROM information_schema.columns "
+ "WHERE table_schema = 'public' AND table_name = 'data_analog'"
+ )
+ res = [row[0] for row in cursor]
+ return res
+
+
+@iemapp()
+def application(environ, start_response):
+ """Go main Go"""
+ pgconn = get_dbconn("talltowers", user="tt_web")
+ columns = get_columns(pgconn.cursor())
+ tzname = environ.get("tz", "Etc/UTC")
+ tzinfo = ZoneInfo(tzname)
+
+ stations = get_stations(environ)
+ if not stations:
+ raise IncompleteWebRequest("No stations")
+ sts, ets = get_time_bounds(environ, tzinfo)
+ fmt = environ.get("format")
+ # Build out our variable list
+ tokens = []
+ zz = ensure_list(environ, "z")
+ varnames = ensure_list(environ, "var")
+ aggs = ensure_list(environ, "agg")
+ for z in zz:
+ for v in varnames:
+ v1 = v
+ v2 = ""
+ if v.find("_") > -1:
+ v1, v2 = v.split("_")
+ v2 = f"_{v2}"
+ colname = f"{v1}_{z}m{v2}"
+ if colname not in columns:
+ continue
+ for agg in aggs:
+ tokens.append(f"{agg}({colname}) as {colname}_{agg}") # noqa
+
+ tw = int(environ.get("window", 1))
+
+ sql = f"""
+ SELECT tower,
+ (date_trunc('hour', valid) +
+ (((date_part('minute', valid)::integer / {tw}::integer) * {tw}::integer)
+ || ' minutes')::interval) at time zone %s as ts,
+ {','.join(tokens)} from
+ data_analog where tower = ANY(%s) and valid >= %s and valid < %s
+ GROUP by tower, ts ORDER by tower, ts
+ """
+ with get_sqlalchemy_conn("talltowers", user="tt_web") as conn:
+ df = pd.read_sql(
+ sql,
+ conn,
+ params=(tzname, stations, sts, ets),
+ )
+ df = df.rename(columns={"ts": "valid"})
+ df["tower"] = df["tower"].replace(TOWERIDS)
+ pgconn.close()
+ if fmt in ["tdf", "comma"]:
+ headers = [
+ ("Content-type", "application/octet-stream"),
+ ("Content-disposition", "attachment; filename=talltowers.txt"),
+ ]
+ start_response("200 OK", headers)
+ sio = StringIO()
+ df.to_csv(sio, sep="," if fmt == "comma" else "\t", index=False)
+ return [sio.getvalue().encode("utf8")]
+
+ # Excel
+ bio = BytesIO()
+ # pylint: disable=abstract-class-instantiated
+ with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
+ df.to_excel(writer, sheet_name="Data", index=False)
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=talltowers.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [bio.getvalue()]
diff --git a/pylib/iemweb/request/tempwind_aloft.py b/pylib/iemweb/request/tempwind_aloft.py
new file mode 100644
index 0000000000..041c803a41
--- /dev/null
+++ b/pylib/iemweb/request/tempwind_aloft.py
@@ -0,0 +1,149 @@
+""".. title:: Temperature and Wind Aloft Data Service
+
+Documentation for /cgi-bin/request/tempwind_aloft.py
+----------------------------------------------------
+
+This service emits processed data from a temperature and winds aloft product.
+
+Example Usage
+~~~~~~~~~~~~~
+
+Request all data for `KDSM` for 2023.
+
+https://mesonet.agron.iastate.edu/cgi-bin/request/tempwind_aloft.py?station=KDSM&sts=2023-01-01T00:00Z&ets=2024-01-01T00:00Z
+
+"""
+
+from io import BytesIO, StringIO
+
+import pandas as pd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class Schema(CGIModel):
+ """See how we are called."""
+
+ ets: AwareDatetime = Field(
+ None,
+ description="The end time of the data request",
+ )
+ format: str = Field(
+ "csv",
+ description="The format of the output (csv json or excel)",
+ pattern="^(csv|json|excel)$",
+ )
+ na: str = Field(
+ "M",
+ description="The value to use for missing data",
+ pattern="^(M|None|blank)$",
+ )
+ sts: AwareDatetime = Field(
+ None,
+ description="The start time of the data request",
+ )
+ station: ListOrCSVType = Field(
+ ...,
+ description="The station identifier(s) to request data for",
+ )
+ tz: str = Field(
+ "UTC",
+ description=(
+ "The timezone to use for timestamps in request and response, it "
+ "should be something recognized by the pytz library."
+ ),
+ )
+ year1: int = Field(
+ None,
+ description="The year for the start time, if sts is not provided",
+ )
+ year2: int = Field(
+ None,
+ description="The year for the end time, if ets is not provided",
+ )
+ month1: int = Field(
+ None,
+ description="The month for the start time, if sts is not provided",
+ )
+ month2: int = Field(
+ None,
+ description="The month for the end time, if ets is not provided",
+ )
+ day1: int = Field(
+ None,
+ description="The day for the start time, if sts is not provided",
+ )
+ day2: int = Field(
+ None,
+ description="The day for the end time, if ets is not provided",
+ )
+
+
+def get_data(stations, sts, ets, tz, na, fmt):
+ """Go fetch data please"""
+ with get_sqlalchemy_conn("asos") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ SELECT *,
+ to_char(obtime at time zone :tz, 'YYYY/MM/DD HH24:MI')
+ as obtime2,
+ to_char(ftime at time zone :tz, 'YYYY/MM/DD HH24:MI')
+ as ftime2
+ from alldata_tempwind_aloft WHERE ftime >= :sts and
+ ftime <= :ets and station = ANY(:stations) ORDER by obtime, ftime
+ """
+ ),
+ conn,
+ params={"sts": sts, "ets": ets, "stations": stations, "tz": tz},
+ )
+ df = df.drop(columns=["obtime", "ftime"]).rename(
+ columns={"obtime2": "obtime", "ftime2": "ftime"}
+ )
+ cols = df.columns.values.tolist()
+ cols.remove("ftime")
+ cols.remove("obtime")
+ cols.insert(1, "obtime")
+ cols.insert(2, "ftime")
+ df = df[cols].dropna(axis=1, how="all")
+ if na != "blank":
+ df = df.fillna(na)
+ if fmt == "json":
+ return df.to_json(orient="records")
+ if fmt == "excel":
+ bio = BytesIO()
+ with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
+ df.to_excel(writer, sheet_name="Data", index=False)
+ return bio.getvalue()
+
+ sio = StringIO()
+ df.to_csv(sio, index=False)
+ return sio.getvalue()
+
+
+@iemapp(help=__doc__, schema=Schema)
+def application(environ, start_response):
+ """See how we are called"""
+
+ fmt = environ["format"]
+ tz = environ["tz"]
+ stations = environ["station"]
+ na = environ["na"]
+ if fmt != "excel":
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return [
+ get_data(
+ stations, environ["sts"], environ["ets"], tz, na, fmt
+ ).encode("ascii")
+ ]
+ lll = "stations" if len(stations) > 1 else stations[0]
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", f"attachment; Filename={lll}.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [get_data(stations, environ["sts"], environ["ets"], tz, na, fmt)]
diff --git a/pylib/iemweb/request/wmo_bufr_srf.py b/pylib/iemweb/request/wmo_bufr_srf.py
new file mode 100644
index 0000000000..ff4c5f890d
--- /dev/null
+++ b/pylib/iemweb/request/wmo_bufr_srf.py
@@ -0,0 +1,125 @@
+""".. title:: WMO BUFR Surface Data
+
+Documentation for /cgi-bin/request/wmo_bufr_srf.py
+--------------------------------------------------
+
+This application provides access to the IEM processed archives of stations
+reporting via the WMO BUFR Surface.
+
+"""
+
+from io import BytesIO, StringIO
+
+import pandas as pd
+from pydantic import AwareDatetime, Field
+from pyiem.database import get_sqlalchemy_conn
+from pyiem.exceptions import IncompleteWebRequest
+from pyiem.webutil import CGIModel, ListOrCSVType, iemapp
+from sqlalchemy import text
+
+EXL = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+
+
+class MyModel(CGIModel):
+ """Our model"""
+
+ format: str = Field(
+ "csv",
+ description="The format of the data response. csv, json, or excel",
+ pattern=r"^(csv|json|excel)$",
+ )
+ ets: AwareDatetime = Field(
+ None,
+ description="The end time for the data request",
+ )
+ stations: ListOrCSVType = Field(..., description="The station identifiers")
+ sts: AwareDatetime = Field(
+ None,
+ description="The start time for the data request",
+ )
+ year1: int = Field(
+ None,
+ description="The start year for the data request, when sts is not set",
+ )
+ month1: int = Field(
+ None,
+ description=(
+ "The start month for the data request, when sts is not set"
+ ),
+ )
+ day1: int = Field(
+ None,
+ description="The start day for the data request, when sts is not set",
+ )
+ hour1: int = Field(
+ None,
+ description="The start hour for the data request, when sts is not set",
+ )
+ year2: int = Field(
+ None,
+ description="The end year for the data request, when ets is not set",
+ )
+ month2: int = Field(
+ None,
+ description="The end month for the data request, when ets is not set",
+ )
+ day2: int = Field(
+ None,
+ description="The end day for the data request, when ets is not set",
+ )
+ hour2: int = Field(
+ None,
+ description="The end hour for the data request, when ets is not set",
+ )
+
+
+def get_data(sts, ets, stations, fmt):
+ """Go fetch data please"""
+ with get_sqlalchemy_conn("other") as conn:
+ df = pd.read_sql(
+ text(
+ """
+ select
+ valid at time zone 'UTC' as utc_valid, * from alldata
+ WHERE station = ANY(:stations)
+ and valid >= :sts and valid <= :ets
+ ORDER by valid, station ASC"""
+ ),
+ conn,
+ params={
+ "sts": sts,
+ "ets": ets,
+ "stations": stations,
+ },
+ )
+ df = df.drop(columns=["valid"])
+ if fmt == "json":
+ return df.to_json(orient="records")
+ if fmt == "excel":
+ bio = BytesIO()
+ # pylint: disable=abstract-class-instantiated
+ with pd.ExcelWriter(bio, engine="xlsxwriter") as writer:
+ df.to_excel(writer, sheet_name="Data", index=False)
+ return bio.getvalue()
+
+ sio = StringIO()
+ df.to_csv(sio, index=False)
+ return sio.getvalue()
+
+
+@iemapp(help=__doc__, schema=MyModel, default_tz="UTC")
+def application(environ, start_response):
+ """See how we are called"""
+ if environ["sts"] is None or environ["ets"] is None:
+ raise IncompleteWebRequest("Missing sts and/or ets")
+ stations = environ["stations"]
+ fmt = environ["format"]
+ if fmt != "excel":
+ start_response("200 OK", [("Content-type", "text/plain")])
+ return get_data(environ["sts"], environ["ets"], stations, fmt)
+ headers = [
+ ("Content-type", EXL),
+ ("Content-disposition", "attachment; Filename=mos.xlsx"),
+ ]
+ start_response("200 OK", headers)
+ return [get_data(environ["sts"], environ["ets"], stations, fmt)]
diff --git a/tests/iemweb/autoplot/urllist.txt b/tests/iemweb/autoplot/urllist.txt
index f2a473b3a7..441471809e 100644
--- a/tests/iemweb/autoplot/urllist.txt
+++ b/tests/iemweb/autoplot/urllist.txt
@@ -1,2 +1,3 @@
/plotting/auto/plot/109/by:state::sdate:2024-01-01%200000::edate:2024-07-17%202359::var:days::w:set::phenomenav1:TO::significancev1:W::e:all::cmap:jet::_r:t::dpi:100.png
/plotting/auto/plot/194/csector:midwest::sdate:2020-05-01::edate:2024-07-17::d:0::w:percent::cmap:plasma::_r:t::dpi:100.png
+/plotting/auto/plot/216/network:MA_ASOS::station:BOS::var:max_dwpf::dir:above::threshold:70::_r:43::dpi:100.png