diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..24e1443f --- /dev/null +++ b/404.html @@ -0,0 +1,1464 @@ + + + + + + + + + + + + + + + + + + + + + rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/advanced/benchmark.html b/advanced/benchmark.html new file mode 100644 index 00000000..1debf18b --- /dev/null +++ b/advanced/benchmark.html @@ -0,0 +1,292 @@ + + + + + + + Benchmarks + + + + +
+ + + + + + + diff --git a/advanced/custom_readers/custom_readers.md b/advanced/custom_readers/custom_readers.md new file mode 100644 index 00000000..9e9b4a7a --- /dev/null +++ b/advanced/custom_readers/custom_readers.md @@ -0,0 +1,352 @@ + +`rio-tiler` provides multiple [abstract base +classes](https://docs.python.org/3.7/library/abc.html) from which it derives its +main readers: [`Reader`](../readers.md#reader) and +[`STACReader`](../readers.md#stacreader). You can also use these classes to build +custom readers. + +## Abstract Base Classes + +### **BaseReader** + +Main `rio_tiler.io` Abstract Base Class. + +##### Minimal Arguments + +- **input**: Input +- **tms**: The TileMatrixSet define which default projection and map grid the reader uses. Defaults to WebMercatorQuad. + +- **bounds**: Dataset's bounding box. Not in the `__init__` method. +- **crs**: dataset's crs. Not in the `__init__` method. +- **geographic_crs**: CRS to use as geographic coordinate system. Defaults to WGS84. Not in the `__init__` method. + +!!! important + BaseClass Arguments outside the `__init__` method and without default value **HAVE TO** be set in the `__attrs_post_init__` step. + +#### Methods + +- **tile_exists**: Check if a given tile (for the input TMS) intersect the dataset bounds. + +##### Properties + +- **geographic_bounds**: dataset's bounds in WGS84 crs (calculated from `self.bounds` and `self.crs`). + +##### Abstract Methods + +Abstract methods, are method that **HAVE TO** be implemented in the child class. + +- **info**: returns dataset info (`rio_tiler.models.Info`) +- **statistics**: returns dataset band statistics (`Dict[str, rio_tiler.models.BandStatistics]`) +- **tile**: reads data for a specific XYZ slippy map indexes (`rio_tiler.models.ImageData`) +- **part**: reads specific part of a dataset (`rio_tiler.models.ImageData`) +- **preview**: creates an overview of a dataset (`rio_tiler.models.ImageData`) +- **point**: reads pixel value for a specific point (`List`) +- **feature**: reads data for a geojson feature (`rio_tiler.models.ImageData`) + +Example: [`Reader`](../readers.md#reader) + +### **MultiBaseReader** + +The goal of the `MultiBaseReader` is to enable joining results from multiple files (e.g STAC). + +The `MultiBaseReader` has the same attributes/properties/methods as the `BaseReader`. + +Example: [`STACReader`](../readers.md#stacreader) + +```python +import os +import pathlib +from typing import Dict, Type + +import attr +from morecantile import TileMatrixSet +from rio_tiler.io.base import MultiBaseReader +from rio_tiler.io import Reader, BaseReader +from rio_tiler.constants import WEB_MERCATOR_TMS +from rio_tiler.models import Info + +@attr.s +class AssetFileReader(MultiBaseReader): + + input: str = attr.ib() + prefix: str = attr.ib() # we add a custom attribute + + # because we add another attribute (prefix) we need to + # re-specify the other attribute for the class + reader: Type[BaseReader] = attr.ib(default=Reader) + reader_options: Dict = attr.ib(factory=dict) + tms: TileMatrixSet = attr.ib(default=WEB_MERCATOR_TMS) + + # we place min/max zoom in __init__ + minzoom: int = attr.ib(default=None) + maxzoom: int = attr.ib(default=None) + + def __attrs_post_init__(self): + """Parse Sceneid and get grid bounds.""" + self.assets = sorted( + [p.stem.split("_")[1] for p in pathlib.Path(self.input).glob(f"*{self.prefix}*.tif")] + ) + with self.reader(self._get_asset_url(self.assets[0])) as cog: + self.bounds = cog.bounds + self.crs = cog.crs + + if self.minzoom is None: + self.minzoom = cog.minzoom + + if self.maxzoom is None: + self.maxzoom = cog.maxzoom + + def _get_asset_url(self, band: str) -> str: + """Validate band's name and return band's url.""" + return os.path.join(self.input, f"{self.prefix}{band}.tif") + +# we have a directoty with "scene_b1.tif", "scene_b2.tif" +with AssetFileReader(input="my_dir/", prefix="scene_") as cr: + print(cr.assets) + >>> ['band1', 'band2'] + + info = cr.info(assets=("band1", "band2")) + # MultiBaseReader returns a Dict + assert isinstance(info, dict) + print(list(info)) + >>> ['band1', 'band2'] + + assert isinstance(info["band1"], Info) + print(info["band1"].model_dump_json(exclude_none=True)) + >>> { + 'bounds': [-11.979244865430259, 24.296321392464325, -10.874546803397614, 25.304623891542263], + 'minzoom': 7, + 'maxzoom': 9, + 'band_metadata': [('b1', {})], + 'band_descriptions': [('b1', '')], + 'dtype': 'uint16', + 'nodata_type': 'Nodata', + 'colorinterp': ['gray'] + } + img = cr.tile(238, 218, 9, assets=("band1", "band2")) + + print(img.assets) + >>> ['my_dir/scene_band1.tif', 'my_dir/scene_band2.tif'] + + # Each assets have 1 bands, so when combining each img we get a (2, 256, 256) array. + print(img.data.shape) + >>> (2, 256, 256) +``` + +### **MultiBandsReader** + +Almost as the previous `MultiBaseReader`, the `MultiBandsReader` children will merge results extracted from different file but taking each file as individual bands. + +The `MultiBaseReader` has the same attributes/properties/methods as the `BaseReader`. + +Example + +```python +import os +import pathlib +from typing import Dict, Type + +import attr +from morecantile import TileMatrixSet +from rio_tiler.io.base import MultiBandReader +from rio_tiler.io import COGReader, BaseReader +from rio_tiler.constants import WEB_MERCATOR_TMS + +@attr.s +class BandFileReader(MultiBandReader): + + input: str = attr.ib() + prefix: str = attr.ib() # we add a custom attribute + + # because we add another attribute (prefix) we need to + # re-specify the other attribute for the class + reader: Type[BaseReader] = attr.ib(default=COGReader) + reader_options: Dict = attr.ib(factory=dict) + tms: TileMatrixSet = attr.ib(default=WEB_MERCATOR_TMS) + + # we place min/max zoom in __init__ + minzoom: int = attr.ib(default=None) + maxzoom: int = attr.ib(default=None) + + def __attrs_post_init__(self): + """Parse Sceneid and get grid bounds.""" + self.bands = sorted( + [p.stem.split("_")[1] for p in pathlib.Path(self.input).glob(f"*{self.prefix}*.tif")] + ) + with self.reader(self._get_band_url(self.bands[0])) as cog: + self.bounds = cog.bounds + self.crs = cog.crs + + if self.minzoom is None: + self.minzoom = cog.minzoom + + if self.maxzoom is None: + self.maxzoom = cog.maxzoom + + def _get_band_url(self, band: str) -> str: + """Validate band's name and return band's url.""" + return os.path.join(self.input, f"{self.prefix}{band}.tif") + + +# we have a directoty with "scene_b1.tif", "scene_b2.tif" +with BandFileReader(input="my_dir/", prefix="scene_") as cr: + print(cr.bands) + >>> ['band1', 'band2'] + + print(cr.info(bands=("band1", "band2")).model_dump_json(exclude_none=True)) + >>> { + 'bounds': [-11.979244865430259, 24.296321392464325, -10.874546803397614, 25.304623891542263], + 'minzoom': 7, + 'maxzoom': 9, + 'band_metadata': [('band1', {}), ('band2', {})], + 'band_descriptions': [('band1', ''), ('band2', '')], + 'dtype': 'uint16', + 'nodata_type': 'Nodata', + 'colorinterp': ['gray', 'gray'] + } + + img = cr.tile(238, 218, 9, bands=("band1", "band2")) + + print(img.assets) + >>> ['my_dir/scene_band1.tif', 'my_dir/scene_band2.tif'] + + print(img.data.shape) + >>> (2, 256, 256) +``` + +Note: [`rio-tiler-pds`][rio-tiler-pds] readers are built using the `MultiBandReader` base class. + +[rio-tiler-pds]: https://github.com/cogeotiff/rio-tiler-pds + + +## Custom Reader subclass + +The example :point_down: was created as a response to https://github.com/developmentseed/titiler/discussions/235. In short, the user needed a way to keep metadata information from an asset within a STAC item. + +Sadly when we are using the STAC Reader we only keep the metadata about the item but not the assets metadata (because we built the STAC Reader with the idea that user might first want to merge assets together). + +But rio-tiler has been designed to be easily customizable. + +```python +import attr +from rasterio.io import DatasetReader +from rio_tiler.io.stac import fetch, _to_pystac_item +from rio_tiler.io import Reader +import pystac + +@attr.s +class CustomSTACReader(Reader): + """Custom Reader support.""" + + # This will keep the STAC item info within the instance + item: pystac.Item = attr.ib(default=None, init=False) + + def __attrs_post_init__(self): + """Define _kwargs, open dataset and get info.""" + # get STAC item URL and asset name + asset = self.input.split(":")[-1] + stac_url = self.input.replace(f":{asset}", "") + + # Fetch the STAC item + self.item = pystac.Item.from_dict(fetch(stac_url), stac_url) + + # Get asset url from the STAC Item + self.input = self.item.assets[asset].get_absolute_href() + super().__attrs_post_init__() + +with CustomSTACReader("https://canada-spot-ortho.s3.amazonaws.com/canada_spot_orthoimages/canada_spot5_orthoimages/S5_2007/S5_11055_6057_20070622/S5_11055_6057_20070622.json:pan") as cog: + print(type(cog.dataset)) + print(cog.input) + print(cog.nodata) + print(cog.bounds) + +>>> rasterio.io.DatasetReader +>>> "https://canada-spot-ortho.s3.amazonaws.com/canada_spot_orthoimages/canada_spot5_orthoimages/S5_2007/S5_11055_6057_20070622/s5_11055_6057_20070622_p10_1_lcc00_cog.tif" +>>> 0 +>>> (-869900.0, 1370200.0, -786360.0, 1453180.0) +``` + +In this `CustomSTACReader`, we are using a custom path `schema` in form of `{item-url}:{asset-name}`. When creating an instance of `CustomSTACReader`, we will do the following: + +1. Parse the input path to get the STAC url and asset name +2. Fetch and parse the STAC item +3. Construct a new `input` using the asset full url. +4. Fall back to the regular `Reader` initialization (using `super().__attrs_post_init__()`) + + +## Simple Reader + + +```python +from typing import Any, Dict + +import attr +import rasterio +from rasterio.io import DatasetReader +from rio_tiler.io import BaseReader +from rio_tiler.models import BandStatistics, Info, ImageData +from morecantile import TileMatrixSet + +from rio_tiler.constants import BBox, WEB_MERCATOR_TMS + +@attr.s +class SimpleReader(BaseReader): + + input: DatasetReader = attr.ib() + + # We force tms to be outside the class __init__ + tms: TileMatrixSet = attr.ib(init=False, default=WEB_MERCATOR_TMS) + + # We overwrite the abstract base class attribute definition and set default + minzoom: int = attr.ib(init=False, default=WEB_MERCATOR_TMS.minzoom) + maxzoom: int = attr.ib(init=False, default=WEB_MERCATOR_TMS.maxzoom) + + def __attrs_post_init__(self): + # Set bounds and crs variable + self.bounds = self.input.bounds + self.crs = self.input.crs + + # implement all mandatory methods + def info(self) -> Info: + raise NotImplemented + + def statistics(self, **kwargs: Any) -> Dict[str, BandStatistics]: + raise NotImplemented + + def part(self, bbox: BBox, **kwargs: Any) -> ImageData: + raise NotImplemented + + def preview(self, **kwargs: Any) -> ImageData: + raise NotImplemented + + def point(self, lon: float, lat: float, **kwargs: Any) -> List: + raise NotImplemented + + def feature(self, shape: Dict, **kwargs: Any) -> ImageData: + raise NotImplemented + + def tile(self, tile_x: int, tile_y: int, tile_z: int, **kwargs: Any) -> ImageData: + if not self.tile_exists(tile_x, tile_y, tile_z): + raise TileOutsideBounds( + f"Tile {tile_z}/{tile_x}/{tile_y} is outside bounds" + ) + + tile_bounds = self.tms.xy_bounds(Tile(x=tile_x, y=tile_y, z=tile_z)) + + data, mask = reader.part( + self.input, + tile_bounds, + width=256, + height=256, + bounds_crs=tms.rasterio_crs, + dst_crs=tms.rasterio_crs, + **kwargs, + ) + return ImageData( + data, mask, bounds=tile_bounds, crs=tms.rasterio_crs + ) + +with rasterio.open("file.tif") as src: + with SimpleReader(src) as cog: + img = cog.tile(1, 1, 1) +``` diff --git a/advanced/custom_readers/index.html b/advanced/custom_readers/index.html new file mode 100644 index 00000000..9abff1dd --- /dev/null +++ b/advanced/custom_readers/index.html @@ -0,0 +1,2036 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Base classes and custom readers - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Base classes and custom readers

+ +

rio-tiler provides multiple abstract base +classes from which it derives its +main readers: Reader and +STACReader. You can also use these classes to build +custom readers.

+

Abstract Base Classes

+

BaseReader

+

Main rio_tiler.io Abstract Base Class.

+
Minimal Arguments
+
    +
  • input: Input
  • +
  • +

    tms: The TileMatrixSet define which default projection and map grid the reader uses. Defaults to WebMercatorQuad.

    +
  • +
  • +

    bounds: Dataset's bounding box. Not in the __init__ method.

    +
  • +
  • crs: dataset's crs. Not in the __init__ method.
  • +
  • geographic_crs: CRS to use as geographic coordinate system. Defaults to WGS84. Not in the __init__ method.
  • +
+
+

Important

+

BaseClass Arguments outside the __init__ method and without default value HAVE TO be set in the __attrs_post_init__ step.

+
+

Methods

+
    +
  • tile_exists: Check if a given tile (for the input TMS) intersect the dataset bounds.
  • +
+
Properties
+
    +
  • geographic_bounds: dataset's bounds in WGS84 crs (calculated from self.bounds and self.crs).
  • +
+
Abstract Methods
+

Abstract methods, are method that HAVE TO be implemented in the child class.

+
    +
  • info: returns dataset info (rio_tiler.models.Info)
  • +
  • statistics: returns dataset band statistics (Dict[str, rio_tiler.models.BandStatistics])
  • +
  • tile: reads data for a specific XYZ slippy map indexes (rio_tiler.models.ImageData)
  • +
  • part: reads specific part of a dataset (rio_tiler.models.ImageData)
  • +
  • preview: creates an overview of a dataset (rio_tiler.models.ImageData)
  • +
  • point: reads pixel value for a specific point (List)
  • +
  • feature: reads data for a geojson feature (rio_tiler.models.ImageData)
  • +
+

Example: Reader

+

MultiBaseReader

+

The goal of the MultiBaseReader is to enable joining results from multiple files (e.g STAC).

+

The MultiBaseReader has the same attributes/properties/methods as the BaseReader.

+

Example: STACReader

+
import os
+import pathlib
+from typing import Dict, Type
+
+import attr
+from morecantile import TileMatrixSet
+from rio_tiler.io.base import MultiBaseReader
+from rio_tiler.io import Reader, BaseReader
+from rio_tiler.constants import WEB_MERCATOR_TMS
+from rio_tiler.models import Info
+
+@attr.s
+class AssetFileReader(MultiBaseReader):
+
+    input: str = attr.ib()
+    prefix: str = attr.ib() # we add a custom attribute
+
+    # because we add another attribute (prefix) we need to
+    # re-specify the other attribute for the class
+    reader: Type[BaseReader] = attr.ib(default=Reader)
+    reader_options: Dict = attr.ib(factory=dict)
+    tms: TileMatrixSet = attr.ib(default=WEB_MERCATOR_TMS)
+
+    # we place min/max zoom in __init__
+    minzoom: int = attr.ib(default=None)
+    maxzoom: int = attr.ib(default=None)
+
+    def __attrs_post_init__(self):
+        """Parse Sceneid and get grid bounds."""
+        self.assets = sorted(
+            [p.stem.split("_")[1] for p in pathlib.Path(self.input).glob(f"*{self.prefix}*.tif")]
+        )
+        with self.reader(self._get_asset_url(self.assets[0])) as cog:
+            self.bounds = cog.bounds
+            self.crs = cog.crs
+
+            if self.minzoom is None:
+                self.minzoom = cog.minzoom
+
+            if self.maxzoom is None:
+                self.maxzoom = cog.maxzoom
+
+    def _get_asset_url(self, band: str) -> str:
+        """Validate band's name and return band's url."""
+        return os.path.join(self.input, f"{self.prefix}{band}.tif")
+
+# we have a directoty with "scene_b1.tif", "scene_b2.tif"
+with AssetFileReader(input="my_dir/", prefix="scene_") as cr:
+    print(cr.assets)
+    >>> ['band1', 'band2']
+
+    info = cr.info(assets=("band1", "band2"))
+    # MultiBaseReader returns a Dict
+    assert isinstance(info, dict)
+    print(list(info))
+    >>> ['band1', 'band2']
+
+    assert isinstance(info["band1"], Info)
+    print(info["band1"].model_dump_json(exclude_none=True))
+    >>> {
+        'bounds': [-11.979244865430259, 24.296321392464325, -10.874546803397614, 25.304623891542263],
+        'minzoom': 7,
+        'maxzoom': 9,
+        'band_metadata': [('b1', {})],
+        'band_descriptions': [('b1', '')],
+        'dtype': 'uint16',
+        'nodata_type': 'Nodata',
+        'colorinterp': ['gray']
+    }
+    img = cr.tile(238, 218, 9, assets=("band1", "band2"))
+
+    print(img.assets)
+    >>> ['my_dir/scene_band1.tif', 'my_dir/scene_band2.tif']
+
+    # Each assets have 1 bands, so when combining each img we get a (2, 256, 256) array.
+    print(img.data.shape)
+    >>> (2, 256, 256)
+
+

MultiBandsReader

+

Almost as the previous MultiBaseReader, the MultiBandsReader children will merge results extracted from different file but taking each file as individual bands.

+

The MultiBaseReader has the same attributes/properties/methods as the BaseReader.

+

Example

+
import os
+import pathlib
+from typing import Dict, Type
+
+import attr
+from morecantile import TileMatrixSet
+from rio_tiler.io.base import MultiBandReader
+from rio_tiler.io import COGReader, BaseReader
+from rio_tiler.constants import WEB_MERCATOR_TMS
+
+@attr.s
+class BandFileReader(MultiBandReader):
+
+    input: str = attr.ib()
+    prefix: str = attr.ib() # we add a custom attribute
+
+    # because we add another attribute (prefix) we need to
+    # re-specify the other attribute for the class
+    reader: Type[BaseReader] = attr.ib(default=COGReader)
+    reader_options: Dict = attr.ib(factory=dict)
+    tms: TileMatrixSet = attr.ib(default=WEB_MERCATOR_TMS)
+
+    # we place min/max zoom in __init__
+    minzoom: int = attr.ib(default=None)
+    maxzoom: int = attr.ib(default=None)
+
+    def __attrs_post_init__(self):
+        """Parse Sceneid and get grid bounds."""
+        self.bands = sorted(
+            [p.stem.split("_")[1] for p in pathlib.Path(self.input).glob(f"*{self.prefix}*.tif")]
+        )
+        with self.reader(self._get_band_url(self.bands[0])) as cog:
+            self.bounds = cog.bounds
+            self.crs = cog.crs
+
+            if self.minzoom is None:
+                self.minzoom = cog.minzoom
+
+            if self.maxzoom is None:
+                self.maxzoom = cog.maxzoom
+
+    def _get_band_url(self, band: str) -> str:
+        """Validate band's name and return band's url."""
+        return os.path.join(self.input, f"{self.prefix}{band}.tif")
+
+
+# we have a directoty with "scene_b1.tif", "scene_b2.tif"
+with BandFileReader(input="my_dir/", prefix="scene_") as cr:
+    print(cr.bands)
+    >>> ['band1', 'band2']
+
+    print(cr.info(bands=("band1", "band2")).model_dump_json(exclude_none=True))
+    >>> {
+        'bounds': [-11.979244865430259, 24.296321392464325, -10.874546803397614, 25.304623891542263],
+        'minzoom': 7,
+        'maxzoom': 9,
+        'band_metadata': [('band1', {}), ('band2', {})],
+        'band_descriptions': [('band1', ''), ('band2', '')],
+        'dtype': 'uint16',
+        'nodata_type': 'Nodata',
+        'colorinterp': ['gray', 'gray']
+    }
+
+    img = cr.tile(238, 218, 9, bands=("band1", "band2"))
+
+    print(img.assets)
+    >>> ['my_dir/scene_band1.tif', 'my_dir/scene_band2.tif']
+
+    print(img.data.shape)
+    >>> (2, 256, 256)
+
+

Note: rio-tiler-pds readers are built using the MultiBandReader base class.

+

Custom Reader subclass

+

The example 👇 was created as a response to developmentseed/titiler?235. In short, the user needed a way to keep metadata information from an asset within a STAC item.

+

Sadly when we are using the STAC Reader we only keep the metadata about the item but not the assets metadata (because we built the STAC Reader with the idea that user might first want to merge assets together).

+

But rio-tiler has been designed to be easily customizable.

+
import attr
+from rasterio.io import DatasetReader
+from rio_tiler.io.stac import fetch, _to_pystac_item
+from rio_tiler.io import Reader
+import pystac
+
+@attr.s
+class CustomSTACReader(Reader):
+    """Custom Reader support."""
+
+    # This will keep the STAC item info within the instance
+    item: pystac.Item = attr.ib(default=None, init=False)
+
+    def __attrs_post_init__(self):
+        """Define _kwargs, open dataset and get info."""
+        # get STAC item URL and asset name
+        asset = self.input.split(":")[-1]
+        stac_url = self.input.replace(f":{asset}", "")
+
+        # Fetch the STAC item
+        self.item = pystac.Item.from_dict(fetch(stac_url), stac_url)
+
+        # Get asset url from the STAC Item
+        self.input = self.item.assets[asset].get_absolute_href()
+        super().__attrs_post_init__()
+
+with CustomSTACReader("https://canada-spot-ortho.s3.amazonaws.com/canada_spot_orthoimages/canada_spot5_orthoimages/S5_2007/S5_11055_6057_20070622/S5_11055_6057_20070622.json:pan") as cog:
+    print(type(cog.dataset))
+    print(cog.input)
+    print(cog.nodata)
+    print(cog.bounds)
+
+>>> rasterio.io.DatasetReader
+>>> "https://canada-spot-ortho.s3.amazonaws.com/canada_spot_orthoimages/canada_spot5_orthoimages/S5_2007/S5_11055_6057_20070622/s5_11055_6057_20070622_p10_1_lcc00_cog.tif"
+>>> 0
+>>> (-869900.0, 1370200.0, -786360.0, 1453180.0)
+
+

In this CustomSTACReader, we are using a custom path schema in form of {item-url}:{asset-name}. When creating an instance of CustomSTACReader, we will do the following:

+
    +
  1. Parse the input path to get the STAC url and asset name
  2. +
  3. Fetch and parse the STAC item
  4. +
  5. Construct a new input using the asset full url.
  6. +
  7. Fall back to the regular Reader initialization (using super().__attrs_post_init__())
  8. +
+

Simple Reader

+
from typing import Any, Dict
+
+import attr
+import rasterio
+from rasterio.io import DatasetReader
+from rio_tiler.io import BaseReader
+from rio_tiler.models import BandStatistics, Info, ImageData
+from morecantile import TileMatrixSet
+
+from rio_tiler.constants import BBox, WEB_MERCATOR_TMS
+
+@attr.s
+class SimpleReader(BaseReader):
+
+    input: DatasetReader = attr.ib()
+
+    # We force tms to be outside the class __init__
+    tms: TileMatrixSet = attr.ib(init=False, default=WEB_MERCATOR_TMS)
+
+    # We overwrite the abstract base class attribute definition and set default
+    minzoom: int = attr.ib(init=False, default=WEB_MERCATOR_TMS.minzoom)
+    maxzoom: int = attr.ib(init=False, default=WEB_MERCATOR_TMS.maxzoom)
+
+    def __attrs_post_init__(self):
+        # Set bounds and crs variable
+        self.bounds = self.input.bounds
+        self.crs = self.input.crs
+
+    # implement all mandatory methods
+    def info(self) -> Info:
+        raise NotImplemented
+
+    def statistics(self, **kwargs: Any) -> Dict[str, BandStatistics]:
+        raise NotImplemented
+
+    def part(self, bbox: BBox, **kwargs: Any) -> ImageData:
+        raise NotImplemented
+
+    def preview(self, **kwargs: Any) -> ImageData:
+        raise NotImplemented
+
+    def point(self, lon: float, lat: float, **kwargs: Any) -> List:
+        raise NotImplemented
+
+    def feature(self, shape: Dict, **kwargs: Any) -> ImageData:
+        raise NotImplemented
+
+    def tile(self, tile_x: int, tile_y: int, tile_z: int, **kwargs: Any) -> ImageData:
+        if not self.tile_exists(tile_x, tile_y, tile_z):
+            raise TileOutsideBounds(
+                f"Tile {tile_z}/{tile_x}/{tile_y} is outside bounds"
+            )
+
+        tile_bounds = self.tms.xy_bounds(Tile(x=tile_x, y=tile_y, z=tile_z))
+
+        data, mask = reader.part(
+            self.input,
+            tile_bounds,
+            width=256,
+            height=256,
+            bounds_crs=tms.rasterio_crs,
+            dst_crs=tms.rasterio_crs,
+            **kwargs,
+        )
+        return ImageData(
+            data, mask, bounds=tile_bounds, crs=tms.rasterio_crs
+        )
+
+with rasterio.open("file.tif") as src:
+    with SimpleReader(src) as cog:
+        img = cog.tile(1, 1, 1)
+
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/advanced/dynamic_tiler/dynamic_tiler.md b/advanced/dynamic_tiler/dynamic_tiler.md new file mode 100644 index 00000000..e52e648b --- /dev/null +++ b/advanced/dynamic_tiler/dynamic_tiler.md @@ -0,0 +1,104 @@ + +`rio-tiler` aims to be a lightweight plugin for `rasterio` to read [slippy map +tiles](https://en.wikipedia.org/wiki/Tiled_web_map) from a raster sources. + +Given that `rio-tiler` allows for simple, efficient reading of tiles, you can +then leverage `rio-tiler` to create a **dynamic tile server** to display raster +tiles on a web map. + +There are couple tile servers built on top of rio-tiler: + +- [`titiler`](https://github.com/developmentseed/titiler) +- [`rio-viz`](https://github.com/developmentseed/rio-viz) + +## Example Application + +To build a simple dynamic tiling application, we can use +[FastAPI](https://github.com/tiangolo/fastapi). Note that `titiler` uses +`FastAPI` internally, so you might consider using `titiler` instead of making +your own API. + +### Requirements + +- `rio-tiler ~= 4.0` +- `fastapi` +- `uvicorn` + +Install with + +```bash +pip install fastapi uvicorn rio-tiler +``` + +### `app.py` + +```python +"""rio-tiler tile server.""" + +import os + +from fastapi import FastAPI, Query +from starlette.requests import Request +from starlette.responses import Response + +from rio_tiler.profiles import img_profiles +from rio_tiler.io import Reader + + +app = FastAPI( + title="rio-tiler", + description="A lightweight Cloud Optimized GeoTIFF tile server", +) + + +@app.get( + r"/{z}/{x}/{y}.png", + responses={ + 200: { + "content": {"image/png": {}}, "description": "Return an image.", + } + }, + response_class=Response, + description="Read COG and return a tile", +) +def tile( + z: int, + x: int, + y: int, + url: str = Query(..., description="Cloud Optimized GeoTIFF URL."), +): + """Handle tile requests.""" + with Reader(url) as cog: + img = cog.tile(x, y, z) + content = img.render(img_format="PNG", **img_profiles.get("png")) + return Response(content, media_type="image/png") + + +@app.get("/tilejson.json", responses={200: {"description": "Return a tilejson"}}) +def tilejson( + request: Request, + url: str = Query(..., description="Cloud Optimized GeoTIFF URL."), +): + """Return TileJSON document for a COG.""" + tile_url = str(request.url_for("tile", z="{z}", x="{x}", y="{y}")) + tile_url = f"{tile_url}?url={url}" + + with Reader(url) as cog: + return { + "bounds": cog.geographic_bounds, + "minzoom": cog.minzoom, + "maxzoom": cog.maxzoom, + "name": os.path.basename(url), + "tiles": [tile_url], + } +``` + +## Launch Example + +Use `uvicorn` to launch the application. Note that `app:app` tells `uvicorn` to +call the `app` function within `app.py`, so you must be in the same directory as +`app.py`. + +``` +uvicorn app:app --reload +``` diff --git a/advanced/dynamic_tiler/index.html b/advanced/dynamic_tiler/index.html new file mode 100644 index 00000000..e79910b2 --- /dev/null +++ b/advanced/dynamic_tiler/index.html @@ -0,0 +1,1695 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Create a Dynamic Tiler - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Create a Dynamic Tiler

+ +

rio-tiler aims to be a lightweight plugin for rasterio to read slippy map +tiles from a raster sources.

+

Given that rio-tiler allows for simple, efficient reading of tiles, you can +then leverage rio-tiler to create a dynamic tile server to display raster +tiles on a web map.

+

There are couple tile servers built on top of rio-tiler:

+ +

Example Application

+

To build a simple dynamic tiling application, we can use +FastAPI. Note that titiler uses +FastAPI internally, so you might consider using titiler instead of making +your own API.

+

Requirements

+
    +
  • rio-tiler ~= 4.0
  • +
  • fastapi
  • +
  • uvicorn
  • +
+

Install with

+
pip install fastapi uvicorn rio-tiler
+
+

app.py

+
"""rio-tiler tile server."""
+
+import os
+
+from fastapi import FastAPI, Query
+from starlette.requests import Request
+from starlette.responses import Response
+
+from rio_tiler.profiles import img_profiles
+from rio_tiler.io import Reader
+
+
+app = FastAPI(
+    title="rio-tiler",
+    description="A lightweight Cloud Optimized GeoTIFF tile server",
+)
+
+
+@app.get(
+    r"/{z}/{x}/{y}.png",
+    responses={
+        200: {
+            "content": {"image/png": {}}, "description": "Return an image.",
+        }
+    },
+    response_class=Response,
+    description="Read COG and return a tile",
+)
+def tile(
+    z: int,
+    x: int,
+    y: int,
+    url: str = Query(..., description="Cloud Optimized GeoTIFF URL."),
+):
+    """Handle tile requests."""
+    with Reader(url) as cog:
+        img = cog.tile(x, y, z)
+    content = img.render(img_format="PNG", **img_profiles.get("png"))
+    return Response(content, media_type="image/png")
+
+
+@app.get("/tilejson.json", responses={200: {"description": "Return a tilejson"}})
+def tilejson(
+    request: Request,
+    url: str = Query(..., description="Cloud Optimized GeoTIFF URL."),
+):
+    """Return TileJSON document for a COG."""
+    tile_url = str(request.url_for("tile", z="{z}", x="{x}", y="{y}"))
+    tile_url = f"{tile_url}?url={url}"
+
+    with Reader(url) as cog:
+        return {
+            "bounds": cog.geographic_bounds,
+            "minzoom": cog.minzoom,
+            "maxzoom": cog.maxzoom,
+            "name": os.path.basename(url),
+            "tiles": [tile_url],
+        }
+
+

Launch Example

+

Use uvicorn to launch the application. Note that app:app tells uvicorn to +call the app function within app.py, so you must be in the same directory as +app.py.

+
uvicorn app:app --reload
+
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/advanced/feature/feature.md b/advanced/feature/feature.md new file mode 100644 index 00000000..54f8e199 --- /dev/null +++ b/advanced/feature/feature.md @@ -0,0 +1,66 @@ +![](https://user-images.githubusercontent.com/10407788/105767632-3f959e80-5f29-11eb-9331-969f3f53111e.png) + +Starting with `rio-tiler` v2, a `.feature()` method exists on `rio-tiler`'s readers (e.g `Reader`) which enables data reading for GeoJSON defined (polygon or multipolygon) shapes. + +```python +from rio_tiler.io import Reader +from rio_tiler.models import ImageData + +with Reader("my-tif.tif") as cog: + # Read data for a given geojson polygon + img: ImageData = cog.feature(geojson_feature, max_size=1024) # we limit the max_size to 1024 +``` + +Under the hood, the `.feature` method uses rasterio's [`rasterize`](https://rasterio.readthedocs.io/en/latest/api/rasterio.features.html#rasterio.features.rasterize) +function and the `.part()` method. The below process is roughly what `.feature` does for you. + +```python +from rasterio.features import rasterize, bounds as featureBounds + +from rio_tiler.io import Reader + +# Use Reader to open and read the dataset +with Reader("my_tif.tif") as cog: + + # Get BBOX of the polygon + bbox = featureBounds(feat) + + # Read part of the data overlapping with the geometry bbox + # assuming that the geometry coordinates are in web mercator + img = cog.part(bbox, bounds_crs=f"EPSG:3857", max_size=1024) + + # Rasterize geometry using the same geotransform parameters + cutline = rasterize( + [feat], + out_shape=(img.height, img.width), + transform=img.transform, + ... + ) + + # Apply geometry mask to imagery + img.array.mask = numpy.where(~cutline, img.array.mask, True) +``` + +Another interesting way to cut features is to use the GDALWarpVRT's `cutline` +option with the .part(), .preview(), or .tile() methods: + +```python +from rio_tiler.utils import create_cutline + +bbox = featureBounds(feat) + +# Use Reader to open and read the dataset +with Reader("my_tif.tif") as cog: + # Create WTT Cutline + cutline = create_cutline(cog.dataset, feat, geometry_crs="epsg:4326") + + # Get a part of the geotiff but use the cutline to mask the data + bbox = featureBounds(feat) + img = cog.part(bbox, vrt_options={'cutline': cutline}) + + # Get a preview of the whole geotiff but use the cutline to mask the data + img = cog.preview(vrt_options={'cutline': cutline}) + + # Read a mercator tile and use the cutline to mask the data + img = cog.tile(1, 1, 1, vrt_options={'cutline': cutline}) +``` diff --git a/advanced/feature/index.html b/advanced/feature/index.html new file mode 100644 index 00000000..a084cd19 --- /dev/null +++ b/advanced/feature/index.html @@ -0,0 +1,1553 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Read Polygon-shaped regions - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Read Polygon-shaped regions

+ +

+

Starting with rio-tiler v2, a .feature() method exists on rio-tiler's readers (e.g Reader) which enables data reading for GeoJSON defined (polygon or multipolygon) shapes.

+
from rio_tiler.io import Reader
+from rio_tiler.models import ImageData
+
+with Reader("my-tif.tif") as cog:
+    # Read data for a given geojson polygon
+    img: ImageData = cog.feature(geojson_feature, max_size=1024)  # we limit the max_size to 1024
+
+

Under the hood, the .feature method uses rasterio's rasterize +function and the .part() method. The below process is roughly what .feature does for you.

+
from rasterio.features import rasterize, bounds as featureBounds
+
+from rio_tiler.io import Reader
+
+# Use Reader to open and read the dataset
+with Reader("my_tif.tif") as cog:
+
+    # Get BBOX of the polygon
+    bbox = featureBounds(feat)
+
+    # Read part of the data overlapping with the geometry bbox
+    # assuming that the geometry coordinates are in web mercator
+    img = cog.part(bbox, bounds_crs=f"EPSG:3857", max_size=1024)
+
+    # Rasterize geometry using the same geotransform parameters
+    cutline = rasterize(
+        [feat],
+        out_shape=(img.height, img.width),
+        transform=img.transform,
+        ...
+    )
+
+    # Apply geometry mask to imagery
+    img.array.mask = numpy.where(~cutline, img.array.mask, True)
+
+

Another interesting way to cut features is to use the GDALWarpVRT's cutline +option with the .part(), .preview(), or .tile() methods:

+
from rio_tiler.utils import create_cutline
+
+bbox = featureBounds(feat)
+
+# Use Reader to open and read the dataset
+with Reader("my_tif.tif") as cog:
+    # Create WTT Cutline
+    cutline = create_cutline(cog.dataset, feat, geometry_crs="epsg:4326")
+
+    # Get a part of the geotiff but use the cutline to mask the data
+    bbox = featureBounds(feat)
+    img = cog.part(bbox, vrt_options={'cutline': cutline})
+
+    # Get a preview of the whole geotiff but use the cutline to mask the data
+    img = cog.preview(vrt_options={'cutline': cutline})
+
+    # Read a mercator tile and use the cutline to mask the data
+    img = cog.tile(1, 1, 1, vrt_options={'cutline': cutline})
+
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/advanced/tms/index.html b/advanced/tms/index.html new file mode 100644 index 00000000..b738c431 --- /dev/null +++ b/advanced/tms/index.html @@ -0,0 +1,1539 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + TileMatrixSet - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

TileMatrixSet

+ +

Starting with rio-tiler 2.0, we replaced mercantile with morecantile, enabling support for other TileMatrixSets than the default WebMercator grid.

+
import morecantile
+from rio_tiler.io import Reader
+from rasterio.crs import CRS
+from pyproj import CRS as projCRS
+
+# By default we use WebMercator TMS
+with Reader("my.tif") as cog:
+    img = cog.tile(1, 1, 1)
+    assert img.crs == CRS.from_epsg(3857)  # default image output is the TMS crs (WebMercator)
+
+# Print default grids
+for name in morecantile.tms.list():
+    print(name, "-", morecantile.tms.get(name).rasterio_crs)
+
+>>> CanadianNAD83_LCC - EPSG:3978
+    EuropeanETRS89_LAEAQuad - EPSG:3035
+    LINZAntarticaMapTilegrid - EPSG:5482
+    NZTM2000Quad - EPSG:2193
+    UPSAntarcticWGS84Quad - EPSG:5042
+    UPSArcticWGS84Quad - EPSG:5041
+    UTM31WGS84Quad - EPSG:32631
+    WGS1984Quad - EPSG:4326
+    WebMercatorQuad - EPSG:3857
+    WorldCRS84Quad - OGC:CRS84
+    WorldMercatorWGS84Quad - EPSG:3395
+
+
+# Use EPSG:4326 (WGS84) grid
+wgs84_grid = morecantile.tms.get("WorldCRS84Quad")
+with Reader("my.tif", tms=wgs84_grid) as cog:
+    img = cog.tile(1, 1, 1)
+    assert img.crs == CRS.from_epsg(4326)
+
+# Create Custom grid
+extent = [-948.75, -543592.47, 5817.41, -3333128.95]  # From https:///epsg.io/3031
+epsg3031TMS = morecantile.TileMatrixSet.custom(
+    extent, projCRS.from_epsg(3031), identifier="MyCustomTmsEPSG3031"
+)
+with Reader("my.tif", tms=epsg3031TMS) as cog:
+    img = cog.tile(1, 1, 1)
+    assert img.crs == CRS.from_epsg(3031)
+
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/advanced/tms/tms.md b/advanced/tms/tms.md new file mode 100644 index 00000000..d219099d --- /dev/null +++ b/advanced/tms/tms.md @@ -0,0 +1,49 @@ + +Starting with rio-tiler 2.0, we replaced [`mercantile`][mercantile] with [_`morecantile`_][morecantile], enabling support for other [**TileMatrixSets**](http://docs.opengeospatial.org/is/17-083r2/17-083r2.html) than the default WebMercator grid. + +[mercantile]: https://github.com/mapbox/mercantile +[morecantile]: https://github.com/developmentseed/morecantile + +```python +import morecantile +from rio_tiler.io import Reader +from rasterio.crs import CRS +from pyproj import CRS as projCRS + +# By default we use WebMercator TMS +with Reader("my.tif") as cog: + img = cog.tile(1, 1, 1) + assert img.crs == CRS.from_epsg(3857) # default image output is the TMS crs (WebMercator) + +# Print default grids +for name in morecantile.tms.list(): + print(name, "-", morecantile.tms.get(name).rasterio_crs) + +>>> CanadianNAD83_LCC - EPSG:3978 + EuropeanETRS89_LAEAQuad - EPSG:3035 + LINZAntarticaMapTilegrid - EPSG:5482 + NZTM2000Quad - EPSG:2193 + UPSAntarcticWGS84Quad - EPSG:5042 + UPSArcticWGS84Quad - EPSG:5041 + UTM31WGS84Quad - EPSG:32631 + WGS1984Quad - EPSG:4326 + WebMercatorQuad - EPSG:3857 + WorldCRS84Quad - OGC:CRS84 + WorldMercatorWGS84Quad - EPSG:3395 + + +# Use EPSG:4326 (WGS84) grid +wgs84_grid = morecantile.tms.get("WorldCRS84Quad") +with Reader("my.tif", tms=wgs84_grid) as cog: + img = cog.tile(1, 1, 1) + assert img.crs == CRS.from_epsg(4326) + +# Create Custom grid +extent = [-948.75, -543592.47, 5817.41, -3333128.95] # From https:///epsg.io/3031 +epsg3031TMS = morecantile.TileMatrixSet.custom( + extent, projCRS.from_epsg(3031), identifier="MyCustomTmsEPSG3031" +) +with Reader("my.tif", tms=epsg3031TMS) as cog: + img = cog.tile(1, 1, 1) + assert img.crs == CRS.from_epsg(3031) +``` diff --git a/advanced/zonal_stats/index.html b/advanced/zonal_stats/index.html new file mode 100644 index 00000000..4fe474e8 --- /dev/null +++ b/advanced/zonal_stats/index.html @@ -0,0 +1,1575 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Zonal statistics - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Zonal statistics

+ +

rio-tiler's Readers provide simple .statistics method to retrieve dataset statistics (min, max, histogram...). We can easily extend this to create a .zonal_statistics method which will accept input features to get statistics from.

+
import attr
+from typing import Any, Union, Optional, List, Dict
+
+from rio_tiler import io
+from rio_tiler.utils import get_array_statistics
+from rio_tiler.models import BandStatistics
+
+from geojson_pydantic.features import Feature, FeatureCollection
+from geojson_pydantic.geometries import Polygon
+
+class Reader(io.Reader):
+    """Custom Reader with zonal_statistics method."""
+
+    def zonal_statistics(
+            self,
+            geojson: Union[FeatureCollection, Feature],
+            categorical: bool = False,
+            categories: Optional[List[float]] = None,
+            percentiles: List[int] = [2, 98],
+            hist_options: Optional[Dict] = None,
+            max_size: int = None,
+            **kwargs: Any,
+        ) -> FeatureCollection:
+            """Return statistics from GeoJSON features.
+
+            Args:
+                geojson (Feature or FeatureCollection): a GeoJSON Feature or FeatureCollection.
+                categorical (bool): treat input data as categorical data. Defaults to False.
+                categories (list of numbers, optional): list of categories to return value for.
+                percentiles (list of numbers, optional): list of percentile values to calculate. Defaults to `[2, 98]`.
+                hist_options (dict, optional): Options to forward to numpy.histogram function.
+                max_size (int, optional): Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to None.
+                kwargs (optional): Options to forward to `self.preview`.
+
+            Returns:
+                FeatureCollection
+
+            """
+            kwargs = {**self._kwargs, **kwargs}
+
+            hist_options = hist_options or {}
+
+            # We transform the input Feature to a FeatureCollection
+            if not isinstance(geojson, FeatureCollection):
+                geojson = FeatureCollection(features=[geojson])
+
+            for feature in geojson:
+                # Get data overlapping with the feature (using Reader.feature method)
+                data = self.feature(
+                    feature.model_dump(exclude_none=True),
+                    max_size=max_size,
+                    **kwargs,
+                )
+
+                # Get band statistics for the data
+                stats = get_array_statistics(
+                    data.as_masked(),
+                    categorical=categorical,
+                    categories=categories,
+                    percentiles=percentiles,
+                    **hist_options,
+                )
+
+                # Update input feature properties and add the statistics
+                feature.properties = feature.properties or {}
+                feature.properties.update(
+                    {
+                        "statistics": {
+                            f"{data.band_names[ix]}": BandStatistics(
+                                **stats[ix]
+                            )
+                            for ix in range(len(stats))
+                        }
+                    }
+                )
+
+            return geojson
+
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/advanced/zonal_stats/zonal_stats.md b/advanced/zonal_stats/zonal_stats.md new file mode 100644 index 00000000..6824fdad --- /dev/null +++ b/advanced/zonal_stats/zonal_stats.md @@ -0,0 +1,82 @@ +`rio-tiler`'s Readers provide simple `.statistics` method to retrieve dataset statistics (min, max, histogram...). We can easily extend this to create a `.zonal_statistics` method which will accept input features to get statistics from. + +```python + +import attr +from typing import Any, Union, Optional, List, Dict + +from rio_tiler import io +from rio_tiler.utils import get_array_statistics +from rio_tiler.models import BandStatistics + +from geojson_pydantic.features import Feature, FeatureCollection +from geojson_pydantic.geometries import Polygon + +class Reader(io.Reader): + """Custom Reader with zonal_statistics method.""" + + def zonal_statistics( + self, + geojson: Union[FeatureCollection, Feature], + categorical: bool = False, + categories: Optional[List[float]] = None, + percentiles: List[int] = [2, 98], + hist_options: Optional[Dict] = None, + max_size: int = None, + **kwargs: Any, + ) -> FeatureCollection: + """Return statistics from GeoJSON features. + + Args: + geojson (Feature or FeatureCollection): a GeoJSON Feature or FeatureCollection. + categorical (bool): treat input data as categorical data. Defaults to False. + categories (list of numbers, optional): list of categories to return value for. + percentiles (list of numbers, optional): list of percentile values to calculate. Defaults to `[2, 98]`. + hist_options (dict, optional): Options to forward to numpy.histogram function. + max_size (int, optional): Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to None. + kwargs (optional): Options to forward to `self.preview`. + + Returns: + FeatureCollection + + """ + kwargs = {**self._kwargs, **kwargs} + + hist_options = hist_options or {} + + # We transform the input Feature to a FeatureCollection + if not isinstance(geojson, FeatureCollection): + geojson = FeatureCollection(features=[geojson]) + + for feature in geojson: + # Get data overlapping with the feature (using Reader.feature method) + data = self.feature( + feature.model_dump(exclude_none=True), + max_size=max_size, + **kwargs, + ) + + # Get band statistics for the data + stats = get_array_statistics( + data.as_masked(), + categorical=categorical, + categories=categories, + percentiles=percentiles, + **hist_options, + ) + + # Update input feature properties and add the statistics + feature.properties = feature.properties or {} + feature.properties.update( + { + "statistics": { + f"{data.band_names[ix]}": BandStatistics( + **stats[ix] + ) + for ix in range(len(stats)) + } + } + ) + + return geojson +``` diff --git a/api/rio_tiler/colormap/colormap.md b/api/rio_tiler/colormap/colormap.md new file mode 100644 index 00000000..adc003b5 --- /dev/null +++ b/api/rio_tiler/colormap/colormap.md @@ -0,0 +1,222 @@ +# Module rio_tiler.colormap + +rio-tiler colormap functions and classes. + +## Variables + +```python3 +DEFAULT_CMAPS_FILES +``` + +```python3 +EMPTY_COLORMAP +``` + +```python3 +USER_CMAPS_DIR +``` + +```python3 +cmap +``` + +## Functions + + +### apply_cmap + +```python3 +def apply_cmap( + data: numpy.ndarray, + colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]] +) -> Tuple[numpy.ndarray, numpy.ndarray] +``` + +Apply colormap on data. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | numpy.ndarray | 1D image array to translate to RGB. | None | +| colormap | dict or sequence | GDAL RGBA Color Table dictionary or sequence (for intervals). | None | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | Data (numpy.ndarray) and Mask (numpy.ndarray) values. | + +**Raises:** + +| Type | Description | +|---|---| +| InvalidFormat | If data is not a 1 band dataset (1, col, row). | + + +### apply_discrete_cmap + +```python3 +def apply_discrete_cmap( + data: numpy.ndarray, + colormap: Dict[int, Tuple[int, int, int, int]] +) -> Tuple[numpy.ndarray, numpy.ndarray] +``` + +Apply discrete colormap. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | numpy.ndarray | 1D image array to translate to RGB. | None | +| color_map | dict | Discrete ColorMap dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | Data (numpy.ndarray) and Alpha band (numpy.ndarray). | + + +### apply_intervals_cmap + +```python3 +def apply_intervals_cmap( + data: numpy.ndarray, + colormap: Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]] +) -> Tuple[numpy.ndarray, numpy.ndarray] +``` + +Apply intervals colormap. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | numpy.ndarray | 1D image array to translate to RGB. | None | +| color_map | Sequence | Sequence of intervals and color in form of [([min, max], [r, g, b, a]), ...]. | None | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | Data (numpy.ndarray) and Alpha band (numpy.ndarray). | + + +### make_lut + +```python3 +def make_lut( + colormap: Dict[int, Tuple[int, int, int, int]] +) -> numpy.ndarray +``` + +Create a lookup table numpy.ndarray from a GDAL RGBA Color Table dictionary. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| colormap | dict | GDAL RGBA Color Table dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| numpy.ndarray | colormap lookup table. | + + +### parse_color + +```python3 +def parse_color( + rgba: Union[Sequence[int], str] +) -> Tuple[int, int, int, int] +``` + +Parse RGB/RGBA color and return valid rio-tiler compatible RGBA colormap entry. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| rgba | str or list of int | HEX encoded or list RGB or RGBA colors. | None | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | RGBA values. | + +## Classes + +### ColorMaps + +```python3 +class ColorMaps( + data: Dict[str, Union[str, Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]]] = NOTHING +) +``` + +Default Colormaps holder. + +#### Attributes + +| Name | Type | Description | Default | +|---|---|---|---| +| data | dict | colormaps. Defaults to `rio_tiler.colormap.DEFAULTS_CMAPS`. | `rio_tiler.colormap.DEFAULTS_CMAPS` | + +#### Methods + + +#### get + +```python3 +def get( + self, + name: str +) -> Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]] +``` + +Fetch a colormap. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| name | str | colormap name. | None | + + +#### list + +```python3 +def list( + self +) -> List[str] +``` + +List registered Colormaps. + +Returns + list: list of colormap names. + + +#### register + +```python3 +def register( + self, + custom_cmap: Dict[str, Union[str, Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]]], + overwrite: bool = False +) -> 'ColorMaps' +``` + +Register a custom colormap. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| custom_cmap | dict | custom colormap(s) to register. | None | +| overwrite | bool | Overwrite existing colormap with same key. Defaults to False. | False | \ No newline at end of file diff --git a/api/rio_tiler/colormap/index.html b/api/rio_tiler/colormap/index.html new file mode 100644 index 00000000..0c5145d2 --- /dev/null +++ b/api/rio_tiler/colormap/index.html @@ -0,0 +1,2115 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.colormap - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.colormap

+

rio-tiler colormap functions and classes.

+

Variables

+
DEFAULT_CMAPS_FILES
+
+
EMPTY_COLORMAP
+
+
USER_CMAPS_DIR
+
+
cmap
+
+

Functions

+

apply_cmap

+
def apply_cmap(
+    data: numpy.ndarray,
+    colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]]
+) -> Tuple[numpy.ndarray, numpy.ndarray]
+
+

Apply colormap on data.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datanumpy.ndarray1D image array to translate to RGB.None
colormapdict or sequenceGDAL RGBA Color Table dictionary or sequence (for intervals).None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tupleData (numpy.ndarray) and Mask (numpy.ndarray) values.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
InvalidFormatIf data is not a 1 band dataset (1, col, row).
+

apply_discrete_cmap

+
def apply_discrete_cmap(
+    data: numpy.ndarray,
+    colormap: Dict[int, Tuple[int, int, int, int]]
+) -> Tuple[numpy.ndarray, numpy.ndarray]
+
+

Apply discrete colormap.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datanumpy.ndarray1D image array to translate to RGB.None
color_mapdictDiscrete ColorMap dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tupleData (numpy.ndarray) and Alpha band (numpy.ndarray).
+

apply_intervals_cmap

+
def apply_intervals_cmap(
+    data: numpy.ndarray,
+    colormap: Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]
+) -> Tuple[numpy.ndarray, numpy.ndarray]
+
+

Apply intervals colormap.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datanumpy.ndarray1D image array to translate to RGB.None
color_mapSequenceSequence of intervals and color in form of [([min, max], [r, g, b, a]), ...].None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tupleData (numpy.ndarray) and Alpha band (numpy.ndarray).
+

make_lut

+
def make_lut(
+    colormap: Dict[int, Tuple[int, int, int, int]]
+) -> numpy.ndarray
+
+

Create a lookup table numpy.ndarray from a GDAL RGBA Color Table dictionary.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
colormapdictGDAL RGBA Color Table dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
numpy.ndarraycolormap lookup table.
+

parse_color

+
def parse_color(
+    rgba: Union[Sequence[int], str]
+) -> Tuple[int, int, int, int]
+
+

Parse RGB/RGBA color and return valid rio-tiler compatible RGBA colormap entry.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
rgbastr or list of intHEX encoded or list RGB or RGBA colors.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tupleRGBA values.
+

Classes

+

ColorMaps

+
class ColorMaps(
+    data: Dict[str, Union[str, Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]]] = NOTHING
+)
+
+

Default Colormaps holder.

+

Attributes

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datadictcolormaps. Defaults to rio_tiler.colormap.DEFAULTS_CMAPS.rio_tiler.colormap.DEFAULTS_CMAPS
+

Methods

+

get

+
def get(
+    self,
+    name: str
+) -> Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]]
+
+

Fetch a colormap.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
namestrcolormap name.None
+

list

+
def list(
+    self
+) -> List[str]
+
+

List registered Colormaps.

+

Returns + list: list of colormap names.

+

register

+
def register(
+    self,
+    custom_cmap: Dict[str, Union[str, Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]]],
+    overwrite: bool = False
+) -> 'ColorMaps'
+
+

Register a custom colormap.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
custom_cmapdictcustom colormap(s) to register.None
overwriteboolOverwrite existing colormap with same key. Defaults to False.False
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/constants/constants.md b/api/rio_tiler/constants/constants.md new file mode 100644 index 00000000..96afc3c5 --- /dev/null +++ b/api/rio_tiler/constants/constants.md @@ -0,0 +1,21 @@ +# Module rio_tiler.constants + +rio-tiler constant values. + +## Variables + +```python3 +MAX_THREADS +``` + +```python3 +WEB_MERCATOR_CRS +``` + +```python3 +WEB_MERCATOR_TMS +``` + +```python3 +WGS84_CRS +``` \ No newline at end of file diff --git a/api/rio_tiler/constants/index.html b/api/rio_tiler/constants/index.html new file mode 100644 index 00000000..0143b731 --- /dev/null +++ b/api/rio_tiler/constants/index.html @@ -0,0 +1,1566 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.constants - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.constants

+

rio-tiler constant values.

+

Variables

+
MAX_THREADS
+
+
WEB_MERCATOR_CRS
+
+
WEB_MERCATOR_TMS
+
+
WGS84_CRS
+
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/errors/errors.md b/api/rio_tiler/errors/errors.md new file mode 100644 index 00000000..1fc0733d --- /dev/null +++ b/api/rio_tiler/errors/errors.md @@ -0,0 +1,846 @@ +# Module rio_tiler.errors + +Errors and warnings. + +## Classes + +### AlphaBandWarning + +```python3 +class AlphaBandWarning( + /, + *args, + **kwargs +) +``` + +Automatically removed Alpha band from output array. + +#### Ancestors (in MRO) + +* builtins.UserWarning +* builtins.Warning +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### AssetAsBandError + +```python3 +class AssetAsBandError( + /, + *args, + **kwargs +) +``` + +Can't use asset_as_band with multiple bands. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### ColorMapAlreadyRegistered + +```python3 +class ColorMapAlreadyRegistered( + /, + *args, + **kwargs +) +``` + +ColorMap is already registered. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### EmptyMosaicError + +```python3 +class EmptyMosaicError( + /, + *args, + **kwargs +) +``` + +Mosaic method returned empty array. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### ExpressionMixingWarning + +```python3 +class ExpressionMixingWarning( + /, + *args, + **kwargs +) +``` + +Expression and assets/indexes mixing. + +#### Ancestors (in MRO) + +* builtins.UserWarning +* builtins.Warning +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidAssetName + +```python3 +class InvalidAssetName( + /, + *args, + **kwargs +) +``` + +Invalid Asset name. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidBandName + +```python3 +class InvalidBandName( + /, + *args, + **kwargs +) +``` + +Invalid band name. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidBufferSize + +```python3 +class InvalidBufferSize( + /, + *args, + **kwargs +) +``` + +`buffer` must be a multiple of `0.5` (e.g: 0.5, 1, 1.5, ...). + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidColorFormat + +```python3 +class InvalidColorFormat( + /, + *args, + **kwargs +) +``` + +Invalid color format. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidColorMapName + +```python3 +class InvalidColorMapName( + /, + *args, + **kwargs +) +``` + +Invalid colormap name. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidDatatypeWarning + +```python3 +class InvalidDatatypeWarning( + /, + *args, + **kwargs +) +``` + +Invalid Output Datatype. + +#### Ancestors (in MRO) + +* builtins.UserWarning +* builtins.Warning +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidExpression + +```python3 +class InvalidExpression( + /, + *args, + **kwargs +) +``` + +Invalid Expression. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidFormat + +```python3 +class InvalidFormat( + /, + *args, + **kwargs +) +``` + +Invalid image format. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidMosaicMethod + +```python3 +class InvalidMosaicMethod( + /, + *args, + **kwargs +) +``` + +Invalid Pixel Selection method for mosaic. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### InvalidPointDataError + +```python3 +class InvalidPointDataError( + /, + *args, + **kwargs +) +``` + +Invalid PointData. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### MissingAssets + +```python3 +class MissingAssets( + /, + *args, + **kwargs +) +``` + +Missing Assets. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### MissingBands + +```python3 +class MissingBands( + /, + *args, + **kwargs +) +``` + +Missing bands. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### NoOverviewWarning + +```python3 +class NoOverviewWarning( + /, + *args, + **kwargs +) +``` + +Dataset has no overviews. + +#### Ancestors (in MRO) + +* builtins.UserWarning +* builtins.Warning +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### PointOutsideBounds + +```python3 +class PointOutsideBounds( + /, + *args, + **kwargs +) +``` + +Point is outside image bounds. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### RioTilerError + +```python3 +class RioTilerError( + /, + *args, + **kwargs +) +``` + +Base exception class. + +#### Ancestors (in MRO) + +* builtins.Exception +* builtins.BaseException + +#### Descendants + +* rio_tiler.errors.InvalidFormat +* rio_tiler.errors.TileOutsideBounds +* rio_tiler.errors.InvalidBufferSize +* rio_tiler.errors.PointOutsideBounds +* rio_tiler.errors.InvalidBandName +* rio_tiler.errors.InvalidColorMapName +* rio_tiler.errors.InvalidAssetName +* rio_tiler.errors.InvalidExpression +* rio_tiler.errors.MissingAssets +* rio_tiler.errors.MissingBands +* rio_tiler.errors.InvalidMosaicMethod +* rio_tiler.errors.ColorMapAlreadyRegistered +* rio_tiler.errors.EmptyMosaicError +* rio_tiler.errors.InvalidColorFormat +* rio_tiler.errors.AssetAsBandError +* rio_tiler.errors.InvalidPointDataError + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. + +### TileOutsideBounds + +```python3 +class TileOutsideBounds( + /, + *args, + **kwargs +) +``` + +Z-X-Y Tile is outside image bounds. + +#### Ancestors (in MRO) + +* rio_tiler.errors.RioTilerError +* builtins.Exception +* builtins.BaseException + +#### Class variables + +```python3 +args +``` + +#### Methods + + +#### with_traceback + +```python3 +def with_traceback( + ... +) +``` + +Exception.with_traceback(tb) -- + +set self.__traceback__ to tb and return self. \ No newline at end of file diff --git a/api/rio_tiler/errors/index.html b/api/rio_tiler/errors/index.html new file mode 100644 index 00000000..de0deaf0 --- /dev/null +++ b/api/rio_tiler/errors/index.html @@ -0,0 +1,3853 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.errors - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.errors

+

Errors and warnings.

+

Classes

+

AlphaBandWarning

+
class AlphaBandWarning(
+    /,
+    *args,
+    **kwargs
+)
+
+

Automatically removed Alpha band from output array.

+

Ancestors (in MRO)

+
    +
  • builtins.UserWarning
  • +
  • builtins.Warning
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

AssetAsBandError

+
class AssetAsBandError(
+    /,
+    *args,
+    **kwargs
+)
+
+

Can't use asset_as_band with multiple bands.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

ColorMapAlreadyRegistered

+
class ColorMapAlreadyRegistered(
+    /,
+    *args,
+    **kwargs
+)
+
+

ColorMap is already registered.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

EmptyMosaicError

+
class EmptyMosaicError(
+    /,
+    *args,
+    **kwargs
+)
+
+

Mosaic method returned empty array.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

ExpressionMixingWarning

+
class ExpressionMixingWarning(
+    /,
+    *args,
+    **kwargs
+)
+
+

Expression and assets/indexes mixing.

+

Ancestors (in MRO)

+
    +
  • builtins.UserWarning
  • +
  • builtins.Warning
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidAssetName

+
class InvalidAssetName(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid Asset name.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidBandName

+
class InvalidBandName(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid band name.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidBufferSize

+
class InvalidBufferSize(
+    /,
+    *args,
+    **kwargs
+)
+
+

buffer must be a multiple of 0.5 (e.g: 0.5, 1, 1.5, ...).

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidColorFormat

+
class InvalidColorFormat(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid color format.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidColorMapName

+
class InvalidColorMapName(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid colormap name.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidDatatypeWarning

+
class InvalidDatatypeWarning(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid Output Datatype.

+

Ancestors (in MRO)

+
    +
  • builtins.UserWarning
  • +
  • builtins.Warning
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidExpression

+
class InvalidExpression(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid Expression.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidFormat

+
class InvalidFormat(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid image format.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidMosaicMethod

+
class InvalidMosaicMethod(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid Pixel Selection method for mosaic.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

InvalidPointDataError

+
class InvalidPointDataError(
+    /,
+    *args,
+    **kwargs
+)
+
+

Invalid PointData.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

MissingAssets

+
class MissingAssets(
+    /,
+    *args,
+    **kwargs
+)
+
+

Missing Assets.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

MissingBands

+
class MissingBands(
+    /,
+    *args,
+    **kwargs
+)
+
+

Missing bands.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

NoOverviewWarning

+
class NoOverviewWarning(
+    /,
+    *args,
+    **kwargs
+)
+
+

Dataset has no overviews.

+

Ancestors (in MRO)

+
    +
  • builtins.UserWarning
  • +
  • builtins.Warning
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

PointOutsideBounds

+
class PointOutsideBounds(
+    /,
+    *args,
+    **kwargs
+)
+
+

Point is outside image bounds.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

RioTilerError

+
class RioTilerError(
+    /,
+    *args,
+    **kwargs
+)
+
+

Base exception class.

+

Ancestors (in MRO)

+
    +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Descendants

+
    +
  • rio_tiler.errors.InvalidFormat
  • +
  • rio_tiler.errors.TileOutsideBounds
  • +
  • rio_tiler.errors.InvalidBufferSize
  • +
  • rio_tiler.errors.PointOutsideBounds
  • +
  • rio_tiler.errors.InvalidBandName
  • +
  • rio_tiler.errors.InvalidColorMapName
  • +
  • rio_tiler.errors.InvalidAssetName
  • +
  • rio_tiler.errors.InvalidExpression
  • +
  • rio_tiler.errors.MissingAssets
  • +
  • rio_tiler.errors.MissingBands
  • +
  • rio_tiler.errors.InvalidMosaicMethod
  • +
  • rio_tiler.errors.ColorMapAlreadyRegistered
  • +
  • rio_tiler.errors.EmptyMosaicError
  • +
  • rio_tiler.errors.InvalidColorFormat
  • +
  • rio_tiler.errors.AssetAsBandError
  • +
  • rio_tiler.errors.InvalidPointDataError
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+

TileOutsideBounds

+
class TileOutsideBounds(
+    /,
+    *args,
+    **kwargs
+)
+
+

Z-X-Y Tile is outside image bounds.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.errors.RioTilerError
  • +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+

Class variables

+
args
+
+

Methods

+

with_traceback

+
def with_traceback(
+    ...
+)
+
+

Exception.with_traceback(tb) --

+

set self.traceback to tb and return self.

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/expression/expression.md b/api/rio_tiler/expression/expression.md new file mode 100644 index 00000000..1458c1d3 --- /dev/null +++ b/api/rio_tiler/expression/expression.md @@ -0,0 +1,80 @@ +# Module rio_tiler.expression + +rio-tiler.expression: Parse and Apply expression. + +## Functions + + +### apply_expression + +```python3 +def apply_expression( + blocks: Sequence[str], + bands: Sequence[str], + data: numpy.ndarray +) -> numpy.ma.core.MaskedArray +``` + +Apply rio-tiler expression. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| blocks | sequence | expression for a specific layer. | None | +| bands | sequence | bands names. | None | +| data | numpy.array | array of bands. | None | + +**Returns:** + +| Type | Description | +|---|---| +| numpy.array | output data. | + + +### get_expression_blocks + +```python3 +def get_expression_blocks( + expression: str +) -> List[str] +``` + +Split expression in blocks. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| expression | str | band math/combination expression. | None | + +**Returns:** + +| Type | Description | +|---|---| +| list | expression blocks (str). | + + +### parse_expression + +```python3 +def parse_expression( + expression: str, + cast: bool = True +) -> Tuple +``` + +Parse rio-tiler band math expression. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| expression | str | band math/combination expression. | None | +| cast | bool | cast band names to integers (convert to index values). Defaults to True. | True | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | band names/indexes. | \ No newline at end of file diff --git a/api/rio_tiler/expression/index.html b/api/rio_tiler/expression/index.html new file mode 100644 index 00000000..6d0156cb --- /dev/null +++ b/api/rio_tiler/expression/index.html @@ -0,0 +1,1753 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.expression - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.expression

+

rio-tiler.expression: Parse and Apply expression.

+

Functions

+

apply_expression

+
def apply_expression(
+    blocks: Sequence[str],
+    bands: Sequence[str],
+    data: numpy.ndarray
+) -> numpy.ma.core.MaskedArray
+
+

Apply rio-tiler expression.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
blockssequenceexpression for a specific layer.None
bandssequencebands names.None
datanumpy.arrayarray of bands.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
numpy.arrayoutput data.
+

get_expression_blocks

+
def get_expression_blocks(
+    expression: str
+) -> List[str]
+
+

Split expression in blocks.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
expressionstrband math/combination expression.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
listexpression blocks (str).
+

parse_expression

+
def parse_expression(
+    expression: str,
+    cast: bool = True
+) -> Tuple
+
+

Parse rio-tiler band math expression.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
expressionstrband math/combination expression.None
castboolcast band names to integers (convert to index values). Defaults to True.True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tupleband names/indexes.
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/io/base/base.md b/api/rio_tiler/io/base/base.md new file mode 100644 index 00000000..f46ea01a --- /dev/null +++ b/api/rio_tiler/io/base/base.md @@ -0,0 +1,934 @@ +# Module rio_tiler.io.base + +rio_tiler.io.base: ABC class for rio-tiler readers. + +## Variables + +```python3 +WGS84_CRS +``` + +## Classes + +### BaseReader + +```python3 +class BaseReader( + input: Any, + tms: morecantile.models.TileMatrixSet = rio_tiler.models.ImageData +``` + +Read a Dataset for a GeoJSON feature. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### info + +```python3 +def info( + self +) -> rio_tiler.models.Info +``` + +Return Dataset's info. + +**Returns:** + +| Type | Description | +|---|---| +| rio_tile.models.Info | Dataset info. | + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float] +) -> rio_tiler.models.ImageData +``` + +Read a Part of a Dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float +) -> rio_tiler.models.PointData +``` + +Read a value from a Dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.PointData | PointData instance with data, mask and spatial info. | + + +#### preview + +```python3 +def preview( + self +) -> rio_tiler.models.ImageData +``` + +Read a preview of a Dataset. + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### statistics + +```python3 +def statistics( + self +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return bands statistics from a dataset. + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> rio_tiler.models.ImageData +``` + +Read a Map tile from the Dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | + +### MultiBandReader + +```python3 +class MultiBandReader( + input: Any, + tms: morecantile.models.TileMatrixSet = rio_tiler.models.ImageData +``` + +Read and merge parts defined by geojson feature from multiple bands. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| bands | sequence of str or str | bands to fetch info from. | None | +| expression | str | rio-tiler expression for the band list (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `self.reader.feature` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### info + +```python3 +def info( + self, + bands: Union[Sequence[str], str] = None, + *args, + **kwargs: Any +) -> rio_tiler.models.Info +``` + +Return metadata from multiple bands. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bands | sequence of str or str | band names to fetch info from. Required keyword argument. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple bands info in form of {"band1": rio_tile.models.Info}. | + + +#### parse_expression + +```python3 +def parse_expression( + self, + expression: str +) -> Tuple +``` + +Parse rio-tiler band math expression. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + bands: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts from multiple bands. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs. | None | +| bands | sequence of str or str | bands to fetch info from. | None | +| expression | str | rio-tiler expression for the band list (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the 'self.reader.part' method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float, + bands: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.PointData +``` + +Read a pixel values from multiple bands. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| bands | sequence of str or str | bands to fetch info from. | None | +| expression | str | rio-tiler expression for the band list (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `self.reader.point` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +#### preview + +```python3 +def preview( + self, + bands: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge previews from multiple bands. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bands | sequence of str or str | bands to fetch info from. | None | +| expression | str | rio-tiler expression for the band list (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `self.reader.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + bands: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: Union[List[int], NoneType] = None, + hist_options: Union[Dict, NoneType] = None, + max_size: int = 1024, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bands | sequence of str or str | bands to fetch info from. Required keyword argument. | None | +| expression | str | rio-tiler expression for the band list (e.g. b1/b2+b3). | None | +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to the `self.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets statistics in form of {"{band}/{expression}": rio_tiler.models.BandStatistics, ...}. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + bands: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge Web Map tiles multiple bands. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| bands | sequence of str or str | bands to fetch info from. | None | +| expression | str | rio-tiler expression for the band list (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `self.reader.tile` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | + +### MultiBaseReader + +```python3 +class MultiBaseReader( + input: Any, + tms: morecantile.models.TileMatrixSet = rio_tiler.models.ImageData +``` + +Read and merge parts defined by geojson feature from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.feature` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### info + +```python3 +def info( + self, + assets: Union[Sequence[str], str] = None, + **kwargs: Any +) -> Dict[str, rio_tiler.models.Info] +``` + +Return metadata from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. Required keyword argument. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets info in form of {"asset1": rio_tile.models.Info}. | + + +#### merged_statistics + +```python3 +def merged_statistics( + self, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: Union[List[int], NoneType] = None, + hist_options: Union[Dict, NoneType] = None, + max_size: int = 1024, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to the `self.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### parse_expression + +```python3 +def parse_expression( + self, + expression: str, + asset_as_band: bool = False +) -> Tuple +``` + +Parse rio-tiler band math expression. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.PointData +``` + +Read pixel value from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.point` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +#### preview + +```python3 +def preview( + self, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge previews from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + assets: Union[Sequence[str], str] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_expression: Union[Dict[str, str], NoneType] = None, + **kwargs: Any +) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| asset_expression | dict | rio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}). | None | +| kwargs | optional | Options to forward to the `self.reader.statistics` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge Wep Map tiles from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.tile` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | + +### SpatialMixin + +```python3 +class SpatialMixin( + tms: morecantile.models.TileMatrixSet = + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.io.base - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.io.base

+

rio_tiler.io.base: ABC class for rio-tiler readers.

+

Variables

+
WGS84_CRS
+
+

Classes

+

BaseReader

+
class BaseReader(
+    input: Any,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>
+)
+
+

Rio-tiler.io BaseReader.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inputanyReader's input.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Descendants

+
    +
  • rio_tiler.io.rasterio.Reader
  • +
  • rio_tiler.io.xarray.XarrayReader
  • +
+

Instance variables

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+

Methods

+

feature

+
def feature(
+    self,
+    shape: Dict
+) -> rio_tiler.models.ImageData
+
+

Read a Dataset for a GeoJSON feature.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

info

+
def info(
+    self
+) -> rio_tiler.models.Info
+
+

Return Dataset's info.

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tile.models.InfoDataset info.
+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float]
+) -> rio_tiler.models.ImageData
+
+

Read a Part of a Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float
+) -> rio_tiler.models.PointData
+
+

Read a value from a Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.PointDataPointData instance with data, mask and spatial info.
+

preview

+
def preview(
+    self
+) -> rio_tiler.models.ImageData
+
+

Read a preview of a Dataset.

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

statistics

+
def statistics(
+    self
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return bands statistics from a dataset.

+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> rio_tiler.models.ImageData
+
+

Read a Map tile from the Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+

MultiBandReader

+
class MultiBandReader(
+    input: Any,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    minzoom: int = None,
+    maxzoom: int = None,
+    reader_options: Dict = NOTHING
+)
+
+

Multi Band Reader.

+

This Abstract Base Class Reader is suited for dataset that stores spectral bands as separate files (e.g. Sentinel 2).

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inputanyinput data.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
minzoomintSet dataset's minzoom.None
maxzoomintSet dataset's maxzoom.None
reader_optionsdict, optionoptions to forward to the reader. Defaults to {}.{}
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Instance variables

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+

Methods

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    bands: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts defined by geojson feature from multiple bands.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
bandssequence of str or strbands to fetch info from.None
expressionstrrio-tiler expression for the band list (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the self.reader.feature method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

info

+
def info(
+    self,
+    bands: Union[Sequence[str], str] = None,
+    *args,
+    **kwargs: Any
+) -> rio_tiler.models.Info
+
+

Return metadata from multiple bands.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bandssequence of str or strband names to fetch info from. Required keyword argument.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple bands info in form of {"band1": rio_tile.models.Info}.
+

parse_expression

+
def parse_expression(
+    self,
+    expression: str
+) -> Tuple
+
+

Parse rio-tiler band math expression.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    bands: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts from multiple bands.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs.None
bandssequence of str or strbands to fetch info from.None
expressionstrrio-tiler expression for the band list (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the 'self.reader.part' method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    bands: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.PointData
+
+

Read a pixel values from multiple bands.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
bandssequence of str or strbands to fetch info from.None
expressionstrrio-tiler expression for the band list (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the self.reader.point method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    bands: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge previews from multiple bands.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bandssequence of str or strbands to fetch info from.None
expressionstrrio-tiler expression for the band list (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the self.reader.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

statistics

+
def statistics(
+    self,
+    bands: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: Union[List[int], NoneType] = None,
+    hist_options: Union[Dict, NoneType] = None,
+    max_size: int = 1024,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bandssequence of str or strbands to fetch info from. Required keyword argument.None
expressionstrrio-tiler expression for the band list (e.g. b1/b2+b3).None
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to the self.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets statistics in form of {"{band}/{expression}": rio_tiler.models.BandStatistics, ...}.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    bands: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge Web Map tiles multiple bands.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
bandssequence of str or strbands to fetch info from.None
expressionstrrio-tiler expression for the band list (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the self.reader.tile method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+

MultiBaseReader

+
class MultiBaseReader(
+    input: Any,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    minzoom: int = None,
+    maxzoom: int = None,
+    reader_options: Dict = NOTHING
+)
+
+

MultiBaseReader Reader.

+

This Abstract Base Class Reader is suited for dataset that are composed of multiple assets (e.g. STAC).

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inputanyinput data.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
minzoomintSet dataset's minzoom.None
maxzoomintSet dataset's maxzoom.None
reader_optionsdict, optionoptions to forward to the reader. Defaults to {}.{}
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Descendants

+
    +
  • rio_tiler.io.stac.STACReader
  • +
+

Instance variables

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+

Methods

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts defined by geojson feature from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.feature method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

info

+
def info(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.Info]
+
+

Return metadata from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from. Required keyword argument.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets info in form of {"asset1": rio_tile.models.Info}.
+

merged_statistics

+
def merged_statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: Union[List[int], NoneType] = None,
+    hist_options: Union[Dict, NoneType] = None,
+    max_size: int = 1024,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to the self.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

parse_expression

+
def parse_expression(
+    self,
+    expression: str,
+    asset_as_band: bool = False
+) -> Tuple
+
+

Parse rio-tiler band math expression.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.PointData
+
+

Read pixel value from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.point method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge previews from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

statistics

+
def statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_expression: Union[Dict[str, str], NoneType] = None,
+    **kwargs: Any
+) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
asset_expressiondictrio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}).None
kwargsoptionalOptions to forward to the self.reader.statistics method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge Wep Map tiles from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.tile method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+

SpatialMixin

+
class SpatialMixin(
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>
+)
+
+

Spatial Info Mixin.

+

Attributes

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
+

Descendants

+
    +
  • rio_tiler.io.base.BaseReader
  • +
  • rio_tiler.io.base.MultiBaseReader
  • +
  • rio_tiler.io.base.MultiBandReader
  • +
+

Instance variables

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+

Methods

+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/io/cogeo/cogeo.md b/api/rio_tiler/io/cogeo/cogeo.md new file mode 100644 index 00000000..7fdb56f3 --- /dev/null +++ b/api/rio_tiler/io/cogeo/cogeo.md @@ -0,0 +1,758 @@ +# Module rio_tiler.io.cogeo + +rio_tiler.io.cogeo: raster processing. + +None + +## Classes + +### COGReader + +```python3 +class COGReader( + input: str, + dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None, + tms: morecantile.models.TileMatrixSet = , + minzoom: int = None, + maxzoom: int = None, + geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + colormap: Dict = None, + nodata: Union[float, int, str, NoneType] = None, + unscale: Union[bool, NoneType] = None, + resampling_method: Union[rasterio.enums.Resampling, NoneType] = None, + vrt_options: Union[Dict, NoneType] = None, + post_process: Union[Callable[[numpy.ndarray, numpy.ndarray], Tuple[numpy.ndarray, numpy.ndarray]], NoneType] = None +) +``` + +#### Attributes + +| Name | Type | Description | Default | +|---|---|---|---| +| input | str | Cloud Optimized GeoTIFF path. | None | +| dataset | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| bounds | tuple | Dataset bounds (left, bottom, right, top). | None | +| crs | rasterio.crs.CRS | Dataset CRS. | None | +| tms | morecantile.TileMatrixSet | TileMatrixSet grid definition. Defaults to `WebMercatorQuad`. | `WebMercatorQuad` | +| minzoom | int | Set minzoom for the tiles. | None | +| maxzoom | int | Set maxzoom for the tiles. | None | +| geographic_crs | rasterio.crs.CRS | CRS to use as geographic coordinate system. Defaults to WGS84. | WGS84 | +| colormap | dict | Overwrite internal colormap. | None | +| nodata | int or float or str | Global options, overwrite internal nodata value. | None | +| unscale | bool | Global options, apply internal scale and offset on all read operations. | None | +| resampling_method | rasterio.enums.Resampling | Global options, resampling method to use for read operations. | None | +| vrt_options | dict | Global options, WarpedVRT options to use for read operations. | None | +| post_process | callable | Global options, Function to apply after all read operations. | None | + +#### Ancestors (in MRO) + +* rio_tiler.io.base.BaseReader +* rio_tiler.io.base.SpatialMixin + +#### Descendants + +* rio_tiler.io.cogeo.GCPCOGReader + +#### Instance variables + +```python3 +geographic_bounds +``` + +return bounds in WGS84. + +#### Methods + + +#### close + +```python3 +def close( + self +) +``` + + +Close rasterio dataset. + + +#### feature + +```python3 +def feature( + self, + shape: Dict, + dst_crs: Union[rasterio.crs.CRS, NoneType] = None, + shape_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: Union[int, NoneType] = None, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read part of a COG defined by a geojson feature. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| dst_crs | rasterio.crs.CRS | Overwrite target coordinate reference system. | None | +| shape_crs | rasterio.crs.CRS | Input geojson coordinate reference system. Defaults to `epsg:4326`. | `epsg:4326` | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. | None | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| kwargs | optional | Options to forward to the `COGReader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### get_zooms + +```python3 +def get_zooms( + self, + tilesize: int = 256 +) -> Tuple[int, int] +``` + + +Calculate raster min/max zoom level for input TMS. + + +#### info + +```python3 +def info( + self +) -> rio_tiler.models.Info +``` + + +Return COG info. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + dst_crs: Union[rasterio.crs.CRS, NoneType] = None, + bounds_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + indexes: Union[int, Sequence, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: Union[int, NoneType] = None, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read part of a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs ("dst_crs"). | None | +| dst_crs | rasterio.crs.CRS | Overwrite target coordinate reference system. | None | +| bounds_crs | rasterio.crs.CRS | Bounds Coordinate Reference System. Defaults to `epsg:4326`. | `epsg:4326` | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. | None | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.part` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float, + coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> List +``` + + +Read a pixel value from a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| coord_crs | rasterio.crs.CRS | Coordinate Reference System of the input coords. Defaults to `epsg:4326`. | `epsg:4326` | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.point` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| list | Pixel value per band indexes. | + + +#### preview + +```python3 +def preview( + self, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: int = 1024, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Return a preview of a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.preview` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### read + +```python3 +def read( + self, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read the COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.read` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: List[int] = [2, 98], + hist_options: Union[Dict, NoneType] = None, + max_size: int = 1024, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + + +Return bands statistics from a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to `self.preview`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + tilesize: int = 256, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + tile_buffer: Union[float, int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read a Web Map tile from a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| tilesize | int | Output image size. Defaults to `256`. | `256` | +| indexes | int or sequence of int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| tile_buffer | int or float | Buffer on each side of the given tile. It must be a multiple of `0.5`. Output **tilesize** will be expanded to `tilesize + 2 * tile_buffer` (e.g 0.5 = 257x257, 1.0 = 258x258). | None | +| kwargs | optional | Options to forward to the `COGReader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | + +### GCPCOGReader + +```python3 +class GCPCOGReader( + input: str, + src_dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None, + tms: morecantile.models.TileMatrixSet = , + minzoom: int = None, + maxzoom: int = None, + geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + colormap: Dict = None, + nodata: Union[float, int, str, NoneType] = None, + unscale: Union[bool, NoneType] = None, + resampling_method: Union[rasterio.enums.Resampling, NoneType] = None, + vrt_options: Union[Dict, NoneType] = None, + post_process: Union[Callable[[numpy.ndarray, numpy.ndarray], Tuple[numpy.ndarray, numpy.ndarray]], NoneType] = None +) +``` + +#### Attributes + +| Name | Type | Description | Default | +|---|---|---|---| +| input | str | Cloud Optimized GeoTIFF path. | None | +| src_dataset | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| tms | morecantile.TileMatrixSet | TileMatrixSet grid definition. Defaults to `WebMercatorQuad`. | `WebMercatorQuad` | +| minzoom | int | Overwrite Min Zoom level. | None | +| maxzoom | int | Overwrite Max Zoom level. | None | +| colormap | dict | Overwrite internal colormap. | None | +| nodata | int or float or str | Global options, overwrite internal nodata value. | None | +| unscale | bool | Global options, apply internal scale and offset on all read operations. | None | +| resampling_method | rasterio.enums.Resampling | Global options, resampling method to use for read operations. | None | +| vrt_options | dict | Global options, WarpedVRT options to use for read operations. | None | +| post_process | callable | Global options, Function to apply after all read operations. | None | +| dataset | rasterio.vrtWarpedVRT | Warped VRT constructed with dataset GCPS info. **READ ONLY attribute**. | None | + +#### Ancestors (in MRO) + +* rio_tiler.io.cogeo.COGReader +* rio_tiler.io.base.BaseReader +* rio_tiler.io.base.SpatialMixin + +#### Instance variables + +```python3 +geographic_bounds +``` + +return bounds in WGS84. + +#### Methods + + +#### close + +```python3 +def close( + self +) +``` + + +Close rasterio dataset. + + +#### feature + +```python3 +def feature( + self, + shape: Dict, + dst_crs: Union[rasterio.crs.CRS, NoneType] = None, + shape_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: Union[int, NoneType] = None, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read part of a COG defined by a geojson feature. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| dst_crs | rasterio.crs.CRS | Overwrite target coordinate reference system. | None | +| shape_crs | rasterio.crs.CRS | Input geojson coordinate reference system. Defaults to `epsg:4326`. | `epsg:4326` | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. | None | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| kwargs | optional | Options to forward to the `COGReader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### get_zooms + +```python3 +def get_zooms( + self, + tilesize: int = 256 +) -> Tuple[int, int] +``` + + +Calculate raster min/max zoom level for input TMS. + + +#### info + +```python3 +def info( + self +) -> rio_tiler.models.Info +``` + + +Return COG info. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + dst_crs: Union[rasterio.crs.CRS, NoneType] = None, + bounds_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + indexes: Union[int, Sequence, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: Union[int, NoneType] = None, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read part of a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs ("dst_crs"). | None | +| dst_crs | rasterio.crs.CRS | Overwrite target coordinate reference system. | None | +| bounds_crs | rasterio.crs.CRS | Bounds Coordinate Reference System. Defaults to `epsg:4326`. | `epsg:4326` | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. | None | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.part` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float, + coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> List +``` + + +Read a pixel value from a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| coord_crs | rasterio.crs.CRS | Coordinate Reference System of the input coords. Defaults to `epsg:4326`. | `epsg:4326` | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.point` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| list | Pixel value per band indexes. | + + +#### preview + +```python3 +def preview( + self, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: int = 1024, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Return a preview of a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.preview` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### read + +```python3 +def read( + self, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read the COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.read` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: List[int] = [2, 98], + hist_options: Union[Dict, NoneType] = None, + max_size: int = 1024, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + + +Return bands statistics from a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to `self.preview`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + tilesize: int = 256, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + tile_buffer: Union[float, int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + + +Read a Web Map tile from a COG. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| tilesize | int | Output image size. Defaults to `256`. | `256` | +| indexes | int or sequence of int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| tile_buffer | int or float | Buffer on each side of the given tile. It must be a multiple of `0.5`. Output **tilesize** will be expanded to `tilesize + 2 * tile_buffer` (e.g 0.5 = 257x257, 1.0 = 258x258). | None | +| kwargs | optional | Options to forward to the `COGReader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | \ No newline at end of file diff --git a/api/rio_tiler/io/cogeo/index.html b/api/rio_tiler/io/cogeo/index.html new file mode 100644 index 00000000..1dc02708 --- /dev/null +++ b/api/rio_tiler/io/cogeo/index.html @@ -0,0 +1,3276 @@ + + + + + + + + + + + + + + + + + + + + + + + Module rio_tiler.io.cogeo - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.io.cogeo

+

rio_tiler.io.cogeo: raster processing.

+

None

+

Classes

+

COGReader

+
class COGReader(
+    input: str,
+    dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' identifier='WebMercatorQuad'>,
+    minzoom: int = None,
+    maxzoom: int = None,
+    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    colormap: Dict = None,
+    nodata: Union[float, int, str, NoneType] = None,
+    unscale: Union[bool, NoneType] = None,
+    resampling_method: Union[rasterio.enums.Resampling, NoneType] = None,
+    vrt_options: Union[Dict, NoneType] = None,
+    post_process: Union[Callable[[numpy.ndarray, numpy.ndarray], Tuple[numpy.ndarray, numpy.ndarray]], NoneType] = None
+)
+
+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inputstrCloud Optimized GeoTIFF path.None
datasetrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
boundstupleDataset bounds (left, bottom, right, top).None
crsrasterio.crs.CRSDataset CRS.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
minzoomintSet minzoom for the tiles.None
maxzoomintSet maxzoom for the tiles.None
geographic_crsrasterio.crs.CRSCRS to use as geographic coordinate system. Defaults to WGS84.WGS84
colormapdictOverwrite internal colormap.None
nodataint or float or strGlobal options, overwrite internal nodata value.None
unscaleboolGlobal options, apply internal scale and offset on all read operations.None
resampling_methodrasterio.enums.ResamplingGlobal options, resampling method to use for read operations.None
vrt_optionsdictGlobal options, WarpedVRT options to use for read operations.None
post_processcallableGlobal options, Function to apply after all read operations.None
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.BaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Descendants

+
    +
  • rio_tiler.io.cogeo.GCPCOGReader
  • +
+

Instance variables

+
geographic_bounds
+
+

return bounds in WGS84.

+

Methods

+

close

+
def close(
+    self
+)
+
+

Close rasterio dataset.

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    shape_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read part of a COG defined by a geojson feature.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
shape_crsrasterio.crs.CRSInput geojson coordinate reference system. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio.None
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the COGReader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

get_zooms

+
def get_zooms(
+    self,
+    tilesize: int = 256
+) -> Tuple[int, int]
+
+

Calculate raster min/max zoom level for input TMS.

+

info

+
def info(
+    self
+) -> rio_tiler.models.Info
+
+

Return COG info.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    bounds_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[int, Sequence, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read part of a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs ("dst_crs").None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
bounds_crsrasterio.crs.CRSBounds Coordinate Reference System. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio.None
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the rio_tiler.reader.part function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> List
+
+

Read a pixel value from a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
coord_crsrasterio.crs.CRSCoordinate Reference System of the input coords. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the rio_tiler.reader.point function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
listPixel value per band indexes.
+

preview

+
def preview(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: int = 1024,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Return a preview of a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the rio_tiler.reader.preview function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

read

+
def read(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read the COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the rio_tiler.reader.read function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

statistics

+
def statistics(
+    self,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: List[int] = [2, 98],
+    hist_options: Union[Dict, NoneType] = None,
+    max_size: int = 1024,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return bands statistics from a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to self.preview.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    tilesize: int = 256,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    tile_buffer: Union[float, int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read a Web Map tile from a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
tilesizeintOutput image size. Defaults to 256.256
indexesint or sequence of intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
tile_bufferint or floatBuffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * tile_buffer (e.g 0.5 = 257x257, 1.0 = 258x258).None
kwargsoptionalOptions to forward to the COGReader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+

GCPCOGReader

+
class GCPCOGReader(
+    input: str,
+    src_dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' identifier='WebMercatorQuad'>,
+    minzoom: int = None,
+    maxzoom: int = None,
+    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    colormap: Dict = None,
+    nodata: Union[float, int, str, NoneType] = None,
+    unscale: Union[bool, NoneType] = None,
+    resampling_method: Union[rasterio.enums.Resampling, NoneType] = None,
+    vrt_options: Union[Dict, NoneType] = None,
+    post_process: Union[Callable[[numpy.ndarray, numpy.ndarray], Tuple[numpy.ndarray, numpy.ndarray]], NoneType] = None
+)
+
+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inputstrCloud Optimized GeoTIFF path.None
src_datasetrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
minzoomintOverwrite Min Zoom level.None
maxzoomintOverwrite Max Zoom level.None
colormapdictOverwrite internal colormap.None
nodataint or float or strGlobal options, overwrite internal nodata value.None
unscaleboolGlobal options, apply internal scale and offset on all read operations.None
resampling_methodrasterio.enums.ResamplingGlobal options, resampling method to use for read operations.None
vrt_optionsdictGlobal options, WarpedVRT options to use for read operations.None
post_processcallableGlobal options, Function to apply after all read operations.None
datasetrasterio.vrtWarpedVRTWarped VRT constructed with dataset GCPS info. READ ONLY attribute.None
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.cogeo.COGReader
  • +
  • rio_tiler.io.base.BaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Instance variables

+
geographic_bounds
+
+

return bounds in WGS84.

+

Methods

+

close

+
def close(
+    self
+)
+
+

Close rasterio dataset.

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    shape_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read part of a COG defined by a geojson feature.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
shape_crsrasterio.crs.CRSInput geojson coordinate reference system. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio.None
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the COGReader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

get_zooms

+
def get_zooms(
+    self,
+    tilesize: int = 256
+) -> Tuple[int, int]
+
+

Calculate raster min/max zoom level for input TMS.

+

info

+
def info(
+    self
+) -> rio_tiler.models.Info
+
+

Return COG info.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    bounds_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[int, Sequence, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read part of a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs ("dst_crs").None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
bounds_crsrasterio.crs.CRSBounds Coordinate Reference System. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio.None
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the rio_tiler.reader.part function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> List
+
+

Read a pixel value from a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
coord_crsrasterio.crs.CRSCoordinate Reference System of the input coords. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the rio_tiler.reader.point function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
listPixel value per band indexes.
+

preview

+
def preview(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: int = 1024,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Return a preview of a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the rio_tiler.reader.preview function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

read

+
def read(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read the COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the rio_tiler.reader.read function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

statistics

+
def statistics(
+    self,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: List[int] = [2, 98],
+    hist_options: Union[Dict, NoneType] = None,
+    max_size: int = 1024,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return bands statistics from a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to self.preview.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    tilesize: int = 256,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    tile_buffer: Union[float, int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read a Web Map tile from a COG.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
tilesizeintOutput image size. Defaults to 256.256
indexesint or sequence of intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
tile_bufferint or floatBuffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * tile_buffer (e.g 0.5 = 257x257, 1.0 = 258x258).None
kwargsoptionalOptions to forward to the COGReader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/io/rasterio/index.html b/api/rio_tiler/io/rasterio/index.html new file mode 100644 index 00000000..84e4651e --- /dev/null +++ b/api/rio_tiler/io/rasterio/index.html @@ -0,0 +1,3515 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.io.rasterio - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.io.rasterio

+

rio_tiler.io.rasterio: rio-tiler reader built on top Rasterio

+

Variables

+
WGS84_CRS
+
+

Classes

+

ImageReader

+
class ImageReader(
+    input: str,
+    dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None,
+    colormap: Dict = None,
+    options: rio_tiler.reader.Options = NOTHING
+)
+
+

Non Geo Image Reader

+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.rasterio.Reader
  • +
  • rio_tiler.io.base.BaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Instance variables

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+
maxzoom
+
+

Return dataset maxzoom.

+
minzoom
+
+

Return dataset minzoom.

+

Methods

+

close

+
def close(
+    self
+)
+
+

Close rasterio dataset.

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    force_binary_mask: bool = True,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest',
+    unscale: bool = False,
+    post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None
+) -> rio_tiler.models.ImageData
+
+

Read part of an Image defined by a geojson feature.

+

get_maxzoom

+
def get_maxzoom(
+    self
+) -> int
+
+

Define dataset maximum zoom level.

+

get_minzoom

+
def get_minzoom(
+    self
+) -> int
+
+

Define dataset minimum zoom level.

+

info

+
def info(
+    self
+) -> rio_tiler.models.Info
+
+

Return Dataset info.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    indexes: Union[int, Sequence, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    force_binary_mask: bool = True,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest',
+    unscale: bool = False,
+    post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None
+) -> rio_tiler.models.ImageData
+
+

Read part of an Image.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top).None
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio.None
heightintOutput height of the array.None
widthintOutput width of the array.None
force_binary_maskboolCast returned mask to binary values (0 or 255). Defaults to True.True
resampling_methodRIOResamplingRasterIO resampling algorithm. Defaults to nearest.nearest
unscaleboolApply 'scales' and 'offsets' on output data value. Defaults to False.False
post_processcallableFunction to apply on output data and mask values.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

point

+
def point(
+    self,
+    x: float,
+    y: float,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    unscale: bool = False,
+    post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None
+) -> rio_tiler.models.PointData
+
+

Read a pixel value from an Image.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatX coordinate.None
latfloatY coordinate.None
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
unscaleboolApply 'scales' and 'offsets' on output data value. Defaults to False.False
post_processcallableFunction to apply on output data and mask values.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: int = 1024,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Return a preview of a Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the self.read method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

read

+
def read(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read the Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the rio_tiler.reader.read function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

statistics

+
def statistics(
+    self,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: Union[List[int], NoneType] = None,
+    hist_options: Union[Dict, NoneType] = None,
+    max_size: int = 1024,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return bands statistics from a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to self.read.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    tilesize: int = 256,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    force_binary_mask: bool = True,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest',
+    unscale: bool = False,
+    post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None
+) -> rio_tiler.models.ImageData
+
+

Read a Web Map tile from an Image.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
tilesizeintOutput image size. Defaults to 256.256
indexesint or sequence of intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
force_binary_maskboolCast returned mask to binary values (0 or 255). Defaults to True.True
resampling_methodRIOResamplingRasterIO resampling algorithm. Defaults to nearest.nearest
unscaleboolApply 'scales' and 'offsets' on output data value. Defaults to False.False
post_processcallableFunction to apply on output data and mask values.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+

LocalTileMatrixSet

+
class LocalTileMatrixSet(
+    width: int,
+    height: int,
+    tile_size: int = 256
+)
+
+

Fake TMS for non-geo image.

+

Methods

+

xy_bounds

+
def xy_bounds(
+    self,
+    *tile: morecantile.commons.Tile
+) -> morecantile.commons.BoundingBox
+
+

Return the bounding box of the (x, y, z) tile

+

Reader

+
class Reader(
+    input: str,
+    dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    colormap: Dict = None,
+    options: rio_tiler.reader.Options = NOTHING
+)
+
+

Rasterio Reader.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inputstrdataset path.None
datasetrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
geographic_crsrasterio.crs.CRSCRS to use as geographic coordinate system. Defaults to WGS84.WGS84
colormapdictOverwrite internal colormap.None
optionsdictOptions to forward to low-level reader methods.None
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.BaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Descendants

+
    +
  • rio_tiler.io.rasterio.ImageReader
  • +
+

Instance variables

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+
maxzoom
+
+

Return dataset maxzoom.

+
minzoom
+
+

Return dataset minzoom.

+

Methods

+

close

+
def close(
+    self
+)
+
+

Close rasterio dataset.

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    shape_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    buffer: Union[float, int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read part of a Dataset defined by a geojson feature.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
shape_crsrasterio.crs.CRSInput geojson coordinate reference system. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio.None
heightintOutput height of the array.None
widthintOutput width of the array.None
bufferint or floatBuffer on each side of the given aoi. It must be a multiple of 0.5. Output image size will be expanded to output imagesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).None
kwargsoptionalOptions to forward to the Reader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

get_maxzoom

+
def get_maxzoom(
+    self
+) -> int
+
+

Define dataset maximum zoom level.

+

get_minzoom

+
def get_minzoom(
+    self
+) -> int
+
+

Define dataset minimum zoom level.

+

info

+
def info(
+    self
+) -> rio_tiler.models.Info
+
+

Return Dataset info.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    bounds_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[int, Sequence, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    buffer: Union[float, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read part of a Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs ("dst_crs").None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
bounds_crsrasterio.crs.CRSBounds Coordinate Reference System. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio.None
heightintOutput height of the array.None
widthintOutput width of the array.None
bufferfloatBuffer on each side of the given aoi. It must be a multiple of 0.5. Output image size will be expanded to output imagesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).None
kwargsoptionalOptions to forward to the rio_tiler.reader.part function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.PointData
+
+

Read a pixel value from a Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
coord_crsrasterio.crs.CRSCoordinate Reference System of the input coords. Defaults to epsg:4326.epsg:4326
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the rio_tiler.reader.point function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    max_size: int = 1024,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Return a preview of a Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
heightintOutput height of the array.None
widthintOutput width of the array.None
kwargsoptionalOptions to forward to the self.read method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

read

+
def read(
+    self,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read the Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indexessequence of int or intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
kwargsoptionalOptions to forward to the rio_tiler.reader.read function.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

statistics

+
def statistics(
+    self,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: Union[List[int], NoneType] = None,
+    hist_options: Union[Dict, NoneType] = None,
+    max_size: int = 1024,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return bands statistics from a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to self.read.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    tilesize: int = 256,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    expression: Union[str, NoneType] = None,
+    buffer: Union[float, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read a Web Map tile from a Dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
tilesizeintOutput image size. Defaults to 256.256
indexesint or sequence of intBand indexes.None
expressionstrrio-tiler expression (e.g. b1/b2+b3).None
bufferfloatBuffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * tile_buffer (e.g 0.5 = 257x257, 1.0 = 258x258).None
kwargsoptionalOptions to forward to the Reader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/io/rasterio/rasterio.md b/api/rio_tiler/io/rasterio/rasterio.md new file mode 100644 index 00000000..74d5343f --- /dev/null +++ b/api/rio_tiler/io/rasterio/rasterio.md @@ -0,0 +1,770 @@ +# Module rio_tiler.io.rasterio + +rio_tiler.io.rasterio: rio-tiler reader built on top Rasterio + +## Variables + +```python3 +WGS84_CRS +``` + +## Classes + +### ImageReader + +```python3 +class ImageReader( + input: str, + dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None, + colormap: Dict = None, + options: rio_tiler.reader.Options = NOTHING +) +``` + +Non Geo Image Reader + +#### Ancestors (in MRO) + +* rio_tiler.io.rasterio.Reader +* rio_tiler.io.base.BaseReader +* rio_tiler.io.base.SpatialMixin + +#### Instance variables + +```python3 +geographic_bounds +``` + +Return dataset bounds in geographic_crs. + +```python3 +maxzoom +``` + +Return dataset maxzoom. + +```python3 +minzoom +``` + +Return dataset minzoom. + +#### Methods + + +#### close + +```python3 +def close( + self +) +``` + +Close rasterio dataset. + + +#### feature + +```python3 +def feature( + self, + shape: Dict, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: Union[int, NoneType] = None, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + force_binary_mask: bool = True, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest', + unscale: bool = False, + post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None +) -> rio_tiler.models.ImageData +``` + +Read part of an Image defined by a geojson feature. + + +#### get_maxzoom + +```python3 +def get_maxzoom( + self +) -> int +``` + +Define dataset maximum zoom level. + + +#### get_minzoom + +```python3 +def get_minzoom( + self +) -> int +``` + +Define dataset minimum zoom level. + + +#### info + +```python3 +def info( + self +) -> rio_tiler.models.Info +``` + +Return Dataset info. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + indexes: Union[int, Sequence, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: Union[int, NoneType] = None, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + force_binary_mask: bool = True, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest', + unscale: bool = False, + post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None +) -> rio_tiler.models.ImageData +``` + +Read part of an Image. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top). | None | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. | None | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| force_binary_mask | bool | Cast returned mask to binary values (0 or 255). Defaults to `True`. | `True` | +| resampling_method | RIOResampling | RasterIO resampling algorithm. Defaults to `nearest`. | `nearest` | +| unscale | bool | Apply 'scales' and 'offsets' on output data value. Defaults to `False`. | `False` | +| post_process | callable | Function to apply on output data and mask values. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### point + +```python3 +def point( + self, + x: float, + y: float, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + unscale: bool = False, + post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None +) -> rio_tiler.models.PointData +``` + +Read a pixel value from an Image. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | X coordinate. | None | +| lat | float | Y coordinate. | None | +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| unscale | bool | Apply 'scales' and 'offsets' on output data value. Defaults to `False`. | `False` | +| post_process | callable | Function to apply on output data and mask values. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +#### preview + +```python3 +def preview( + self, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + max_size: int = 1024, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Return a preview of a Dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | +| kwargs | optional | Options to forward to the `self.read` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### read + +```python3 +def read( + self, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read the Dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indexes | sequence of int or int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| kwargs | optional | Options to forward to the `rio_tiler.reader.read` function. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: Union[List[int], NoneType] = None, + hist_options: Union[Dict, NoneType] = None, + max_size: int = 1024, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return bands statistics from a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to `self.read`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + tilesize: int = 256, + indexes: Union[Sequence[int], int, NoneType] = None, + expression: Union[str, NoneType] = None, + force_binary_mask: bool = True, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest', + unscale: bool = False, + post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None +) -> rio_tiler.models.ImageData +``` + +Read a Web Map tile from an Image. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| tilesize | int | Output image size. Defaults to `256`. | `256` | +| indexes | int or sequence of int | Band indexes. | None | +| expression | str | rio-tiler expression (e.g. b1/b2+b3). | None | +| force_binary_mask | bool | Cast returned mask to binary values (0 or 255). Defaults to `True`. | `True` | +| resampling_method | RIOResampling | RasterIO resampling algorithm. Defaults to `nearest`. | `nearest` | +| unscale | bool | Apply 'scales' and 'offsets' on output data value. Defaults to `False`. | `False` | +| post_process | callable | Function to apply on output data and mask values. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | + +### LocalTileMatrixSet + +```python3 +class LocalTileMatrixSet( + width: int, + height: int, + tile_size: int = 256 +) +``` + +Fake TMS for non-geo image. + +#### Methods + + +#### xy_bounds + +```python3 +def xy_bounds( + self, + *tile: morecantile.commons.Tile +) -> morecantile.commons.BoundingBox +``` + +Return the bounding box of the (x, y, z) tile + +### Reader + +```python3 +class Reader( + input: str, + dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.io.MemoryFile, rasterio.vrt.WarpedVRT] = None, + tms: morecantile.models.TileMatrixSet = bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | \ No newline at end of file diff --git a/api/rio_tiler/io/stac/index.html b/api/rio_tiler/io/stac/index.html new file mode 100644 index 00000000..b26a9913 --- /dev/null +++ b/api/rio_tiler/io/stac/index.html @@ -0,0 +1,2675 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.io.stac - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.io.stac

+

rio_tiler.io.stac: STAC reader.

+

Variables

+
DEFAULT_VALID_TYPE
+
+
WGS84_CRS
+
+
boto3_session
+
+

Functions

+

aws_get_object

+
def aws_get_object(
+    bucket: str,
+    key: str,
+    request_pays: bool = False,
+    client: 'boto3_session.client' = None
+) -> bytes
+
+

AWS s3 get object content.

+

fetch

+
def fetch(
+    filepath: str,
+    **kwargs: Any
+) -> Dict
+
+

Fetch STAC items.

+

A LRU cache is set on top of this function.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
filepathstrSTAC item URL.None
kwargsanyadditional options to pass to client.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictSTAC Item content.
+

Classes

+

STACReader

+
class STACReader(
+    input: str,
+    item: Union[NoneType, Dict, pystac.item.Item] = None,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    minzoom: int = NOTHING,
+    maxzoom: int = NOTHING,
+    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    include_assets: Union[Set[str], NoneType] = None,
+    exclude_assets: Union[Set[str], NoneType] = None,
+    include_asset_types: Set[str] = {'image/tiff; profile=cloud-optimized; application=geotiff', 'image/jp2', 'image/x.geotiff', 'application/x-hdf', 'image/tiff; application=geotiff; profile=cloud-optimized', 'image/tiff; application=geotiff', 'image/tiff', 'image/vnd.stac.geotiff; cloud-optimized=true', 'application/x-hdf5'},
+    exclude_asset_types: Union[Set[str], NoneType] = None,
+    reader: Type[rio_tiler.io.base.BaseReader] = <class 'rio_tiler.io.rasterio.Reader'>,
+    reader_options: Dict = NOTHING,
+    fetch_options: Dict = NOTHING,
+    ctx: Any = <class 'rasterio.env.Env'>
+)
+
+

STAC Reader.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
inputstrSTAC Item path, URL or S3 URL.None
itemdict or pystac.Item, STACStac Item.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
minzoomintSet minzoom for the tiles.None
maxzoomintSet maxzoom for the tiles.None
geographic_crsrasterio.crs.CRSCRS to use as geographic coordinate system. Defaults to WGS84.WGS84
include_assetsset of stringOnly Include specific assets.None
exclude_assetsset of stringExclude specific assets.None
include_asset_typesset of stringOnly include some assets base on their type.None
exclude_asset_typesset of stringExclude some assets base on their type.None
readerrio_tiler.io.BaseReaderrio-tiler Reader. Defaults to rio_tiler.io.Reader.rio_tiler.io.Reader
reader_optionsdictAdditional option to forward to the Reader. Defaults to {}.{}
fetch_optionsdictOptions to pass to rio_tiler.io.stac.fetch function fetching the STAC Items. Defaults to {}.{}
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.MultiBaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Instance variables

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+

Methods

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts defined by geojson feature from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.feature method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

info

+
def info(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.Info]
+
+

Return metadata from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from. Required keyword argument.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets info in form of {"asset1": rio_tile.models.Info}.
+

merged_statistics

+
def merged_statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: Union[List[int], NoneType] = None,
+    hist_options: Union[Dict, NoneType] = None,
+    max_size: int = 1024,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to the self.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

parse_expression

+
def parse_expression(
+    self,
+    expression: str,
+    asset_as_band: bool = False
+) -> Tuple
+
+

Parse rio-tiler band math expression.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.PointData
+
+

Read pixel value from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.point method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge previews from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

statistics

+
def statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_expression: Union[Dict[str, str], NoneType] = None,
+    **kwargs: Any
+) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
asset_expressiondictrio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}).None
kwargsoptionalOptions to forward to the self.reader.statistics method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    assets: Union[Sequence[str], str] = None,
+    expression: Union[str, NoneType] = None,
+    asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge Wep Map tiles from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.tile method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/io/stac/stac.md b/api/rio_tiler/io/stac/stac.md new file mode 100644 index 00000000..6e824c1b --- /dev/null +++ b/api/rio_tiler/io/stac/stac.md @@ -0,0 +1,423 @@ +# Module rio_tiler.io.stac + +rio_tiler.io.stac: STAC reader. + +## Variables + +```python3 +DEFAULT_VALID_TYPE +``` + +```python3 +WGS84_CRS +``` + +```python3 +boto3_session +``` + +## Functions + + +### aws_get_object + +```python3 +def aws_get_object( + bucket: str, + key: str, + request_pays: bool = False, + client: 'boto3_session.client' = None +) -> bytes +``` + +AWS s3 get object content. + + +### fetch + +```python3 +def fetch( + filepath: str, + **kwargs: Any +) -> Dict +``` + +Fetch STAC items. + +A LRU cache is set on top of this function. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| filepath | str | STAC item URL. | None | +| kwargs | any | additional options to pass to client. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | STAC Item content. | + +## Classes + +### STACReader + +```python3 +class STACReader( + input: str, + item: Union[NoneType, Dict, pystac.item.Item] = None, + tms: morecantile.models.TileMatrixSet = , + reader_options: Dict = NOTHING, + fetch_options: Dict = NOTHING, + ctx: Any = +) +``` + +STAC Reader. + +#### Attributes + +| Name | Type | Description | Default | +|---|---|---|---| +| input | str | STAC Item path, URL or S3 URL. | None | +| item | dict or pystac.Item, STAC | Stac Item. | None | +| tms | morecantile.TileMatrixSet | TileMatrixSet grid definition. Defaults to `WebMercatorQuad`. | `WebMercatorQuad` | +| minzoom | int | Set minzoom for the tiles. | None | +| maxzoom | int | Set maxzoom for the tiles. | None | +| geographic_crs | rasterio.crs.CRS | CRS to use as geographic coordinate system. Defaults to WGS84. | WGS84 | +| include_assets | set of string | Only Include specific assets. | None | +| exclude_assets | set of string | Exclude specific assets. | None | +| include_asset_types | set of string | Only include some assets base on their type. | None | +| exclude_asset_types | set of string | Exclude some assets base on their type. | None | +| reader | rio_tiler.io.BaseReader | rio-tiler Reader. Defaults to `rio_tiler.io.Reader`. | `rio_tiler.io.Reader` | +| reader_options | dict | Additional option to forward to the Reader. Defaults to `{}`. | `{}` | +| fetch_options | dict | Options to pass to `rio_tiler.io.stac.fetch` function fetching the STAC Items. Defaults to `{}`. | `{}` | + +#### Ancestors (in MRO) + +* rio_tiler.io.base.MultiBaseReader +* rio_tiler.io.base.SpatialMixin + +#### Instance variables + +```python3 +geographic_bounds +``` + +Return dataset bounds in geographic_crs. + +#### Methods + + +#### feature + +```python3 +def feature( + self, + shape: Dict, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts defined by geojson feature from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.feature` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### info + +```python3 +def info( + self, + assets: Union[Sequence[str], str] = None, + **kwargs: Any +) -> Dict[str, rio_tiler.models.Info] +``` + +Return metadata from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. Required keyword argument. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets info in form of {"asset1": rio_tile.models.Info}. | + + +#### merged_statistics + +```python3 +def merged_statistics( + self, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: Union[List[int], NoneType] = None, + hist_options: Union[Dict, NoneType] = None, + max_size: int = 1024, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to the `self.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### parse_expression + +```python3 +def parse_expression( + self, + expression: str, + asset_as_band: bool = False +) -> Tuple +``` + +Parse rio-tiler band math expression. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.PointData +``` + +Read pixel value from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.point` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +#### preview + +```python3 +def preview( + self, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge previews from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + assets: Union[Sequence[str], str] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_expression: Union[Dict[str, str], NoneType] = None, + **kwargs: Any +) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| asset_expression | dict | rio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}). | None | +| kwargs | optional | Options to forward to the `self.reader.statistics` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + assets: Union[Sequence[str], str] = None, + expression: Union[str, NoneType] = None, + asset_indexes: Union[Dict[str, Union[Sequence[int], int]], NoneType] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge Wep Map tiles from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.tile` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | \ No newline at end of file diff --git a/api/rio_tiler/io/xarray/index.html b/api/rio_tiler/io/xarray/index.html new file mode 100644 index 00000000..952d4c30 --- /dev/null +++ b/api/rio_tiler/io/xarray/index.html @@ -0,0 +1,2279 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.io.xarray - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.io.xarray

+

rio_tiler.io.xarray: Xarray Reader.

+

Variables

+
WGS84_CRS
+
+
rioxarray
+
+
xarray
+
+

Classes

+

XarrayReader

+
class XarrayReader(
+    input: 'xarray.DataArray',
+    tms: 'TileMatrixSet' = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    geographic_crs: 'CRS' = CRS.from_epsg(4326)
+)
+
+

Xarray Reader.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datasetxarray.DataArrayXarray DataArray dataset.None
tmsmorecantile.TileMatrixSetTileMatrixSet grid definition. Defaults to WebMercatorQuad.WebMercatorQuad
geographic_crsrasterio.crs.CRSCRS to use as geographic coordinate system. Defaults to WGS84.WGS84
+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.BaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Instance variables

+
band_names
+
+

Return list of band names in DataArray.

+
geographic_bounds
+
+

Return dataset bounds in geographic_crs.

+
maxzoom
+
+

Return dataset maxzoom.

+
minzoom
+
+

Return dataset minzoom.

+

Methods

+

feature

+
def feature(
+    self,
+    shape: 'Dict',
+    dst_crs: 'Optional[CRS]' = None,
+    shape_crs: 'CRS' = CRS.from_epsg(4326),
+    resampling_method: 'WarpResampling' = 'nearest',
+    nodata: 'Optional[NoData]' = None
+) -> 'ImageData'
+
+

Read part of a dataset defined by a geojson feature.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
shape_crsrasterio.crs.CRSInput geojson coordinate reference system. Defaults to epsg:4326.epsg:4326
resampling_methodWarpResamplingWarpKernel resampling algorithm. Defaults to nearest.nearest
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

get_maxzoom

+
def get_maxzoom(
+    self
+) -> 'int'
+
+

Define dataset maximum zoom level.

+

get_minzoom

+
def get_minzoom(
+    self
+) -> 'int'
+
+

Define dataset minimum zoom level.

+

info

+
def info(
+    self
+) -> 'Info'
+
+

Return xarray.DataArray info.

+

part

+
def part(
+    self,
+    bbox: 'BBox',
+    dst_crs: 'Optional[CRS]' = None,
+    bounds_crs: 'CRS' = CRS.from_epsg(4326),
+    resampling_method: 'WarpResampling' = 'nearest',
+    auto_expand: 'bool' = True,
+    nodata: 'Optional[NoData]' = None
+) -> 'ImageData'
+
+

Read part of a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs ("dst_crs").None
dst_crsrasterio.crs.CRSOverwrite target coordinate reference system.None
bounds_crsrasterio.crs.CRSBounds Coordinate Reference System. Defaults to epsg:4326.epsg:4326
resampling_methodWarpResamplingWarpKernel resampling algorithm. Defaults to nearest.nearest
auto_expandbooleanWhen True, rioxarray's clip_box will expand clip search if only 1D raster found with clip. When False, will throw OneDimensionalRaster error if only 1 x or y data point is found. Defaults to True.True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

point

+
def point(
+    self,
+    lon: 'float',
+    lat: 'float',
+    coord_crs: 'CRS' = CRS.from_epsg(4326),
+    nodata: 'Optional[NoData]' = None
+) -> 'PointData'
+
+

Read a pixel value from a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
coord_crsrasterio.crs.CRSCoordinate Reference System of the input coords. Defaults to epsg:4326.epsg:4326
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    max_size: 'int' = 1024,
+    height: 'Optional[int]' = None,
+    width: 'Optional[int]' = None
+) -> 'ImageData'
+
+

Return a preview of a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
heightintOutput height of the array.None
widthintOutput width of the array.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and input spatial info.
+

statistics

+
def statistics(
+    self,
+    categorical: 'bool' = False,
+    categories: 'Optional[List[float]]' = None,
+    percentiles: 'Optional[List[int]]' = None,
+    hist_options: 'Optional[Dict]' = None,
+    max_size: 'int' = 1024,
+    **kwargs: 'Any'
+) -> 'Dict[str, BandStatistics]'
+
+

Return bands statistics from a dataset.

+

tile

+
def tile(
+    self,
+    tile_x: 'int',
+    tile_y: 'int',
+    tile_z: 'int',
+    tilesize: 'int' = 256,
+    resampling_method: 'WarpResampling' = 'nearest',
+    auto_expand: 'bool' = True,
+    nodata: 'Optional[NoData]' = None
+) -> 'ImageData'
+
+

Read a Web Map tile from a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
tilesizeintOutput image size. Defaults to 256.256
resampling_methodWarpResamplingWarpKernel resampling algorithm. Defaults to nearest.nearest
auto_expandbooleanWhen True, rioxarray's clip_box will expand clip search if only 1D raster found with clip. When False, will throw OneDimensionalRaster error if only 1 x or y data point is found. Defaults to True.True
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/io/xarray/xarray.md b/api/rio_tiler/io/xarray/xarray.md new file mode 100644 index 00000000..d9608c41 --- /dev/null +++ b/api/rio_tiler/io/xarray/xarray.md @@ -0,0 +1,306 @@ +# Module rio_tiler.io.xarray + +rio_tiler.io.xarray: Xarray Reader. + +## Variables + +```python3 +WGS84_CRS +``` + +```python3 +rioxarray +``` + +```python3 +xarray +``` + +## Classes + +### XarrayReader + +```python3 +class XarrayReader( + input: 'xarray.DataArray', + tms: 'TileMatrixSet' = 'ImageData' +``` + +Read part of a dataset defined by a geojson feature. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| dst_crs | rasterio.crs.CRS | Overwrite target coordinate reference system. | None | +| shape_crs | rasterio.crs.CRS | Input geojson coordinate reference system. Defaults to `epsg:4326`. | `epsg:4326` | +| resampling_method | WarpResampling | WarpKernel resampling algorithm. Defaults to `nearest`. | `nearest` | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### get_maxzoom + +```python3 +def get_maxzoom( + self +) -> 'int' +``` + +Define dataset maximum zoom level. + + +#### get_minzoom + +```python3 +def get_minzoom( + self +) -> 'int' +``` + +Define dataset minimum zoom level. + + +#### info + +```python3 +def info( + self +) -> 'Info' +``` + +Return xarray.DataArray info. + + +#### part + +```python3 +def part( + self, + bbox: 'BBox', + dst_crs: 'Optional[CRS]' = None, + bounds_crs: 'CRS' = CRS.from_epsg(4326), + resampling_method: 'WarpResampling' = 'nearest', + auto_expand: 'bool' = True, + nodata: 'Optional[NoData]' = None +) -> 'ImageData' +``` + +Read part of a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs ("dst_crs"). | None | +| dst_crs | rasterio.crs.CRS | Overwrite target coordinate reference system. | None | +| bounds_crs | rasterio.crs.CRS | Bounds Coordinate Reference System. Defaults to `epsg:4326`. | `epsg:4326` | +| resampling_method | WarpResampling | WarpKernel resampling algorithm. Defaults to `nearest`. | `nearest` | +| auto_expand | boolean | When True, rioxarray's clip_box will expand clip search if only 1D raster found with clip. When False, will throw `OneDimensionalRaster` error if only 1 x or y data point is found. Defaults to True. | True | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### point + +```python3 +def point( + self, + lon: 'float', + lat: 'float', + coord_crs: 'CRS' = CRS.from_epsg(4326), + nodata: 'Optional[NoData]' = None +) -> 'PointData' +``` + +Read a pixel value from a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| coord_crs | rasterio.crs.CRS | Coordinate Reference System of the input coords. Defaults to `epsg:4326`. | `epsg:4326` | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +#### preview + +```python3 +def preview( + self, + max_size: 'int' = 1024, + height: 'Optional[int]' = None, + width: 'Optional[int]' = None +) -> 'ImageData' +``` + +Return a preview of a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| height | int | Output height of the array. | None | +| width | int | Output width of the array. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and input spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + categorical: 'bool' = False, + categories: 'Optional[List[float]]' = None, + percentiles: 'Optional[List[int]]' = None, + hist_options: 'Optional[Dict]' = None, + max_size: 'int' = 1024, + **kwargs: 'Any' +) -> 'Dict[str, BandStatistics]' +``` + +Return bands statistics from a dataset. + + +#### tile + +```python3 +def tile( + self, + tile_x: 'int', + tile_y: 'int', + tile_z: 'int', + tilesize: 'int' = 256, + resampling_method: 'WarpResampling' = 'nearest', + auto_expand: 'bool' = True, + nodata: 'Optional[NoData]' = None +) -> 'ImageData' +``` + +Read a Web Map tile from a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| tilesize | int | Output image size. Defaults to `256`. | `256` | +| resampling_method | WarpResampling | WarpKernel resampling algorithm. Defaults to `nearest`. | `nearest` | +| auto_expand | boolean | When True, rioxarray's clip_box will expand clip search if only 1D raster found with clip. When False, will throw `OneDimensionalRaster` error if only 1 x or y data point is found. Defaults to True. | True | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | \ No newline at end of file diff --git a/api/rio_tiler/models/index.html b/api/rio_tiler/models/index.html new file mode 100644 index 00000000..557954bb --- /dev/null +++ b/api/rio_tiler/models/index.html @@ -0,0 +1,8755 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.models - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.models

+

rio-tiler models.

+

Variables

+
dtype_ranges
+
+

Functions

+

masked_and_3d

+
def masked_and_3d(
+    array: numpy.ndarray
+) -> numpy.ma.core.MaskedArray
+
+

Makes sure we have a 3D array and mask

+

rescale_image

+
def rescale_image(
+    array: numpy.ma.core.MaskedArray,
+    in_range: Sequence[Tuple[Union[float, int], Union[float, int]]],
+    out_range: Sequence[Tuple[Union[float, int], Union[float, int]]] = ((0, 255),),
+    out_dtype: Union[str, numpy.number] = 'uint8'
+) -> numpy.ma.core.MaskedArray
+
+

Rescale image data in-place.

+

to_coordsbbox

+
def to_coordsbbox(
+    bbox
+) -> Union[rasterio.coords.BoundingBox, NoneType]
+
+

Convert bbox to CoordsBbox nameTuple.

+

to_masked

+
def to_masked(
+    array: numpy.ndarray
+) -> numpy.ma.core.MaskedArray
+
+

Makes sure we have a MaskedArray.

+

Classes

+

BandStatistics

+
class BandStatistics(
+    __pydantic_self__,
+    **data: 'Any'
+)
+
+

Band statistics

+

Ancestors (in MRO)

+
    +
  • rio_tiler.models.RioTilerBaseModel
  • +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if Config.extra = 'allow' was set since it adds all passed values

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to raise an exception on invalid fields.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_computed_fields
+
+

Get the computed fields of this model instance.

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping
specifying which fields to include in the copied model.
None
excludeNoneOptional set or mapping
specifying which fields to exclude in the copied model.
None
updateNoneOptional dictionary of field-value pairs to override field values
in the copied model.
None
deepNoneIf True, the values of fields that are Pydantic models will be deep copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal[('json', 'python')] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects.
None
includeNoneA list of fields to include in the output.None
excludeNoneA list of fields to exclude from the output.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that are unset or None from the output.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value from the output.None
exclude_noneNoneWhether to exclude fields that have a value of None from the output.None
round_tripNoneWhether to enable serialization and deserialization round-trip support.None
warningsNoneWhether to log warnings when invalid fields are encountered.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output. Can take either a string or set of strings.None
excludeNoneField(s) to exclude from the JSON output. Can take either a string or set of strings.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that have the default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneWhether to use serialization/deserialization between JSON and class instance.None
warningsNoneWhether to show any warnings that occurred during serialization.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

Bounds

+
class Bounds(
+    __pydantic_self__,
+    **data: 'Any'
+)
+
+

Dataset Bounding box

+

Ancestors (in MRO)

+
    +
  • rio_tiler.models.RioTilerBaseModel
  • +
  • pydantic.main.BaseModel
  • +
+

Descendants

+
    +
  • rio_tiler.models.SpatialInfo
  • +
+

Class variables

+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if Config.extra = 'allow' was set since it adds all passed values

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to raise an exception on invalid fields.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_computed_fields
+
+

Get the computed fields of this model instance.

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping
specifying which fields to include in the copied model.
None
excludeNoneOptional set or mapping
specifying which fields to exclude in the copied model.
None
updateNoneOptional dictionary of field-value pairs to override field values
in the copied model.
None
deepNoneIf True, the values of fields that are Pydantic models will be deep copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal[('json', 'python')] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects.
None
includeNoneA list of fields to include in the output.None
excludeNoneA list of fields to exclude from the output.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that are unset or None from the output.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value from the output.None
exclude_noneNoneWhether to exclude fields that have a value of None from the output.None
round_tripNoneWhether to enable serialization and deserialization round-trip support.None
warningsNoneWhether to log warnings when invalid fields are encountered.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output. Can take either a string or set of strings.None
excludeNoneField(s) to exclude from the JSON output. Can take either a string or set of strings.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that have the default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneWhether to use serialization/deserialization between JSON and class instance.None
warningsNoneWhether to show any warnings that occurred during serialization.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

ImageData

+
class ImageData(
+    array: numpy.ndarray,
+    cutline_mask: Union[numpy.ndarray, NoneType] = None,
+    *,
+    assets: Union[List, NoneType] = None,
+    bounds=None,
+    crs: Union[rasterio.crs.CRS, NoneType] = None,
+    metadata: Union[Dict, NoneType] = NOTHING,
+    band_names: List[str] = NOTHING,
+    dataset_statistics: Union[Sequence[Tuple[float, float]], NoneType] = None
+)
+
+

Image Data class.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arraynumpy.ma.MaskedArrayimage values.None
assetslistlist of assets used to construct the data values.None
boundsBoundingBoxbounding box of the data.None
crsrasterio.crs.CRSCoordinates Reference System of the bounds.None
metadatadictAdditional metadata. Defaults to {}.{}
band_nameslistname of each band. Defaults to ["1", "2", "3"] for 3 bands image.["1", "2", "3"] for 3 bands image
dataset_statisticslistdataset statistics [(min, max), (min, max)]None
+

Static methods

+

create_from_list

+
def create_from_list(
+    data: Sequence[ForwardRef('ImageData')]
+) -> 'ImageData'
+
+

Create ImageData from a sequence of ImageData objects.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datasequencesequence of ImageData.None
+

from_array

+
def from_array(
+    arr: numpy.ndarray
+) -> 'ImageData'
+
+

Create ImageData from a numpy array.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arrnumpy.ndarrayNumpy array or Numpy masked array.None
+

from_bytes

+
def from_bytes(
+    data: bytes
+) -> 'ImageData'
+
+

Create ImageData from bytes.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
databytesraster dataset as bytes.None
+

Instance variables

+
count
+
+

Number of band.

+
data
+
+

Return data part of the masked array.

+
height
+
+

Height of the data array.

+
mask
+
+

Return Mask in form of rasterio dataset mask.

+
transform
+
+

Returns the affine transform.

+
width
+
+

Width of the data array.

+

Methods

+

apply_color_formula

+
def apply_color_formula(
+    self,
+    color_formula: Union[str, NoneType]
+)
+
+

Apply color-operations formula in place.

+

apply_colormap

+
def apply_colormap(
+    self,
+    colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]]
+) -> 'ImageData'
+
+

Apply colormap to the image data.

+

apply_expression

+
def apply_expression(
+    self,
+    expression: str
+) -> 'ImageData'
+
+

Apply expression to the image data.

+

as_masked

+
def as_masked(
+    self
+) -> numpy.ma.core.MaskedArray
+
+

return a numpy masked array.

+

clip

+
def clip(
+    self,
+    bbox: Tuple[float, float, float, float]
+) -> 'ImageData'
+
+

Clip data and mask to a bbox.

+

data_as_image

+
def data_as_image(
+    self
+) -> numpy.ndarray
+
+

Return the data array reshaped into an image processing/visualization software friendly order.

+

(bands, rows, columns) -> (rows, columns, bands).

+

post_process

+
def post_process(
+    self,
+    in_range: Union[Sequence[Tuple[Union[float, int], Union[float, int]]], NoneType] = None,
+    out_dtype: Union[str, numpy.number] = 'uint8',
+    color_formula: Union[str, NoneType] = None,
+    **kwargs: Any
+) -> 'ImageData'
+
+

Post-process image data.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
in_rangetupleinput min/max bounds value to rescale from.None
out_dtypestroutput datatype after rescaling. Defaults to uint8.uint8
color_formulastrcolor-ops formula (see: vincentsarago/color-ops).None
kwargsoptionalkeyword arguments to forward to rio_tiler.utils.linear_rescale.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
ImageDatanew ImageData object with the updated data.
+

render

+
def render(
+    self,
+    add_mask: bool = True,
+    img_format: str = 'PNG',
+    colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]], NoneType] = None,
+    **kwargs
+) -> bytes
+
+

Render data to image blob.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
add_maskbooladd mask to output image. Defaults to True.True
img_formatstroutput image format. Defaults to PNG.PNG
colormapdict or sequenceRGBA Color Table dictionary or sequence.None
kwargsoptionalkeyword arguments to forward to rio_tiler.utils.render.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
bytesimage.
+

rescale

+
def rescale(
+    self,
+    in_range: Sequence[Tuple[Union[float, int], Union[float, int]]],
+    out_range: Sequence[Tuple[Union[float, int], Union[float, int]]] = ((0, 255),),
+    out_dtype: Union[str, numpy.number] = 'uint8'
+)
+
+

Rescale data in place.

+

resize

+
def resize(
+    self,
+    height: int,
+    width: int,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest'
+) -> 'ImageData'
+
+

Resize data and mask.

+

statistics

+
def statistics(
+    self,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: Union[List[int], NoneType] = None,
+    hist_options: Union[Dict, NoneType] = None
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return statistics from ImageData.

+

Info

+
class Info(
+    __pydantic_self__,
+    **data: 'Any'
+)
+
+

Dataset Info.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.models.SpatialInfo
  • +
  • rio_tiler.models.Bounds
  • +
  • rio_tiler.models.RioTilerBaseModel
  • +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if Config.extra = 'allow' was set since it adds all passed values

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to raise an exception on invalid fields.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_computed_fields
+
+

Get the computed fields of this model instance.

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping
specifying which fields to include in the copied model.
None
excludeNoneOptional set or mapping
specifying which fields to exclude in the copied model.
None
updateNoneOptional dictionary of field-value pairs to override field values
in the copied model.
None
deepNoneIf True, the values of fields that are Pydantic models will be deep copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal[('json', 'python')] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects.
None
includeNoneA list of fields to include in the output.None
excludeNoneA list of fields to exclude from the output.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that are unset or None from the output.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value from the output.None
exclude_noneNoneWhether to exclude fields that have a value of None from the output.None
round_tripNoneWhether to enable serialization and deserialization round-trip support.None
warningsNoneWhether to log warnings when invalid fields are encountered.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output. Can take either a string or set of strings.None
excludeNoneField(s) to exclude from the JSON output. Can take either a string or set of strings.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that have the default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneWhether to use serialization/deserialization between JSON and class instance.None
warningsNoneWhether to show any warnings that occurred during serialization.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

PointData

+
class PointData(
+    array: numpy.ndarray,
+    *,
+    band_names: List[str] = NOTHING,
+    coordinates: Union[Tuple[float, float], NoneType] = None,
+    crs: Union[rasterio.crs.CRS, NoneType] = None,
+    assets: Union[List, NoneType] = None,
+    metadata: Union[Dict, NoneType] = NOTHING
+)
+
+

Point Data class.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arraynumpy.ma.MaskedArraypixel values.None
band_nameslistname of each band. Defaults to ["1", "2", "3"] for 3 bands image.["1", "2", "3"] for 3 bands image
coordinatestuplePoint's coordinates.None
crsrasterio.crs.CRSCoordinates Reference System of the bounds.None
assetslistlist of assets used to construct the data values.None
metadatadictAdditional metadata. Defaults to {}.{}
+

Static methods

+

create_from_list

+
def create_from_list(
+    data: Sequence[ForwardRef('PointData')]
+)
+
+

Create PointData from a sequence of PointsData objects.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datasequencesequence of PointData.None
+

Instance variables

+
count
+
+

Number of band.

+
data
+
+

Return data part of the masked array.

+
mask
+
+

Return Mask in form of rasterio dataset mask.

+

Methods

+

apply_expression

+
def apply_expression(
+    self,
+    expression: str
+) -> 'PointData'
+
+

Apply expression to the image data.

+

as_masked

+
def as_masked(
+    self
+) -> numpy.ma.core.MaskedArray
+
+

return a numpy masked array.

+

RioTilerBaseModel

+
class RioTilerBaseModel(
+    __pydantic_self__,
+    **data: 'Any'
+)
+
+

Provides dictionary access for pydantic models, for backwards compatability.

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Descendants

+
    +
  • rio_tiler.models.Bounds
  • +
  • rio_tiler.models.BandStatistics
  • +
+

Class variables

+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if Config.extra = 'allow' was set since it adds all passed values

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to raise an exception on invalid fields.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_computed_fields
+
+

Get the computed fields of this model instance.

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping
specifying which fields to include in the copied model.
None
excludeNoneOptional set or mapping
specifying which fields to exclude in the copied model.
None
updateNoneOptional dictionary of field-value pairs to override field values
in the copied model.
None
deepNoneIf True, the values of fields that are Pydantic models will be deep copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal[('json', 'python')] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects.
None
includeNoneA list of fields to include in the output.None
excludeNoneA list of fields to exclude from the output.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that are unset or None from the output.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value from the output.None
exclude_noneNoneWhether to exclude fields that have a value of None from the output.None
round_tripNoneWhether to enable serialization and deserialization round-trip support.None
warningsNoneWhether to log warnings when invalid fields are encountered.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output. Can take either a string or set of strings.None
excludeNoneField(s) to exclude from the JSON output. Can take either a string or set of strings.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that have the default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneWhether to use serialization/deserialization between JSON and class instance.None
warningsNoneWhether to show any warnings that occurred during serialization.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

SpatialInfo

+
class SpatialInfo(
+    __pydantic_self__,
+    **data: 'Any'
+)
+
+

Dataset SpatialInfo

+

Ancestors (in MRO)

+
    +
  • rio_tiler.models.Bounds
  • +
  • rio_tiler.models.RioTilerBaseModel
  • +
  • pydantic.main.BaseModel
  • +
+

Descendants

+
    +
  • rio_tiler.models.Info
  • +
+

Class variables

+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if Config.extra = 'allow' was set since it adds all passed values

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to raise an exception on invalid fields.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: '_deprecated_parse.Protocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_computed_fields
+
+

Get the computed fields of this model instance.

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping
specifying which fields to include in the copied model.
None
excludeNoneOptional set or mapping
specifying which fields to exclude in the copied model.
None
updateNoneOptional dictionary of field-value pairs to override field values
in the copied model.
None
deepNoneIf True, the values of fields that are Pydantic models will be deep copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal[('json', 'python')] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects.
None
includeNoneA list of fields to include in the output.None
excludeNoneA list of fields to exclude from the output.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that are unset or None from the output.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value from the output.None
exclude_noneNoneWhether to exclude fields that have a value of None from the output.None
round_tripNoneWhether to enable serialization and deserialization round-trip support.None
warningsNoneWhether to log warnings when invalid fields are encountered.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: 'bool' = True
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output. Can take either a string or set of strings.None
excludeNoneField(s) to exclude from the JSON output. Can take either a string or set of strings.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that have the default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneWhether to use serialization/deserialization between JSON and class instance.None
warningsNoneWhether to show any warnings that occurred during serialization.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/models/models.md b/api/rio_tiler/models/models.md new file mode 100644 index 00000000..63ecf304 --- /dev/null +++ b/api/rio_tiler/models/models.md @@ -0,0 +1,3184 @@ +# Module rio_tiler.models + +rio-tiler models. + +## Variables + +```python3 +dtype_ranges +``` + +## Functions + + +### masked_and_3d + +```python3 +def masked_and_3d( + array: numpy.ndarray +) -> numpy.ma.core.MaskedArray +``` + +Makes sure we have a 3D array and mask + + +### rescale_image + +```python3 +def rescale_image( + array: numpy.ma.core.MaskedArray, + in_range: Sequence[Tuple[Union[float, int], Union[float, int]]], + out_range: Sequence[Tuple[Union[float, int], Union[float, int]]] = ((0, 255),), + out_dtype: Union[str, numpy.number] = 'uint8' +) -> numpy.ma.core.MaskedArray +``` + +Rescale image data in-place. + + +### to_coordsbbox + +```python3 +def to_coordsbbox( + bbox +) -> Union[rasterio.coords.BoundingBox, NoneType] +``` + +Convert bbox to CoordsBbox nameTuple. + + +### to_masked + +```python3 +def to_masked( + array: numpy.ndarray +) -> numpy.ma.core.MaskedArray +``` + +Makes sure we have a MaskedArray. + +## Classes + +### BandStatistics + +```python3 +class BandStatistics( + __pydantic_self__, + **data: 'Any' +) +``` + +Band statistics + +#### Ancestors (in MRO) + +* rio_tiler.models.RioTilerBaseModel +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to raise an exception on invalid fields. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_computed_fields +``` + +Get the computed fields of this model instance. + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping
specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping
specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values
in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal[('json', 'python')] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects. | None | +| include | None | A list of fields to include in the output. | None | +| exclude | None | A list of fields to exclude from the output. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that are unset or None from the output. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value from the output. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None` from the output. | None | +| round_trip | None | Whether to enable serialization and deserialization round-trip support. | None | +| warnings | None | Whether to log warnings when invalid fields are encountered. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. Can take either a string or set of strings. | None | +| exclude | None | Field(s) to exclude from the JSON output. Can take either a string or set of strings. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that have the default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | Whether to use serialization/deserialization between JSON and class instance. | None | +| warnings | None | Whether to show any warnings that occurred during serialization. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### Bounds + +```python3 +class Bounds( + __pydantic_self__, + **data: 'Any' +) +``` + +Dataset Bounding box + +#### Ancestors (in MRO) + +* rio_tiler.models.RioTilerBaseModel +* pydantic.main.BaseModel + +#### Descendants + +* rio_tiler.models.SpatialInfo + +#### Class variables + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to raise an exception on invalid fields. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_computed_fields +``` + +Get the computed fields of this model instance. + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping
specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping
specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values
in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal[('json', 'python')] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects. | None | +| include | None | A list of fields to include in the output. | None | +| exclude | None | A list of fields to exclude from the output. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that are unset or None from the output. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value from the output. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None` from the output. | None | +| round_trip | None | Whether to enable serialization and deserialization round-trip support. | None | +| warnings | None | Whether to log warnings when invalid fields are encountered. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. Can take either a string or set of strings. | None | +| exclude | None | Field(s) to exclude from the JSON output. Can take either a string or set of strings. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that have the default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | Whether to use serialization/deserialization between JSON and class instance. | None | +| warnings | None | Whether to show any warnings that occurred during serialization. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### ImageData + +```python3 +class ImageData( + array: numpy.ndarray, + cutline_mask: Union[numpy.ndarray, NoneType] = None, + *, + assets: Union[List, NoneType] = None, + bounds=None, + crs: Union[rasterio.crs.CRS, NoneType] = None, + metadata: Union[Dict, NoneType] = NOTHING, + band_names: List[str] = NOTHING, + dataset_statistics: Union[Sequence[Tuple[float, float]], NoneType] = None +) +``` + +Image Data class. + +#### Attributes + +| Name | Type | Description | Default | +|---|---|---|---| +| array | numpy.ma.MaskedArray | image values. | None | +| assets | list | list of assets used to construct the data values. | None | +| bounds | BoundingBox | bounding box of the data. | None | +| crs | rasterio.crs.CRS | Coordinates Reference System of the bounds. | None | +| metadata | dict | Additional metadata. Defaults to `{}`. | `{}` | +| band_names | list | name of each band. Defaults to `["1", "2", "3"]` for 3 bands image. | `["1", "2", "3"]` for 3 bands image | +| dataset_statistics | list | dataset statistics `[(min, max), (min, max)]` | None | + +#### Static methods + + +#### create_from_list + +```python3 +def create_from_list( + data: Sequence[ForwardRef('ImageData')] +) -> 'ImageData' +``` + +Create ImageData from a sequence of ImageData objects. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | sequence | sequence of ImageData. | None | + + +#### from_array + +```python3 +def from_array( + arr: numpy.ndarray +) -> 'ImageData' +``` + +Create ImageData from a numpy array. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| arr | numpy.ndarray | Numpy array or Numpy masked array. | None | + + +#### from_bytes + +```python3 +def from_bytes( + data: bytes +) -> 'ImageData' +``` + +Create ImageData from bytes. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | bytes | raster dataset as bytes. | None | + +#### Instance variables + +```python3 +count +``` + +Number of band. + +```python3 +data +``` + +Return data part of the masked array. + +```python3 +height +``` + +Height of the data array. + +```python3 +mask +``` + +Return Mask in form of rasterio dataset mask. + +```python3 +transform +``` + +Returns the affine transform. + +```python3 +width +``` + +Width of the data array. + +#### Methods + + +#### apply_color_formula + +```python3 +def apply_color_formula( + self, + color_formula: Union[str, NoneType] +) +``` + +Apply color-operations formula in place. + + +#### apply_colormap + +```python3 +def apply_colormap( + self, + colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]]] +) -> 'ImageData' +``` + +Apply colormap to the image data. + + +#### apply_expression + +```python3 +def apply_expression( + self, + expression: str +) -> 'ImageData' +``` + +Apply expression to the image data. + + +#### as_masked + +```python3 +def as_masked( + self +) -> numpy.ma.core.MaskedArray +``` + +return a numpy masked array. + + +#### clip + +```python3 +def clip( + self, + bbox: Tuple[float, float, float, float] +) -> 'ImageData' +``` + +Clip data and mask to a bbox. + + +#### data_as_image + +```python3 +def data_as_image( + self +) -> numpy.ndarray +``` + +Return the data array reshaped into an image processing/visualization software friendly order. + +(bands, rows, columns) -> (rows, columns, bands). + + +#### post_process + +```python3 +def post_process( + self, + in_range: Union[Sequence[Tuple[Union[float, int], Union[float, int]]], NoneType] = None, + out_dtype: Union[str, numpy.number] = 'uint8', + color_formula: Union[str, NoneType] = None, + **kwargs: Any +) -> 'ImageData' +``` + +Post-process image data. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| in_range | tuple | input min/max bounds value to rescale from. | None | +| out_dtype | str | output datatype after rescaling. Defaults to `uint8`. | `uint8` | +| color_formula | str | color-ops formula (see: https://github.com/vincentsarago/color-ops). | None | +| kwargs | optional | keyword arguments to forward to `rio_tiler.utils.linear_rescale`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| ImageData | new ImageData object with the updated data. | + + +#### render + +```python3 +def render( + self, + add_mask: bool = True, + img_format: str = 'PNG', + colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]], NoneType] = None, + **kwargs +) -> bytes +``` + +Render data to image blob. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| add_mask | bool | add mask to output image. Defaults to `True`. | `True` | +| img_format | str | output image format. Defaults to `PNG`. | `PNG` | +| colormap | dict or sequence | RGBA Color Table dictionary or sequence. | None | +| kwargs | optional | keyword arguments to forward to `rio_tiler.utils.render`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bytes | image. | + + +#### rescale + +```python3 +def rescale( + self, + in_range: Sequence[Tuple[Union[float, int], Union[float, int]]], + out_range: Sequence[Tuple[Union[float, int], Union[float, int]]] = ((0, 255),), + out_dtype: Union[str, numpy.number] = 'uint8' +) +``` + +Rescale data in place. + + +#### resize + +```python3 +def resize( + self, + height: int, + width: int, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest' +) -> 'ImageData' +``` + +Resize data and mask. + + +#### statistics + +```python3 +def statistics( + self, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: Union[List[int], NoneType] = None, + hist_options: Union[Dict, NoneType] = None +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return statistics from ImageData. + +### Info + +```python3 +class Info( + __pydantic_self__, + **data: 'Any' +) +``` + +Dataset Info. + +#### Ancestors (in MRO) + +* rio_tiler.models.SpatialInfo +* rio_tiler.models.Bounds +* rio_tiler.models.RioTilerBaseModel +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to raise an exception on invalid fields. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_computed_fields +``` + +Get the computed fields of this model instance. + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping
specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping
specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values
in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal[('json', 'python')] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects. | None | +| include | None | A list of fields to include in the output. | None | +| exclude | None | A list of fields to exclude from the output. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that are unset or None from the output. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value from the output. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None` from the output. | None | +| round_trip | None | Whether to enable serialization and deserialization round-trip support. | None | +| warnings | None | Whether to log warnings when invalid fields are encountered. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. Can take either a string or set of strings. | None | +| exclude | None | Field(s) to exclude from the JSON output. Can take either a string or set of strings. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that have the default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | Whether to use serialization/deserialization between JSON and class instance. | None | +| warnings | None | Whether to show any warnings that occurred during serialization. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### PointData + +```python3 +class PointData( + array: numpy.ndarray, + *, + band_names: List[str] = NOTHING, + coordinates: Union[Tuple[float, float], NoneType] = None, + crs: Union[rasterio.crs.CRS, NoneType] = None, + assets: Union[List, NoneType] = None, + metadata: Union[Dict, NoneType] = NOTHING +) +``` + +Point Data class. + +#### Attributes + +| Name | Type | Description | Default | +|---|---|---|---| +| array | numpy.ma.MaskedArray | pixel values. | None | +| band_names | list | name of each band. Defaults to `["1", "2", "3"]` for 3 bands image. | `["1", "2", "3"]` for 3 bands image | +| coordinates | tuple | Point's coordinates. | None | +| crs | rasterio.crs.CRS | Coordinates Reference System of the bounds. | None | +| assets | list | list of assets used to construct the data values. | None | +| metadata | dict | Additional metadata. Defaults to `{}`. | `{}` | + +#### Static methods + + +#### create_from_list + +```python3 +def create_from_list( + data: Sequence[ForwardRef('PointData')] +) +``` + +Create PointData from a sequence of PointsData objects. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | sequence | sequence of PointData. | None | + +#### Instance variables + +```python3 +count +``` + +Number of band. + +```python3 +data +``` + +Return data part of the masked array. + +```python3 +mask +``` + +Return Mask in form of rasterio dataset mask. + +#### Methods + + +#### apply_expression + +```python3 +def apply_expression( + self, + expression: str +) -> 'PointData' +``` + +Apply expression to the image data. + + +#### as_masked + +```python3 +def as_masked( + self +) -> numpy.ma.core.MaskedArray +``` + +return a numpy masked array. + +### RioTilerBaseModel + +```python3 +class RioTilerBaseModel( + __pydantic_self__, + **data: 'Any' +) +``` + +Provides dictionary access for pydantic models, for backwards compatability. + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Descendants + +* rio_tiler.models.Bounds +* rio_tiler.models.BandStatistics + +#### Class variables + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to raise an exception on invalid fields. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_computed_fields +``` + +Get the computed fields of this model instance. + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping
specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping
specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values
in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal[('json', 'python')] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects. | None | +| include | None | A list of fields to include in the output. | None | +| exclude | None | A list of fields to exclude from the output. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that are unset or None from the output. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value from the output. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None` from the output. | None | +| round_trip | None | Whether to enable serialization and deserialization round-trip support. | None | +| warnings | None | Whether to log warnings when invalid fields are encountered. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. Can take either a string or set of strings. | None | +| exclude | None | Field(s) to exclude from the JSON output. Can take either a string or set of strings. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that have the default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | Whether to use serialization/deserialization between JSON and class instance. | None | +| warnings | None | Whether to show any warnings that occurred during serialization. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### SpatialInfo + +```python3 +class SpatialInfo( + __pydantic_self__, + **data: 'Any' +) +``` + +Dataset SpatialInfo + +#### Ancestors (in MRO) + +* rio_tiler.models.Bounds +* rio_tiler.models.RioTilerBaseModel +* pydantic.main.BaseModel + +#### Descendants + +* rio_tiler.models.Info + +#### Class variables + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. +Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to raise an exception on invalid fields. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: '_deprecated_parse.Protocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_computed_fields +``` + +Get the computed fields of this model instance. + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping
specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping
specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values
in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal[('json', 'python')] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects. | None | +| include | None | A list of fields to include in the output. | None | +| exclude | None | A list of fields to exclude from the output. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that are unset or None from the output. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value from the output. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None` from the output. | None | +| round_trip | None | Whether to enable serialization and deserialization round-trip support. | None | +| warnings | None | Whether to log warnings when invalid fields are encountered. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: 'bool' = True +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.2/usage/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. Can take either a string or set of strings. | None | +| exclude | None | Field(s) to exclude from the JSON output. Can take either a string or set of strings. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that have the default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | Whether to use serialization/deserialization between JSON and class instance. | None | +| warnings | None | Whether to show any warnings that occurred during serialization. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. \ No newline at end of file diff --git a/api/rio_tiler/mosaic/methods/base/base.md b/api/rio_tiler/mosaic/methods/base/base.md new file mode 100644 index 00000000..e2155765 --- /dev/null +++ b/api/rio_tiler/mosaic/methods/base/base.md @@ -0,0 +1,78 @@ +# Module rio_tiler.mosaic.methods.base + +rio-tiler.mosaic.methods abc class. + +## Classes + +### MosaicMethodBase + +```python3 +class MosaicMethodBase( + +) +``` + +Abstract base class for rio-tiler-mosaic methods objects. + +#### Ancestors (in MRO) + +* abc.ABC + +#### Descendants + +* rio_tiler.mosaic.methods.defaults.FirstMethod +* rio_tiler.mosaic.methods.defaults.HighestMethod +* rio_tiler.mosaic.methods.defaults.LowestMethod +* rio_tiler.mosaic.methods.defaults.MeanMethod +* rio_tiler.mosaic.methods.defaults.MedianMethod +* rio_tiler.mosaic.methods.defaults.StdevMethod +* rio_tiler.mosaic.methods.defaults.LastBandHighMethod +* rio_tiler.mosaic.methods.defaults.LastBandLowMethod + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return data. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: numpy.ma.core.MaskedArray +) +``` + +Fill mosaic array. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| array | numpy.ma.ndarray | data | None | \ No newline at end of file diff --git a/api/rio_tiler/mosaic/methods/base/index.html b/api/rio_tiler/mosaic/methods/base/index.html new file mode 100644 index 00000000..aaacc491 --- /dev/null +++ b/api/rio_tiler/mosaic/methods/base/index.html @@ -0,0 +1,1746 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.mosaic.methods.base - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.mosaic.methods.base

+

rio-tiler.mosaic.methods abc class.

+

Classes

+

MosaicMethodBase

+
class MosaicMethodBase(
+
+)
+
+

Abstract base class for rio-tiler-mosaic methods objects.

+

Ancestors (in MRO)

+
    +
  • abc.ABC
  • +
+

Descendants

+
    +
  • rio_tiler.mosaic.methods.defaults.FirstMethod
  • +
  • rio_tiler.mosaic.methods.defaults.HighestMethod
  • +
  • rio_tiler.mosaic.methods.defaults.LowestMethod
  • +
  • rio_tiler.mosaic.methods.defaults.MeanMethod
  • +
  • rio_tiler.mosaic.methods.defaults.MedianMethod
  • +
  • rio_tiler.mosaic.methods.defaults.StdevMethod
  • +
  • rio_tiler.mosaic.methods.defaults.LastBandHighMethod
  • +
  • rio_tiler.mosaic.methods.defaults.LastBandLowMethod
  • +
+

Class variables

+
cutline_mask
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return data.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: numpy.ma.core.MaskedArray
+)
+
+

Fill mosaic array.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
arraynumpy.ma.ndarraydataNone
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/mosaic/methods/defaults/defaults.md b/api/rio_tiler/mosaic/methods/defaults/defaults.md new file mode 100644 index 00000000..b4d127b3 --- /dev/null +++ b/api/rio_tiler/mosaic/methods/defaults/defaults.md @@ -0,0 +1,469 @@ +# Module rio_tiler.mosaic.methods.defaults + +rio_tiler.mosaic.methods.defaults: default mosaic filling methods. + +## Classes + +### FirstMethod + +```python3 +class FirstMethod( + +) +``` + +Feed the mosaic array with the first pixel available. + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return data. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: Union[numpy.ma.core.MaskedArray, NoneType] +) +``` + +Add data to the mosaic array. + +### HighestMethod + +```python3 +class HighestMethod( + +) +``` + +Feed the mosaic array with the highest pixel values. + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return data. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: Union[numpy.ma.core.MaskedArray, NoneType] +) +``` + +Add data to the mosaic array. + +### LastBandHighMethod + +```python3 +class LastBandHighMethod( + +) +``` + +Feed the mosaic array using the last band as decision factor (highest value). + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return data. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: Union[numpy.ma.core.MaskedArray, NoneType] +) +``` + +Add data to the mosaic array. + +### LastBandLowMethod + +```python3 +class LastBandLowMethod( + +) +``` + +Feed the mosaic array using the last band as decision factor (lowest value). + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return data. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: Union[numpy.ma.core.MaskedArray, NoneType] +) +``` + +Add data to the mosaic array. + +### LowestMethod + +```python3 +class LowestMethod( + +) +``` + +Feed the mosaic array with the lowest pixel values. + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return data. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: Union[numpy.ma.core.MaskedArray, NoneType] +) +``` + +Add data to the mosaic array. + +### MeanMethod + +```python3 +class MeanMethod( + enforce_data_type: bool = True +) +``` + +Stack the arrays and return the Mean pixel value. + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +enforce_data_type +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return Mean of the data stack. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: numpy.ma.core.MaskedArray +) +``` + +Add array to the stack. + +### MedianMethod + +```python3 +class MedianMethod( + enforce_data_type: bool = True +) +``` + +Stack the arrays and return the Median pixel value. + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +enforce_data_type +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return Median of the data stack. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: Union[numpy.ma.core.MaskedArray, NoneType] +) +``` + +Add array to the stack. + +### StdevMethod + +```python3 +class StdevMethod( + +) +``` + +Stack the arrays and return the Standard Deviation value. + +#### Ancestors (in MRO) + +* rio_tiler.mosaic.methods.base.MosaicMethodBase +* abc.ABC + +#### Class variables + +```python3 +cutline_mask +``` + +```python3 +exit_when_filled +``` + +```python3 +mosaic +``` + +#### Instance variables + +```python3 +data +``` + +Return STDDEV of the data stack. + +```python3 +is_done +``` + +Check if the mosaic filling is done. + +#### Methods + + +#### feed + +```python3 +def feed( + self, + array: Union[numpy.ma.core.MaskedArray, NoneType] +) +``` + +Add array to the stack. \ No newline at end of file diff --git a/api/rio_tiler/mosaic/methods/defaults/index.html b/api/rio_tiler/mosaic/methods/defaults/index.html new file mode 100644 index 00000000..8e36b00c --- /dev/null +++ b/api/rio_tiler/mosaic/methods/defaults/index.html @@ -0,0 +1,2610 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.mosaic.methods.defaults - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.mosaic.methods.defaults

+

rio_tiler.mosaic.methods.defaults: default mosaic filling methods.

+

Classes

+

FirstMethod

+
class FirstMethod(
+
+)
+
+

Feed the mosaic array with the first pixel available.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return data.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: Union[numpy.ma.core.MaskedArray, NoneType]
+)
+
+

Add data to the mosaic array.

+

HighestMethod

+
class HighestMethod(
+
+)
+
+

Feed the mosaic array with the highest pixel values.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return data.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: Union[numpy.ma.core.MaskedArray, NoneType]
+)
+
+

Add data to the mosaic array.

+

LastBandHighMethod

+
class LastBandHighMethod(
+
+)
+
+

Feed the mosaic array using the last band as decision factor (highest value).

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return data.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: Union[numpy.ma.core.MaskedArray, NoneType]
+)
+
+

Add data to the mosaic array.

+

LastBandLowMethod

+
class LastBandLowMethod(
+
+)
+
+

Feed the mosaic array using the last band as decision factor (lowest value).

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return data.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: Union[numpy.ma.core.MaskedArray, NoneType]
+)
+
+

Add data to the mosaic array.

+

LowestMethod

+
class LowestMethod(
+
+)
+
+

Feed the mosaic array with the lowest pixel values.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return data.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: Union[numpy.ma.core.MaskedArray, NoneType]
+)
+
+

Add data to the mosaic array.

+

MeanMethod

+
class MeanMethod(
+    enforce_data_type: bool = True
+)
+
+

Stack the arrays and return the Mean pixel value.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
enforce_data_type
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return Mean of the data stack.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: numpy.ma.core.MaskedArray
+)
+
+

Add array to the stack.

+

MedianMethod

+
class MedianMethod(
+    enforce_data_type: bool = True
+)
+
+

Stack the arrays and return the Median pixel value.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
enforce_data_type
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return Median of the data stack.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: Union[numpy.ma.core.MaskedArray, NoneType]
+)
+
+

Add array to the stack.

+

StdevMethod

+
class StdevMethod(
+
+)
+
+

Stack the arrays and return the Standard Deviation value.

+

Ancestors (in MRO)

+
    +
  • rio_tiler.mosaic.methods.base.MosaicMethodBase
  • +
  • abc.ABC
  • +
+

Class variables

+
cutline_mask
+
+
exit_when_filled
+
+
mosaic
+
+

Instance variables

+
data
+
+

Return STDDEV of the data stack.

+
is_done
+
+

Check if the mosaic filling is done.

+

Methods

+

feed

+
def feed(
+    self,
+    array: Union[numpy.ma.core.MaskedArray, NoneType]
+)
+
+

Add array to the stack.

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/mosaic/reader/index.html b/api/rio_tiler/mosaic/reader/index.html new file mode 100644 index 00000000..ab293e62 --- /dev/null +++ b/api/rio_tiler/mosaic/reader/index.html @@ -0,0 +1,1795 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.mosaic.reader - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.mosaic.reader

+

rio_tiler.mosaic: create tile from multiple assets.

+

Variables

+
MAX_THREADS
+
+

Functions

+

mosaic_point_reader

+
def mosaic_point_reader(
+    mosaic_assets: Sequence,
+    reader: Callable[..., rio_tiler.models.PointData],
+    *args: Any,
+    pixel_selection: Union[Type[rio_tiler.mosaic.methods.base.MosaicMethodBase], rio_tiler.mosaic.methods.base.MosaicMethodBase] = <class 'rio_tiler.mosaic.methods.defaults.FirstMethod'>,
+    chunk_size: Union[int, NoneType] = None,
+    threads: int = 10,
+    allowed_exceptions: Tuple = (<class 'rio_tiler.errors.PointOutsideBounds'>,),
+    **kwargs
+) -> Tuple[rio_tiler.models.PointData, List]
+
+

Merge multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
mosaic_assetssequenceList of assets.None
readercallableReader function. The function MUST take (asset, *args, **kwargs) as arguments, and MUST return a PointData object.None
argsAnyArgument to forward to the reader function.None
pixel_selectionMosaicMethodInstance of MosaicMethodBase class. Defaults to rio_tiler.mosaic.methods.defaults.FirstMethod.rio_tiler.mosaic.methods.defaults.FirstMethod
chunk_sizeintControl the number of asset to process per loop.None
threadsintNumber of threads to use. If <= 1, runs single threaded without an event loop. By default reads from the MAX_THREADS environment variable, and if not found defaults to multiprocessing.cpu_count() * 5.None
allowed_exceptionstupleList of exceptions which will be ignored. Note: PointOutsideBounds is likely to be raised and should be included in the allowed_exceptions. Defaults to (TileOutsideBounds, ).(TileOutsideBounds, )
kwargsoptionalReader callable's keywords options.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tuplePointData and assets (list).
+

mosaic_reader

+
def mosaic_reader(
+    mosaic_assets: Sequence,
+    reader: Callable[..., rio_tiler.models.ImageData],
+    *args: Any,
+    pixel_selection: Union[Type[rio_tiler.mosaic.methods.base.MosaicMethodBase], rio_tiler.mosaic.methods.base.MosaicMethodBase] = <class 'rio_tiler.mosaic.methods.defaults.FirstMethod'>,
+    chunk_size: Union[int, NoneType] = None,
+    threads: int = 10,
+    allowed_exceptions: Tuple = (<class 'rio_tiler.errors.TileOutsideBounds'>,),
+    **kwargs
+) -> Tuple[rio_tiler.models.ImageData, List]
+
+

Merge multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
mosaic_assetssequenceList of assets.None
readercallableReader function. The function MUST take (asset, *args, **kwargs) as arguments, and MUST return an ImageData.None
argsAnyArgument to forward to the reader function.None
pixel_selectionMosaicMethodInstance of MosaicMethodBase class. Defaults to rio_tiler.mosaic.methods.defaults.FirstMethod.rio_tiler.mosaic.methods.defaults.FirstMethod
chunk_sizeintControl the number of asset to process per loop.None
threadsintNumber of threads to use. If <= 1, runs single threaded without an event loop. By default reads from the MAX_THREADS environment variable, and if not found defaults to multiprocessing.cpu_count() * 5.None
allowed_exceptionstupleList of exceptions which will be ignored. Note: TileOutsideBounds is likely to be raised and should be included in the allowed_exceptions. Defaults to (TileOutsideBounds, ).(TileOutsideBounds, )
kwargsoptionalReader callable's keywords options.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tupleImageData and assets (list).
+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/mosaic/reader/reader.md b/api/rio_tiler/mosaic/reader/reader.md new file mode 100644 index 00000000..81efec28 --- /dev/null +++ b/api/rio_tiler/mosaic/reader/reader.md @@ -0,0 +1,85 @@ +# Module rio_tiler.mosaic.reader + +rio_tiler.mosaic: create tile from multiple assets. + +## Variables + +```python3 +MAX_THREADS +``` + +## Functions + + +### mosaic_point_reader + +```python3 +def mosaic_point_reader( + mosaic_assets: Sequence, + reader: Callable[..., rio_tiler.models.PointData], + *args: Any, + pixel_selection: Union[Type[rio_tiler.mosaic.methods.base.MosaicMethodBase], rio_tiler.mosaic.methods.base.MosaicMethodBase] = , + chunk_size: Union[int, NoneType] = None, + threads: int = 10, + allowed_exceptions: Tuple = (,), + **kwargs +) -> Tuple[rio_tiler.models.PointData, List] +``` + +Merge multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mosaic_assets | sequence | List of assets. | None | +| reader | callable | Reader function. The function MUST take `(asset, *args, **kwargs)` as arguments, and MUST return a PointData object. | None | +| args | Any | Argument to forward to the reader function. | None | +| pixel_selection | MosaicMethod | Instance of MosaicMethodBase class. Defaults to `rio_tiler.mosaic.methods.defaults.FirstMethod`. | `rio_tiler.mosaic.methods.defaults.FirstMethod` | +| chunk_size | int | Control the number of asset to process per loop. | None | +| threads | int | Number of threads to use. If <= 1, runs single threaded without an event loop. By default reads from the MAX_THREADS environment variable, and if not found defaults to multiprocessing.cpu_count() * 5. | None | +| allowed_exceptions | tuple | List of exceptions which will be ignored. Note: `PointOutsideBounds` is likely to be raised and should be included in the allowed_exceptions. Defaults to `(TileOutsideBounds, )`. | `(TileOutsideBounds, )` | +| kwargs | optional | Reader callable's keywords options. | None | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | PointData and assets (list). | + + +### mosaic_reader + +```python3 +def mosaic_reader( + mosaic_assets: Sequence, + reader: Callable[..., rio_tiler.models.ImageData], + *args: Any, + pixel_selection: Union[Type[rio_tiler.mosaic.methods.base.MosaicMethodBase], rio_tiler.mosaic.methods.base.MosaicMethodBase] = , + chunk_size: Union[int, NoneType] = None, + threads: int = 10, + allowed_exceptions: Tuple = (,), + **kwargs +) -> Tuple[rio_tiler.models.ImageData, List] +``` + +Merge multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mosaic_assets | sequence | List of assets. | None | +| reader | callable | Reader function. The function MUST take `(asset, *args, **kwargs)` as arguments, and MUST return an ImageData. | None | +| args | Any | Argument to forward to the reader function. | None | +| pixel_selection | MosaicMethod | Instance of MosaicMethodBase class. Defaults to `rio_tiler.mosaic.methods.defaults.FirstMethod`. | `rio_tiler.mosaic.methods.defaults.FirstMethod` | +| chunk_size | int | Control the number of asset to process per loop. | None | +| threads | int | Number of threads to use. If <= 1, runs single threaded without an event loop. By default reads from the MAX_THREADS environment variable, and if not found defaults to multiprocessing.cpu_count() * 5. | None | +| allowed_exceptions | tuple | List of exceptions which will be ignored. Note: `TileOutsideBounds` is likely to be raised and should be included in the allowed_exceptions. Defaults to `(TileOutsideBounds, )`. | `(TileOutsideBounds, )` | +| kwargs | optional | Reader callable's keywords options. | None | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | ImageData and assets (list). | \ No newline at end of file diff --git a/api/rio_tiler/profiles/index.html b/api/rio_tiler/profiles/index.html new file mode 100644 index 00000000..f64ceffd --- /dev/null +++ b/api/rio_tiler/profiles/index.html @@ -0,0 +1,3259 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.profiles - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.profiles

+

Image file profiles.

+

Variables

+
img_profiles
+
+

Classes

+

ImagesProfiles

+
class ImagesProfiles(
+
+)
+
+

GDAL Image creation options.

+

ref: github.com/mapnik/mapnik/wiki/Image-IO#default-output-details.

+

Ancestors (in MRO)

+
    +
  • collections.UserDict
  • +
  • collections.abc.MutableMapping
  • +
  • collections.abc.Mapping
  • +
  • collections.abc.Collection
  • +
  • collections.abc.Sized
  • +
  • collections.abc.Iterable
  • +
  • collections.abc.Container
  • +
+

Static methods

+

fromkeys

+
def fromkeys(
+    iterable,
+    value=None
+)
+
+

Methods

+

clear

+
def clear(
+    self
+)
+
+

D.clear() -> None. Remove all items from D.

+

copy

+
def copy(
+    self
+)
+
+

get

+
def get(
+    self,
+    key,
+    default=None
+)
+
+

Like normal item access but return a copy of the key.

+

items

+
def items(
+    self
+)
+
+

D.items() -> a set-like object providing a view on D's items

+

keys

+
def keys(
+    self
+)
+
+

D.keys() -> a set-like object providing a view on D's keys

+

pop

+
def pop(
+    self,
+    key,
+    default=<object object at 0x7f03e50de150>
+)
+
+

D.pop(k[,d]) -> v, remove specified key and return the corresponding value.

+

If key is not found, d is returned if given, otherwise KeyError is raised.

+

popitem

+
def popitem(
+    self
+)
+
+

D.popitem() -> (k, v), remove and return some (key, value) pair

+

as a 2-tuple; but raise KeyError if D is empty.

+

setdefault

+
def setdefault(
+    self,
+    key,
+    default=None
+)
+
+

D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D

+

update

+
def update(
+    self,
+    other=(),
+    /,
+    **kwds
+)
+
+

D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.

+

If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v

+

values

+
def values(
+    self
+)
+
+

D.values() -> an object providing a view on D's values

+

JPEGProfile

+
class JPEGProfile(
+    data={},
+    **kwds
+)
+
+

JPEG creation options ref: www.gdal.org/frmt_jpeg.html.

+

Ancestors (in MRO)

+
    +
  • rasterio.profiles.Profile
  • +
  • collections.UserDict
  • +
  • collections.abc.MutableMapping
  • +
  • collections.abc.Mapping
  • +
  • collections.abc.Collection
  • +
  • collections.abc.Sized
  • +
  • collections.abc.Iterable
  • +
  • collections.abc.Container
  • +
+

Class variables

+
defaults
+
+

Static methods

+

fromkeys

+
def fromkeys(
+    iterable,
+    value=None
+)
+
+

Methods

+

clear

+
def clear(
+    self
+)
+
+

D.clear() -> None. Remove all items from D.

+

copy

+
def copy(
+    self
+)
+
+

get

+
def get(
+    self,
+    key,
+    default=None
+)
+
+

D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.

+

items

+
def items(
+    self
+)
+
+

D.items() -> a set-like object providing a view on D's items

+

keys

+
def keys(
+    self
+)
+
+

D.keys() -> a set-like object providing a view on D's keys

+

pop

+
def pop(
+    self,
+    key,
+    default=<object object at 0x7f03e50de150>
+)
+
+

D.pop(k[,d]) -> v, remove specified key and return the corresponding value.

+

If key is not found, d is returned if given, otherwise KeyError is raised.

+

popitem

+
def popitem(
+    self
+)
+
+

D.popitem() -> (k, v), remove and return some (key, value) pair

+

as a 2-tuple; but raise KeyError if D is empty.

+

setdefault

+
def setdefault(
+    self,
+    key,
+    default=None
+)
+
+

D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D

+

update

+
def update(
+    self,
+    other=(),
+    /,
+    **kwds
+)
+
+

D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.

+

If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v

+

values

+
def values(
+    self
+)
+
+

D.values() -> an object providing a view on D's values

+

PNGProfile

+
class PNGProfile(
+    data={},
+    **kwds
+)
+
+

PNG creation options ref: www.gdal.org/frmt_png.html.

+

Ancestors (in MRO)

+
    +
  • rasterio.profiles.Profile
  • +
  • collections.UserDict
  • +
  • collections.abc.MutableMapping
  • +
  • collections.abc.Mapping
  • +
  • collections.abc.Collection
  • +
  • collections.abc.Sized
  • +
  • collections.abc.Iterable
  • +
  • collections.abc.Container
  • +
+

Class variables

+
defaults
+
+

Static methods

+

fromkeys

+
def fromkeys(
+    iterable,
+    value=None
+)
+
+

Methods

+

clear

+
def clear(
+    self
+)
+
+

D.clear() -> None. Remove all items from D.

+

copy

+
def copy(
+    self
+)
+
+

get

+
def get(
+    self,
+    key,
+    default=None
+)
+
+

D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.

+

items

+
def items(
+    self
+)
+
+

D.items() -> a set-like object providing a view on D's items

+

keys

+
def keys(
+    self
+)
+
+

D.keys() -> a set-like object providing a view on D's keys

+

pop

+
def pop(
+    self,
+    key,
+    default=<object object at 0x7f03e50de150>
+)
+
+

D.pop(k[,d]) -> v, remove specified key and return the corresponding value.

+

If key is not found, d is returned if given, otherwise KeyError is raised.

+

popitem

+
def popitem(
+    self
+)
+
+

D.popitem() -> (k, v), remove and return some (key, value) pair

+

as a 2-tuple; but raise KeyError if D is empty.

+

setdefault

+
def setdefault(
+    self,
+    key,
+    default=None
+)
+
+

D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D

+

update

+
def update(
+    self,
+    other=(),
+    /,
+    **kwds
+)
+
+

D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.

+

If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v

+

values

+
def values(
+    self
+)
+
+

D.values() -> an object providing a view on D's values

+

PNGRAWProfile

+
class PNGRAWProfile(
+    data={},
+    **kwds
+)
+
+

PNG creation options ref: www.gdal.org/frmt_png.html.

+

Ancestors (in MRO)

+
    +
  • rasterio.profiles.Profile
  • +
  • collections.UserDict
  • +
  • collections.abc.MutableMapping
  • +
  • collections.abc.Mapping
  • +
  • collections.abc.Collection
  • +
  • collections.abc.Sized
  • +
  • collections.abc.Iterable
  • +
  • collections.abc.Container
  • +
+

Class variables

+
defaults
+
+

Static methods

+

fromkeys

+
def fromkeys(
+    iterable,
+    value=None
+)
+
+

Methods

+

clear

+
def clear(
+    self
+)
+
+

D.clear() -> None. Remove all items from D.

+

copy

+
def copy(
+    self
+)
+
+

get

+
def get(
+    self,
+    key,
+    default=None
+)
+
+

D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.

+

items

+
def items(
+    self
+)
+
+

D.items() -> a set-like object providing a view on D's items

+

keys

+
def keys(
+    self
+)
+
+

D.keys() -> a set-like object providing a view on D's keys

+

pop

+
def pop(
+    self,
+    key,
+    default=<object object at 0x7f03e50de150>
+)
+
+

D.pop(k[,d]) -> v, remove specified key and return the corresponding value.

+

If key is not found, d is returned if given, otherwise KeyError is raised.

+

popitem

+
def popitem(
+    self
+)
+
+

D.popitem() -> (k, v), remove and return some (key, value) pair

+

as a 2-tuple; but raise KeyError if D is empty.

+

setdefault

+
def setdefault(
+    self,
+    key,
+    default=None
+)
+
+

D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D

+

update

+
def update(
+    self,
+    other=(),
+    /,
+    **kwds
+)
+
+

D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.

+

If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v

+

values

+
def values(
+    self
+)
+
+

D.values() -> an object providing a view on D's values

+

WEBPProfile

+
class WEBPProfile(
+    data={},
+    **kwds
+)
+
+

WEBP creation options ref: www.gdal.org/frmt_webp.html.

+

Ancestors (in MRO)

+
    +
  • rasterio.profiles.Profile
  • +
  • collections.UserDict
  • +
  • collections.abc.MutableMapping
  • +
  • collections.abc.Mapping
  • +
  • collections.abc.Collection
  • +
  • collections.abc.Sized
  • +
  • collections.abc.Iterable
  • +
  • collections.abc.Container
  • +
+

Class variables

+
defaults
+
+

Static methods

+

fromkeys

+
def fromkeys(
+    iterable,
+    value=None
+)
+
+

Methods

+

clear

+
def clear(
+    self
+)
+
+

D.clear() -> None. Remove all items from D.

+

copy

+
def copy(
+    self
+)
+
+

get

+
def get(
+    self,
+    key,
+    default=None
+)
+
+

D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.

+

items

+
def items(
+    self
+)
+
+

D.items() -> a set-like object providing a view on D's items

+

keys

+
def keys(
+    self
+)
+
+

D.keys() -> a set-like object providing a view on D's keys

+

pop

+
def pop(
+    self,
+    key,
+    default=<object object at 0x7f03e50de150>
+)
+
+

D.pop(k[,d]) -> v, remove specified key and return the corresponding value.

+

If key is not found, d is returned if given, otherwise KeyError is raised.

+

popitem

+
def popitem(
+    self
+)
+
+

D.popitem() -> (k, v), remove and return some (key, value) pair

+

as a 2-tuple; but raise KeyError if D is empty.

+

setdefault

+
def setdefault(
+    self,
+    key,
+    default=None
+)
+
+

D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D

+

update

+
def update(
+    self,
+    other=(),
+    /,
+    **kwds
+)
+
+

D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.

+

If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v

+

values

+
def values(
+    self
+)
+
+

D.values() -> an object providing a view on D's values

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/profiles/profiles.md b/api/rio_tiler/profiles/profiles.md new file mode 100644 index 00000000..fe3e16c2 --- /dev/null +++ b/api/rio_tiler/profiles/profiles.md @@ -0,0 +1,840 @@ +# Module rio_tiler.profiles + +Image file profiles. + +## Variables + +```python3 +img_profiles +``` + +## Classes + +### ImagesProfiles + +```python3 +class ImagesProfiles( + +) +``` + +GDAL Image creation options. + +ref: https://github.com/mapnik/mapnik/wiki/Image-IO#default-output-details. + +#### Ancestors (in MRO) + +* collections.UserDict +* collections.abc.MutableMapping +* collections.abc.Mapping +* collections.abc.Collection +* collections.abc.Sized +* collections.abc.Iterable +* collections.abc.Container + +#### Static methods + + +#### fromkeys + +```python3 +def fromkeys( + iterable, + value=None +) +``` + +#### Methods + + +#### clear + +```python3 +def clear( + self +) +``` + +D.clear() -> None. Remove all items from D. + + +#### copy + +```python3 +def copy( + self +) +``` + + +#### get + +```python3 +def get( + self, + key, + default=None +) +``` + +Like normal item access but return a copy of the key. + + +#### items + +```python3 +def items( + self +) +``` + +D.items() -> a set-like object providing a view on D's items + + +#### keys + +```python3 +def keys( + self +) +``` + +D.keys() -> a set-like object providing a view on D's keys + + +#### pop + +```python3 +def pop( + self, + key, + default= +) +``` + +D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + +If key is not found, d is returned if given, otherwise KeyError is raised. + + +#### popitem + +```python3 +def popitem( + self +) +``` + +D.popitem() -> (k, v), remove and return some (key, value) pair + +as a 2-tuple; but raise KeyError if D is empty. + + +#### setdefault + +```python3 +def setdefault( + self, + key, + default=None +) +``` + +D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D + + +#### update + +```python3 +def update( + self, + other=(), + /, + **kwds +) +``` + +D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + +If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v + + +#### values + +```python3 +def values( + self +) +``` + +D.values() -> an object providing a view on D's values + +### JPEGProfile + +```python3 +class JPEGProfile( + data={}, + **kwds +) +``` + +JPEG creation options ref: https://www.gdal.org/frmt_jpeg.html. + +#### Ancestors (in MRO) + +* rasterio.profiles.Profile +* collections.UserDict +* collections.abc.MutableMapping +* collections.abc.Mapping +* collections.abc.Collection +* collections.abc.Sized +* collections.abc.Iterable +* collections.abc.Container + +#### Class variables + +```python3 +defaults +``` + +#### Static methods + + +#### fromkeys + +```python3 +def fromkeys( + iterable, + value=None +) +``` + +#### Methods + + +#### clear + +```python3 +def clear( + self +) +``` + +D.clear() -> None. Remove all items from D. + + +#### copy + +```python3 +def copy( + self +) +``` + + +#### get + +```python3 +def get( + self, + key, + default=None +) +``` + +D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. + + +#### items + +```python3 +def items( + self +) +``` + +D.items() -> a set-like object providing a view on D's items + + +#### keys + +```python3 +def keys( + self +) +``` + +D.keys() -> a set-like object providing a view on D's keys + + +#### pop + +```python3 +def pop( + self, + key, + default= +) +``` + +D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + +If key is not found, d is returned if given, otherwise KeyError is raised. + + +#### popitem + +```python3 +def popitem( + self +) +``` + +D.popitem() -> (k, v), remove and return some (key, value) pair + +as a 2-tuple; but raise KeyError if D is empty. + + +#### setdefault + +```python3 +def setdefault( + self, + key, + default=None +) +``` + +D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D + + +#### update + +```python3 +def update( + self, + other=(), + /, + **kwds +) +``` + +D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + +If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v + + +#### values + +```python3 +def values( + self +) +``` + +D.values() -> an object providing a view on D's values + +### PNGProfile + +```python3 +class PNGProfile( + data={}, + **kwds +) +``` + +PNG creation options ref: https://www.gdal.org/frmt_png.html. + +#### Ancestors (in MRO) + +* rasterio.profiles.Profile +* collections.UserDict +* collections.abc.MutableMapping +* collections.abc.Mapping +* collections.abc.Collection +* collections.abc.Sized +* collections.abc.Iterable +* collections.abc.Container + +#### Class variables + +```python3 +defaults +``` + +#### Static methods + + +#### fromkeys + +```python3 +def fromkeys( + iterable, + value=None +) +``` + +#### Methods + + +#### clear + +```python3 +def clear( + self +) +``` + +D.clear() -> None. Remove all items from D. + + +#### copy + +```python3 +def copy( + self +) +``` + + +#### get + +```python3 +def get( + self, + key, + default=None +) +``` + +D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. + + +#### items + +```python3 +def items( + self +) +``` + +D.items() -> a set-like object providing a view on D's items + + +#### keys + +```python3 +def keys( + self +) +``` + +D.keys() -> a set-like object providing a view on D's keys + + +#### pop + +```python3 +def pop( + self, + key, + default= +) +``` + +D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + +If key is not found, d is returned if given, otherwise KeyError is raised. + + +#### popitem + +```python3 +def popitem( + self +) +``` + +D.popitem() -> (k, v), remove and return some (key, value) pair + +as a 2-tuple; but raise KeyError if D is empty. + + +#### setdefault + +```python3 +def setdefault( + self, + key, + default=None +) +``` + +D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D + + +#### update + +```python3 +def update( + self, + other=(), + /, + **kwds +) +``` + +D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + +If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v + + +#### values + +```python3 +def values( + self +) +``` + +D.values() -> an object providing a view on D's values + +### PNGRAWProfile + +```python3 +class PNGRAWProfile( + data={}, + **kwds +) +``` + +PNG creation options ref: https://www.gdal.org/frmt_png.html. + +#### Ancestors (in MRO) + +* rasterio.profiles.Profile +* collections.UserDict +* collections.abc.MutableMapping +* collections.abc.Mapping +* collections.abc.Collection +* collections.abc.Sized +* collections.abc.Iterable +* collections.abc.Container + +#### Class variables + +```python3 +defaults +``` + +#### Static methods + + +#### fromkeys + +```python3 +def fromkeys( + iterable, + value=None +) +``` + +#### Methods + + +#### clear + +```python3 +def clear( + self +) +``` + +D.clear() -> None. Remove all items from D. + + +#### copy + +```python3 +def copy( + self +) +``` + + +#### get + +```python3 +def get( + self, + key, + default=None +) +``` + +D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. + + +#### items + +```python3 +def items( + self +) +``` + +D.items() -> a set-like object providing a view on D's items + + +#### keys + +```python3 +def keys( + self +) +``` + +D.keys() -> a set-like object providing a view on D's keys + + +#### pop + +```python3 +def pop( + self, + key, + default= +) +``` + +D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + +If key is not found, d is returned if given, otherwise KeyError is raised. + + +#### popitem + +```python3 +def popitem( + self +) +``` + +D.popitem() -> (k, v), remove and return some (key, value) pair + +as a 2-tuple; but raise KeyError if D is empty. + + +#### setdefault + +```python3 +def setdefault( + self, + key, + default=None +) +``` + +D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D + + +#### update + +```python3 +def update( + self, + other=(), + /, + **kwds +) +``` + +D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + +If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v + + +#### values + +```python3 +def values( + self +) +``` + +D.values() -> an object providing a view on D's values + +### WEBPProfile + +```python3 +class WEBPProfile( + data={}, + **kwds +) +``` + +WEBP creation options ref: https://www.gdal.org/frmt_webp.html. + +#### Ancestors (in MRO) + +* rasterio.profiles.Profile +* collections.UserDict +* collections.abc.MutableMapping +* collections.abc.Mapping +* collections.abc.Collection +* collections.abc.Sized +* collections.abc.Iterable +* collections.abc.Container + +#### Class variables + +```python3 +defaults +``` + +#### Static methods + + +#### fromkeys + +```python3 +def fromkeys( + iterable, + value=None +) +``` + +#### Methods + + +#### clear + +```python3 +def clear( + self +) +``` + +D.clear() -> None. Remove all items from D. + + +#### copy + +```python3 +def copy( + self +) +``` + + +#### get + +```python3 +def get( + self, + key, + default=None +) +``` + +D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. + + +#### items + +```python3 +def items( + self +) +``` + +D.items() -> a set-like object providing a view on D's items + + +#### keys + +```python3 +def keys( + self +) +``` + +D.keys() -> a set-like object providing a view on D's keys + + +#### pop + +```python3 +def pop( + self, + key, + default= +) +``` + +D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + +If key is not found, d is returned if given, otherwise KeyError is raised. + + +#### popitem + +```python3 +def popitem( + self +) +``` + +D.popitem() -> (k, v), remove and return some (key, value) pair + +as a 2-tuple; but raise KeyError if D is empty. + + +#### setdefault + +```python3 +def setdefault( + self, + key, + default=None +) +``` + +D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D + + +#### update + +```python3 +def update( + self, + other=(), + /, + **kwds +) +``` + +D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + +If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v + + +#### values + +```python3 +def values( + self +) +``` + +D.values() -> an object providing a view on D's values \ No newline at end of file diff --git a/api/rio_tiler/reader/index.html b/api/rio_tiler/reader/index.html new file mode 100644 index 00000000..0f4ccd1f --- /dev/null +++ b/api/rio_tiler/reader/index.html @@ -0,0 +1,2347 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.reader - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.reader

+

rio-tiler.reader: low level reader.

+

Variables

+
WGS84_CRS
+
+

Functions

+

part

+
def part(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT],
+    bounds: Tuple[float, float, float, float],
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    bounds_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    minimum_overlap: Union[float, NoneType] = None,
+    padding: Union[int, NoneType] = None,
+    buffer: Union[float, NoneType] = None,
+    force_binary_mask: bool = True,
+    nodata: Union[float, int, str, NoneType] = None,
+    vrt_options: Union[Dict, NoneType] = None,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest',
+    reproject_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'sum', 'rms'] = 'nearest',
+    unscale: bool = False,
+    post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None
+) -> rio_tiler.models.ImageData
+
+

Read part of a dataset.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
src_dstrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
boundstupleOutput bounds (left, bottom, right, top). By default the coordinates are considered to be in either the dataset CRS or in the dst_crs if set. Use bounds_crs to set a specific CRS.None
heightintOutput height of the image.None
widthintOutput width of the image.None
max_sizeintLimit output size image if not width and height.None
dst_crsrasterio.crs.CRSTarget coordinate reference system.None
bounds_crsrasterio.crs.CRSOverwrite bounds Coordinate Reference System.None
indexessequence of int or intBand indexes.None
minimum_overlapfloatMinimum % overlap for which to raise an error with dataset not covering enough of the tile.None
paddingintPadding to apply to each bbox edge. Helps reduce resampling artefacts along edges. Defaults to 0.0
bufferfloatBuffer to apply to each bbox edge. Defaults to 0..0.
nodataint or floatOverwrite dataset internal nodata value.None
vrt_optionsdictOptions to be passed to the rasterio.warp.WarpedVRT class.None
resampling_methodRIOResamplingRasterIO resampling algorithm. Defaults to nearest.nearest
reproject_methodWarpResamplingWarpKernel resampling algorithm. Defaults to nearest.nearest
unscaleboolApply 'scales' and 'offsets' on output data value. Defaults to False.False
post_processcallableFunction to apply on output data and mask values.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneImageData
+

point

+
def point(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT],
+    coordinates: Tuple[float, float],
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    force_binary_mask: bool = True,
+    nodata: Union[float, int, str, NoneType] = None,
+    vrt_options: Union[Dict, NoneType] = None,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest',
+    reproject_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'sum', 'rms'] = 'nearest',
+    unscale: bool = False,
+    post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None
+) -> rio_tiler.models.PointData
+
+

Read a pixel value for a point.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
src_dstrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
coordinatestupleCoordinates in form of (X, Y).None
indexessequence of int or intBand indexes.None
coord_crsrasterio.crs.CRSCoordinate Reference System of the input coords. Defaults to epsg:4326.epsg:4326
nodataint or floatOverwrite dataset internal nodata value.None
vrt_optionsdictOptions to be passed to the rasterio.warp.WarpedVRT class.None
resampling_methodRIOResamplingRasterIO resampling algorithm. Defaults to nearest.nearest
reproject_methodWarpResamplingWarpKernel resampling algorithm. Defaults to nearest.nearest
unscaleboolApply 'scales' and 'offsets' on output data value. Defaults to False.False
post_processcallableFunction to apply on output data and mask values.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

read

+
def read(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT],
+    dst_crs: Union[rasterio.crs.CRS, NoneType] = None,
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    max_size: Union[int, NoneType] = None,
+    indexes: Union[Sequence[int], int, NoneType] = None,
+    window: Union[rasterio.windows.Window, NoneType] = None,
+    force_binary_mask: bool = True,
+    nodata: Union[float, int, str, NoneType] = None,
+    vrt_options: Union[Dict, NoneType] = None,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest',
+    reproject_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'sum', 'rms'] = 'nearest',
+    unscale: bool = False,
+    post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None
+) -> rio_tiler.models.ImageData
+
+

Low level read function.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
src_dstrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
dst_crsrasterio.crs.CRSTarget coordinate reference system.None
heightintOutput height of the image.None
widthintOutput width of the image.None
max_sizeintLimit output size image if not width and height.None
indexessequence of int or intBand indexes.None
windowrasterio.windows.WindowWindow to read.None
nodataint or floatOverwrite dataset internal nodata value.None
vrt_optionsdictOptions to be passed to the rasterio.warp.WarpedVRT class.None
resampling_methodRIOResamplingRasterIO resampling algorithm. Defaults to nearest.nearest
reproject_methodWarpResamplingWarpKernel resampling algorithm. Defaults to nearest.nearest
force_binary_maskboolCast returned mask to binary values (0 or 255). Defaults to True.True
unscaleboolApply 'scales' and 'offsets' on output data value. Defaults to False.False
post_processcallableFunction to apply on output data and mask values.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneImageData
+

Classes

+

Options

+
class Options(
+    /,
+    *args,
+    **kwargs
+)
+
+

Reader Options.

+

Ancestors (in MRO)

+
    +
  • builtins.dict
  • +
+

Methods

+

clear

+
def clear(
+    ...
+)
+
+

D.clear() -> None. Remove all items from D.

+

copy

+
def copy(
+    ...
+)
+
+

D.copy() -> a shallow copy of D

+

fromkeys

+
def fromkeys(
+    iterable,
+    value=None,
+    /
+)
+
+

Create a new dictionary with keys from iterable and values set to value.

+

get

+
def get(
+    self,
+    key,
+    default=None,
+    /
+)
+
+

Return the value for key if key is in the dictionary, else default.

+

items

+
def items(
+    ...
+)
+
+

D.items() -> a set-like object providing a view on D's items

+

keys

+
def keys(
+    ...
+)
+
+

D.keys() -> a set-like object providing a view on D's keys

+

pop

+
def pop(
+    ...
+)
+
+

D.pop(k[,d]) -> v, remove specified key and return the corresponding value.

+

If key is not found, d is returned if given, otherwise KeyError is raised

+

popitem

+
def popitem(
+    self,
+    /
+)
+
+

Remove and return a (key, value) pair as a 2-tuple.

+

Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.

+

setdefault

+
def setdefault(
+    self,
+    key,
+    default=None,
+    /
+)
+
+

Insert key with a value of default if key is not in the dictionary.

+

Return the value for key if key is in the dictionary, else default.

+

update

+
def update(
+    ...
+)
+
+

D.update([E, ]**F) -> None. Update D from dict/iterable E and F.

+

If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]

+

values

+
def values(
+    ...
+)
+
+

D.values() -> an object providing a view on D's values

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/reader/reader.md b/api/rio_tiler/reader/reader.md new file mode 100644 index 00000000..99f2cffd --- /dev/null +++ b/api/rio_tiler/reader/reader.md @@ -0,0 +1,319 @@ +# Module rio_tiler.reader + +rio-tiler.reader: low level reader. + +## Variables + +```python3 +WGS84_CRS +``` + +## Functions + + +### part + +```python3 +def part( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT], + bounds: Tuple[float, float, float, float], + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + max_size: Union[int, NoneType] = None, + dst_crs: Union[rasterio.crs.CRS, NoneType] = None, + bounds_crs: Union[rasterio.crs.CRS, NoneType] = None, + indexes: Union[Sequence[int], int, NoneType] = None, + minimum_overlap: Union[float, NoneType] = None, + padding: Union[int, NoneType] = None, + buffer: Union[float, NoneType] = None, + force_binary_mask: bool = True, + nodata: Union[float, int, str, NoneType] = None, + vrt_options: Union[Dict, NoneType] = None, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest', + reproject_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'sum', 'rms'] = 'nearest', + unscale: bool = False, + post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None +) -> rio_tiler.models.ImageData +``` + +Read part of a dataset. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| src_dst | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| bounds | tuple | Output bounds (left, bottom, right, top). By default the coordinates are considered to be in either the dataset CRS or in the `dst_crs` if set. Use `bounds_crs` to set a specific CRS. | None | +| height | int | Output height of the image. | None | +| width | int | Output width of the image. | None | +| max_size | int | Limit output size image if not width and height. | None | +| dst_crs | rasterio.crs.CRS | Target coordinate reference system. | None | +| bounds_crs | rasterio.crs.CRS | Overwrite bounds Coordinate Reference System. | None | +| indexes | sequence of int or int | Band indexes. | None | +| minimum_overlap | float | Minimum % overlap for which to raise an error with dataset not covering enough of the tile. | None | +| padding | int | Padding to apply to each bbox edge. Helps reduce resampling artefacts along edges. Defaults to `0`. | `0` | +| buffer | float | Buffer to apply to each bbox edge. Defaults to `0.`. | `0.` | +| nodata | int or float | Overwrite dataset internal nodata value. | None | +| vrt_options | dict | Options to be passed to the rasterio.warp.WarpedVRT class. | None | +| resampling_method | RIOResampling | RasterIO resampling algorithm. Defaults to `nearest`. | `nearest` | +| reproject_method | WarpResampling | WarpKernel resampling algorithm. Defaults to `nearest`. | `nearest` | +| unscale | bool | Apply 'scales' and 'offsets' on output data value. Defaults to `False`. | `False` | +| post_process | callable | Function to apply on output data and mask values. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | ImageData | + + +### point + +```python3 +def point( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT], + coordinates: Tuple[float, float], + indexes: Union[Sequence[int], int, NoneType] = None, + coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326), + force_binary_mask: bool = True, + nodata: Union[float, int, str, NoneType] = None, + vrt_options: Union[Dict, NoneType] = None, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest', + reproject_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'sum', 'rms'] = 'nearest', + unscale: bool = False, + post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None +) -> rio_tiler.models.PointData +``` + +Read a pixel value for a point. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| src_dst | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| coordinates | tuple | Coordinates in form of (X, Y). | None | +| indexes | sequence of int or int | Band indexes. | None | +| coord_crs | rasterio.crs.CRS | Coordinate Reference System of the input coords. Defaults to `epsg:4326`. | `epsg:4326` | +| nodata | int or float | Overwrite dataset internal nodata value. | None | +| vrt_options | dict | Options to be passed to the rasterio.warp.WarpedVRT class. | None | +| resampling_method | RIOResampling | RasterIO resampling algorithm. Defaults to `nearest`. | `nearest` | +| reproject_method | WarpResampling | WarpKernel resampling algorithm. Defaults to `nearest`. | `nearest` | +| unscale | bool | Apply 'scales' and 'offsets' on output data value. Defaults to `False`. | `False` | +| post_process | callable | Function to apply on output data and mask values. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +### read + +```python3 +def read( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT], + dst_crs: Union[rasterio.crs.CRS, NoneType] = None, + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + max_size: Union[int, NoneType] = None, + indexes: Union[Sequence[int], int, NoneType] = None, + window: Union[rasterio.windows.Window, NoneType] = None, + force_binary_mask: bool = True, + nodata: Union[float, int, str, NoneType] = None, + vrt_options: Union[Dict, NoneType] = None, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest', + reproject_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'sum', 'rms'] = 'nearest', + unscale: bool = False, + post_process: Union[Callable[[numpy.ma.core.MaskedArray], numpy.ma.core.MaskedArray], NoneType] = None +) -> rio_tiler.models.ImageData +``` + +Low level read function. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| src_dst | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| dst_crs | rasterio.crs.CRS | Target coordinate reference system. | None | +| height | int | Output height of the image. | None | +| width | int | Output width of the image. | None | +| max_size | int | Limit output size image if not width and height. | None | +| indexes | sequence of int or int | Band indexes. | None | +| window | rasterio.windows.Window | Window to read. | None | +| nodata | int or float | Overwrite dataset internal nodata value. | None | +| vrt_options | dict | Options to be passed to the rasterio.warp.WarpedVRT class. | None | +| resampling_method | RIOResampling | RasterIO resampling algorithm. Defaults to `nearest`. | `nearest` | +| reproject_method | WarpResampling | WarpKernel resampling algorithm. Defaults to `nearest`. | `nearest` | +| force_binary_mask | bool | Cast returned mask to binary values (0 or 255). Defaults to `True`. | `True` | +| unscale | bool | Apply 'scales' and 'offsets' on output data value. Defaults to `False`. | `False` | +| post_process | callable | Function to apply on output data and mask values. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | ImageData | + +## Classes + +### Options + +```python3 +class Options( + /, + *args, + **kwargs +) +``` + +Reader Options. + +#### Ancestors (in MRO) + +* builtins.dict + +#### Methods + + +#### clear + +```python3 +def clear( + ... +) +``` + +D.clear() -> None. Remove all items from D. + + +#### copy + +```python3 +def copy( + ... +) +``` + +D.copy() -> a shallow copy of D + + +#### fromkeys + +```python3 +def fromkeys( + iterable, + value=None, + / +) +``` + +Create a new dictionary with keys from iterable and values set to value. + + +#### get + +```python3 +def get( + self, + key, + default=None, + / +) +``` + +Return the value for key if key is in the dictionary, else default. + + +#### items + +```python3 +def items( + ... +) +``` + +D.items() -> a set-like object providing a view on D's items + + +#### keys + +```python3 +def keys( + ... +) +``` + +D.keys() -> a set-like object providing a view on D's keys + + +#### pop + +```python3 +def pop( + ... +) +``` + +D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + +If key is not found, d is returned if given, otherwise KeyError is raised + + +#### popitem + +```python3 +def popitem( + self, + / +) +``` + +Remove and return a (key, value) pair as a 2-tuple. + +Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty. + + +#### setdefault + +```python3 +def setdefault( + self, + key, + default=None, + / +) +``` + +Insert key with a value of default if key is not in the dictionary. + +Return the value for key if key is in the dictionary, else default. + + +#### update + +```python3 +def update( + ... +) +``` + +D.update([E, ]**F) -> None. Update D from dict/iterable E and F. + +If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k] + + +#### values + +```python3 +def values( + ... +) +``` + +D.values() -> an object providing a view on D's values \ No newline at end of file diff --git a/api/rio_tiler/tasks/index.html b/api/rio_tiler/tasks/index.html new file mode 100644 index 00000000..0b38d39d --- /dev/null +++ b/api/rio_tiler/tasks/index.html @@ -0,0 +1,1749 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.tasks - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.tasks

+

rio_tiler.tasks: tools for handling rio-tiler's future tasks.

+

Variables

+
MAX_THREADS
+
+
TaskType
+
+

Functions

+

create_tasks

+
def create_tasks(
+    reader: Callable,
+    asset_list: Sequence,
+    threads: int,
+    *args,
+    **kwargs
+) -> Sequence[Tuple[Union[concurrent.futures._base.Future, Callable], Any]]
+
+

Create Future Tasks.

+

filter_tasks

+
def filter_tasks(
+    tasks: Sequence[Tuple[Union[concurrent.futures._base.Future, Callable], Any]],
+    allowed_exceptions: Union[Tuple, NoneType] = None
+) -> Generator
+
+

Filter Tasks to remove Exceptions.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
taskssequenceSequence of 'concurrent.futures._base.Future' or 'Callable'None
allowed_exceptionstupleList of exceptions which won't be raised.None
+

Yields:

+ + + + + + + + + + + + + +
TypeDescription
NoneTask results.
+

multi_arrays

+
def multi_arrays(
+    asset_list: Sequence,
+    reader: Callable[..., rio_tiler.models.ImageData],
+    *args: Any,
+    threads: int = 10,
+    allowed_exceptions: Union[Tuple, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Merge arrays returned from tasks.

+

multi_points

+
def multi_points(
+    asset_list: Sequence,
+    reader: Callable[..., rio_tiler.models.PointData],
+    *args: Any,
+    threads: int = 10,
+    allowed_exceptions: Union[Tuple, NoneType] = None,
+    **kwargs: Any
+) -> rio_tiler.models.PointData
+
+

Merge points returned from tasks.

+

multi_values

+
def multi_values(
+    asset_list: Sequence,
+    reader: Callable,
+    *args: Any,
+    threads: int = 10,
+    allowed_exceptions: Union[Tuple, NoneType] = None,
+    **kwargs: Any
+) -> Dict
+
+

Merge values returned from tasks.

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/tasks/tasks.md b/api/rio_tiler/tasks/tasks.md new file mode 100644 index 00000000..68d88faa --- /dev/null +++ b/api/rio_tiler/tasks/tasks.md @@ -0,0 +1,103 @@ +# Module rio_tiler.tasks + +rio_tiler.tasks: tools for handling rio-tiler's future tasks. + +## Variables + +```python3 +MAX_THREADS +``` + +```python3 +TaskType +``` + +## Functions + + +### create_tasks + +```python3 +def create_tasks( + reader: Callable, + asset_list: Sequence, + threads: int, + *args, + **kwargs +) -> Sequence[Tuple[Union[concurrent.futures._base.Future, Callable], Any]] +``` + +Create Future Tasks. + + +### filter_tasks + +```python3 +def filter_tasks( + tasks: Sequence[Tuple[Union[concurrent.futures._base.Future, Callable], Any]], + allowed_exceptions: Union[Tuple, NoneType] = None +) -> Generator +``` + +Filter Tasks to remove Exceptions. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tasks | sequence | Sequence of 'concurrent.futures._base.Future' or 'Callable' | None | +| allowed_exceptions | tuple | List of exceptions which won't be raised. | None | + +**Yields:** + +| Type | Description | +|---|---| +| None | Task results. | + + +### multi_arrays + +```python3 +def multi_arrays( + asset_list: Sequence, + reader: Callable[..., rio_tiler.models.ImageData], + *args: Any, + threads: int = 10, + allowed_exceptions: Union[Tuple, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Merge arrays returned from tasks. + + +### multi_points + +```python3 +def multi_points( + asset_list: Sequence, + reader: Callable[..., rio_tiler.models.PointData], + *args: Any, + threads: int = 10, + allowed_exceptions: Union[Tuple, NoneType] = None, + **kwargs: Any +) -> rio_tiler.models.PointData +``` + +Merge points returned from tasks. + + +### multi_values + +```python3 +def multi_values( + asset_list: Sequence, + reader: Callable, + *args: Any, + threads: int = 10, + allowed_exceptions: Union[Tuple, NoneType] = None, + **kwargs: Any +) -> Dict +``` + +Merge values returned from tasks. \ No newline at end of file diff --git a/api/rio_tiler/utils/index.html b/api/rio_tiler/utils/index.html new file mode 100644 index 00000000..d2333627 --- /dev/null +++ b/api/rio_tiler/utils/index.html @@ -0,0 +1,2209 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rio_tiler.utils - rio-tiler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ +
+
+ + + +
+
+ + + + + + + + + + +

Module rio_tiler.utils

+

rio_tiler.utils: utility functions.

+

Variables

+
WEB_MERCATOR_CRS
+
+

Functions

+

create_cutline

+
def create_cutline(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT],
+    geometry: Dict,
+    geometry_crs: rasterio.crs.CRS = None
+) -> str
+
+

Create WKT Polygon Cutline for GDALWarpOptions.

+

Ref: gdal.org/api/gdalwarp_cpp.html?highlight=vrt#_CPPv415GDALWarpOptions

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
src_dstrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
geometrydictGeoJSON feature or GeoJSON geometry. By default the coordinates are considered to be in the dataset CRS. Use geometry_crs to set a specific CRS.None
geometry_crsrasterio.crs.CRSInput geometry Coordinate Reference SystemNone
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
strWKT geometry in form of `POLYGON ((x y, x y, ...)))
+

get_array_statistics

+
def get_array_statistics(
+    data: numpy.ma.core.MaskedArray,
+    categorical: bool = False,
+    categories: Union[List[float], NoneType] = None,
+    percentiles: Union[List[int], NoneType] = None,
+    **kwargs: Any
+) -> List[Dict[Any, Any]]
+
+

Calculate per band array statistics.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datanumpy.ma.MaskedArrayinput masked array data to get the statistics from.None
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
kwargsoptionaloptions to forward to numpy.histogram function (only applies for non-categorical data).None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
listlist of array statistics (dict)
+

get_overview_level

+
def get_overview_level(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT],
+    bounds: Tuple[float, float, float, float],
+    height: int,
+    width: int,
+    dst_crs: rasterio.crs.CRS = CRS.from_epsg(3857)
+) -> int
+
+

Return the overview level corresponding to the tile resolution.

+

Freely adapted from github.com/OSGeo/gdal/blob/41993f127e6e1669fbd9e944744b7c9b2bd6c400/gdal/apps/gdalwarp_lib.cpp#L2293-L2362

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
src_dstrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
boundstupleBounding box coordinates in target crs (dst_crs).None
heightintDesired output height of the array for the input bounds.None
widthintDesired output width of the array for the input bounds.None
dst_crsrasterio.crs.CRSTarget Coordinate Reference System. Defaults to epsg:3857.epsg:3857
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
intOverview level.
+

get_vrt_transform

+
def get_vrt_transform(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT],
+    bounds: Tuple[float, float, float, float],
+    height: Union[int, NoneType] = None,
+    width: Union[int, NoneType] = None,
+    dst_crs: rasterio.crs.CRS = CRS.from_epsg(3857),
+    window_precision: int = 6
+) -> Tuple[affine.Affine, int, int]
+
+

Calculate VRT transform.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
src_dstrasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRTRasterio dataset.None
boundstupleBounding box coordinates in target crs (dst_crs).None
heightintDesired output height of the array for the input bounds.None
widthintDesired output width of the array for the input bounds.None
dst_crsrasterio.crs.CRSTarget Coordinate Reference System. Defaults to epsg:3857.epsg:3857
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
tupleVRT transform (affine.Affine), width (int) and height (int)
+

has_alpha_band

+
def has_alpha_band(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT]
+) -> bool
+
+

Check for alpha band or mask in source.

+

has_mask_band

+
def has_mask_band(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT]
+) -> bool
+
+

Check for mask band in source.

+

linear_rescale

+
def linear_rescale(
+    image: numpy.ndarray,
+    in_range: Tuple[Union[float, int], Union[float, int]],
+    out_range: Tuple[Union[float, int], Union[float, int]] = (0, 255)
+) -> numpy.ndarray
+
+

Apply linear rescaling to a numpy array.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
imagenumpy.ndarrayarray to rescale.None
in_rangetuplearray min/max value to rescale from.None
out_rangetupleoutput min/max bounds to rescale to. Defaults to (0, 255).(0, 255)
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
numpy.ndarraylinear rescaled array.
+

mapzen_elevation_rgb

+
def mapzen_elevation_rgb(
+    data: numpy.ndarray
+) -> numpy.ndarray
+
+

Encode elevation value to RGB values compatible with Mapzen tangram.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datanumpy.ndarrayImage array to encode.None
+

non_alpha_indexes

+
def non_alpha_indexes(
+    src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT]
+) -> Tuple
+
+

Return indexes of non-alpha bands.

+

normalize_bounds

+
def normalize_bounds(
+    bounds: Tuple[float, float, float, float]
+) -> Tuple[float, float, float, float]
+
+

Return BBox in correct minx, miny, maxx, maxy order.

+

pansharpening_brovey

+
def pansharpening_brovey(
+    rgb: numpy.ndarray,
+    pan: numpy.ndarray,
+    weight: float,
+    pan_dtype: str
+) -> numpy.ndarray
+
+

Apply Brovey pansharpening method.

+

Brovey Method: Each resampled, multispectral pixel is +multiplied by the ratio of the corresponding +panchromatic pixel intensity to the sum of all the +multispectral intensities.

+

Original code from mapbox/rio-pansharpen

+

render

+
def render(
+    data: numpy.ndarray,
+    mask: Union[numpy.ndarray, NoneType] = None,
+    img_format: str = 'PNG',
+    colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]], NoneType] = None,
+    **creation_options: Any
+) -> bytes
+
+

Translate numpy.ndarray to image bytes.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
datanumpy.ndarrayImage array to encode.None
masknumpy.ndarrayMask array.None
img_formatstrImage format. See: for the list of supported format by GDAL: www.gdal.org/formats_list.html. Defaults to PNG.PNG
colormapdict or sequenceRGBA Color Table dictionary or sequence.None
creation_optionsoptionalImage driver creation options to forward to GDAL.None
+

resize_array

+
def resize_array(
+    data: numpy.ndarray,
+    height: int,
+    width: int,
+    resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest'
+) -> numpy.ndarray
+
+

resize array to a given height and width.

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/api/rio_tiler/utils/utils.md b/api/rio_tiler/utils/utils.md new file mode 100644 index 00000000..9661ed2c --- /dev/null +++ b/api/rio_tiler/utils/utils.md @@ -0,0 +1,284 @@ +# Module rio_tiler.utils + +rio_tiler.utils: utility functions. + +## Variables + +```python3 +WEB_MERCATOR_CRS +``` + +## Functions + + +### create_cutline + +```python3 +def create_cutline( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT], + geometry: Dict, + geometry_crs: rasterio.crs.CRS = None +) -> str +``` + +Create WKT Polygon Cutline for GDALWarpOptions. + +Ref: https://gdal.org/api/gdalwarp_cpp.html?highlight=vrt#_CPPv415GDALWarpOptions + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| src_dst | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| geometry | dict | GeoJSON feature or GeoJSON geometry. By default the coordinates are considered to be in the dataset CRS. Use `geometry_crs` to set a specific CRS. | None | +| geometry_crs | rasterio.crs.CRS | Input geometry Coordinate Reference System | None | + +**Returns:** + +| Type | Description | +|---|---| +| str | WKT geometry in form of `POLYGON ((x y, x y, ...))) | + + +### get_array_statistics + +```python3 +def get_array_statistics( + data: numpy.ma.core.MaskedArray, + categorical: bool = False, + categories: Union[List[float], NoneType] = None, + percentiles: Union[List[int], NoneType] = None, + **kwargs: Any +) -> List[Dict[Any, Any]] +``` + +Calculate per band array statistics. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | numpy.ma.MaskedArray | input masked array data to get the statistics from. | None | +| categorical | bool | treat input data as categorical data. Defaults to `False`. | `False` | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| kwargs | optional | options to forward to `numpy.histogram` function (only applies for non-categorical data). | None | + +**Returns:** + +| Type | Description | +|---|---| +| list | list of array statistics (dict) | + + +### get_overview_level + +```python3 +def get_overview_level( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT], + bounds: Tuple[float, float, float, float], + height: int, + width: int, + dst_crs: rasterio.crs.CRS = CRS.from_epsg(3857) +) -> int +``` + +Return the overview level corresponding to the tile resolution. + +Freely adapted from https://github.com/OSGeo/gdal/blob/41993f127e6e1669fbd9e944744b7c9b2bd6c400/gdal/apps/gdalwarp_lib.cpp#L2293-L2362 + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| src_dst | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| bounds | tuple | Bounding box coordinates in target crs (**dst_crs**). | None | +| height | int | Desired output height of the array for the input bounds. | None | +| width | int | Desired output width of the array for the input bounds. | None | +| dst_crs | rasterio.crs.CRS | Target Coordinate Reference System. Defaults to `epsg:3857`. | `epsg:3857` | + +**Returns:** + +| Type | Description | +|---|---| +| int | Overview level. | + + +### get_vrt_transform + +```python3 +def get_vrt_transform( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT], + bounds: Tuple[float, float, float, float], + height: Union[int, NoneType] = None, + width: Union[int, NoneType] = None, + dst_crs: rasterio.crs.CRS = CRS.from_epsg(3857), + window_precision: int = 6 +) -> Tuple[affine.Affine, int, int] +``` + +Calculate VRT transform. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| src_dst | rasterio.io.DatasetReader or rasterio.io.DatasetWriter or rasterio.vrt.WarpedVRT | Rasterio dataset. | None | +| bounds | tuple | Bounding box coordinates in target crs (**dst_crs**). | None | +| height | int | Desired output height of the array for the input bounds. | None | +| width | int | Desired output width of the array for the input bounds. | None | +| dst_crs | rasterio.crs.CRS | Target Coordinate Reference System. Defaults to `epsg:3857`. | `epsg:3857` | + +**Returns:** + +| Type | Description | +|---|---| +| tuple | VRT transform (affine.Affine), width (int) and height (int) | + + +### has_alpha_band + +```python3 +def has_alpha_band( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT] +) -> bool +``` + +Check for alpha band or mask in source. + + +### has_mask_band + +```python3 +def has_mask_band( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT] +) -> bool +``` + +Check for mask band in source. + + +### linear_rescale + +```python3 +def linear_rescale( + image: numpy.ndarray, + in_range: Tuple[Union[float, int], Union[float, int]], + out_range: Tuple[Union[float, int], Union[float, int]] = (0, 255) +) -> numpy.ndarray +``` + +Apply linear rescaling to a numpy array. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| image | numpy.ndarray | array to rescale. | None | +| in_range | tuple | array min/max value to rescale from. | None | +| out_range | tuple | output min/max bounds to rescale to. Defaults to `(0, 255)`. | `(0, 255)` | + +**Returns:** + +| Type | Description | +|---|---| +| numpy.ndarray | linear rescaled array. | + + +### mapzen_elevation_rgb + +```python3 +def mapzen_elevation_rgb( + data: numpy.ndarray +) -> numpy.ndarray +``` + +Encode elevation value to RGB values compatible with Mapzen tangram. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | numpy.ndarray | Image array to encode. | None | + + +### non_alpha_indexes + +```python3 +def non_alpha_indexes( + src_dst: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter, rasterio.vrt.WarpedVRT] +) -> Tuple +``` + +Return indexes of non-alpha bands. + + +### normalize_bounds + +```python3 +def normalize_bounds( + bounds: Tuple[float, float, float, float] +) -> Tuple[float, float, float, float] +``` + +Return BBox in correct minx, miny, maxx, maxy order. + + +### pansharpening_brovey + +```python3 +def pansharpening_brovey( + rgb: numpy.ndarray, + pan: numpy.ndarray, + weight: float, + pan_dtype: str +) -> numpy.ndarray +``` + +Apply Brovey pansharpening method. + +Brovey Method: Each resampled, multispectral pixel is +multiplied by the ratio of the corresponding +panchromatic pixel intensity to the sum of all the +multispectral intensities. + +Original code from https://github.com/mapbox/rio-pansharpen + + +### render + +```python3 +def render( + data: numpy.ndarray, + mask: Union[numpy.ndarray, NoneType] = None, + img_format: str = 'PNG', + colormap: Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]], NoneType] = None, + **creation_options: Any +) -> bytes +``` + +Translate numpy.ndarray to image bytes. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| data | numpy.ndarray | Image array to encode. | None | +| mask | numpy.ndarray | Mask array. | None | +| img_format | str | Image format. See: for the list of supported format by GDAL: https://www.gdal.org/formats_list.html. Defaults to `PNG`. | `PNG` | +| colormap | dict or sequence | RGBA Color Table dictionary or sequence. | None | +| creation_options | optional | Image driver creation options to forward to GDAL. | None | + + +### resize_array + +```python3 +def resize_array( + data: numpy.ndarray, + height: int, + width: int, + resampling_method: Literal['nearest', 'bilinear', 'cubic', 'cubic_spline', 'lanczos', 'average', 'mode', 'gauss', 'rms'] = 'nearest' +) -> numpy.ndarray +``` + +resize array to a given height and width. \ No newline at end of file diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 00000000..1cf13b9f Binary files /dev/null and b/assets/images/favicon.png differ diff --git a/assets/javascripts/bundle.dff1b7c8.min.js b/assets/javascripts/bundle.dff1b7c8.min.js new file mode 100644 index 00000000..a89e799a --- /dev/null +++ b/assets/javascripts/bundle.dff1b7c8.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var gi=Object.create;var dr=Object.defineProperty;var xi=Object.getOwnPropertyDescriptor;var yi=Object.getOwnPropertyNames,Ht=Object.getOwnPropertySymbols,Ei=Object.getPrototypeOf,hr=Object.prototype.hasOwnProperty,Xr=Object.prototype.propertyIsEnumerable;var Jr=(e,t,r)=>t in e?dr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,I=(e,t)=>{for(var r in t||(t={}))hr.call(t,r)&&Jr(e,r,t[r]);if(Ht)for(var r of Ht(t))Xr.call(t,r)&&Jr(e,r,t[r]);return e};var Zr=(e,t)=>{var r={};for(var o in e)hr.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Ht)for(var o of Ht(e))t.indexOf(o)<0&&Xr.call(e,o)&&(r[o]=e[o]);return r};var br=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var wi=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of yi(t))!hr.call(e,n)&&n!==r&&dr(e,n,{get:()=>t[n],enumerable:!(o=xi(t,n))||o.enumerable});return e};var $t=(e,t,r)=>(r=e!=null?gi(Ei(e)):{},wi(t||!e||!e.__esModule?dr(r,"default",{value:e,enumerable:!0}):r,e));var to=br((vr,eo)=>{(function(e,t){typeof vr=="object"&&typeof eo!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(vr,function(){"use strict";function e(r){var o=!0,n=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(A){return!!(A&&A!==document&&A.nodeName!=="HTML"&&A.nodeName!=="BODY"&&"classList"in A&&"contains"in A.classList)}function c(A){var it=A.type,Ne=A.tagName;return!!(Ne==="INPUT"&&s[it]&&!A.readOnly||Ne==="TEXTAREA"&&!A.readOnly||A.isContentEditable)}function p(A){A.classList.contains("focus-visible")||(A.classList.add("focus-visible"),A.setAttribute("data-focus-visible-added",""))}function m(A){A.hasAttribute("data-focus-visible-added")&&(A.classList.remove("focus-visible"),A.removeAttribute("data-focus-visible-added"))}function f(A){A.metaKey||A.altKey||A.ctrlKey||(a(r.activeElement)&&p(r.activeElement),o=!0)}function u(A){o=!1}function d(A){a(A.target)&&(o||c(A.target))&&p(A.target)}function b(A){a(A.target)&&(A.target.classList.contains("focus-visible")||A.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),m(A.target))}function _(A){document.visibilityState==="hidden"&&(n&&(o=!0),re())}function re(){document.addEventListener("mousemove",Y),document.addEventListener("mousedown",Y),document.addEventListener("mouseup",Y),document.addEventListener("pointermove",Y),document.addEventListener("pointerdown",Y),document.addEventListener("pointerup",Y),document.addEventListener("touchmove",Y),document.addEventListener("touchstart",Y),document.addEventListener("touchend",Y)}function Z(){document.removeEventListener("mousemove",Y),document.removeEventListener("mousedown",Y),document.removeEventListener("mouseup",Y),document.removeEventListener("pointermove",Y),document.removeEventListener("pointerdown",Y),document.removeEventListener("pointerup",Y),document.removeEventListener("touchmove",Y),document.removeEventListener("touchstart",Y),document.removeEventListener("touchend",Y)}function Y(A){A.target.nodeName&&A.target.nodeName.toLowerCase()==="html"||(o=!1,Z())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",_,!0),re(),r.addEventListener("focus",d,!0),r.addEventListener("blur",b,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var Vr=br((Mt,Dr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof Mt=="object"&&typeof Dr=="object"?Dr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Mt=="object"?Mt.ClipboardJS=r():t.ClipboardJS=r()})(Mt,function(){return function(){var e={686:function(o,n,i){"use strict";i.d(n,{default:function(){return vi}});var s=i(279),a=i.n(s),c=i(370),p=i.n(c),m=i(817),f=i.n(m);function u(F){try{return document.execCommand(F)}catch(S){return!1}}var d=function(S){var y=f()(S);return u("cut"),y},b=d;function _(F){var S=document.documentElement.getAttribute("dir")==="rtl",y=document.createElement("textarea");y.style.fontSize="12pt",y.style.border="0",y.style.padding="0",y.style.margin="0",y.style.position="absolute",y.style[S?"right":"left"]="-9999px";var R=window.pageYOffset||document.documentElement.scrollTop;return y.style.top="".concat(R,"px"),y.setAttribute("readonly",""),y.value=F,y}var re=function(S,y){var R=_(S);y.container.appendChild(R);var P=f()(R);return u("copy"),R.remove(),P},Z=function(S){var y=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},R="";return typeof S=="string"?R=re(S,y):S instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(S==null?void 0:S.type)?R=re(S.value,y):(R=f()(S),u("copy")),R},Y=Z;function A(F){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?A=function(y){return typeof y}:A=function(y){return y&&typeof Symbol=="function"&&y.constructor===Symbol&&y!==Symbol.prototype?"symbol":typeof y},A(F)}var it=function(){var S=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},y=S.action,R=y===void 0?"copy":y,P=S.container,q=S.target,Me=S.text;if(R!=="copy"&&R!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(q!==void 0)if(q&&A(q)==="object"&&q.nodeType===1){if(R==="copy"&&q.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(R==="cut"&&(q.hasAttribute("readonly")||q.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Me)return Y(Me,{container:P});if(q)return R==="cut"?b(q):Y(q,{container:P})},Ne=it;function Ie(F){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Ie=function(y){return typeof y}:Ie=function(y){return y&&typeof Symbol=="function"&&y.constructor===Symbol&&y!==Symbol.prototype?"symbol":typeof y},Ie(F)}function pi(F,S){if(!(F instanceof S))throw new TypeError("Cannot call a class as a function")}function Gr(F,S){for(var y=0;y0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof P.action=="function"?P.action:this.defaultAction,this.target=typeof P.target=="function"?P.target:this.defaultTarget,this.text=typeof P.text=="function"?P.text:this.defaultText,this.container=Ie(P.container)==="object"?P.container:document.body}},{key:"listenClick",value:function(P){var q=this;this.listener=p()(P,"click",function(Me){return q.onClick(Me)})}},{key:"onClick",value:function(P){var q=P.delegateTarget||P.currentTarget,Me=this.action(q)||"copy",kt=Ne({action:Me,container:this.container,target:this.target(q),text:this.text(q)});this.emit(kt?"success":"error",{action:Me,text:kt,trigger:q,clearSelection:function(){q&&q.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(P){return ur("action",P)}},{key:"defaultTarget",value:function(P){var q=ur("target",P);if(q)return document.querySelector(q)}},{key:"defaultText",value:function(P){return ur("text",P)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(P){var q=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return Y(P,q)}},{key:"cut",value:function(P){return b(P)}},{key:"isSupported",value:function(){var P=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],q=typeof P=="string"?[P]:P,Me=!!document.queryCommandSupported;return q.forEach(function(kt){Me=Me&&!!document.queryCommandSupported(kt)}),Me}}]),y}(a()),vi=bi},828:function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==n;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}o.exports=s},438:function(o,n,i){var s=i(828);function a(m,f,u,d,b){var _=p.apply(this,arguments);return m.addEventListener(u,_,b),{destroy:function(){m.removeEventListener(u,_,b)}}}function c(m,f,u,d,b){return typeof m.addEventListener=="function"?a.apply(null,arguments):typeof u=="function"?a.bind(null,document).apply(null,arguments):(typeof m=="string"&&(m=document.querySelectorAll(m)),Array.prototype.map.call(m,function(_){return a(_,f,u,d,b)}))}function p(m,f,u,d){return function(b){b.delegateTarget=s(b.target,f),b.delegateTarget&&d.call(m,b)}}o.exports=c},879:function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(o,n,i){var s=i(879),a=i(438);function c(u,d,b){if(!u&&!d&&!b)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(b))throw new TypeError("Third argument must be a Function");if(s.node(u))return p(u,d,b);if(s.nodeList(u))return m(u,d,b);if(s.string(u))return f(u,d,b);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function p(u,d,b){return u.addEventListener(d,b),{destroy:function(){u.removeEventListener(d,b)}}}function m(u,d,b){return Array.prototype.forEach.call(u,function(_){_.addEventListener(d,b)}),{destroy:function(){Array.prototype.forEach.call(u,function(_){_.removeEventListener(d,b)})}}}function f(u,d,b){return a(document.body,u,d,b)}o.exports=c},817:function(o){function n(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),p=document.createRange();p.selectNodeContents(i),c.removeAllRanges(),c.addRange(p),s=c.toString()}return s}o.exports=n},279:function(o){function n(){}n.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function p(){c.off(i,p),s.apply(a,arguments)}return p._=s,this.on(i,p,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,p=a.length;for(c;c{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var _a=/["'&<>]/;Pn.exports=Aa;function Aa(e){var t=""+e,r=_a.exec(t);if(!r)return t;var o,n="",i=0,s=0;for(i=r.index;i0&&i[i.length-1])&&(p[0]===6||p[0]===2)){r=0;continue}if(p[0]===3&&(!i||p[1]>i[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function U(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],s;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(a){s={error:a}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(s)throw s.error}}return i}function D(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||a(u,d)})})}function a(u,d){try{c(o[u](d))}catch(b){f(i[0][3],b)}}function c(u){u.value instanceof Ze?Promise.resolve(u.value.v).then(p,m):f(i[0][2],u)}function p(u){a("next",u)}function m(u){a("throw",u)}function f(u,d){u(d),i.shift(),i.length&&a(i[0][0],i[0][1])}}function no(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof Ee=="function"?Ee(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(s){return new Promise(function(a,c){s=e[i](s),n(a,c,s.done,s.value)})}}function n(i,s,a,c){Promise.resolve(c).then(function(p){i({value:p,done:a})},s)}}function C(e){return typeof e=="function"}function at(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var It=at(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function De(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Pe=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var s=this._parentage;if(s)if(this._parentage=null,Array.isArray(s))try{for(var a=Ee(s),c=a.next();!c.done;c=a.next()){var p=c.value;p.remove(this)}}catch(_){t={error:_}}finally{try{c&&!c.done&&(r=a.return)&&r.call(a)}finally{if(t)throw t.error}}else s.remove(this);var m=this.initialTeardown;if(C(m))try{m()}catch(_){i=_ instanceof It?_.errors:[_]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=Ee(f),d=u.next();!d.done;d=u.next()){var b=d.value;try{io(b)}catch(_){i=i!=null?i:[],_ instanceof It?i=D(D([],U(i)),U(_.errors)):i.push(_)}}}catch(_){o={error:_}}finally{try{d&&!d.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new It(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)io(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&De(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&De(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var xr=Pe.EMPTY;function Pt(e){return e instanceof Pe||e&&"closed"in e&&C(e.remove)&&C(e.add)&&C(e.unsubscribe)}function io(e){C(e)?e():e.unsubscribe()}var Le={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var st={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,s=n.isStopped,a=n.observers;return i||s?xr:(this.currentObservers=null,a.push(r),new Pe(function(){o.currentObservers=null,De(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,s=o.isStopped;n?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new j;return r.source=this,r},t.create=function(r,o){return new uo(r,o)},t}(j);var uo=function(e){ie(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:xr},t}(x);var yt={now:function(){return(yt.delegate||Date).now()},delegate:void 0};var Et=function(e){ie(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=yt);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,s=o._infiniteTimeWindow,a=o._timestampProvider,c=o._windowTime;n||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,s=n._buffer,a=s.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=mt.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var s=r.actions;o!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==o&&(mt.cancelAnimationFrame(o),r._scheduled=void 0)},t}(Wt);var vo=function(e){ie(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o=this._scheduled;this._scheduled=void 0;var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t}(Ut);var Te=new vo(bo);var T=new j(function(e){return e.complete()});function Nt(e){return e&&C(e.schedule)}function Mr(e){return e[e.length-1]}function Qe(e){return C(Mr(e))?e.pop():void 0}function Oe(e){return Nt(Mr(e))?e.pop():void 0}function Dt(e,t){return typeof Mr(e)=="number"?e.pop():t}var lt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Vt(e){return C(e==null?void 0:e.then)}function zt(e){return C(e[pt])}function qt(e){return Symbol.asyncIterator&&C(e==null?void 0:e[Symbol.asyncIterator])}function Kt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function ki(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Qt=ki();function Yt(e){return C(e==null?void 0:e[Qt])}function Bt(e){return oo(this,arguments,function(){var r,o,n,i;return Rt(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,Ze(r.read())];case 3:return o=s.sent(),n=o.value,i=o.done,i?[4,Ze(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,Ze(n)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function Gt(e){return C(e==null?void 0:e.getReader)}function W(e){if(e instanceof j)return e;if(e!=null){if(zt(e))return Hi(e);if(lt(e))return $i(e);if(Vt(e))return Ri(e);if(qt(e))return go(e);if(Yt(e))return Ii(e);if(Gt(e))return Pi(e)}throw Kt(e)}function Hi(e){return new j(function(t){var r=e[pt]();if(C(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function $i(e){return new j(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?L(function(n,i){return e(n,i,o)}):de,ge(1),r?He(t):Io(function(){return new Xt}))}}function Po(){for(var e=[],t=0;t=2,!0))}function le(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new x}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(p){var m,f,u,d=0,b=!1,_=!1,re=function(){f==null||f.unsubscribe(),f=void 0},Z=function(){re(),m=u=void 0,b=_=!1},Y=function(){var A=m;Z(),A==null||A.unsubscribe()};return g(function(A,it){d++,!_&&!b&&re();var Ne=u=u!=null?u:r();it.add(function(){d--,d===0&&!_&&!b&&(f=kr(Y,c))}),Ne.subscribe(it),!m&&d>0&&(m=new tt({next:function(Ie){return Ne.next(Ie)},error:function(Ie){_=!0,re(),f=kr(Z,n,Ie),Ne.error(Ie)},complete:function(){b=!0,re(),f=kr(Z,s),Ne.complete()}}),W(A).subscribe(m))})(p)}}function kr(e,t){for(var r=[],o=2;oe.next(document)),e}function z(e,t=document){return Array.from(t.querySelectorAll(e))}function N(e,t=document){let r=ce(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function ce(e,t=document){return t.querySelector(e)||void 0}function Re(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}var ea=M(h(document.body,"focusin"),h(document.body,"focusout")).pipe(ke(1),V(void 0),l(()=>Re()||document.body),B(1));function er(e){return ea.pipe(l(t=>e.contains(t)),G())}function Je(e){return{x:e.offsetLeft,y:e.offsetTop}}function Uo(e){return M(h(window,"load"),h(window,"resize")).pipe(Ae(0,Te),l(()=>Je(e)),V(Je(e)))}function tr(e){return{x:e.scrollLeft,y:e.scrollTop}}function dt(e){return M(h(e,"scroll"),h(window,"resize")).pipe(Ae(0,Te),l(()=>tr(e)),V(tr(e)))}function No(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)No(e,r)}function O(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)No(o,n);return o}function rr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function ht(e){let t=O("script",{src:e});return $(()=>(document.head.appendChild(t),M(h(t,"load"),h(t,"error").pipe(v(()=>St(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(l(()=>{}),k(()=>document.head.removeChild(t)),ge(1))))}var Do=new x,ta=$(()=>typeof ResizeObserver=="undefined"?ht("https://unpkg.com/resize-observer-polyfill"):H(void 0)).pipe(l(()=>new ResizeObserver(e=>{for(let t of e)Do.next(t)})),v(e=>M(Ve,H(e)).pipe(k(()=>e.disconnect()))),B(1));function he(e){return{width:e.offsetWidth,height:e.offsetHeight}}function xe(e){return ta.pipe(w(t=>t.observe(e)),v(t=>Do.pipe(L(({target:r})=>r===e),k(()=>t.unobserve(e)),l(()=>he(e)))),V(he(e)))}function bt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function or(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var Vo=new x,ra=$(()=>H(new IntersectionObserver(e=>{for(let t of e)Vo.next(t)},{threshold:0}))).pipe(v(e=>M(Ve,H(e)).pipe(k(()=>e.disconnect()))),B(1));function nr(e){return ra.pipe(w(t=>t.observe(e)),v(t=>Vo.pipe(L(({target:r})=>r===e),k(()=>t.unobserve(e)),l(({isIntersecting:r})=>r))))}function zo(e,t=16){return dt(e).pipe(l(({y:r})=>{let o=he(e),n=bt(e);return r>=n.height-o.height-t}),G())}var ir={drawer:N("[data-md-toggle=drawer]"),search:N("[data-md-toggle=search]")};function qo(e){return ir[e].checked}function Ke(e,t){ir[e].checked!==t&&ir[e].click()}function We(e){let t=ir[e];return h(t,"change").pipe(l(()=>t.checked),V(t.checked))}function oa(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function na(){return M(h(window,"compositionstart").pipe(l(()=>!0)),h(window,"compositionend").pipe(l(()=>!1))).pipe(V(!1))}function Ko(){let e=h(window,"keydown").pipe(L(t=>!(t.metaKey||t.ctrlKey)),l(t=>({mode:qo("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),L(({mode:t,type:r})=>{if(t==="global"){let o=Re();if(typeof o!="undefined")return!oa(o,r)}return!0}),le());return na().pipe(v(t=>t?T:e))}function fe(){return new URL(location.href)}function ot(e){location.href=e.href}function Qo(){return new x}function Yo(){return location.hash.slice(1)}function Pr(e){let t=O("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function ia(e){return M(h(window,"hashchange"),e).pipe(l(Yo),V(Yo()),L(t=>t.length>0),B(1))}function Bo(e){return ia(e).pipe(l(t=>ce(`[id="${t}"]`)),L(t=>typeof t!="undefined"))}function Fr(e){let t=matchMedia(e);return Zt(r=>t.addListener(()=>r(t.matches))).pipe(V(t.matches))}function Go(){let e=matchMedia("print");return M(h(window,"beforeprint").pipe(l(()=>!0)),h(window,"afterprint").pipe(l(()=>!1))).pipe(V(e.matches))}function jr(e,t){return e.pipe(v(r=>r?t():T))}function ar(e,t={credentials:"same-origin"}){return me(fetch(`${e}`,t)).pipe(pe(()=>T),v(r=>r.status!==200?St(()=>new Error(r.statusText)):H(r)))}function Ue(e,t){return ar(e,t).pipe(v(r=>r.json()),B(1))}function Jo(e,t){let r=new DOMParser;return ar(e,t).pipe(v(o=>o.text()),l(o=>r.parseFromString(o,"text/xml")),B(1))}function Xo(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function Zo(){return M(h(window,"scroll",{passive:!0}),h(window,"resize",{passive:!0})).pipe(l(Xo),V(Xo()))}function en(){return{width:innerWidth,height:innerHeight}}function tn(){return h(window,"resize",{passive:!0}).pipe(l(en),V(en()))}function rn(){return Q([Zo(),tn()]).pipe(l(([e,t])=>({offset:e,size:t})),B(1))}function sr(e,{viewport$:t,header$:r}){let o=t.pipe(X("size")),n=Q([o,r]).pipe(l(()=>Je(e)));return Q([r,t,n]).pipe(l(([{height:i},{offset:s,size:a},{x:c,y:p}])=>({offset:{x:s.x-c,y:s.y-p+i},size:a})))}function aa(e){return h(e,"message",t=>t.data)}function sa(e){let t=new x;return t.subscribe(r=>e.postMessage(r)),t}function on(e,t=new Worker(e)){let r=aa(t),o=sa(t),n=new x;n.subscribe(o);let i=o.pipe(J(),ee(!0));return n.pipe(J(),qe(r.pipe(K(i))),le())}var ca=N("#__config"),vt=JSON.parse(ca.textContent);vt.base=`${new URL(vt.base,fe())}`;function ue(){return vt}function te(e){return vt.features.includes(e)}function be(e,t){return typeof t!="undefined"?vt.translations[e].replace("#",t.toString()):vt.translations[e]}function ye(e,t=document){return N(`[data-md-component=${e}]`,t)}function ne(e,t=document){return z(`[data-md-component=${e}]`,t)}function pa(e){let t=N(".md-typeset > :first-child",e);return h(t,"click",{once:!0}).pipe(l(()=>N(".md-typeset",e)),l(r=>({hash:__md_hash(r.innerHTML)})))}function nn(e){if(!te("announce.dismiss")||!e.childElementCount)return T;if(!e.hidden){let t=N(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return $(()=>{let t=new x;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),pa(e).pipe(w(r=>t.next(r)),k(()=>t.complete()),l(r=>I({ref:e},r)))})}function ma(e,{target$:t}){return t.pipe(l(r=>({hidden:r!==e})))}function an(e,t){let r=new x;return r.subscribe(({hidden:o})=>{e.hidden=o}),ma(e,t).pipe(w(o=>r.next(o)),k(()=>r.complete()),l(o=>I({ref:e},o)))}function la(e,t){let r=$(()=>Q([Uo(e),dt(t)])).pipe(l(([{x:o,y:n},i])=>{let{width:s,height:a}=he(e);return{x:o-i.x+s/2,y:n-i.y+a/2}}));return er(e).pipe(v(o=>r.pipe(l(n=>({active:o,offset:n})),ge(+!o||1/0))))}function sn(e,t,{target$:r}){let[o,n]=Array.from(e.children);return $(()=>{let i=new x,s=i.pipe(J(),ee(!0));return i.subscribe({next({offset:a}){e.style.setProperty("--md-tooltip-x",`${a.x}px`),e.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),nr(e).pipe(K(s)).subscribe(a=>{e.toggleAttribute("data-md-visible",a)}),M(i.pipe(L(({active:a})=>a)),i.pipe(ke(250),L(({active:a})=>!a))).subscribe({next({active:a}){a?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe(Ae(16,Te)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(Rr(125,Te),L(()=>!!e.offsetParent),l(()=>e.offsetParent.getBoundingClientRect()),l(({x:a})=>a)).subscribe({next(a){a?e.style.setProperty("--md-tooltip-0",`${-a}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),h(n,"click").pipe(K(s),L(a=>!(a.metaKey||a.ctrlKey))).subscribe(a=>{a.stopPropagation(),a.preventDefault()}),h(n,"mousedown").pipe(K(s),oe(i)).subscribe(([a,{active:c}])=>{var p;if(a.button!==0||a.metaKey||a.ctrlKey)a.preventDefault();else if(c){a.preventDefault();let m=e.parentElement.closest(".md-annotation");m instanceof HTMLElement?m.focus():(p=Re())==null||p.blur()}}),r.pipe(K(s),L(a=>a===o),ze(125)).subscribe(()=>e.focus()),la(e,t).pipe(w(a=>i.next(a)),k(()=>i.complete()),l(a=>I({ref:e},a)))})}function Wr(e){return O("div",{class:"md-tooltip",id:e},O("div",{class:"md-tooltip__inner md-typeset"}))}function cn(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return O("aside",{class:"md-annotation",tabIndex:0},Wr(t),O("a",{href:r,class:"md-annotation__index",tabIndex:-1},O("span",{"data-md-annotation-id":e})))}else return O("aside",{class:"md-annotation",tabIndex:0},Wr(t),O("span",{class:"md-annotation__index",tabIndex:-1},O("span",{"data-md-annotation-id":e})))}function pn(e){return O("button",{class:"md-clipboard md-icon",title:be("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function Ur(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(c=>!e.terms[c]).reduce((c,p)=>[...c,O("del",null,p)," "],[]).slice(0,-1),i=ue(),s=new URL(e.location,i.base);te("search.highlight")&&s.searchParams.set("h",Object.entries(e.terms).filter(([,c])=>c).reduce((c,[p])=>`${c} ${p}`.trim(),""));let{tags:a}=ue();return O("a",{href:`${s}`,class:"md-search-result__link",tabIndex:-1},O("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&O("div",{class:"md-search-result__icon md-icon"}),r>0&&O("h1",null,e.title),r<=0&&O("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&e.tags.map(c=>{let p=a?c in a?`md-tag-icon md-tag--${a[c]}`:"md-tag-icon":"";return O("span",{class:`md-tag ${p}`},c)}),o>0&&n.length>0&&O("p",{class:"md-search-result__terms"},be("search.result.term.missing"),": ",...n)))}function mn(e){let t=e[0].score,r=[...e],o=ue(),n=r.findIndex(m=>!`${new URL(m.location,o.base)}`.includes("#")),[i]=r.splice(n,1),s=r.findIndex(m=>m.scoreUr(m,1)),...c.length?[O("details",{class:"md-search-result__more"},O("summary",{tabIndex:-1},O("div",null,c.length>0&&c.length===1?be("search.result.more.one"):be("search.result.more.other",c.length))),...c.map(m=>Ur(m,1)))]:[]];return O("li",{class:"md-search-result__item"},p)}function ln(e){return O("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>O("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?rr(r):r)))}function Nr(e){let t=`tabbed-control tabbed-control--${e}`;return O("div",{class:t,hidden:!0},O("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function fn(e){return O("div",{class:"md-typeset__scrollwrap"},O("div",{class:"md-typeset__table"},e))}function fa(e){let t=ue(),r=new URL(`../${e.version}/`,t.base);return O("li",{class:"md-version__item"},O("a",{href:`${r}`,class:"md-version__link"},e.title))}function un(e,t){return O("div",{class:"md-version"},O("button",{class:"md-version__current","aria-label":be("select.version")},t.title),O("ul",{class:"md-version__list"},e.map(fa)))}function ua(e){return e.tagName==="CODE"?z(".c, .c1, .cm",e):[e]}function da(e){let t=[];for(let r of ua(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let s;for(;s=/(\(\d+\))(!)?/.exec(i.textContent);){let[,a,c]=s;if(typeof c=="undefined"){let p=i.splitText(s.index);i=p.splitText(a.length),t.push(p)}else{i.textContent=a,t.push(i);break}}}}return t}function dn(e,t){t.append(...Array.from(e.childNodes))}function cr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,s=new Map;for(let a of da(t)){let[,c]=a.textContent.match(/\((\d+)\)/);ce(`:scope > li:nth-child(${c})`,e)&&(s.set(c,cn(c,i)),a.replaceWith(s.get(c)))}return s.size===0?T:$(()=>{let a=new x,c=a.pipe(J(),ee(!0)),p=[];for(let[m,f]of s)p.push([N(".md-typeset",f),N(`:scope > li:nth-child(${m})`,e)]);return o.pipe(K(c)).subscribe(m=>{e.hidden=!m,e.classList.toggle("md-annotation-list",m);for(let[f,u]of p)m?dn(f,u):dn(u,f)}),M(...[...s].map(([,m])=>sn(m,t,{target$:r}))).pipe(k(()=>a.complete()),le())})}function hn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return hn(t)}}function bn(e,t){return $(()=>{let r=hn(e);return typeof r!="undefined"?cr(r,e,t):T})}var gn=$t(Vr());var ha=0;function xn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return xn(t)}}function vn(e){return xe(e).pipe(l(({width:t})=>({scrollable:bt(e).width>t})),X("scrollable"))}function yn(e,t){let{matches:r}=matchMedia("(hover)"),o=$(()=>{let n=new x;if(n.subscribe(({scrollable:s})=>{s&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")}),gn.default.isSupported()&&(e.closest(".copy")||te("content.code.copy")&&!e.closest(".no-copy"))){let s=e.closest("pre");s.id=`__code_${ha++}`,s.insertBefore(pn(s.id),e)}let i=e.closest(".highlight");if(i instanceof HTMLElement){let s=xn(i);if(typeof s!="undefined"&&(i.classList.contains("annotate")||te("content.code.annotate"))){let a=cr(s,e,t);return vn(e).pipe(w(c=>n.next(c)),k(()=>n.complete()),l(c=>I({ref:e},c)),qe(xe(i).pipe(l(({width:c,height:p})=>c&&p),G(),v(c=>c?a:T))))}}return vn(e).pipe(w(s=>n.next(s)),k(()=>n.complete()),l(s=>I({ref:e},s)))});return te("content.lazy")?nr(e).pipe(L(n=>n),ge(1),v(()=>o)):o}function ba(e,{target$:t,print$:r}){let o=!0;return M(t.pipe(l(n=>n.closest("details:not([open])")),L(n=>e===n),l(()=>({action:"open",reveal:!0}))),r.pipe(L(n=>n||!o),w(()=>o=e.open),l(n=>({action:n?"open":"close"}))))}function En(e,t){return $(()=>{let r=new x;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),ba(e,t).pipe(w(o=>r.next(o)),k(()=>r.complete()),l(o=>I({ref:e},o)))})}var wn=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color);stroke-width:.05rem}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.attributeBoxEven,.attributeBoxOdd{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var zr,ga=0;function xa(){return typeof mermaid=="undefined"||mermaid instanceof Element?ht("https://unpkg.com/mermaid@9.4.3/dist/mermaid.min.js"):H(void 0)}function Sn(e){return e.classList.remove("mermaid"),zr||(zr=xa().pipe(w(()=>mermaid.initialize({startOnLoad:!1,themeCSS:wn,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),l(()=>{}),B(1))),zr.subscribe(()=>{e.classList.add("mermaid");let t=`__mermaid_${ga++}`,r=O("div",{class:"mermaid"}),o=e.textContent;mermaid.mermaidAPI.render(t,o,(n,i)=>{let s=r.attachShadow({mode:"closed"});s.innerHTML=n,e.replaceWith(r),i==null||i(s)})}),zr.pipe(l(()=>({ref:e})))}var Tn=O("table");function On(e){return e.replaceWith(Tn),Tn.replaceWith(fn(e)),H({ref:e})}function ya(e){let t=z(":scope > input",e),r=t.find(o=>o.checked)||t[0];return M(...t.map(o=>h(o,"change").pipe(l(()=>N(`label[for="${o.id}"]`))))).pipe(V(N(`label[for="${r.id}"]`)),l(o=>({active:o})))}function Mn(e,{viewport$:t}){let r=Nr("prev");e.append(r);let o=Nr("next");e.append(o);let n=N(".tabbed-labels",e);return $(()=>{let i=new x,s=i.pipe(J(),ee(!0));return Q([i,xe(e)]).pipe(Ae(1,Te),K(s)).subscribe({next([{active:a},c]){let p=Je(a),{width:m}=he(a);e.style.setProperty("--md-indicator-x",`${p.x}px`),e.style.setProperty("--md-indicator-width",`${m}px`);let f=tr(n);(p.xf.x+c.width)&&n.scrollTo({left:Math.max(0,p.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),Q([dt(n),xe(n)]).pipe(K(s)).subscribe(([a,c])=>{let p=bt(n);r.hidden=a.x<16,o.hidden=a.x>p.width-c.width-16}),M(h(r,"click").pipe(l(()=>-1)),h(o,"click").pipe(l(()=>1))).pipe(K(s)).subscribe(a=>{let{width:c}=he(n);n.scrollBy({left:c*a,behavior:"smooth"})}),te("content.tabs.link")&&i.pipe(je(1),oe(t)).subscribe(([{active:a},{offset:c}])=>{let p=a.innerText.trim();if(a.hasAttribute("data-md-switching"))a.removeAttribute("data-md-switching");else{let m=e.offsetTop-c.y;for(let u of z("[data-tabs]"))for(let d of z(":scope > input",u)){let b=N(`label[for="${d.id}"]`);if(b!==a&&b.innerText.trim()===p){b.setAttribute("data-md-switching",""),d.click();break}}window.scrollTo({top:e.offsetTop-m});let f=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([p,...f])])}}),i.pipe(K(s)).subscribe(()=>{for(let a of z("audio, video",e))a.pause()}),ya(e).pipe(w(a=>i.next(a)),k(()=>i.complete()),l(a=>I({ref:e},a)))}).pipe(rt(ae))}function Ln(e,{viewport$:t,target$:r,print$:o}){return M(...z(".annotate:not(.highlight)",e).map(n=>bn(n,{target$:r,print$:o})),...z("pre:not(.mermaid) > code",e).map(n=>yn(n,{target$:r,print$:o})),...z("pre.mermaid",e).map(n=>Sn(n)),...z("table:not([class])",e).map(n=>On(n)),...z("details",e).map(n=>En(n,{target$:r,print$:o})),...z("[data-tabs]",e).map(n=>Mn(n,{viewport$:t})))}function Ea(e,{alert$:t}){return t.pipe(v(r=>M(H(!0),H(!1).pipe(ze(2e3))).pipe(l(o=>({message:r,active:o})))))}function _n(e,t){let r=N(".md-typeset",e);return $(()=>{let o=new x;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),Ea(e,t).pipe(w(n=>o.next(n)),k(()=>o.complete()),l(n=>I({ref:e},n)))})}function wa({viewport$:e}){if(!te("header.autohide"))return H(!1);let t=e.pipe(l(({offset:{y:n}})=>n),Ce(2,1),l(([n,i])=>[nMath.abs(i-n.y)>100),l(([,[n]])=>n),G()),o=We("search");return Q([e,o]).pipe(l(([{offset:n},i])=>n.y>400&&!i),G(),v(n=>n?r:H(!1)),V(!1))}function An(e,t){return $(()=>Q([xe(e),wa(t)])).pipe(l(([{height:r},o])=>({height:r,hidden:o})),G((r,o)=>r.height===o.height&&r.hidden===o.hidden),B(1))}function Cn(e,{header$:t,main$:r}){return $(()=>{let o=new x,n=o.pipe(J(),ee(!0));return o.pipe(X("active"),Ge(t)).subscribe(([{active:i},{hidden:s}])=>{e.classList.toggle("md-header--shadow",i&&!s),e.hidden=s}),r.subscribe(o),t.pipe(K(n),l(i=>I({ref:e},i)))})}function Sa(e,{viewport$:t,header$:r}){return sr(e,{viewport$:t,header$:r}).pipe(l(({offset:{y:o}})=>{let{height:n}=he(e);return{active:o>=n}}),X("active"))}function kn(e,t){return $(()=>{let r=new x;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=ce(".md-content h1");return typeof o=="undefined"?T:Sa(o,t).pipe(w(n=>r.next(n)),k(()=>r.complete()),l(n=>I({ref:e},n)))})}function Hn(e,{viewport$:t,header$:r}){let o=r.pipe(l(({height:i})=>i),G()),n=o.pipe(v(()=>xe(e).pipe(l(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),X("bottom"))));return Q([o,n,t]).pipe(l(([i,{top:s,bottom:a},{offset:{y:c},size:{height:p}}])=>(p=Math.max(0,p-Math.max(0,s-c,i)-Math.max(0,p+c-a)),{offset:s-i,height:p,active:s-i<=c})),G((i,s)=>i.offset===s.offset&&i.height===s.height&&i.active===s.active))}function Ta(e){let t=__md_get("__palette")||{index:e.findIndex(r=>matchMedia(r.getAttribute("data-md-color-media")).matches)};return H(...e).pipe(se(r=>h(r,"change").pipe(l(()=>r))),V(e[Math.max(0,t.index)]),l(r=>({index:e.indexOf(r),color:{scheme:r.getAttribute("data-md-color-scheme"),primary:r.getAttribute("data-md-color-primary"),accent:r.getAttribute("data-md-color-accent")}})),B(1))}function $n(e){let t=O("meta",{name:"theme-color"});document.head.appendChild(t);let r=O("meta",{name:"color-scheme"});return document.head.appendChild(r),$(()=>{let o=new x;o.subscribe(i=>{document.body.setAttribute("data-md-color-switching","");for(let[s,a]of Object.entries(i.color))document.body.setAttribute(`data-md-color-${s}`,a);for(let s=0;s{let i=ye("header"),s=window.getComputedStyle(i);return r.content=s.colorScheme,s.backgroundColor.match(/\d+/g).map(a=>(+a).toString(16).padStart(2,"0")).join("")})).subscribe(i=>t.content=`#${i}`),o.pipe(_e(ae)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")});let n=z("input",e);return Ta(n).pipe(w(i=>o.next(i)),k(()=>o.complete()),l(i=>I({ref:e},i)))})}var qr=$t(Vr());function Oa(e){e.setAttribute("data-md-copying","");let t=e.innerText;return e.removeAttribute("data-md-copying"),t}function Rn({alert$:e}){qr.default.isSupported()&&new j(t=>{new qr.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||Oa(N(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(w(t=>{t.trigger.focus()}),l(()=>be("clipboard.copied"))).subscribe(e)}function Ma(e){if(e.length<2)return[""];let[t,r]=[...e].sort((n,i)=>n.length-i.length).map(n=>n.replace(/[^/]+$/,"")),o=0;if(t===r)o=t.length;else for(;t.charCodeAt(o)===r.charCodeAt(o);)o++;return e.map(n=>n.replace(t.slice(0,o),""))}function pr(e){let t=__md_get("__sitemap",sessionStorage,e);if(t)return H(t);{let r=ue();return Jo(new URL("sitemap.xml",e||r.base)).pipe(l(o=>Ma(z("loc",o).map(n=>n.textContent))),pe(()=>T),He([]),w(o=>__md_set("__sitemap",o,sessionStorage,e)))}}function In({location$:e,viewport$:t}){let r=ue();if(location.protocol==="file:")return T;let o=pr().pipe(l(p=>p.map(m=>`${new URL(m,r.base)}`))),n=h(document.body,"click").pipe(oe(o),v(([p,m])=>{if(!(p.target instanceof Element))return T;let f=p.target.closest("a");if(f===null)return T;if(f.target||p.metaKey||p.ctrlKey)return T;let u=new URL(f.href);return u.search=u.hash="",m.includes(`${u}`)?(p.preventDefault(),H(new URL(f.href))):T}),le());n.pipe(ge(1)).subscribe(()=>{let p=ce("link[rel=icon]");typeof p!="undefined"&&(p.href=p.href)}),h(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),n.pipe(oe(t)).subscribe(([p,{offset:m}])=>{history.scrollRestoration="manual",history.replaceState(m,""),history.pushState(null,"",p)}),n.subscribe(e);let i=e.pipe(V(fe()),X("pathname"),je(1),v(p=>ar(p).pipe(pe(()=>(ot(p),T))))),s=new DOMParser,a=i.pipe(v(p=>p.text()),v(p=>{let m=s.parseFromString(p,"text/html");for(let u of["title","link[rel=canonical]","meta[name=author]","meta[name=description]","[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...te("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let d=ce(u),b=ce(u,m);typeof d!="undefined"&&typeof b!="undefined"&&d.replaceWith(b)}let f=ye("container");return Fe(z("script",f)).pipe(v(u=>{let d=m.createElement("script");if(u.src){for(let b of u.getAttributeNames())d.setAttribute(b,u.getAttribute(b));return u.replaceWith(d),new j(b=>{d.onload=()=>b.complete()})}else return d.textContent=u.textContent,u.replaceWith(d),T}),J(),ee(m))}),le());return h(window,"popstate").pipe(l(fe)).subscribe(e),e.pipe(V(fe()),Ce(2,1),v(([p,m])=>p.pathname===m.pathname&&p.hash!==m.hash?H(m):T)).subscribe(p=>{var m,f;history.state!==null||!p.hash?window.scrollTo(0,(f=(m=history.state)==null?void 0:m.y)!=null?f:0):(history.scrollRestoration="auto",Pr(p.hash),history.scrollRestoration="manual")}),a.pipe(oe(e)).subscribe(([,p])=>{var m,f;history.state!==null||!p.hash?window.scrollTo(0,(f=(m=history.state)==null?void 0:m.y)!=null?f:0):Pr(p.hash)}),a.pipe(v(()=>t),X("offset"),ke(100)).subscribe(({offset:p})=>{history.replaceState(p,"")}),a}var jn=$t(Fn());function Wn(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,s)=>`${i}${s}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return s=>(0,jn.default)(s).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function Lt(e){return e.type===1}function mr(e){return e.type===3}function Un(e,t){let r=on(e);return M(H(location.protocol!=="file:"),We("search")).pipe($e(o=>o),v(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:te("search.suggest")}}})),r}function Nn({document$:e}){let t=ue(),r=Ue(new URL("../versions.json",t.base)).pipe(pe(()=>T)),o=r.pipe(l(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:s,aliases:a})=>s===i||a.includes(i))||n[0]}));r.pipe(l(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),v(n=>h(document.body,"click").pipe(L(i=>!i.metaKey&&!i.ctrlKey),oe(o),v(([i,s])=>{if(i.target instanceof Element){let a=i.target.closest("a");if(a&&!a.target&&n.has(a.href)){let c=a.href;return!i.target.closest(".md-version")&&n.get(c)===s?T:(i.preventDefault(),H(c))}}return T}),v(i=>{let{version:s}=n.get(i);return pr(new URL(i)).pipe(l(a=>{let p=fe().href.replace(t.base,"");return a.includes(p.split("#")[0])?new URL(`../${s}/${p}`,t.base):new URL(i)}))})))).subscribe(n=>ot(n)),Q([r,o]).subscribe(([n,i])=>{N(".md-header__topic").appendChild(un(n,i))}),e.pipe(v(()=>o)).subscribe(n=>{var s;let i=__md_get("__outdated",sessionStorage);if(i===null){i=!0;let a=((s=t.version)==null?void 0:s.default)||"latest";Array.isArray(a)||(a=[a]);e:for(let c of a)for(let p of n.aliases)if(new RegExp(c,"i").test(p)){i=!1;break e}__md_set("__outdated",i,sessionStorage)}if(i)for(let a of ne("outdated"))a.hidden=!1})}function ka(e,{worker$:t}){let{searchParams:r}=fe();r.has("q")&&(Ke("search",!0),e.value=r.get("q"),e.focus(),We("search").pipe($e(i=>!i)).subscribe(()=>{let i=new URL(location.href);i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=er(e),n=M(t.pipe($e(Lt)),h(e,"keyup"),o).pipe(l(()=>e.value),G());return Q([n,o]).pipe(l(([i,s])=>({value:i,focus:s})),B(1))}function Dn(e,{worker$:t}){let r=new x,o=r.pipe(J(),ee(!0));Q([t.pipe($e(Lt)),r],(i,s)=>s).pipe(X("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(X("focus")).subscribe(({focus:i})=>{i&&Ke("search",i)}),h(e.form,"reset").pipe(K(o)).subscribe(()=>e.focus());let n=N("header [for=__search]");return h(n,"click").subscribe(()=>e.focus()),ka(e,{worker$:t}).pipe(w(i=>r.next(i)),k(()=>r.complete()),l(i=>I({ref:e},i)),B(1))}function Vn(e,{worker$:t,query$:r}){let o=new x,n=zo(e.parentElement).pipe(L(Boolean)),i=e.parentElement,s=N(":scope > :first-child",e),a=N(":scope > :last-child",e);We("search").subscribe(m=>a.setAttribute("role",m?"list":"presentation")),o.pipe(oe(r),Hr(t.pipe($e(Lt)))).subscribe(([{items:m},{value:f}])=>{switch(m.length){case 0:s.textContent=f.length?be("search.result.none"):be("search.result.placeholder");break;case 1:s.textContent=be("search.result.one");break;default:let u=rr(m.length);s.textContent=be("search.result.other",u)}});let c=o.pipe(w(()=>a.innerHTML=""),v(({items:m})=>M(H(...m.slice(0,10)),H(...m.slice(10)).pipe(Ce(4),Ir(n),v(([f])=>f)))),l(mn),le());return c.subscribe(m=>a.appendChild(m)),c.pipe(se(m=>{let f=ce("details",m);return typeof f=="undefined"?T:h(f,"toggle").pipe(K(o),l(()=>f))})).subscribe(m=>{m.open===!1&&m.offsetTop<=i.scrollTop&&i.scrollTo({top:m.offsetTop})}),t.pipe(L(mr),l(({data:m})=>m)).pipe(w(m=>o.next(m)),k(()=>o.complete()),l(m=>I({ref:e},m)))}function Ha(e,{query$:t}){return t.pipe(l(({value:r})=>{let o=fe();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function zn(e,t){let r=new x,o=r.pipe(J(),ee(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),h(e,"click").pipe(K(o)).subscribe(n=>n.preventDefault()),Ha(e,t).pipe(w(n=>r.next(n)),k(()=>r.complete()),l(n=>I({ref:e},n)))}function qn(e,{worker$:t,keyboard$:r}){let o=new x,n=ye("search-query"),i=M(h(n,"keydown"),h(n,"focus")).pipe(_e(ae),l(()=>n.value),G());return o.pipe(Ge(i),l(([{suggest:a},c])=>{let p=c.split(/([\s-]+)/);if(a!=null&&a.length&&p[p.length-1]){let m=a[a.length-1];m.startsWith(p[p.length-1])&&(p[p.length-1]=m)}else p.length=0;return p})).subscribe(a=>e.innerHTML=a.join("").replace(/\s/g," ")),r.pipe(L(({mode:a})=>a==="search")).subscribe(a=>{switch(a.type){case"ArrowRight":e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText);break}}),t.pipe(L(mr),l(({data:a})=>a)).pipe(w(a=>o.next(a)),k(()=>o.complete()),l(()=>({ref:e})))}function Kn(e,{index$:t,keyboard$:r}){let o=ue();try{let n=Un(o.search,t),i=ye("search-query",e),s=ye("search-result",e);h(e,"click").pipe(L(({target:c})=>c instanceof Element&&!!c.closest("a"))).subscribe(()=>Ke("search",!1)),r.pipe(L(({mode:c})=>c==="search")).subscribe(c=>{let p=Re();switch(c.type){case"Enter":if(p===i){let m=new Map;for(let f of z(":first-child [href]",s)){let u=f.firstElementChild;m.set(f,parseFloat(u.getAttribute("data-md-score")))}if(m.size){let[[f]]=[...m].sort(([,u],[,d])=>d-u);f.click()}c.claim()}break;case"Escape":case"Tab":Ke("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof p=="undefined")i.focus();else{let m=[i,...z(":not(details) > [href], summary, details[open] [href]",s)],f=Math.max(0,(Math.max(0,m.indexOf(p))+m.length+(c.type==="ArrowUp"?-1:1))%m.length);m[f].focus()}c.claim();break;default:i!==Re()&&i.focus()}}),r.pipe(L(({mode:c})=>c==="global")).subscribe(c=>{switch(c.type){case"f":case"s":case"/":i.focus(),i.select(),c.claim();break}});let a=Dn(i,{worker$:n});return M(a,Vn(s,{worker$:n,query$:a})).pipe(qe(...ne("search-share",e).map(c=>zn(c,{query$:a})),...ne("search-suggest",e).map(c=>qn(c,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,Ve}}function Qn(e,{index$:t,location$:r}){return Q([t,r.pipe(V(fe()),L(o=>!!o.searchParams.get("h")))]).pipe(l(([o,n])=>Wn(o.config)(n.searchParams.get("h"))),l(o=>{var s;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let a=i.nextNode();a;a=i.nextNode())if((s=a.parentElement)!=null&&s.offsetHeight){let c=a.textContent,p=o(c);p.length>c.length&&n.set(a,p)}for(let[a,c]of n){let{childNodes:p}=O("span",null,c);a.replaceWith(...Array.from(p))}return{ref:e,nodes:n}}))}function $a(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return Q([r,t]).pipe(l(([{offset:i,height:s},{offset:{y:a}}])=>(s=s+Math.min(n,Math.max(0,a-i))-n,{height:s,locked:a>=i+n})),G((i,s)=>i.height===s.height&&i.locked===s.locked))}function Kr(e,o){var n=o,{header$:t}=n,r=Zr(n,["header$"]);let i=N(".md-sidebar__scrollwrap",e),{y:s}=Je(i);return $(()=>{let a=new x,c=a.pipe(J(),ee(!0)),p=a.pipe(Ae(0,Te));return p.pipe(oe(t)).subscribe({next([{height:m},{height:f}]){i.style.height=`${m-2*s}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),p.pipe($e()).subscribe(()=>{for(let m of z(".md-nav__link--active[href]",e)){let f=or(m);if(typeof f!="undefined"){let u=m.offsetTop-f.offsetTop,{height:d}=he(f);f.scrollTo({top:u-d/2})}}}),me(z("label[tabindex]",e)).pipe(se(m=>h(m,"click").pipe(l(()=>m),K(c)))).subscribe(m=>{let f=N(`[id="${m.htmlFor}"]`);N(`[aria-labelledby="${m.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),$a(e,r).pipe(w(m=>a.next(m)),k(()=>a.complete()),l(m=>I({ref:e},m)))})}function Yn(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return Tt(Ue(`${r}/releases/latest`).pipe(pe(()=>T),l(o=>({version:o.tag_name})),He({})),Ue(r).pipe(pe(()=>T),l(o=>({stars:o.stargazers_count,forks:o.forks_count})),He({}))).pipe(l(([o,n])=>I(I({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return Ue(r).pipe(l(o=>({repositories:o.public_repos})),He({}))}}function Bn(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return Ue(r).pipe(pe(()=>T),l(({star_count:o,forks_count:n})=>({stars:o,forks:n})),He({}))}function Gn(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return Yn(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return Bn(r,o)}return T}var Ra;function Ia(e){return Ra||(Ra=$(()=>{let t=__md_get("__source",sessionStorage);if(t)return H(t);if(ne("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return T}return Gn(e.href).pipe(w(o=>__md_set("__source",o,sessionStorage)))}).pipe(pe(()=>T),L(t=>Object.keys(t).length>0),l(t=>({facts:t})),B(1)))}function Jn(e){let t=N(":scope > :last-child",e);return $(()=>{let r=new x;return r.subscribe(({facts:o})=>{t.appendChild(ln(o)),t.classList.add("md-source__repository--active")}),Ia(e).pipe(w(o=>r.next(o)),k(()=>r.complete()),l(o=>I({ref:e},o)))})}function Pa(e,{viewport$:t,header$:r}){return xe(document.body).pipe(v(()=>sr(e,{header$:r,viewport$:t})),l(({offset:{y:o}})=>({hidden:o>=10})),X("hidden"))}function Xn(e,t){return $(()=>{let r=new x;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(te("navigation.tabs.sticky")?H({hidden:!1}):Pa(e,t)).pipe(w(o=>r.next(o)),k(()=>r.complete()),l(o=>I({ref:e},o)))})}function Fa(e,{viewport$:t,header$:r}){let o=new Map,n=z("[href^=\\#]",e);for(let a of n){let c=decodeURIComponent(a.hash.substring(1)),p=ce(`[id="${c}"]`);typeof p!="undefined"&&o.set(a,p)}let i=r.pipe(X("height"),l(({height:a})=>{let c=ye("main"),p=N(":scope > :first-child",c);return a+.8*(p.offsetTop-c.offsetTop)}),le());return xe(document.body).pipe(X("height"),v(a=>$(()=>{let c=[];return H([...o].reduce((p,[m,f])=>{for(;c.length&&o.get(c[c.length-1]).tagName>=f.tagName;)c.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let d=f.offsetParent;for(;d;d=d.offsetParent)u+=d.offsetTop;return p.set([...c=[...c,m]].reverse(),u)},new Map))}).pipe(l(c=>new Map([...c].sort(([,p],[,m])=>p-m))),Ge(i),v(([c,p])=>t.pipe(Cr(([m,f],{offset:{y:u},size:d})=>{let b=u+d.height>=Math.floor(a.height);for(;f.length;){let[,_]=f[0];if(_-p=u&&!b)f=[m.pop(),...f];else break}return[m,f]},[[],[...c]]),G((m,f)=>m[0]===f[0]&&m[1]===f[1])))))).pipe(l(([a,c])=>({prev:a.map(([p])=>p),next:c.map(([p])=>p)})),V({prev:[],next:[]}),Ce(2,1),l(([a,c])=>a.prev.length{let i=new x,s=i.pipe(J(),ee(!0));if(i.subscribe(({prev:a,next:c})=>{for(let[p]of c)p.classList.remove("md-nav__link--passed"),p.classList.remove("md-nav__link--active");for(let[p,[m]]of a.entries())m.classList.add("md-nav__link--passed"),m.classList.toggle("md-nav__link--active",p===a.length-1)}),te("toc.follow")){let a=M(t.pipe(ke(1),l(()=>{})),t.pipe(ke(250),l(()=>"smooth")));i.pipe(L(({prev:c})=>c.length>0),Ge(o.pipe(_e(ae))),oe(a)).subscribe(([[{prev:c}],p])=>{let[m]=c[c.length-1];if(m.offsetHeight){let f=or(m);if(typeof f!="undefined"){let u=m.offsetTop-f.offsetTop,{height:d}=he(f);f.scrollTo({top:u-d/2,behavior:p})}}})}return te("navigation.tracking")&&t.pipe(K(s),X("offset"),ke(250),je(1),K(n.pipe(je(1))),Ot({delay:250}),oe(i)).subscribe(([,{prev:a}])=>{let c=fe(),p=a[a.length-1];if(p&&p.length){let[m]=p,{hash:f}=new URL(m.href);c.hash!==f&&(c.hash=f,history.replaceState({},"",`${c}`))}else c.hash="",history.replaceState({},"",`${c}`)}),Fa(e,{viewport$:t,header$:r}).pipe(w(a=>i.next(a)),k(()=>i.complete()),l(a=>I({ref:e},a)))})}function ja(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(l(({offset:{y:s}})=>s),Ce(2,1),l(([s,a])=>s>a&&a>0),G()),i=r.pipe(l(({active:s})=>s));return Q([i,n]).pipe(l(([s,a])=>!(s&&a)),G(),K(o.pipe(je(1))),ee(!0),Ot({delay:250}),l(s=>({hidden:s})))}function ei(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new x,s=i.pipe(J(),ee(!0));return i.subscribe({next({hidden:a}){e.hidden=a,a?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(K(s),X("height")).subscribe(({height:a})=>{e.style.top=`${a+16}px`}),h(e,"click").subscribe(a=>{a.preventDefault(),window.scrollTo({top:0})}),ja(e,{viewport$:t,main$:o,target$:n}).pipe(w(a=>i.next(a)),k(()=>i.complete()),l(a=>I({ref:e},a)))}function ti({document$:e,tablet$:t}){e.pipe(v(()=>z(".md-toggle--indeterminate")),w(r=>{r.indeterminate=!0,r.checked=!1}),se(r=>h(r,"change").pipe($r(()=>r.classList.contains("md-toggle--indeterminate")),l(()=>r))),oe(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function Wa(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function ri({document$:e}){e.pipe(v(()=>z("[data-md-scrollfix]")),w(t=>t.removeAttribute("data-md-scrollfix")),L(Wa),se(t=>h(t,"touchstart").pipe(l(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function oi({viewport$:e,tablet$:t}){Q([We("search"),t]).pipe(l(([r,o])=>r&&!o),v(r=>H(r).pipe(ze(r?400:100))),oe(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function Ua(){return location.protocol==="file:"?ht(`${new URL("search/search_index.js",Qr.base)}`).pipe(l(()=>__index),B(1)):Ue(new URL("search/search_index.json",Qr.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var nt=Wo(),At=Qo(),gt=Bo(At),Yr=Ko(),Se=rn(),lr=Fr("(min-width: 960px)"),ii=Fr("(min-width: 1220px)"),ai=Go(),Qr=ue(),si=document.forms.namedItem("search")?Ua():Ve,Br=new x;Rn({alert$:Br});te("navigation.instant")&&In({location$:At,viewport$:Se}).subscribe(nt);var ni;((ni=Qr.version)==null?void 0:ni.provider)==="mike"&&Nn({document$:nt});M(At,gt).pipe(ze(125)).subscribe(()=>{Ke("drawer",!1),Ke("search",!1)});Yr.pipe(L(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=ce("link[rel=prev]");typeof t!="undefined"&&ot(t);break;case"n":case".":let r=ce("link[rel=next]");typeof r!="undefined"&&ot(r);break;case"Enter":let o=Re();o instanceof HTMLLabelElement&&o.click()}});ti({document$:nt,tablet$:lr});ri({document$:nt});oi({viewport$:Se,tablet$:lr});var Xe=An(ye("header"),{viewport$:Se}),_t=nt.pipe(l(()=>ye("main")),v(e=>Hn(e,{viewport$:Se,header$:Xe})),B(1)),Na=M(...ne("consent").map(e=>an(e,{target$:gt})),...ne("dialog").map(e=>_n(e,{alert$:Br})),...ne("header").map(e=>Cn(e,{viewport$:Se,header$:Xe,main$:_t})),...ne("palette").map(e=>$n(e)),...ne("search").map(e=>Kn(e,{index$:si,keyboard$:Yr})),...ne("source").map(e=>Jn(e))),Da=$(()=>M(...ne("announce").map(e=>nn(e)),...ne("content").map(e=>Ln(e,{viewport$:Se,target$:gt,print$:ai})),...ne("content").map(e=>te("search.highlight")?Qn(e,{index$:si,location$:At}):T),...ne("header-title").map(e=>kn(e,{viewport$:Se,header$:Xe})),...ne("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?jr(ii,()=>Kr(e,{viewport$:Se,header$:Xe,main$:_t})):jr(lr,()=>Kr(e,{viewport$:Se,header$:Xe,main$:_t}))),...ne("tabs").map(e=>Xn(e,{viewport$:Se,header$:Xe})),...ne("toc").map(e=>Zn(e,{viewport$:Se,header$:Xe,main$:_t,target$:gt})),...ne("top").map(e=>ei(e,{viewport$:Se,header$:Xe,main$:_t,target$:gt})))),ci=nt.pipe(v(()=>Da),qe(Na),B(1));ci.subscribe();window.document$=nt;window.location$=At;window.target$=gt;window.keyboard$=Yr;window.viewport$=Se;window.tablet$=lr;window.screen$=ii;window.print$=ai;window.alert$=Br;window.component$=ci;})(); +//# sourceMappingURL=bundle.dff1b7c8.min.js.map + diff --git a/assets/javascripts/bundle.dff1b7c8.min.js.map b/assets/javascripts/bundle.dff1b7c8.min.js.map new file mode 100644 index 00000000..82d90238 --- /dev/null +++ b/assets/javascripts/bundle.dff1b7c8.min.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "src/assets/javascripts/bundle.ts", "node_modules/rxjs/node_modules/tslib/tslib.es6.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/assets/javascripts/browser/document/index.ts", "src/assets/javascripts/browser/element/_/index.ts", "src/assets/javascripts/browser/element/focus/index.ts", "src/assets/javascripts/browser/element/offset/_/index.ts", "src/assets/javascripts/browser/element/offset/content/index.ts", "src/assets/javascripts/utilities/h/index.ts", "src/assets/javascripts/utilities/round/index.ts", "src/assets/javascripts/browser/script/index.ts", "src/assets/javascripts/browser/element/size/_/index.ts", "src/assets/javascripts/browser/element/size/content/index.ts", "src/assets/javascripts/browser/element/visibility/index.ts", "src/assets/javascripts/browser/toggle/index.ts", "src/assets/javascripts/browser/keyboard/index.ts", "src/assets/javascripts/browser/location/_/index.ts", "src/assets/javascripts/browser/location/hash/index.ts", "src/assets/javascripts/browser/media/index.ts", "src/assets/javascripts/browser/request/index.ts", "src/assets/javascripts/browser/viewport/offset/index.ts", "src/assets/javascripts/browser/viewport/size/index.ts", "src/assets/javascripts/browser/viewport/_/index.ts", "src/assets/javascripts/browser/viewport/at/index.ts", "src/assets/javascripts/browser/worker/index.ts", "src/assets/javascripts/_/index.ts", "src/assets/javascripts/components/_/index.ts", "src/assets/javascripts/components/announce/index.ts", "src/assets/javascripts/components/consent/index.ts", "src/assets/javascripts/components/content/annotation/_/index.ts", "src/assets/javascripts/templates/tooltip/index.tsx", "src/assets/javascripts/templates/annotation/index.tsx", "src/assets/javascripts/templates/clipboard/index.tsx", "src/assets/javascripts/templates/search/index.tsx", "src/assets/javascripts/templates/source/index.tsx", "src/assets/javascripts/templates/tabbed/index.tsx", "src/assets/javascripts/templates/table/index.tsx", "src/assets/javascripts/templates/version/index.tsx", "src/assets/javascripts/components/content/annotation/list/index.ts", "src/assets/javascripts/components/content/annotation/block/index.ts", "src/assets/javascripts/components/content/code/_/index.ts", "src/assets/javascripts/components/content/details/index.ts", "src/assets/javascripts/components/content/mermaid/index.css", "src/assets/javascripts/components/content/mermaid/index.ts", "src/assets/javascripts/components/content/table/index.ts", "src/assets/javascripts/components/content/tabs/index.ts", "src/assets/javascripts/components/content/_/index.ts", "src/assets/javascripts/components/dialog/index.ts", "src/assets/javascripts/components/header/_/index.ts", "src/assets/javascripts/components/header/title/index.ts", "src/assets/javascripts/components/main/index.ts", "src/assets/javascripts/components/palette/index.ts", "src/assets/javascripts/integrations/clipboard/index.ts", "src/assets/javascripts/integrations/sitemap/index.ts", "src/assets/javascripts/integrations/instant/index.ts", "src/assets/javascripts/integrations/search/highlighter/index.ts", "src/assets/javascripts/integrations/search/worker/message/index.ts", "src/assets/javascripts/integrations/search/worker/_/index.ts", "src/assets/javascripts/integrations/version/index.ts", "src/assets/javascripts/components/search/query/index.ts", "src/assets/javascripts/components/search/result/index.ts", "src/assets/javascripts/components/search/share/index.ts", "src/assets/javascripts/components/search/suggest/index.ts", "src/assets/javascripts/components/search/_/index.ts", "src/assets/javascripts/components/search/highlight/index.ts", "src/assets/javascripts/components/sidebar/index.ts", "src/assets/javascripts/components/source/facts/github/index.ts", "src/assets/javascripts/components/source/facts/gitlab/index.ts", "src/assets/javascripts/components/source/facts/_/index.ts", "src/assets/javascripts/components/source/_/index.ts", "src/assets/javascripts/components/tabs/index.ts", "src/assets/javascripts/components/toc/index.ts", "src/assets/javascripts/components/top/index.ts", "src/assets/javascripts/patches/indeterminate/index.ts", "src/assets/javascripts/patches/scrollfix/index.ts", "src/assets/javascripts/patches/scrolllock/index.ts", "src/assets/javascripts/polyfills/index.ts"], + "sourceRoot": "../../..", + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2023 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantLoading,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up instant loading, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantLoading({ location$, viewport$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.component$ = component$ /* Component observable */\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n}\r\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n *\n * @class Subscription\n */\nexport class Subscription implements SubscriptionLike {\n /** @nocollapse */\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n * @return {void}\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n *\n * @class Subscriber\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @nocollapse\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param {T} [value] The `next` value.\n * @return {void}\n */\n next(value?: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param {any} [err] The `error` exception.\n * @return {void}\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n * @return {void}\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as (((value: T) => void) | undefined),\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent\n * @param subscriber The stopped subscriber\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n *\n * @class Observable\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @constructor\n * @param {Function} subscribe the function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @owner Observable\n * @method create\n * @param {Function} subscribe? the subscriber function to be passed to the Observable constructor\n * @return {Observable} a new observable\n * @nocollapse\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @method lift\n * @param operator the operator defining the operation to take on the observable\n * @return a new observable with the Operator applied\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param {Observer|Function} observerOrNext (optional) Either an observer with methods to be called,\n * or the first of three possible handlers, which is the handler for each value emitted from the subscribed\n * Observable.\n * @param {Function} error (optional) A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param {Function} complete (optional) A handler for a terminal event resulting from successful completion.\n * @return {Subscription} a subscription reference to the registered handlers\n * @method subscribe\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next a handler for each value emitted by the observable\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @method Symbol.observable\n * @return {Observable} this instance of the observable\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n * @method pipe\n * @return {Observable} the Observable result of all of the operators having\n * been called in the order they were passed in.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @method toPromise\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @nocollapse\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return {Observable} Observable that the Subject casts to\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\n/**\n * @class AnonymousSubject\n */\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param bufferSize The size of the buffer to replay on subscription\n * @param windowTime The amount of time the buffered items will stay buffered\n * @param timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n *\n * @class Action\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler.\n * @return {void}\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n * @return {any}\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @class Scheduler\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return {number} A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param {function(state: ?T): ?Subscription} work A function representing a\n * task, or some unit of work to be executed by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler itself.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @return {Subscription} A subscription in order to be able to unsubscribe\n * the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @type {boolean}\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @type {any}\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n const flushId = this._scheduled;\n this._scheduled = undefined;\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an