diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..0033ad99 --- /dev/null +++ b/404.html @@ -0,0 +1,480 @@ + + + +
+ + + + + + + + + + + + + +ALLOW_BIND_ZIP_FILTER
+
DEFAULT_HARD_TASK_LIMIT
+
DEFAULT_README_TEXT
+
DEFAULT_SOFT_TASK_LIMIT
+
ENABLE_SOZIP
+
ENABLE_TILES
+
HDX_HARD_TASK_LIMIT
+
HDX_SOFT_TASK_LIMIT
+
WORKER_PREFETCH_MULTIPLIER
+
celery
+
celery_backend
+
celery_broker_uri
+
use_s3_to_upload
+
def create_readme_content(
+ default_readme,
+ polygon_stats
+)
+
def remove_file(
+ path: str
+) -> None
+
def zip_binding(
+ working_dir,
+ exportname_parts,
+ geom_dump,
+ polygon_stats,
+ default_readme
+)
+
osm_auth
+
def admin_required(
+ user: API.auth.AuthUser = Depends(login_required)
+)
+
def get_optional_user(
+ access_token: str = Header(None)
+) -> API.auth.AuthUser
+
def get_osm_auth_user(
+ access_token
+)
+
def get_user_from_db(
+ osm_id: int
+)
+
def login_required(
+ access_token: str = Header(PydanticUndefined)
+)
+
def staff_required(
+ user: API.auth.AuthUser = Depends(login_required)
+)
+
class AuthUser(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class UserRole(
+ /,
+ *args,
+ **kwargs
+)
+
An enumeration.
+ADMIN
+
GUEST
+
STAFF
+
name
+
value
+
router
+
def callback(
+ request: starlette.requests.Request
+)
+
Core will use Oauth secret key from configuration while deserializing token, +provides access token that can be used for authorized endpoints.
+Parameters: None
+Returns: +- access_token (string)
+def create_user(
+ params: API.auth.routers.User,
+ user_data: API.auth.AuthUser = Depends(admin_required)
+)
+
Args: +- params (User): The user data including osm_id and role.
+Returns: +- Dict[str, Any]: A dictionary containing the osm_id of the newly created user.
+Raises: +- HTTPException: If the user creation fails.
+def delete_user(
+ osm_id: int,
+ user_data: API.auth.AuthUser = Depends(admin_required)
+)
+
Args: +- osm_id (int): The OSM ID of the user to delete.
+Returns: +- Dict[str, Any]: A dictionary containing the deleted user information.
+Raises: +- HTTPException: If the user with the given osm_id is not found.
+def login_url(
+ request: starlette.requests.Request
+)
+
Parameters: None
+Returns: +- login_url (dict) - URL to authorize user to the application via. Openstreetmap + OAuth2 with client_id, redirect_uri, and permission scope as query_string parameters
+def my_data(
+ user_data: API.auth.AuthUser = Depends(login_required)
+)
+
Parameters:None
+Returns: user_data + User Role : + ADMIN = 1 + STAFF = 2 + GUEST = 3
+def read_user(
+ osm_id: int,
+ user_data: API.auth.AuthUser = Depends(staff_required)
+)
+
Args: +- osm_id (int): The OSM ID of the user to retrieve.
+Returns: +- Dict[str, Any]: A dictionary containing user information.
+Raises: +- HTTPException: If the user with the given osm_id is not found.
+def read_users(
+ skip: int = 0,
+ limit: int = 10,
+ user_data: API.auth.AuthUser = Depends(staff_required)
+)
+
Args: +- skip (int): The number of users to skip (for pagination). +- limit (int): The maximum number of users to retrieve (for pagination).
+Returns: +- List[Dict[str, Any]]: A list of dictionaries containing user information.
+def update_user(
+ osm_id: int,
+ update_data: API.auth.routers.User,
+ user_data: API.auth.AuthUser = Depends(admin_required)
+)
+
Returns: +- Dict[str, Any]: A dictionary containing the updated user information.
+Raises: +- HTTPException: If the user with the given osm_id is not found.
+class User(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+
+
+
+
+
+
+ DEFAULT_QUEUE_NAME
+
RATE_LIMIT_PER_MIN
+
router
+
def process_custom_requests(
+ request: starlette.requests.Request,
+ user: API.auth.AuthUser = Depends(staff_required),
+ params: src.validation.models.DynamicCategoriesModel = Body(PydanticUndefined)
+)
+
Args: + request: FastAPI Request object. + params (DynamicCategoriesModel): Input parameters including ISO3 country code and dynamic categories.
+Returns: + dict: Result message.
+ + + + + + +RATE_LIMIT_PER_MIN
+
router
+
def create_hdx(
+ request: starlette.requests.Request,
+ hdx_data: dict,
+ user_data: API.auth.AuthUser = Depends(staff_required)
+)
+
Args: + request (Request): The request object. + hdx_data (dict): Data for creating the HDX entry. + user_data (AuthUser): User authentication data.
+Returns: + dict: Result of the HDX creation process.
+def delete_hdx(
+ request: starlette.requests.Request,
+ hdx_id: int,
+ user_data: API.auth.AuthUser = Depends(admin_required)
+)
+
Args: + request (Request): The request object. + hdx_id (int): ID of the HDX entry to delete. + user_data (AuthUser): User authentication data.
+Returns: + dict: Result of the HDX deletion process.
+Raises: + HTTPException: If the HDX entry is not found.
+def patch_hdx(
+ request: starlette.requests.Request,
+ hdx_id: int,
+ hdx_data: Dict,
+ user_data: API.auth.AuthUser = Depends(staff_required)
+)
+
Args: + request (Request): The request object. + hdx_id (int): ID of the HDX entry to update. + hdx_data (Dict): Data for partially updating the HDX entry. + user_data (AuthUser): User authentication data.
+Returns: + Dict: Result of the HDX update process.
+Raises: + HTTPException: If the HDX entry is not found.
+def read_hdx(
+ request: starlette.requests.Request,
+ hdx_id: int
+)
+
Args: + request (Request): The request object. + hdx_id (int): ID of the HDX entry to retrieve.
+Returns: + dict: Details of the requested HDX entry.
+Raises: + HTTPException: If the HDX entry is not found.
+def read_hdx_list(
+ request: starlette.requests.Request,
+ skip: int = 0,
+ limit: int = 10
+)
+
Args: + request (Request): The request object. + skip (int): Number of entries to skip. + limit (int): Maximum number of entries to retrieve.
+Returns: + List[dict]: List of HDX entries.
+def search_hdx(
+ request: starlette.requests.Request,
+ dataset_title: str = Query(PydanticUndefined),
+ skip: int = Query(0),
+ limit: int = Query(10)
+)
+
Args: + request (Request): The request object. + dataset_title (str): The title of the dataset to search for. + skip (int): Number of entries to skip. + limit (int): Maximum number of entries to retrieve.
+Returns: + List[dict]: List of HDX entries matching the dataset title.
+def update_hdx(
+ request: starlette.requests.Request,
+ hdx_id: int,
+ hdx_data: dict,
+ user_data: API.auth.AuthUser = Depends(staff_required)
+)
+
Args: + request (Request): The request object. + hdx_id (int): ID of the HDX entry to update. + hdx_data (dict): Data for updating the HDX entry. + user_data (AuthUser): User authentication data.
+Returns: + dict: Result of the HDX update process.
+Raises: + HTTPException: If the HDX entry is not found.
+ + + + + + +ENABLE_CUSTOM_EXPORTS
+
ENABLE_HDX_EXPORTS
+
ENABLE_POLYGON_STATISTICS_ENDPOINTS
+
EXPORT_PATH
+
LOG_LEVEL
+
SENTRY_DSN
+
SENTRY_RATE
+
USE_CONNECTION_POOLING
+
USE_S3_TO_UPLOAD
+
app
+
origins
+
def add_process_time_header(
+ request,
+ call_next
+)
+
Args: + request (type): description + call_next (type): description
+Returns: + header with process time
+def on_shutdown(
+
+)
+
def on_startup(
+
+)
+
Raises: + e: if connection is rejected to database
+ + + + + + +[Router Responsible for Raw data API ]
+ALLOW_BIND_ZIP_FILTER
+
CELERY_BROKER_URL
+
DEFAULT_QUEUE_NAME
+
EXPORT_MAX_AREA_SQKM
+
export_rate_limit
+
redis_client
+
router
+
def check_database_last_updated(
+
+)
+
def get_countries(
+ q: str = ''
+)
+
def get_osm_current_snapshot_as_file(
+ request: starlette.requests.Request,
+ params: src.validation.models.RawDataCurrentParams = Body({}),
+ user: API.auth.AuthUser = Depends(get_optional_user)
+)
+
Steps to Run Snapshot :
+def get_osm_current_snapshot_as_plain_geojson(
+ request: starlette.requests.Request,
+ params: src.validation.models.RawDataCurrentParamsBase,
+ user: API.auth.AuthUser = Depends(get_optional_user)
+)
+
Args: + request (Request): description + params (RawDataCurrentParamsBase): Same as /snapshot excpet multiple output format options and configurations
+Returns: + Featurecollection: Geojson
+def get_osm_feature(
+ osm_id: int
+)
+
AWS_ACCESS_KEY_ID
+
AWS_REGION
+
AWS_SECRET_ACCESS_KEY
+
BUCKET_NAME
+
RATE_LIMIT_PER_MIN
+
paginator
+
router
+
s3
+
session
+
def check_object_existence(
+ bucket_name,
+ file_path
+)
+
def get_s3_file(
+ request: starlette.requests.Request,
+ file_path: str = Path(PydanticUndefined),
+ expiry: int = Query(3600),
+ read_meta: bool = Query(True)
+)
+
def head_s3_file(
+ request: starlette.requests.Request,
+ file_path: str = Path(PydanticUndefined)
+)
+
def list_s3_files(
+ request: starlette.requests.Request,
+ folder: str = Query(/HDX),
+ prettify: bool = Query(False)
+)
+
def read_meta_json(
+ bucket_name,
+ file_path
+)
+
POLYGON_STATISTICS_API_RATE_LIMIT
+
router
+
def get_polygon_stats(
+ request: starlette.requests.Request,
+ params: src.validation.models.StatsRequestParams = Body(PydanticUndefined)
+)
+
Args: + request (Request): An HTTP request object. + params (StatsRequestParams): Parameters for the statistics request, including the polygon geometry.
+Returns: + dict: A dictionary containing statistics for the specified polygon.
+ + + + + + +CELERY_BROKER_URL
+
DAEMON_QUEUE_NAME
+
DEFAULT_QUEUE_NAME
+
queues
+
router
+
def discard_all_waiting_tasks(
+ user: API.auth.AuthUser = Depends(admin_required)
+)
+
def get_list_details(
+ queue_name: str,
+ args: bool = Query(False)
+)
+
def get_queue_info(
+
+)
+
def get_task_status(
+ task_id,
+ only_args: bool = Query(False)
+)
+
Args:
+task_id ([type]): [Unique id provided on response from /snapshot/]
+
+Returns:
+id: Id of the task
+status : Possible values includes:
+
+ PENDING
+
+ The task is waiting for execution.
+
+ STARTED
+
+ The task has been started.
+
+ RETRY
+
+ The task is to be retried, possibly because of failure.
+
+ FAILURE
+
+ The task raised an exception, or has exceeded the retry limit. The result attribute then contains the exception raised by the task.
+
+ SUCCESS
+
+ The task executed successfully. The result attribute then contains the tasks return value.
+
+result : Result of task
+
+Successful task will have additional nested json inside
+def inspect_workers(
+ request: starlette.requests.Request,
+ summary: bool = Query(True)
+)
+
Returns: + active: Current Active tasks ongoing on workers
+def ping_workers(
+
+)
+
Returns: {worker_name : return_result}
+def revoke_task(
+ task_id,
+ user: API.auth.AuthUser = Depends(staff_required)
+)
+
Args: + task_id (type): task id of raw data task
+Returns: + id: id of revoked task
+ + + + + + +Description | +URL | +
---|---|
/latest | ++ /latest + | +
Check Database Last Updated
+Gives status about how recent the osm data is , it will give the last time +that database was updated completely
++ Response 200 OK +
+ +{
+ "lastUpdated": "2022-06-27 19:59:24+05:45"
+}
+
{
+ "title": "StatusResponse",
+ "required": [
+ "lastUpdated"
+ ],
+ "type": "object",
+ "properties": {
+ "lastUpdated": {
+ "title": "Lastupdated",
+ "type": "string"
+ }
+ },
+ "additionalProperties": false,
+ "example": {
+ "lastUpdated": "2022-06-27 19:59:24+05:45"
+ }
+}
+
Get Osm Current Snapshot As File
+Generates the current raw OpenStreetMap data available on database based on +the input geometry, query and spatial features.
+Steps to Run Snapshot :
+Request body
+ +{
+ "geometry": {
+ "type": "Polygon",
+ "coordinates": [
+ [
+ [
+ 83.96919250488281,
+ 28.194446860487773
+ ],
+ [
+ 83.99751663208006,
+ 28.194446860487773
+ ],
+ [
+ 83.99751663208006,
+ 28.214869548073377
+ ],
+ [
+ 83.96919250488281,
+ 28.214869548073377
+ ],
+ [
+ 83.96919250488281,
+ 28.194446860487773
+ ]
+ ]
+ ]
+ }
+}
+
{
+ "outputType": "shp",
+ "fileName": "Pokhara_all_features",
+ "geometry": {
+ "type": "Polygon",
+ "coordinates": [
+ [
+ [
+ 83.96919250488281,
+ 28.194446860487773
+ ],
+ [
+ 83.99751663208006,
+ 28.194446860487773
+ ],
+ [
+ 83.99751663208006,
+ 28.214869548073377
+ ],
+ [
+ 83.96919250488281,
+ 28.214869548073377
+ ],
+ [
+ 83.96919250488281,
+ 28.194446860487773
+ ]
+ ]
+ ]
+ }
+}
+
{
+ "outputType": "geojson",
+ "fileName": "Pokhara_buildings",
+ "geometry": {
+ "type": "Polygon",
+ "coordinates": [
+ [
+ [
+ 83.96919250488281,
+ 28.194446860487773
+ ],
+ [
+ 83.99751663208006,
+ 28.194446860487773
+ ],
+ [
+ 83.99751663208006,
+ 28.214869548073377
+ ],
+ [
+ 83.96919250488281,
+ 28.214869548073377
+ ],
+ [
+ 83.96919250488281,
+ 28.194446860487773
+ ]
+ ]
+ ]
+ },
+ "filters": {
+ "tags": {
+ "all_geometry": {
+ "building": []
+ }
+ },
+ "attributes": {
+ "all_geometry": [
+ "name"
+ ]
+ }
+ },
+ "geometryType": [
+ "point",
+ "polygon"
+ ]
+}
+
{
+ "geometry": {
+ "type": "Polygon",
+ "coordinates": [
+ [
+ [
+ 83.585701,
+ 28.046607
+ ],
+ [
+ 83.585701,
+ 28.382561
+ ],
+ [
+ 84.391823,
+ 28.382561
+ ],
+ [
+ 84.391823,
+ 28.046607
+ ],
+ [
+ 83.585701,
+ 28.046607
+ ]
+ ]
+ ]
+ },
+ "fileName": "my export",
+ "outputType": "geojson",
+ "geometryType": [
+ "point",
+ "polygon"
+ ],
+ "filters": {
+ "tags": {
+ "all_geometry": {
+ "building": [],
+ "amenity": [
+ "cafe",
+ "restaurant",
+ "pub"
+ ]
+ }
+ },
+ "attributes": {
+ "all_geometry": [
+ "name",
+ "addr"
+ ]
+ }
+ },
+ "joinFilterType": "OR"
+}
+
{
+ "fileName": "Example export with all features",
+ "geometry": {
+ "type": "Polygon",
+ "coordinates": [
+ [
+ [
+ 83.585701,
+ 28.046607
+ ],
+ [
+ 83.585701,
+ 28.382561
+ ],
+ [
+ 84.391823,
+ 28.382561
+ ],
+ [
+ 84.391823,
+ 28.046607
+ ],
+ [
+ 83.585701,
+ 28.046607
+ ]
+ ]
+ ]
+ },
+ "outputType": "geojson",
+ "geometryType": [
+ "point",
+ "line",
+ "polygon"
+ ],
+ "filters": {
+ "tags": {
+ "point": {
+ "amenity": [
+ "bank",
+ "ferry_terminal",
+ "bus_station",
+ "fuel",
+ "kindergarten",
+ "school",
+ "college",
+ "university",
+ "place_of_worship",
+ "marketplace",
+ "clinic",
+ "hospital",
+ "police",
+ "fire_station"
+ ],
+ "building": [
+ "bank",
+ "aerodrome",
+ "ferry_terminal",
+ "train_station",
+ "bus_station",
+ "pumping_station",
+ "power_substation",
+ "kindergarten",
+ "school",
+ "college",
+ "university",
+ "mosque ",
+ " church ",
+ " temple",
+ "supermarket",
+ "marketplace",
+ "clinic",
+ "hospital",
+ "police",
+ "fire_station",
+ "stadium ",
+ " sports_centre",
+ "governor_office ",
+ " townhall ",
+ " subdistrict_office ",
+ " village_office ",
+ " community_group_office",
+ "government_office"
+ ],
+ "man_made": [
+ "tower",
+ "water_tower",
+ "pumping_station"
+ ],
+ "tower:type": [
+ "communication"
+ ],
+ "aeroway": [
+ "aerodrome"
+ ],
+ "railway": [
+ "station"
+ ],
+ "emergency": [
+ "fire_hydrant"
+ ],
+ "landuse": [
+ "reservoir",
+ "recreation_gound"
+ ],
+ "waterway": [
+ "floodgate"
+ ],
+ "natural": [
+ "spring"
+ ],
+ "power": [
+ "tower",
+ "substation"
+ ],
+ "shop": [
+ "supermarket"
+ ],
+ "leisure": [
+ "stadium ",
+ " sports_centre ",
+ " pitch ",
+ " swimming_pool",
+ "park"
+ ],
+ "office": [
+ "government"
+ ]
+ },
+ "line": {
+ "highway": [
+ "motorway ",
+ " trunk ",
+ " primary ",
+ " secondary ",
+ " tertiary ",
+ " service ",
+ " residential ",
+ " pedestrian ",
+ " path ",
+ " living_street ",
+ " track"
+ ],
+ "railway": [
+ "rail"
+ ],
+ "man_made": [
+ "embankment"
+ ],
+ "waterway": []
+ },
+ "polygon": {
+ "amenity": [
+ "bank",
+ "ferry_terminal",
+ "bus_station",
+ "fuel",
+ "kindergarten",
+ "school",
+ "college",
+ "university",
+ "place_of_worship",
+ "marketplace",
+ "clinic",
+ "hospital",
+ "police",
+ "fire_station"
+ ],
+ "building": [
+ "bank",
+ "aerodrome",
+ "ferry_terminal",
+ "train_station",
+ "bus_station",
+ "pumping_station",
+ "power_substation",
+ "power_plant",
+ "kindergarten",
+ "school",
+ "college",
+ "university",
+ "mosque ",
+ " church ",
+ " temple",
+ "supermarket",
+ "marketplace",
+ "clinic",
+ "hospital",
+ "police",
+ "fire_station",
+ "stadium ",
+ " sports_centre",
+ "governor_office ",
+ " townhall ",
+ " subdistrict_office ",
+ " village_office ",
+ " community_group_office",
+ "government_office"
+ ],
+ "man_made": [
+ "tower",
+ "water_tower",
+ "pumping_station"
+ ],
+ "tower:type": [
+ "communication"
+ ],
+ "aeroway": [
+ "aerodrome"
+ ],
+ "railway": [
+ "station"
+ ],
+ "landuse": [
+ "reservoir",
+ "recreation_gound"
+ ],
+ "waterway": [],
+ "natural": [
+ "spring"
+ ],
+ "power": [
+ "substation",
+ "plant"
+ ],
+ "shop": [
+ "supermarket"
+ ],
+ "leisure": [
+ "stadium ",
+ " sports_centre ",
+ " pitch ",
+ " swimming_pool",
+ "park"
+ ],
+ "office": [
+ "government"
+ ],
+ "type": [
+ "boundary"
+ ],
+ "boundary": [
+ "administrative"
+ ]
+ }
+ },
+ "attributes": {
+ "point": [
+ "building",
+ "ground_floor:height",
+ "capacity:persons",
+ "building:structure",
+ "building:condition",
+ "name",
+ "admin_level",
+ "building:material",
+ "office",
+ "building:roof",
+ "backup_generator",
+ "access:roof",
+ "building:levels",
+ "building:floor",
+ "addr:full",
+ "addr:city",
+ "source"
+ ],
+ "line": [
+ "width",
+ "source",
+ "waterway",
+ "name"
+ ],
+ "polygon": [
+ "landslide_prone",
+ "name",
+ "admin_level",
+ "type",
+ "is_in:town",
+ "flood_prone",
+ "is_in:province",
+ "is_in:city",
+ "is_in:municipality",
+ "is_in:RW",
+ "is_in:village",
+ "source",
+ "boundary"
+ ]
+ }
+ }
+}
+
{
+ "title": "Params",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/RawDataCurrentParams"
+ }
+ ],
+ "default": {}
+}
+
+ Response 200 OK +
+ +{
+ "task_id": "aa539af6-83d4-4aa3-879e-abf14fffa03f",
+ "track_link": "/tasks/status/aa539af6-83d4-4aa3-879e-abf14fffa03f/"
+}
+
{
+ "title": "SnapshotResponse",
+ "required": [
+ "taskId",
+ "trackLink"
+ ],
+ "type": "object",
+ "properties": {
+ "taskId": {
+ "title": "Taskid",
+ "type": "string"
+ },
+ "trackLink": {
+ "title": "Tracklink",
+ "type": "string"
+ }
+ },
+ "additionalProperties": false,
+ "example": {
+ "task_id": "aa539af6-83d4-4aa3-879e-abf14fffa03f",
+ "track_link": "/tasks/status/aa539af6-83d4-4aa3-879e-abf14fffa03f/"
+ }
+}
+
+ Response 422 Unprocessable Entity +
+ +{
+ "detail": [
+ {
+ "loc": [
+ "string"
+ ],
+ "msg": "string",
+ "type": "string"
+ }
+ ]
+}
+
{
+ "title": "HTTPValidationError",
+ "type": "object",
+ "properties": {
+ "detail": {
+ "title": "Detail",
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ValidationError"
+ }
+ }
+ }
+}
+
Get Current Snapshot As Plain Geojson
+Simple API to get osm features as geojson for small region. This is designed +only for querying small data for large data follow /snapshot/
+Params ::
+bbox: Optional List = takes xmin, ymin, xmax, ymax uses srid=4326
+select: List = this is select query you can pass [*] to select all
+
+attribute + where: List[WhereCondition] = [{'key': 'building', 'value': +['*']},{'key':'amenity','value':['school','college']}] + join_by: Optional[JoinFilterType] = or/ and + look_in: Optional[List[OsmFeatureType]] = ["nodes", +"ways_poly","ways_line","relations"] : tables name
+Request body
+ +{
+ "select": [
+ "name"
+ ],
+ "where": [
+ {
+ "key": "admin_level",
+ "value": [
+ "2"
+ ]
+ },
+ {
+ "key": "boundary",
+ "value": [
+ "administrative"
+ ]
+ },
+ {
+ "key": "name:en",
+ "value": [
+ "Nepal"
+ ]
+ }
+ ],
+ "joinBy": "AND",
+ "lookIn": [
+ "relations"
+ ]
+}
+
{
+ "select": [
+ "name"
+ ],
+ "where": [
+ {
+ "key": "admin_level",
+ "value": [
+ "7"
+ ]
+ },
+ {
+ "key": "boundary",
+ "value": [
+ "administrative"
+ ]
+ },
+ {
+ "key": "name",
+ "value": [
+ "Pokhara"
+ ]
+ }
+ ],
+ "joinBy": "AND",
+ "lookIn": [
+ "relations"
+ ]
+}
+
{
+ "title": "Params",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/SnapshotParamsPlain"
+ }
+ ],
+ "default": {}
+}
+
+ Response 200 OK +
+ +{
+ "title": "Response Get Current Snapshot As Plain Geojson Snapshot Plain Post",
+ "type": "object"
+}
+
+ Response 422 Unprocessable Entity +
+ +{
+ "detail": [
+ {
+ "loc": [
+ "string"
+ ],
+ "msg": "string",
+ "type": "string"
+ }
+ ]
+}
+
{
+ "title": "HTTPValidationError",
+ "type": "object",
+ "properties": {
+ "detail": {
+ "title": "Detail",
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ValidationError"
+ }
+ }
+ }
+}
+
Get Task Status
+Tracks the request from the task id provided by Raw Data API for the +request
+Args:
+task_id ([type]): [Unique id provided on response from /snapshot/]
+
+Returns:
+id: Id of the task
+status : SUCCESS / PENDING
+result : Result of task
+
+Successful task will have additional nested json inside
+Input parameters
+Parameter | +In | +Type | +Default | +Nullable | +Description | +
---|---|---|---|---|---|
task_id |
+ path | +None | ++ | No | ++ |
+ Response 200 OK +
+ +{
+ "id": "3fded368-456f-4ef4-a1b8-c099a7f77ca4",
+ "status": "SUCCESS",
+ "result": {
+ "download_url": "https://s3.us-east-1.amazonaws.com/exports-stage.hotosm.org/Raw_Export_3fded368-456f-4ef4-a1b8-c099a7f77ca4_GeoJSON.zip",
+ "file_name": "Raw_Export_3fded368-456f-4ef4-a1b8-c099a7f77ca4_GeoJSON",
+ "response_time": "0:00:12.175976",
+ "query_area": "6 Sq Km ",
+ "binded_file_size": "7 MB",
+ "zip_file_size_bytes": 1331601
+ }
+}
+
{
+ "title": "SnapshotTaskResponse",
+ "required": [
+ "id",
+ "status",
+ "result"
+ ],
+ "type": "object",
+ "properties": {
+ "id": {
+ "title": "Id",
+ "type": "string"
+ },
+ "status": {
+ "title": "Status",
+ "type": "string"
+ },
+ "result": {
+ "$ref": "#/components/schemas/SnapshotTaskResult"
+ }
+ },
+ "additionalProperties": false,
+ "example": {
+ "id": "3fded368-456f-4ef4-a1b8-c099a7f77ca4",
+ "status": "SUCCESS",
+ "result": {
+ "download_url": "https://s3.us-east-1.amazonaws.com/exports-stage.hotosm.org/Raw_Export_3fded368-456f-4ef4-a1b8-c099a7f77ca4_GeoJSON.zip",
+ "file_name": "Raw_Export_3fded368-456f-4ef4-a1b8-c099a7f77ca4_GeoJSON",
+ "response_time": "0:00:12.175976",
+ "query_area": "6 Sq Km ",
+ "binded_file_size": "7 MB",
+ "zip_file_size_bytes": 1331601
+ }
+ }
+}
+
+ Response 422 Unprocessable Entity +
+ +{
+ "detail": [
+ {
+ "loc": [
+ "string"
+ ],
+ "msg": "string",
+ "type": "string"
+ }
+ ]
+}
+
{
+ "title": "HTTPValidationError",
+ "type": "object",
+ "properties": {
+ "detail": {
+ "title": "Detail",
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ValidationError"
+ }
+ }
+ }
+}
+
Name | +Type | +
---|---|
allGeometry |
+ Array<string> | +
line |
+ Array<string> | +
point |
+ Array<string> | +
polygon |
+ Array<string> | +
Name | +Type | +
---|---|
attributes |
+ AttributeFilter | +
tags |
+ TagsFilter | +
Name | +Type | +
---|---|
detail |
+ Array<ValidationError> | +
Type:
+Name | +Type | +
---|---|
coordinates |
+ Array<Array<Array<>>> | +
type |
+ string | +
Type:
+Name | +Type | +
---|---|
coordinates |
+ Array<Array<>> | +
type |
+ string | +
Name | +Type | +
---|---|
countryExport |
+ boolean | +
fileName |
+ string | +
filters |
+ + |
geometry |
+ + |
geometryType |
+ Array<SupportedGeometryFilters> | +
joinFilterType |
+ + |
maxZoom |
+ integer | +
minZoom |
+ integer | +
outputType |
+ + |
Type:
+Name | +Type | +
---|---|
bbox |
+ + |
geometryType |
+ SupportedGeometryFilters | +
joinBy |
+ + |
lookIn |
+ Array<OsmFeatureType> | +
select |
+ Array<string> | +
where |
+ Array<WhereCondition> | +
Name | +Type | +
---|---|
taskId |
+ string | +
trackLink |
+ string | +
Name | +Type | +
---|---|
id |
+ string | +
result |
+ SnapshotTaskResult | +
status |
+ string | +
Name | +Type | +
---|---|
bindedFileSize |
+ string | +
downloadUrl |
+ string | +
fileName |
+ string | +
queryArea |
+ string | +
responseTime |
+ string | +
zipFileSizeBytes |
+ integer | +
Name | +Type | +
---|---|
lastUpdated |
+ string | +
Type:
+Name | +Type | +
---|---|
allGeometry |
+ + |
line |
+ + |
point |
+ + |
polygon |
+ + |
Name | +Type | +
---|---|
loc |
+ Array<string> | +
msg |
+ string | +
type |
+ string | +
Name | +Type | +
---|---|
key |
+ string | +
value |
+ Array<string> | +
Page contains Main core logic of app
+AWS_ACCESS_KEY_ID
+
AWS_SECRET_ACCESS_KEY
+
BUCKET_NAME
+
DEFAULT_README_TEXT
+
ENABLE_CUSTOM_EXPORTS
+
ENABLE_HDX_EXPORTS
+
ENABLE_POLYGON_STATISTICS_ENDPOINTS
+
ENABLE_SOZIP
+
ENABLE_TILES
+
EXPORT_MAX_AREA_SQKM
+
EXPORT_TYPE_MAPPING
+
HDX_FILTER_CRITERIA
+
HDX_MARKDOWN
+
LOCAL_CON_POOL
+
PARALLEL_PROCESSING_CATEGORIES
+
POLYGON_STATISTICS_API_URL
+
PROCESS_SINGLE_CATEGORY_IN_POSTGRES
+
USE_DUCK_DB_FOR_CUSTOM_EXPORTS
+
USE_S3_TO_UPLOAD
+
database_instance
+
export_path
+
index_threshold
+
level
+
use_connection_pooling
+
def check_for_json(
+ result_str
+)
+
Return: bool: + True in case of success, False otherwise
+def convert_dict_to_conn_str(
+ db_dict
+)
+
def dict_none_clean(
+ to_clean
+)
+
def generate_ogr2ogr_cmd_from_psql(
+ export_file_path,
+ export_file_format_driver,
+ postgres_query,
+ layer_creation_options,
+ query_dump_path
+)
+
def print_psycopg2_exception(
+ err
+)
+
def run_ogr2ogr_cmd(
+ cmd
+)
+
Args: + cmd (type): Command to run for subprocess + binding_file_dir (type): description
+Raises: + Exception: If process gets failed
+class CustomExport(
+ params
+)
+
Constructor for the custom export class.
+Parameters: +- params (DynamicCategoriesModel): An instance of DynamicCategoriesModel containing configuration settings.
+def clean_resources(
+ self
+)
+
def file_to_zip(
+ self,
+ working_dir,
+ zip_path
+)
+
Parameters: +- working_dir (str): Path to the directory containing files to be zipped. +- zip_path (str): Path to the resulting ZIP file.
+Returns: +- Path to the created ZIP file.
+def format_where_clause_duckdb(
+ self,
+ where_clause
+)
+
Parameters: +- where_clause (str): SQL-like condition to filter features.
+Returns: +- Formatted where_clause.
+def process_category(
+ self,
+ category
+)
+
Parameters: +- category (Dict[str, CategoryModel]): Dictionary representing a category.
+Returns: +- List of resource dictionaries containing export information.
+def process_category_result(
+ self,
+ category_result
+)
+
Parameters: +- category_result (CategoryResult): Instance of CategoryResult.
+Returns: +- Dictionary containing processed category result.
+def process_custom_categories(
+ self
+)
+
Returns: +- Dictionary containing the processed dataset information.
+def query_to_file(
+ self,
+ query,
+ category_name,
+ feature_type,
+ export_formats
+)
+
Parameters: +- query (str): SQL query to execute. +- category_name (str): Name of the category. +- feature_type (str): Feature type. +- export_formats (List[ExportTypeInfo]): List of export formats.
+Returns: +- List of resource dictionaries containing export information.
+def resource_to_hdx(
+ self,
+ uploaded_resources,
+ dataset_config,
+ category
+)
+
Parameters: +- uploaded_resources (List[Dict[str, Any]]): List of resource dictionaries. +- dataset_config (DatasetConfig): Instance of DatasetConfig. +- category (Dict[str, CategoryModel]): Dictionary representing a category.
+Returns: +- Dictionary containing the HDX upload information.
+def resource_to_response(
+ self,
+ uploaded_resources,
+ category
+)
+
Parameters: +- uploaded_resources (List[Dict[str, Any]]): List of resource dictionaries. +- category (Dict[str, CategoryModel]): Dictionary representing a category.
+Returns: +- Dictionary containing the response information.
+def types_to_tables(
+ self,
+ type_list: list
+)
+
Parameters: +- type_list (List[str]): List of feature types.
+Returns: +- List of database tables associated with the given feature types.
+def upload_resources(
+ self,
+ resource_path
+)
+
Parameters: +- resource_path (str): Path to the resource file on the local filesystem.
+Returns: +- Download URL for the uploaded resource.
+def zip_to_s3(
+ self,
+ resources
+)
+
Parameters: +- resources (List[Dict[str, Any]]): List of resource dictionaries.
+Returns: +- List of resource dictionaries with added download URLs.
+class Database(
+ db_params
+)
+
Database class is used to connect with your database , run query and get result from it . It has all tests and validation inside class
+def close_conn(
+ self
+)
+
def connect(
+ self
+)
+
def executequery(
+ self,
+ query
+)
+
class DuckDB(
+ db_path,
+ temp_dir=None
+)
+
Constructor for the DuckDB class.
+Parameters: +- db_path (str): The path to the DuckDB database file.
+def run_query(
+ self,
+ query,
+ attach_pgsql=False,
+ load_spatial=False
+)
+
Parameters: +- query (str): The SQL query to execute. +- attach_pgsql (bool): Flag to indicate whether to attach a PostgreSQL database. +- load_spatial (bool): Flag to indicate whether to load the spatial extension.
+class HDX(
+
+)
+
def create_hdx(
+ self,
+ hdx_data
+)
+
Args: + hdx_data (dict): Data for creating the HDX entry.
+Returns: + dict: Result of the HDX creation process.
+def delete_hdx(
+ self,
+ hdx_id: int
+)
+
Args: + hdx_id (int): ID of the HDX entry to delete.
+Returns: + dict: Result of the HDX deletion process.
+Raises: + HTTPException: If the HDX entry is not found.
+def get_hdx_by_id(
+ self,
+ hdx_id: int
+)
+
Args: + hdx_id (int): ID of the HDX entry to retrieve.
+Returns: + dict: Details of the requested HDX entry.
+Raises: + HTTPException: If the HDX entry is not found.
+def get_hdx_list_with_filters(
+ self,
+ skip: int = 0,
+ limit: int = 10,
+ filters: dict = {}
+)
+
Args: + skip (int): Number of entries to skip. + limit (int): Maximum number of entries to retrieve. + filters (dict): Filtering criteria.
+Returns: + List[dict]: List of HDX entries.
+def patch_hdx(
+ self,
+ hdx_id: int,
+ hdx_data: dict
+)
+
Args: + hdx_id (int): ID of the HDX entry to update. + hdx_data (dict): Data for partially updating the HDX entry.
+Returns: + dict: Result of the HDX update process.
+Raises: + HTTPException: If the HDX entry is not found.
+def search_hdx_by_dataset_title(
+ self,
+ dataset_title: str,
+ skip: int = 0,
+ limit: int = 10
+)
+
Args: + dataset_title (str): The title of the dataset to search for. + skip (int): Number of entries to skip. + limit (int): Maximum number of entries to retrieve.
+Returns: + List[dict]: List of HDX entries matching the dataset title.
+def update_hdx(
+ self,
+ hdx_id: int,
+ hdx_data
+)
+
Args: + hdx_id (int): ID of the HDX entry to update. + hdx_data (dict): Data for updating the HDX entry.
+Returns: + dict: Result of the HDX update process.
+Raises: + HTTPException: If the HDX entry is not found.
+class HDXUploader(
+ category,
+ hdx,
+ uuid,
+ default_category_path,
+ completeness_metadata=None
+)
+
Constructor for the HDXUploader class.
+Parameters: +- category (Dict[str, CategoryModel]): Dictionary representing a category. +- hdx (HDX): Instance of the HDX class. +- uuid (str): Universally unique identifier. +- default_category_path (str): Default path for the category. +- completeness_metadata (Optional[Dict[str, Any]]): Metadata for completeness.
+def add_notes(
+ self
+)
+
Returns: +- Notes string.
+def add_resource(
+ self,
+ resource_meta
+)
+
Parameters: +- resource_meta (Dict[str, Any]): Metadata for the resource.
+def init_dataset(
+ self
+)
+
def slugify(
+ self,
+ name
+)
+
Parameters: +- name (str): Input string.
+Returns: +- Slugified string.
+def upload_dataset(
+ self,
+ dump_config_to_s3=False
+)
+
Parameters: +- dump_config_to_s3 (bool): Flag to indicate whether to dump configuration to S3.
+Returns: +- Tuple containing category name and dataset information.
+class PolygonStats(
+ geojson=None,
+ iso3=None
+)
+
Generates stats for polygon
+def get_building_pattern_statement(
+ osm_building_count,
+ ai_building_count,
+ avg_timestamp,
+ last_edit_timestamp,
+ osm_building_count_6_months
+)
+
Args: + osm_building_count (int): Count of buildings from OpenStreetMap. + ai_building_count (int): Count of buildings from AI estimates. + avg_timestamp (timestamp): Average timestamp of data. + last_edit_timestamp(timestamp): Last edit timestamp of an area + osm_building_count_6_months (int): Count of buildings updated in the last 6 months.
+Returns: + str: Human-readable building statement.
+def get_road_pattern_statement(
+ osm_highway_length,
+ ai_highway_length,
+ avg_timestamp,
+ last_edit_timestamp,
+ osm_highway_length_6_months
+)
+
Args: + osm_highway_length (float): Length of roads from OpenStreetMap. + ai_highway_length (float): Length of roads from AI estimates. + avg_timestamp (str): Average timestamp of data. + osm_highway_length_6_months (float): Length of roads updated in the last 6 months.
+Returns: + str: Human-readable road statement.
+def get_osm_analytics_meta_stats(
+ self
+)
+
Returns: + dict: Raw statistics translated into JSON.
+def get_summary_stats(
+ self
+)
+
Returns: + dict: Summary statistics including building and road statements.
+class RawData(
+ parameters=None,
+ dbdict=None
+)
+
Class responsible for the Rawdata Extraction from available sources , + Currently Works for Underpass source Current Snapshot +Returns: +Geojson Zip file +Supports: +-Any Key value pair of osm tags +-A Polygon +-Osm element type (Optional)
+def close_con(
+ con
+)
+
def geojson2tiles(
+ geojson_path,
+ tile_path,
+ tile_layer_name
+)
+
def get_grid_id(
+ geom,
+ cur
+)
+
Args: + geom (type): description + cur (type): description
+Returns: + type: grid id , geometry dump and the area of geometry
+def ogr_export(
+ query,
+ outputtype,
+ working_dir,
+ dump_temp_path,
+ params
+)
+
Args: + query (type): Postgresql query to extract + outputtype (type): description + working_dir (type): description + dump_temp_path (type): temp file path for metadata gen + params (type): description
+def ogr_export_shp(
+ point_query,
+ line_query,
+ poly_query,
+ working_dir,
+ file_name
+)
+
def query2geojson(
+ con,
+ extraction_query,
+ dump_temp_file_path
+)
+
def check_status(
+ self
+)
+
def extract_current_data(
+ self,
+ exportname
+)
+
Returns: + geom_area: area of polygon supplied + working_dir: dir where results are saved
+def extract_plain_geojson(
+ self
+)
+
def get_countries_list(
+ self,
+ q
+)
+
Args: + q (type): list filter query string
+Returns: + featurecollection: geojson of country
+def get_osm_feature(
+ self,
+ osm_id
+)
+
Args: + osm_id (type): osm_id of feature
+Returns: + featurecollection: Geojson
+class S3FileTransfer(
+
+)
+
Responsible for the file transfer to s3 from API maachine
+def get_bucket_location(
+ self,
+ bucket_name
+)
+
def list_buckets(
+ self
+)
+
def upload(
+ self,
+ file_path,
+ file_name,
+ file_suffix=None
+)
+
class Users(
+
+)
+
Users class provides CRUD operations for interacting with the 'users' table in the database.
+Methods: +- create_user(osm_id: int, role: int) -> Dict[str, Any]: Inserts a new user into the database. +- read_user(osm_id: int) -> Dict[str, Any]: Retrieves user information based on the given osm_id. +- update_user(osm_id: int, update_data: UserUpdate) -> Dict[str, Any]: Updates user information based on the given osm_id. +- delete_user(osm_id: int) -> Dict[str, Any]: Deletes a user based on the given osm_id. +- read_users(skip: int = 0, limit: int = 10) -> List[Dict[str, Any]]: Retrieves a list of users with optional pagination.
+Usage: +users = Users()
+def create_user(
+ self,
+ osm_id,
+ role
+)
+
Args: +- osm_id (int): The OSM ID of the new user. +- role (int): The role of the new user.
+Returns: +- Dict[str, Any]: A dictionary containing the osm_id of the newly created user.
+Raises: +- HTTPException: If the user creation fails.
+def delete_user(
+ self,
+ osm_id
+)
+
Args: +- osm_id (int): The OSM ID of the user to delete.
+Returns: +- Dict[str, Any]: A dictionary containing the deleted user information.
+Raises: +- HTTPException: If the user with the given osm_id is not found.
+def read_user(
+ self,
+ osm_id
+)
+
Args: +- osm_id (int): The OSM ID of the user to retrieve.
+Returns: +- Dict[str, Any]: A dictionary containing user information if the user is found. + If the user is not found, returns a default user with 'role' set to 3.
+Raises: +- HTTPException: If there's an issue with the database query.
+def read_users(
+ self,
+ skip=0,
+ limit=10
+)
+
Args: +- skip (int): The number of users to skip (for pagination). +- limit (int): The maximum number of users to retrieve (for pagination).
+Returns: +- List[Dict[str, Any]]: A list of dictionaries containing user information.
+def update_user(
+ self,
+ osm_id,
+ update_data
+)
+
Args: +- osm_id (int): The OSM ID of the user to update. +- update_data (UserUpdate): The data to update for the user.
+Returns: +- Dict[str, Any]: A dictionary containing the updated user information.
+Raises: +- HTTPException: If the user with the given osm_id is not found.
+ + + + + + +ALLOW_BIND_ZIP_FILTER
+
AWS_ACCESS_KEY_ID
+
AWS_SECRET_ACCESS_KEY
+
BUCKET_NAME
+
CELERY_BROKER_URL
+
CELERY_RESULT_BACKEND
+
CONFIG_FILE_PATH
+
DAEMON_QUEUE_NAME
+
DEFAULT_HARD_TASK_LIMIT
+
DEFAULT_QUEUE_NAME
+
DEFAULT_README_TEXT
+
DEFAULT_SOFT_TASK_LIMIT
+
ENABLE_CUSTOM_EXPORTS
+
ENABLE_HDX_EXPORTS
+
ENABLE_POLYGON_STATISTICS_ENDPOINTS
+
ENABLE_SOZIP
+
ENABLE_TILES
+
EXPORT_MAX_AREA_SQKM
+
EXPORT_PATH
+
EXTRA_README_TXT
+
FILE_UPLOAD_METHOD
+
HDX_HARD_TASK_LIMIT
+
HDX_SOFT_TASK_LIMIT
+
INDEX_THRESHOLD
+
LIMITER
+
LOG_LEVEL
+
PARALLEL_PROCESSING_CATEGORIES
+
POLYGON_STATISTICS_API_RATE_LIMIT
+
POLYGON_STATISTICS_API_URL
+
PROCESS_SINGLE_CATEGORY_IN_POSTGRES
+
RATE_LIMITER_STORAGE_URI
+
RATE_LIMIT_PER_MIN
+
SENTRY_DSN
+
SENTRY_RATE
+
USE_CONNECTION_POOLING
+
USE_DUCK_DB_FOR_CUSTOM_EXPORTS
+
USE_S3_TO_UPLOAD
+
WORKER_PREFETCH_MULTIPLIER
+
config
+
level
+
logger
+
def get_bool_env_var(
+ key,
+ default=False
+)
+
def get_db_connection_params(
+
+) -> dict
+
Returns: connection_params (dict): PostgreSQL connection parameters + corresponding to the configuration section.
+def get_oauth_credentials(
+
+) -> tuple
+
Return an ordered python tuple that can be passed to functions that +authenticate to OSM.
+Order of precedence: +1. Environment Variables +2. Config File +3. Default fallback
+Returns: oauth2_credentials (tuple): Tuple containing OAuth2 client + secret, client ID, and redirect URL.
+def not_raises(
+ func,
+ *args,
+ **kwargs
+)
+
class Database(
+
+)
+
Handles the all work related to connection pooling
+def close_all_connection_pool(
+ self
+)
+
def connect(
+ self
+)
+
def get_conn_from_pool(
+ self
+)
+
Returns: + connection
+def release_conn_from_pool(
+ self,
+ pool_con
+)
+
Args: + pool_con (type): define which connection to remove from pool
+Raises: + ex: error if connection doesnot exists or misbehave of function
+ + + + + + +database_instance
+
Page Contains Query logic required for application
+HDX_FILTER_CRITERIA
+
HDX_MARKDOWN
+
USE_DUCK_DB_FOR_CUSTOM_EXPORTS
+
def check_exisiting_country(
+ geom
+)
+
def check_last_updated_rawdata(
+
+)
+
def convert_tags_pattern_to_postgres(
+ query_string
+)
+
def create_column_filter(
+ columns,
+ create_schema=False,
+ output_type='geojson',
+ use_centroid=False,
+ include_osm_type=True
+)
+
def create_geom_filter(
+ geom,
+ geom_lookup_by='ST_intersects'
+)
+
def create_tag_sql_logic(
+ key,
+ value,
+ filter_list
+)
+
def extract_attributes_tags(
+ filters
+)
+
def extract_custom_features_from_postgres(
+ select_q,
+ from_q,
+ where_q,
+ geom=None,
+ cid=None
+)
+
def extract_features_custom_exports(
+ base_table_name,
+ select,
+ feature_type,
+ where,
+ geometry=None,
+ cid=None
+)
+
Args: +- base_table_name (str): Base table name. +- select (List[str]): List of selected fields. +- feature_type (str): Type of feature (points, lines, polygons). +- where (str): SQL-like condition to filter features.
+Returns: +str: Extraction query to extract features.
+def extract_geometry_type_query(
+ params,
+ ogr_export=False,
+ g_id=None,
+ c_id=None,
+ country_export=False
+)
+
def format_file_name_str(
+ input_str
+)
+
def generate_polygon_stats_graphql_query(
+ geojson_feature
+)
+
def generate_tag_filter_query(
+ filter,
+ join_by=' OR ',
+ plain_query_filter=False
+)
+
def generate_where_clause_indexes_case(
+ geom_filter,
+ g_id,
+ c_id,
+ country_export,
+ table_name='ways_poly'
+)
+
def get_countries_query(
+ q
+)
+
def get_country_from_iso(
+ iso3
+)
+
Args: +- iso3 (str): ISO3 Country Code.
+Returns: +str: SQL query to fetch country information.
+def get_country_geojson(
+ c_id
+)
+
def get_country_geom_from_iso(
+ iso3
+)
+
Args: +- iso3 (str): ISO3 Country Code.
+Returns: +str: SQL query to fetch country geometry.
+def get_country_id_query(
+ geom_dump
+)
+
def get_grid_id_query(
+ geometry_dump
+)
+
def get_osm_feature_query(
+ osm_id
+)
+
def get_query_as_geojson(
+ query_list,
+ ogr_export=None
+)
+
def postgres2duckdb_query(
+ base_table_name,
+ table,
+ cid=None,
+ geometry=None,
+ single_category_where=None,
+ enable_users_detail=False
+)
+
Args: +- base_table_name (str): Base table name. +- table (str): PostgreSQL table name. +- cid (int, optional): Country ID for filtering. Defaults to None. +- geometry (Polygon, optional): Custom polygon geometry. Defaults to None. +- single_category_where (str, optional): Where clause for single category to fetch it from postgres +- enable_users_detail (bool, optional): Enable user details. Defaults to False.
+Returns: +str: DuckDB query for creating a table.
+def raw_currentdata_extraction_query(
+ params,
+ g_id=None,
+ c_id=None,
+ ogr_export=False,
+ select_all=False,
+ country_export=False
+)
+
def raw_extract_plain_geojson(
+ params,
+ inspect_only=False
+)
+
def remove_spaces(
+ input_str
+)
+
Page contains validation models for application
+ALLOW_BIND_ZIP_FILTER
+
ENABLE_HDX_EXPORTS
+
ENABLE_POLYGON_STATISTICS_ENDPOINTS
+
ENABLE_TILES
+
EXPORT_TYPE_MAPPING
+
def to_camel(
+ string: str
+) -> str
+
class AttributeFilter(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class BaseModel(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class CategoryModel(
+ /,
+ **data: 'Any'
+)
+
Model for category configuration settings.
+Fields: +- hdx (HDXModel): HDX configuration model. +- types (List[str]): List of feature types (points, lines, polygons). +- select (List[str]): List of selected fields. +- where (str): SQL-like condition to filter features. +- formats (List[str]): List of Export Formats (suffixes).
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
def validate_export_types(
+ value
+)
+
Args: + value (type): description
+Raises: + ValueError: description
+Returns: + type: description
+def validate_types(
+ value
+)
+
Args: + value (type): description
+Raises: + ValueError: description
+Returns: + type: description
+model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class DatasetConfig(
+ /,
+ **data: 'Any'
+)
+
Model for dataset configuration settings.
+Fields: +- private (bool): Make dataset private. By default False, public is recommended. +- subnational (bool): Make it true if the dataset doesn't cover the nation/country. +- update_frequency (str): Update frequency to be added on uploads. +- dataset_title (str): Dataset title that appears at the top of the page. +- dataset_prefix (str): Dataset prefix to be appended before the category name. Ignored if iso3 is supplied. +- dataset_locations (List[str]): Valid dataset locations iso3.
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
def validate_frequency(
+ value
+)
+
Args: + value (type): description
+Raises: + ValueError: description
+Returns: + type: description
+model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class DynamicCategoriesModel(
+ /,
+ **data: 'Any'
+)
+
Model for dynamic categories.
+Fields: +- iso3 (Optional[str]): ISO3 Country Code. +- dataset (Optional[DatasetConfig]): Dataset Configurations for HDX Upload. +- meta (bool): Dumps Meta db in parquet format & HDX config JSON to S3. +- hdx_upload (bool): Enable/Disable uploading the dataset to HDX. +- categories (List[Dict[str, CategoryModel]]): List of dynamic categories. +- geometry (Optional[Union[Polygon, MultiPolygon]]): Custom polygon geometry.
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def set_geometry_or_iso3(
+ value,
+ values
+)
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
def validate_geometry(
+ value
+)
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class ExportTypeInfo(
+ suffix,
+ driver_name,
+ layer_creation_options,
+ format_option
+)
+
Class representing export type information.
+Fields: +- suffix (str): File suffix for the export type. +- driver_name (str): GDAL driver name. +- layer_creation_options (List[str]): Layer creation options. +- format_option (str): Format option for GDAL.
+class Filters(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class GeometryValidatorMixin(
+ /,
+ *args,
+ **kwargs
+)
+
def validate_geometry(
+ value
+)
+
class HDXModel(
+ /,
+ **data: 'Any'
+)
+
Model for HDX configuration settings.
+Fields: +- tags (List[str]): List of tags for the HDX model. +- caveats (str): Caveats/Warning for the Datasets. +- notes (str): Extra notes to append in the notes section of HDX datasets.
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
def validate_tags(
+ value
+)
+
Args: + value (type): description
+Raises: + ValueError: description
+Returns: + type: description
+model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class JoinFilterType(
+ /,
+ *args,
+ **kwargs
+)
+
An enumeration.
+AND
+
OR
+
name
+
value
+
class RawDataCurrentParams(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def check_bind_option(
+ value,
+ values
+)
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def return_unique_value(
+ value
+)
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
def validate_geometry(
+ value
+)
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class RawDataCurrentParamsBase(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def return_unique_value(
+ value
+)
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
def validate_geometry(
+ value
+)
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class RawDataOutputType(
+ /,
+ *args,
+ **kwargs
+)
+
An enumeration.
+CSV
+
FLATGEOBUF
+
GEOJSON
+
GEOPACKAGE
+
GEOPARQUET
+
KML
+
PGDUMP
+
SHAPEFILE
+
name
+
value
+
class SQLFilter(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class SnapshotResponse(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class SnapshotTaskResponse(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class SnapshotTaskResult(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class StatsRequestParams(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def set_geometry_or_iso3(
+ value,
+ values
+)
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
def validate_geometry(
+ value
+)
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class StatusResponse(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+class SupportedFilters(
+ /,
+ *args,
+ **kwargs
+)
+
An enumeration.
+ATTRIBUTES
+
TAGS
+
name
+
value
+
class SupportedGeometryFilters(
+ /,
+ *args,
+ **kwargs
+)
+
An enumeration.
+ALLGEOM
+
LINE
+
POINT
+
POLYGON
+
name
+
value
+
class TagsFilter(
+ /,
+ **data: 'Any'
+)
+
Usage docs: https://docs.pydantic.dev/2.6/concepts/models/
+A base class for creating Pydantic models.
+Attributes: + class_vars: The names of classvars defined on the model. + private_attributes: Metadata about the private attributes of the model. + signature: The signature for instantiating the model.
+__pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
+__pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
+__pydantic_custom_init__: Whether the model has a custom `__init__` function.
+__pydantic_decorators__: Metadata containing the decorators defined on the model.
+ This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
+__pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
+ __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
+__pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
+__pydantic_post_init__: The name of the post-init method for the model, if defined.
+__pydantic_root_model__: Whether the model is a `RootModel`.
+__pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
+__pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
+
+__pydantic_extra__: An instance attribute with the values of extra fields from validation when
+ `model_config['extra'] == 'allow'`.
+__pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
+__pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
+
+Config
+
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
def from_orm(
+ obj: 'Any'
+) -> 'Model'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Model'
+
Model
class with validated data.
+Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
+Behaves as if Config.extra = 'allow'
was set since it adds all passed values
Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary.
+Returns:
+ A new instance of the Model
class with validated data.
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Args:
+ by_alias: Whether to use attribute aliases or not.
+ ref_template: The reference template.
+ schema_generator: To override the logic used to generate the JSON schema, as a subclass of
+ GenerateJsonSchema
with your desired modifications
+ mode: The mode in which to generate the schema.
Returns: + The JSON schema for the given model class.
+def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Args:
+ params: Tuple of types of the class. Given a generic class
+ Model
with 2 type variables and a concrete model Model[str, int]
,
+ the value (str, int)
would be passed to params
.
Returns:
+ String representing the new class where params
are passed to cls
as type variables.
Raises: + TypeError: Raised when trying to generate concrete names for non-generic models.
+def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Args:
+ force: Whether to force the rebuilding of the model schema, defaults to False
.
+ raise_errors: Whether to raise errors, defaults to True
.
+ _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
+ _types_namespace: The types namespace, defaults to None
.
Returns:
+ Returns None
if the schema is already "complete" and rebuilding was not required.
+ If rebuilding was required, returns True
if rebuilding was successful, otherwise False
.
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator.
+Raises: + ValidationError: If the object could not be validated.
+Returns: + The validated model instance.
+def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Validate the given JSON data against the Pydantic model.
+Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+Raises:
+ ValueError: If json_data
is not a JSON string.
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator.
+Returns: + The validated Pydantic model.
+def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Model'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Model'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Model'
+
model_extra
+
Returns:
+ A dictionary of extra fields, or None
if config.extra
is not set to "allow"
.
model_fields_set
+
Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults.
+def copy(
+ self: 'Model',
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'typing.Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
!!! warning "Deprecated"
+ This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied.
+Returns: + A copy of the model with included, excluded and updated fields as specified.
+def dict(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self: 'Model',
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Model'
+
Returns a copy of the model.
+Args:
+ update: Values to change/add in the new model. Note: the data is not validated
+ before creating the new model. You should trust this data.
+ deep: Set to True
to make a deep copy of the model.
Returns: + New model instance.
+def model_dump(
+ self,
+ *,
+ mode: "Literal['json', 'python'] | str" = 'python',
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'dict[str, Any]'
+
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Args:
+ mode: The mode in which to_python
should run.
+ If mode is 'json', the output will only contain JSON serializable types.
+ If mode is 'python', the output may contain non-JSON-serializable Python objects.
+ include: A list of fields to include in the output.
+ exclude: A list of fields to exclude from the output.
+ by_alias: Whether to use the field's alias in the dictionary key if defined.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A dictionary representation of the model.
+def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx' = None,
+ exclude: 'IncEx' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: 'bool' = True
+) -> 'str'
+
Generates a JSON representation of the model using Pydantic's to_json
method.
Args:
+ indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
+ include: Field(s) to include in the JSON output.
+ exclude: Field(s) to exclude from the JSON output.
+ by_alias: Whether to serialize using field aliases.
+ exclude_unset: Whether to exclude fields that have not been explicitly set.
+ exclude_defaults: Whether to exclude fields that are set to their default value.
+ exclude_none: Whether to exclude fields that have a value of None
.
+ round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
+ warnings: Whether to log warnings when invalid fields are encountered.
Returns: + A JSON string representation of the model.
+def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
__init__
and model_construct
.
+This is useful if you want to do some validation that requires the entire model to be initialized.
+
+
+
+
+
+
+ \n {translation(\"search.result.term.missing\")}: {...missing}\n
\n }\n