diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..fe377b39 --- /dev/null +++ b/404.html @@ -0,0 +1,1215 @@ + + + +
+ + + + + + + + + + + + + + + + +Stac-fatsapi-elasticsearch-opensearch supports the STAC API Aggregation Extension. This enables aggregation of points and geometries, as well as frequency distribution aggregation of any other property including dates. Aggregations can be defined at the root Catalog level (/aggregations
) and at the Collection level (/<collection_id>/aggregations
). The Filter Extension is also fully supported, enabling aggregated returns of search queries. Any query made with /search
may also be executed with /aggregate
, provided that the relevant aggregation fields are available,
A field named aggregations
should be added to the Collection object for the collection for which the aggregations are available, for example:
Available aggregations are:
+collection
field)Support for additional fields and new aggregations can be added in the OpenSearch database_logic.py and ElasticSearch database_logic.py files.
+json
+"aggregations": [
+ {
+ "name": "total_count",
+ "data_type": "integer"
+ },
+ {
+ "name": "datetime_max",
+ "data_type": "datetime"
+ },
+ {
+ "name": "datetime_min",
+ "data_type": "datetime"
+ },
+ {
+ "name": "datetime_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "datetime"
+ },
+ {
+ "name": "sun_elevation_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "numeric"
+ },
+ {
+ "name": "platform_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "string"
+ },
+ {
+ "name": "sun_azimuth_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "numeric"
+ },
+ {
+ "name": "off_nadir_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "numeric"
+ },
+ {
+ "name": "cloud_cover_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "numeric"
+ },
+ {
+ "name": "grid_code_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "string"
+ },
+ {
+ "name": "centroid_geohash_grid_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "string"
+ },
+ {
+ "name": "centroid_geohex_grid_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "string"
+ },
+ {
+ "name": "centroid_geotile_grid_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "string"
+ },
+ {
+ "name": "geometry_geohash_grid_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "numeric"
+ },
+ {
+ "name": "geometry_geotile_grid_frequency",
+ "data_type": "frequency_distribution",
+ "frequency_distribution_data_type": "string"
+ }
+]
Fastapi app creation.
+DEFAULT_STATUS_CODES
+
STAC_API_VERSION
+
class StacApi(
+ settings: stac_fastapi.types.config.ApiSettings,
+ client: Union[stac_fastapi.types.core.AsyncBaseCoreClient, stac_fastapi.types.core.BaseCoreClient],
+ extensions: List[stac_fastapi.types.extension.ApiExtension] = NOTHING,
+ exceptions: Dict[Type[Exception], int] = NOTHING,
+ app: fastapi.applications.FastAPI = NOTHING,
+ router: fastapi.routing.APIRouter = NOTHING,
+ title: str = NOTHING,
+ api_version: str = NOTHING,
+ stac_version: str = '1.0.0',
+ description: str = NOTHING,
+ search_get_request_model: Type[stac_fastapi.types.search.BaseSearchGetRequest] = <class 'stac_fastapi.types.search.BaseSearchGetRequest'>,
+ search_post_request_model: Type[stac_fastapi.types.search.BaseSearchPostRequest] = <class 'stac_fastapi.types.search.BaseSearchPostRequest'>,
+ collections_get_request_model: Type[stac_fastapi.types.search.APIRequest] = <class 'stac_fastapi.api.models.EmptyRequest'>,
+ collection_get_request_model: Type[stac_fastapi.types.search.APIRequest] = <class 'stac_fastapi.api.models.CollectionUri'>,
+ items_get_request_model: Type[stac_fastapi.types.search.APIRequest] = <class 'stac_fastapi.api.models.ItemCollectionUri'>,
+ item_get_request_model: Type[stac_fastapi.types.search.APIRequest] = <class 'stac_fastapi.api.models.ItemUri'>,
+ response_class: Type[starlette.responses.Response] = <class 'starlette.responses.JSONResponse'>,
+ middlewares: List[starlette.middleware.Middleware] = NOTHING,
+ route_dependencies: List[Tuple[List[stac_fastapi.api.routes.Scope], List[fastapi.params.Depends]]] = []
+)
+
StacApi factory.
+Factory for creating a STAC-compliant FastAPI application. After
+instantation, the application is accessible from the StacApi.app
attribute.
Name | +Type | +Description | +Default | +
---|---|---|---|
settings | +None | +API settings and configuration, potentially using environment variables. See pydantic-docs.helpmanual.io/usage/settings/. |
+None | +
client | +None | +A subclass of stac_api.clients.BaseCoreClient . Defines theapplication logic which is injected into the API. |
+None | +
extensions | +None | +API extensions to include with the application. This may include official STAC extensions as well as third-party add ons. |
+None | +
exceptions | +None | +Defines a global mapping between exceptions and status codes, allowing configuration of response behavior on certain exceptions (fastapi.tiangolo.com/tutorial/handling-errors/#install-custom-exception-handlers). |
+None | +
app | +None | +The FastAPI application, defaults to a fresh application. | +None | +
route_dependencies | +None | +List of tuples of route scope dicts (eg {'path':<br>'/collections', 'method': 'POST'} ) and list of dependencies (e.g.[Depends(oauth2_scheme)] )). Applies specified dependencies tospecified routes. This is useful for applying custom auth requirements to routes defined elsewhere in the application. |
+None | +
def add_health_check(
+ self
+)
+
Add a health check.
+def add_route_dependencies(
+ self,
+ scopes: List[stac_fastapi.api.routes.Scope],
+ dependencies=typing.List[fastapi.params.Depends]
+) -> None
+
Add custom dependencies to routes.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
scopes | +None | +list of scopes. Each scope should be a dict with a path and method property. |
+None | +
dependencies | +None | +list of FastAPI dependencies to apply to each scope. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
def customize_openapi(
+ self
+) -> Union[Dict[str, Any], NoneType]
+
Customize openapi schema.
+def get_extension(
+ self,
+ extension: Type[stac_fastapi.types.extension.ApiExtension]
+) -> Union[stac_fastapi.types.extension.ApiExtension, NoneType]
+
Get an extension.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
extension | +None | +extension to check for. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The extension instance, if it exists. | +
def register_conformance_classes(
+ self
+)
+
Register conformance classes (GET /conformance).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_core(
+ self
+)
+
Register core STAC endpoints.
+GET / + GET /conformance + GET /collections + GET /collections/{collection_id} + GET /collections/{collection_id}/items + GET /collection/{collection_id}/items/{item_id} + GET /search + POST /search
+Injects application logic (StacApi.client) into the API layer.
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_get_collection(
+ self
+)
+
Register get collection endpoint (GET /collection/{collection_id}).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_get_collections(
+ self
+)
+
Register get collections endpoint (GET /collections).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_get_item(
+ self
+)
+
Register get item endpoint (GET /collections/{collection_id}/items/{item_id}).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_get_item_collection(
+ self
+)
+
Register get item collection endpoint (GET /collection/{collection_id}/items).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_get_search(
+ self
+)
+
Register search endpoint (GET /search).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_landing_page(
+ self
+)
+
Register landing page (GET /).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_post_search(
+ self
+)
+
Register search endpoint (POST /search).
+Returns:
+Type | +Description | +
---|---|
None | +None | +
Application settings.
+class AddOns(
+ /,
+ *args,
+ **kwargs
+)
+
Enumeration of available third party add ons.
+bulk_transaction
+
name
+
value
+
class ApiExtensions(
+ /,
+ *args,
+ **kwargs
+)
+
Enumeration of available stac api extensions.
+Ref: @stac-api-extensions
+aggregation
+
collection_search
+
fields
+
filter
+
free_text
+
name
+
query
+
sort
+
transaction
+
value
+
Error handling.
+DEFAULT_STATUS_CODES
+
logger
+
def add_exception_handlers(
+ app: fastapi.applications.FastAPI,
+ status_codes: Dict[Type[Exception], int]
+) -> None
+
Add exception handlers to the FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +the FastAPI application. | +None | +
status_codes | +None | +mapping between exceptions and status codes. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
def exception_handler_factory(
+ status_code: int
+) -> Callable
+
Create a FastAPI exception handler for a particular status code.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
status_code | +None | +HTTP status code. | +None | +
Returns:
+Type | +Description | +
---|---|
callable | +an exception handler. | +
class ErrorResponse(
+ /,
+ *args,
+ **kwargs
+)
+
A JSON error response returned by the API.
+The STAC API spec expects that code
and description
are both present in
+the payload.
Name | +Type | +Description | +Default | +
---|---|---|---|
code | +None | +A code representing the error, semantics are up to implementor. | +None | +
description | +None | +A description of the error. | +None | +
def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+ + + + + + + + + + + + + +Api middleware.
+HTTPS_PORT
+
HTTP_PORT
+
class CORSMiddleware(
+ app: Callable[[MutableMapping[str, Any], Callable[[], Awaitable[MutableMapping[str, Any]]], Callable[[MutableMapping[str, Any]], Awaitable[NoneType]]], Awaitable[NoneType]],
+ allow_origins: Sequence[str] = ('*',),
+ allow_methods: Sequence[str] = ('OPTIONS', 'POST', 'GET'),
+ allow_headers: Sequence[str] = ('Content-Type',),
+ allow_credentials: bool = False,
+ allow_origin_regex: Union[str, NoneType] = None,
+ expose_headers: Sequence[str] = (),
+ max_age: int = 600
+)
+
Subclass of Starlette's standard CORS middleware with default values set to those
+recommended by the STAC API spec.
+ +def allow_explicit_origin(
+ headers: 'MutableHeaders',
+ origin: 'str'
+) -> 'None'
+
def is_allowed_origin(
+ self,
+ origin: 'str'
+) -> 'bool'
+
def preflight_response(
+ self,
+ request_headers: 'Headers'
+) -> 'Response'
+
def send(
+ self,
+ message: 'Message',
+ send: 'Send',
+ request_headers: 'Headers'
+) -> 'None'
+
def simple_response(
+ self,
+ scope: 'Scope',
+ receive: 'Receive',
+ send: 'Send',
+ request_headers: 'Headers'
+) -> 'None'
+
class ProxyHeaderMiddleware(
+ app: Callable[[MutableMapping[str, Any], Callable[[], Awaitable[MutableMapping[str, Any]]], Callable[[MutableMapping[str, Any]], Awaitable[NoneType]]], Awaitable[NoneType]]
+)
+
Account for forwarding headers when deriving base URL.
+Prioritise standard Forwarded header, look for non-standard X-Forwarded-* if missing. +Default to what can be derived from the URL if no headers provided. Middleware updates +the host header that is interpreted by starlette when deriving Request.base_url.
+ + + + + + + + + + + + + +Api request/response models.
+def create_get_request_model(
+ extensions: Union[List[stac_fastapi.types.extension.ApiExtension], NoneType],
+ base_model: stac_fastapi.types.search.BaseSearchGetRequest = <class 'stac_fastapi.types.search.BaseSearchGetRequest'>
+) -> stac_fastapi.types.search.APIRequest
+
Wrap create_request_model to create the GET request model.
+def create_post_request_model(
+ extensions: Union[List[stac_fastapi.types.extension.ApiExtension], NoneType],
+ base_model: stac_fastapi.types.search.BaseSearchPostRequest = <class 'stac_fastapi.types.search.BaseSearchPostRequest'>
+) -> Type[pydantic.main.BaseModel]
+
Wrap create_request_model to create the POST request model.
+def create_request_model(
+ model_name='SearchGetRequest',
+ base_model: Union[Type[pydantic.main.BaseModel], stac_fastapi.types.search.APIRequest] = <class 'stac_fastapi.types.search.BaseSearchGetRequest'>,
+ extensions: Union[List[stac_fastapi.types.extension.ApiExtension], NoneType] = None,
+ mixins: Union[List[pydantic.main.BaseModel], List[stac_fastapi.types.search.APIRequest], NoneType] = None,
+ request_type: Union[str, NoneType] = 'GET'
+) -> Union[Type[pydantic.main.BaseModel], stac_fastapi.types.search.APIRequest]
+
Create a pydantic model for validating request bodies.
+class CollectionUri(
+ collection_id: typing_extensions.Annotated[str, Path(PydanticUndefined)]
+)
+
Get or delete collection.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class EmptyRequest(
+
+)
+
Empty request.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class GeoJSONResponse(
+ content: 'typing.Any',
+ status_code: 'int' = 200,
+ headers: 'typing.Mapping[str, str] | None' = None,
+ media_type: 'str | None' = None,
+ background: 'BackgroundTask | None' = None
+)
+
JSON with custom, vendor content-type.
+charset
+
media_type
+
headers
+
def delete_cookie(
+ self,
+ key: 'str',
+ path: 'str' = '/',
+ domain: 'str | None' = None,
+ secure: 'bool' = False,
+ httponly: 'bool' = False,
+ samesite: "typing.Literal[('lax', 'strict', 'none')] | None" = 'lax'
+) -> 'None'
+
def init_headers(
+ self,
+ headers: 'typing.Mapping[str, str] | None' = None
+) -> 'None'
+
def render(
+ self,
+ content: Any
+) -> bytes
+
def set_cookie(
+ self,
+ key: 'str',
+ value: 'str' = '',
+ max_age: 'int | None' = None,
+ expires: 'datetime | str | int | None' = None,
+ path: 'str | None' = '/',
+ domain: 'str | None' = None,
+ secure: 'bool' = False,
+ httponly: 'bool' = False,
+ samesite: "typing.Literal[('lax', 'strict', 'none')] | None" = 'lax'
+) -> 'None'
+
class ItemCollectionUri(
+ collection_id: typing_extensions.Annotated[str, Path(PydanticUndefined)],
+ limit: typing_extensions.Annotated[Union[typing_extensions.Annotated[int, Gt(gt=0), AfterValidator(func=<function crop at 0x7f6e23133040>)], NoneType], Query(PydanticUndefined)] = 10,
+ bbox: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ datetime: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Get item collection.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class ItemUri(
+ collection_id: typing_extensions.Annotated[str, Path(PydanticUndefined)],
+ item_id: typing_extensions.Annotated[str, Path(PydanticUndefined)]
+)
+
Get or delete item.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class JSONSchemaResponse(
+ content: 'typing.Any',
+ status_code: 'int' = 200,
+ headers: 'typing.Mapping[str, str] | None' = None,
+ media_type: 'str | None' = None,
+ background: 'BackgroundTask | None' = None
+)
+
JSON with custom, vendor content-type.
+charset
+
media_type
+
headers
+
def delete_cookie(
+ self,
+ key: 'str',
+ path: 'str' = '/',
+ domain: 'str | None' = None,
+ secure: 'bool' = False,
+ httponly: 'bool' = False,
+ samesite: "typing.Literal[('lax', 'strict', 'none')] | None" = 'lax'
+) -> 'None'
+
def init_headers(
+ self,
+ headers: 'typing.Mapping[str, str] | None' = None
+) -> 'None'
+
def render(
+ self,
+ content: Any
+) -> bytes
+
def set_cookie(
+ self,
+ key: 'str',
+ value: 'str' = '',
+ max_age: 'int | None' = None,
+ expires: 'datetime | str | int | None' = None,
+ path: 'str | None' = '/',
+ domain: 'str | None' = None,
+ secure: 'bool' = False,
+ httponly: 'bool' = False,
+ samesite: "typing.Literal[('lax', 'strict', 'none')] | None" = 'lax'
+) -> 'None'
+
openapi.
+def update_openapi(
+ app: fastapi.applications.FastAPI
+) -> fastapi.applications.FastAPI
+
Update OpenAPI response content-type.
+This function modifies the openapi route to comply with the STAC API spec's required +content-type response header.
+ + + + + + + + + + + + + +Route factories.
+HTTP_204_NO_CONTENT
+
def add_route_dependencies(
+ routes: List[starlette.routing.BaseRoute],
+ scopes: List[stac_fastapi.api.routes.Scope],
+ dependencies=typing.List[fastapi.params.Depends]
+) -> None
+
Add dependencies to routes.
+Allows a developer to add dependencies to a route after the route has been +defined.
+"*" can be used for path or method to match all allowed routes.
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def create_async_endpoint(
+ func: Callable,
+ request_model: Union[Type[stac_fastapi.types.search.APIRequest], Type[pydantic.main.BaseModel], Dict]
+)
+
Wrap a function in a coroutine which may be used to create a FastAPI endpoint.
+Synchronous functions are executed asynchronously using a background thread.
+def sync_to_async(
+ func
+)
+
Run synchronous function asynchronously in a background thread.
+class Scope(
+ /,
+ *args,
+ **kwargs
+)
+
More strict version of Starlette's Scope.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+ + + + + + + + + + + + + +Library version.
+ + + + + + + + + + + + + +Base database logic.
+class BaseDatabaseLogic(
+ /,
+ *args,
+ **kwargs
+)
+
Abstract base class for database logic.
+This class defines the basic structure and operations for database interactions. +Subclasses must provide implementations for these methods.
+def create_collection(
+ self,
+ collection: Dict,
+ refresh: bool = False
+) -> None
+
Create a collection in the database.
+def create_item(
+ self,
+ item: Dict,
+ refresh: bool = False
+) -> None
+
Create an item in the database.
+def delete_collection(
+ self,
+ collection_id: str,
+ refresh: bool = False
+) -> None
+
Delete a collection from the database.
+def delete_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ refresh: bool = False
+) -> None
+
Delete an item from the database.
+def find_collection(
+ self,
+ collection_id: str
+) -> Dict
+
Find a collection in the database.
+def get_all_collections(
+ self,
+ token: Union[str, NoneType],
+ limit: int
+) -> Iterable[Dict[str, Any]]
+
Retrieve a list of all collections from the database.
+def get_one_item(
+ self,
+ collection_id: str,
+ item_id: str
+) -> Dict
+
Retrieve a single item from the database.
+ + + + + + + + + + + + + +Base settings.
+class ApiBaseSettings(
+ /,
+ *args,
+ **kwargs
+)
+
Abstract base class for API settings.
+def create_client(
+ self
+)
+
Create a database client.
+ + + + + + + + + + + + + +Basic Authentication Module.
+class BasicAuth(
+ credentials: list
+)
+
Apply basic authentication to the provided FastAPI application based on environment variables for username, password, and endpoints.
+ + + + + + + + + + + + + +Core client.
+BASE_CONFORMANCE_CLASSES
+
NumType
+
STAC_VERSION
+
logger
+
class BulkTransactionsClient(
+ database: stac_fastapi.core.base_database_logic.BaseDatabaseLogic,
+ settings: stac_fastapi.core.base_settings.ApiBaseSettings,
+ session: stac_fastapi.core.session.Session = NOTHING
+)
+
A client for posting bulk transactions to a Postgres database.
+Name | +Type | +Description | +Default | +
---|---|---|---|
session | +None | +An instance of Session to use for database connection. |
+None | +
database | +None | +An instance of DatabaseLogic to perform database operations. |
+None | +
def bulk_item_insert(
+ self,
+ items: stac_fastapi.extensions.third_party.bulk_transactions.Items,
+ chunk_size: Union[int, NoneType] = None,
+ **kwargs
+) -> str
+
Perform a bulk insertion of items into the database using Elasticsearch.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
items | +None | +The items to insert. | +None | +
chunk_size | +None | +The size of each chunk for bulk processing. | +None | +
**kwargs | +None | +Additional keyword arguments, such as request and refresh . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +A string indicating the number of items successfully added. | +
def preprocess_item(
+ self,
+ item: stac_fastapi.types.stac.Item,
+ base_url,
+ method: stac_fastapi.extensions.third_party.bulk_transactions.BulkTransactionMethod
+) -> stac_fastapi.types.stac.Item
+
Preprocess an item to match the data model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +None | +The item to preprocess. | +None | +
base_url | +None | +The base URL of the request. | +None | +
method | +None | +The bulk transaction method. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The preprocessed item. | +
class CoreClient(
+ database: stac_fastapi.core.base_database_logic.BaseDatabaseLogic,
+ base_conformance_classes: List[str] = NOTHING,
+ extensions: List[stac_fastapi.types.extension.ApiExtension] = NOTHING,
+ session: stac_fastapi.core.session.Session = NOTHING,
+ item_serializer: Type[stac_fastapi.core.serializers.ItemSerializer] = <class 'stac_fastapi.core.serializers.ItemSerializer'>,
+ collection_serializer: Type[stac_fastapi.core.serializers.CollectionSerializer] = <class 'stac_fastapi.core.serializers.CollectionSerializer'>,
+ post_request_model=<class 'stac_fastapi.types.search.BaseSearchPostRequest'>,
+ stac_version: str = '1.0.0',
+ landing_page_id: str = 'stac-fastapi',
+ title: str = 'stac-fastapi',
+ description: str = 'stac-fastapi'
+)
+
Client for core endpoints defined by the STAC specification.
+This class is a implementation of AsyncBaseCoreClient
that implements the core endpoints
+defined by the STAC specification. It uses the DatabaseLogic
class to interact with the
+database, and ItemSerializer
and CollectionSerializer
to convert between STAC objects and
+database records.
Name | +Type | +Description | +Default | +
---|---|---|---|
session | +Session | +A requests session instance to be used for all HTTP requests. | +None | +
item_serializer | +Type[serializers.ItemSerializer] | +A serializer class to be used to convert between STAC items and database records. |
+None | +
collection_serializer | +Type[serializers.CollectionSerializer] | +A serializer class to be used to convert between STAC collections and database records. |
+None | +
database | +DatabaseLogic | +An instance of the DatabaseLogic class that is used to interactwith the database. |
+None | +
post_request_model
+
def all_collections(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.Collections
+
Read all collections from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
**kwargs | +None | +Keyword arguments from the request. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A Collections object containing all the collections in the database and links to various resources. | +
def conformance(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.Conformance
+
Conformance classes.
+Called with GET /conformance
.
Returns:
+Type | +Description | +
---|---|
None | +Conformance classes which the server conforms to. | +
def conformance_classes(
+ self
+) -> List[str]
+
Generate conformance classes by adding extension conformance to base
+conformance classes.
+def extension_is_enabled(
+ self,
+ extension: str
+) -> bool
+
Check if an api extension is enabled.
+def get_collection(
+ self,
+ collection_id: str,
+ **kwargs
+) -> stac_fastapi.types.stac.Collection
+
Get a collection from the database by its id.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The id of the collection to retrieve. | +None | +
kwargs | +None | +Additional keyword arguments passed to the API call. | +None | +
Returns:
+Type | +Description | +
---|---|
Collection | +A Collection object representing the requested collection. |
+
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection with the given id cannot be found in the database. | +
def get_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ **kwargs
+) -> stac_fastapi.types.stac.Item
+
Get an item from the database based on its id and collection id.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The ID of the collection the item belongs to. | +None | +
item_id | +str | +The ID of the item to be retrieved. | +None | +
Returns:
+Type | +Description | +
---|---|
Item | +An Item object representing the requested item. |
+
Raises:
+Type | +Description | +
---|---|
Exception | +If any error occurs while getting the item from the database. | +
NotFoundError | +If the item does not exist in the specified collection. | +
def get_search(
+ self,
+ request: starlette.requests.Request,
+ collections: Union[List[str], NoneType] = None,
+ ids: Union[List[str], NoneType] = None,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ limit: Union[int, NoneType] = 10,
+ query: Union[str, NoneType] = None,
+ token: Union[str, NoneType] = None,
+ fields: Union[List[str], NoneType] = None,
+ sortby: Union[str, NoneType] = None,
+ q: Union[List[str], NoneType] = None,
+ intersects: Union[str, NoneType] = None,
+ filter: Union[str, NoneType] = None,
+ filter_lang: Union[str, NoneType] = None,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Get search results from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collections | +Optional[List[str]] | +List of collection IDs to search in. | +None | +
ids | +Optional[List[str]] | +List of item IDs to search for. | +None | +
bbox | +Optional[BBox] | +Bounding box to search in. | +None | +
datetime | +Optional[DateTimeType] | +Filter items based on the datetime field. | +None | +
limit | +Optional[int] | +Maximum number of results to return. | +None | +
query | +Optional[str] | +Query string to filter the results. | +None | +
token | +Optional[str] | +Access token to use when searching the catalog. | +None | +
fields | +Optional[List[str]] | +Fields to include or exclude from the results. | +None | +
sortby | +Optional[str] | +Sorting options for the results. | +None | +
q | +Optional[List[str]] | +Free text query to filter the results. | +None | +
intersects | +Optional[str] | +GeoJSON geometry to search in. | +None | +
kwargs | +None | +Additional parameters to be passed to the API. | +None | +
Returns:
+Type | +Description | +
---|---|
ItemCollection | +Collection of Item objects representing the search results. |
+
Raises:
+Type | +Description | +
---|---|
HTTPException | +If any error occurs while searching the catalog. | +
def item_collection(
+ self,
+ collection_id: str,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ limit: Union[int, NoneType] = 10,
+ token: Union[str, NoneType] = None,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Read items from a specific collection in the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The identifier of the collection to read items from. | +None | +
bbox | +Optional[BBox] | +The bounding box to filter items by. | +None | +
datetime | +Optional[DateTimeType] | +The datetime range to filter items by. | +None | +
limit | +int | +The maximum number of items to return. The default value is 10. | +None | +
token | +str | +A token used for pagination. | +None | +
request | +Request | +The incoming request. | +None | +
Returns:
+Type | +Description | +
---|---|
ItemCollection | +An ItemCollection object containing the items from the specified collection that meetthe filter criteria and links to various resources. |
+
Raises:
+Type | +Description | +
---|---|
HTTPException | +If the specified collection is not found. | +
Exception | +If any error occurs while reading the items from the database. | +
def landing_page(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.LandingPage
+
Landing page.
+Called with GET /
.
Returns:
+Type | +Description | +
---|---|
None | +API landing page, serving as an entry point to the API. | +
def post_search(
+ self,
+ search_request: stac_fastapi.types.search.BaseSearchPostRequest,
+ request: starlette.requests.Request
+) -> stac_fastapi.types.stac.ItemCollection
+
Perform a POST search on the catalog.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search_request | +BaseSearchPostRequest | +Request object that includes the parameters for the search. | +None | +
kwargs | +None | +Keyword arguments passed to the function. | +None | +
Returns:
+Type | +Description | +
---|---|
ItemCollection | +A collection of items matching the search criteria. | +
Raises:
+Type | +Description | +
---|---|
HTTPException | +If there is an error with the cql2_json filter. | +
class EsAsyncBaseFiltersClient(
+
+)
+
Defines a pattern for implementing the STAC filter extension.
+def get_queryables(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ **kwargs
+) -> Dict[str, Any]
+
Get the queryables available for the given collection_id.
+If collection_id is None, returns the intersection of all +queryables over all collections.
+This base implementation returns a blank queryable schema. This is not allowed +under OGC CQL but it is allowed by the STAC API Filter Extension
+github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The id of the collection to get queryables for. | +None | +
**kwargs | +None | +additional keyword arguments | +None | +
Returns:
+Type | +Description | +
---|---|
Dict[str, Any] | +A dictionary containing the queryables for the given collection. | +
class TransactionsClient(
+ database: stac_fastapi.core.base_database_logic.BaseDatabaseLogic,
+ settings: stac_fastapi.core.base_settings.ApiBaseSettings,
+ session: stac_fastapi.core.session.Session = NOTHING
+)
+
Transactions extension specific CRUD operations.
+def create_collection(
+ self,
+ collection: stac_pydantic.collection.Collection,
+ **kwargs
+) -> stac_fastapi.types.stac.Collection
+
Create a new collection in the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection | +stac_types.Collection | +The collection to be created. | +None | +
kwargs | +None | +Additional keyword arguments. | +None | +
Returns:
+Type | +Description | +
---|---|
stac_types.Collection | +The created collection object. | +
Raises:
+Type | +Description | +
---|---|
ConflictError | +If the collection already exists. | +
def create_item(
+ self,
+ collection_id: str,
+ item: Union[stac_pydantic.item.Item, stac_pydantic.item_collection.ItemCollection],
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, NoneType]
+
Create an item in the collection.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The id of the collection to add the item to. | +None | +
item | +stac_types.Item | +The item to be added to the collection. | +None | +
kwargs | +None | +Additional keyword arguments. | +None | +
Returns:
+Type | +Description | +
---|---|
stac_types.Item | +The created item. | +
Raises:
+Type | +Description | +
---|---|
NotFound | +If the specified collection is not found in the database. | +
ConflictError | +If the item in the specified collection already exists. | +
def delete_collection(
+ self,
+ collection_id: str,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Collection, NoneType]
+
Delete a collection.
+This method deletes an existing collection in the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The identifier of the collection that contains the item. | +None | +
kwargs | +None | +Additional keyword arguments. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None. | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection doesn't exist. | +
def delete_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, NoneType]
+
Delete an item from a collection.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +str | +The identifier of the item to delete. | +None | +
collection_id | +str | +The identifier of the collection that contains the item. | +None | +
Returns:
+Type | +Description | +
---|---|
Optional[stac_types.Item] | +The deleted item, or None if the item was successfully deleted. |
+
def update_collection(
+ self,
+ collection_id: str,
+ collection: stac_pydantic.collection.Collection,
+ **kwargs
+) -> stac_fastapi.types.stac.Collection
+
Update a collection.
+This method updates an existing collection in the database by first finding
+the collection by the id given in the keyword argument collection_id
.
+If no collection_id
is given the id of the given collection object is used.
+If the object and keyword collection ids don't match the sub items
+collection id is updated else the items are left unchanged.
+The updated collection is then returned.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +id of the existing collection to be updated | +None | +
collection | +None | +A STAC collection that needs to be updated. | +None | +
kwargs | +None | +Additional keyword arguments. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A STAC collection that has been updated in the database. | +
def update_item(
+ self,
+ collection_id: str,
+ item_id: str,
+ item: stac_pydantic.item.Item,
+ **kwargs
+) -> stac_fastapi.types.stac.Item
+
Update an item in the collection.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The ID of the collection the item belongs to. | +None | +
item_id | +str | +The ID of the item to be updated. | +None | +
item | +stac_types.Item | +The new item data. | +None | +
kwargs | +None | +Other optional arguments, including the request object. | +None | +
Returns:
+Type | +Description | +
---|---|
stac_types.Item | +The updated item object. | +
Raises:
+Type | +Description | +
---|---|
NotFound | +If the specified collection is not found in the database. | +
A few datetime methods.
+def datetime_to_str(
+ dt: datetime.datetime,
+ timespec: str = 'auto'
+) -> str
+
Convert a :class:datetime.datetime
instance to an ISO8601 string in the `RFC 3339, section 5.6.
datatracker.ietf.org/doc/html/rfc3339#section-5.6__ format required by
+the :stac-spec:
STAC Spec
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
dt | +None | +The datetime to convert. | +None | +
timespec | +None | +An optional argument that specifies the number of additional terms of the time to include. Valid options are 'auto', 'hours', 'minutes', 'seconds', 'milliseconds' and 'microseconds'. The default value is 'auto'. |
+None | +
Returns:
+Type | +Description | +
---|---|
str | +The ISO8601 (RFC 3339) formatted string representing the datetime. | +
def now_in_utc(
+
+) -> datetime.datetime
+
Return a datetime value of now with the UTC timezone applied.
+def now_to_rfc3339_str(
+
+) -> str
+
Return an RFC 3339 string representing now.
+ + + + + + + + + + + + + +Request model for the Aggregation extension.
+FilterLang
+
class EsAggregationExtensionGetRequest(
+ collections: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ ids: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ bbox: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ intersects: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ datetime: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ limit: typing_extensions.Annotated[Union[typing_extensions.Annotated[int, Gt(gt=0), AfterValidator(func=<function crop at 0x7f6e23133040>)], NoneType], Query(PydanticUndefined)] = 10,
+ aggregations: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ filter: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ filter_crs: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ filter_lang: typing_extensions.Annotated[Union[Literal['cql-json', 'cql2-json', 'cql2-text'], NoneType], Query(PydanticUndefined)] = 'cql2-text',
+ collection_id: Union[typing_extensions.Annotated[str, Path(PydanticUndefined)], NoneType] = None,
+ centroid_geohash_grid_frequency_precision: Union[int, NoneType] = None,
+ centroid_geohex_grid_frequency_precision: Union[int, NoneType] = None,
+ centroid_geotile_grid_frequency_precision: Union[int, NoneType] = None,
+ geometry_geohash_grid_frequency_precision: Union[int, NoneType] = None,
+ geometry_geotile_grid_frequency_precision: Union[int, NoneType] = None,
+ datetime_frequency_interval: Union[str, NoneType] = None
+)
+
Implementation specific query parameters for aggregation precision.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class EsAggregationExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Implementation specific query parameters for aggregation precision.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
def validate_bbox(
+ v: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+) -> Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+
def validate_datetime(
+ value: str
+) -> str
+
def validate_spatial(
+ values: Dict[str, Any]
+) -> Dict[str, Any]
+
end_date
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+spatial_filter
+
Return a geojson-pydantic object representing the spatial filter for the search request.
+Check for both because the bbox
and intersects
parameters are mutually exclusive.
start_date
+
def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self: 'BaseModel',
+ context: 'Any',
+ /
+) -> 'None'
+
We need to both initialize private attributes and call the user-defined model_post_init
+method.
+class EsAsyncAggregationClient(
+ database: stac_fastapi.core.base_database_logic.BaseDatabaseLogic,
+ settings: stac_fastapi.core.base_settings.ApiBaseSettings,
+ session: stac_fastapi.core.session.Session = NOTHING
+)
+
Defines a pattern for implementing the STAC aggregation extension.
+DEFAULT_AGGREGATIONS
+
DEFAULT_DATETIME_INTERVAL
+
GEO_POINT_AGGREGATIONS
+
MAX_GEOHASH_PRECISION
+
MAX_GEOHEX_PRECISION
+
MAX_GEOTILE_PRECISION
+
SUPPORTED_DATETIME_INTERVAL
+
def aggregate(
+ self,
+ aggregate_request: Union[stac_fastapi.core.extensions.aggregation.EsAggregationExtensionPostRequest, NoneType] = None,
+ collection_id: Union[typing_extensions.Annotated[str, Path(PydanticUndefined)], NoneType] = None,
+ collections: Union[List[str], NoneType] = [],
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ intersects: Union[str, NoneType] = None,
+ filter_lang: Union[str, NoneType] = None,
+ filter: Union[str, NoneType] = None,
+ aggregations: Union[str, NoneType] = None,
+ ids: Union[List[str], NoneType] = None,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ centroid_geohash_grid_frequency_precision: Union[int, NoneType] = None,
+ centroid_geohex_grid_frequency_precision: Union[int, NoneType] = None,
+ centroid_geotile_grid_frequency_precision: Union[int, NoneType] = None,
+ geometry_geohash_grid_frequency_precision: Union[int, NoneType] = None,
+ geometry_geotile_grid_frequency_precision: Union[int, NoneType] = None,
+ datetime_frequency_interval: Union[str, NoneType] = None,
+ **kwargs
+) -> Union[Dict, Exception]
+
Get aggregations from the database.
+def extract_date_histogram_interval(
+ self,
+ value: Union[str, NoneType]
+) -> str
+
Ensure that the interval for the date histogram is valid. If no value is provided, the default will be returned.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
value | +None | +value entered by the user | +None | +
Returns:
+Type | +Description | +
---|---|
None | +string containing the date histogram interval to use. | +
Raises:
+Type | +Description | +
---|---|
HTTPException | +if the supplied value is not in the supported intervals | +
def extract_precision(
+ self,
+ precision: Union[int, NoneType],
+ min_value: int,
+ max_value: int
+) -> Union[int, NoneType]
+
Ensure that the aggregation precision value is withing the a valid range, otherwise return the minumium value.
+def frequency_agg(
+ self,
+ es_aggs,
+ name,
+ data_type
+)
+
Format an aggregation for a frequency distribution aggregation.
+def get_aggregations(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ **kwargs
+)
+
Get the available aggregations for a catalog or collection defined in the STAC JSON. If no aggregations, default aggregations are used.
+def get_filter(
+ self,
+ filter,
+ filter_lang
+)
+
Format the filter parameter in cql2-json or cql2-text.
+def metric_agg(
+ self,
+ es_aggs,
+ name,
+ data_type
+)
+
Format an aggregation for a metric aggregation.
+ + + + + + + + + + + + + +Fields extension.
+class FieldsExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Override the POST model.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +fastapi.FastAPI | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class FieldsExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Additional fields and schema for the POST request.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class PostFieldsExtension(
+ /,
+ **data: 'Any'
+)
+
PostFieldsExtension.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Filter extension logic for es conversion.
+queryables_mapping
+
def cql2_like_to_es(
+ string: str
+) -> str
+
Convert CQL2 "LIKE" characters to Elasticsearch "wildcard" characters.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
string | +str | +The string containing CQL2 wildcard characters. | +None | +
Returns:
+Type | +Description | +
---|---|
str | +The converted string with Elasticsearch compatible wildcards. | +
Raises:
+Type | +Description | +
---|---|
ValueError | +If an invalid escape sequence is encountered. | +
def to_es(
+ query: Dict[str, Any]
+) -> Dict[str, Any]
+
Transform a simplified CQL2 query structure to an Elasticsearch compatible query DSL.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
query | +Dict[str, Any] | +The query dictionary containing 'op' and 'args'. | +None | +
Returns:
+Type | +Description | +
---|---|
Dict[str, Any] | +The corresponding Elasticsearch query in the form of a dictionary. | +
def to_es_field(
+ field: str
+) -> str
+
Map a given field to its corresponding Elasticsearch field according to a predefined mapping.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
field | +str | +The field name from a user query or filter. | +None | +
Returns:
+Type | +Description | +
---|---|
str | +The mapped field name suitable for Elasticsearch queries. | +
class AdvancedComparisonOp(
+ /,
+ *args,
+ **kwargs
+)
+
Enumeration for advanced comparison operators like 'like', 'between', and 'in'.
+BETWEEN
+
IN
+
LIKE
+
name
+
value
+
class ComparisonOp(
+ /,
+ *args,
+ **kwargs
+)
+
Enumeration for comparison operators used in filtering queries according to CQL2 standards.
+EQ
+
GT
+
GTE
+
IS_NULL
+
LT
+
LTE
+
NEQ
+
name
+
value
+
class LogicalOp(
+ /,
+ *args,
+ **kwargs
+)
+
Enumeration for logical operators used in constructing Elasticsearch queries.
+AND
+
NOT
+
OR
+
name
+
value
+
class SpatialIntersectsOp(
+ /,
+ *args,
+ **kwargs
+)
+
Enumeration for spatial intersection operator as per CQL2 standards.
+S_INTERSECTS
+
name
+
value
+
elasticsearch extensions modifications.
+class Operator(
+ /,
+ *args,
+ **kwargs
+)
+
Defines the set of operators supported by the API.
+eq
+
gt
+
gte
+
lt
+
lte
+
ne
+
class QueryExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Query Extenson.
+Override the POST request model to add validation against +supported fields
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class QueryableTypes(
+
+)
+
Defines a set of queryable fields.
+ + + + + + + + + + + + + +STAC SQLAlchemy specific query search model.
+NumType
+
logger
+
class Operator(
+ /,
+ *args,
+ **kwargs
+)
+
Defines the set of operators supported by the API.
+eq
+
gt
+
gte
+
lt
+
lte
+
ne
+
class QueryExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Query Extenson.
+Override the POST request model to add validation against +supported fields
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class QueryExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Queryable validation.
+Add queryables validation to the POST request +to raise errors for unsupported querys.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
def validate_query_fields(
+ values: Dict
+) -> Dict
+
Validate query fields.
+model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class QueryableTypes(
+
+)
+
Defines a set of queryable fields.
+class Queryables(
+ /,
+ *args,
+ **kwargs
+)
+
Queryable fields.
+Core library.
+link helpers.
+INFERRED_LINK_RELS
+
def merge_params(
+ url: str,
+ newparams: Dict
+) -> str
+
Merge url parameters.
+class BaseLinks(
+ request: starlette.requests.Request
+)
+
Create inferred links common to collections and items.
+base_url
+
Get the base url.
+url
+
Get the current request url.
+def create_links(
+ self
+) -> List[Dict[str, Any]]
+
Return all inferred links.
+def get_links(
+ self,
+ extra_links: Union[List[Dict[str, Any]], NoneType] = None
+) -> List[Dict[str, Any]]
+
Generate all the links.
+Get the links object for a stac resource by iterating through +available methods on this class that start with link_.
+def link_root(
+ self
+) -> Dict
+
Return the catalog root.
+def link_self(
+ self
+) -> Dict
+
Return the self link.
+def resolve(
+ self,
+ url
+)
+
Resolve url to the current request url.
+class CollectionLinks(
+ request: starlette.requests.Request,
+ collection_id: str,
+ extensions: List[str] = NOTHING
+)
+
Create inferred links specific to collections.
+base_url
+
Get the base url.
+url
+
Get the current request url.
+def create_links(
+ self
+) -> List[Dict[str, Any]]
+
Return all inferred links.
+def get_links(
+ self,
+ extra_links: Union[List[Dict[str, Any]], NoneType] = None
+) -> List[Dict[str, Any]]
+
Generate all the links.
+Get the links object for a stac resource by iterating through +available methods on this class that start with link_.
+def link_aggregate(
+ self
+) -> Dict[str, Any]
+
Create the aggregate
link.
def link_aggregations(
+ self
+) -> Dict[str, Any]
+
Create the aggregations
link.
def link_items(
+ self
+) -> Dict[str, Any]
+
Create the items
link.
def link_parent(
+ self
+) -> Dict[str, Any]
+
Create the parent
link.
def link_queryables(
+ self
+) -> Dict[str, Any]
+
Create the queryables
link.
def link_root(
+ self
+) -> Dict
+
Return the catalog root.
+def link_self(
+ self
+) -> Dict
+
Return the self link.
+def resolve(
+ self,
+ url
+)
+
Resolve url to the current request url.
+class PagingLinks(
+ request: starlette.requests.Request,
+ *,
+ next: Union[str, NoneType] = None
+)
+
Create links for paging.
+base_url
+
Get the base url.
+url
+
Get the current request url.
+def create_links(
+ self
+) -> List[Dict[str, Any]]
+
Return all inferred links.
+def get_links(
+ self,
+ extra_links: Union[List[Dict[str, Any]], NoneType] = None
+) -> List[Dict[str, Any]]
+
Generate all the links.
+Get the links object for a stac resource by iterating through +available methods on this class that start with link_.
+def link_next(
+ self
+) -> Union[Dict[str, Any], NoneType]
+
Create link for next page.
+def link_root(
+ self
+) -> Dict
+
Return the catalog root.
+def link_self(
+ self
+) -> Dict
+
Return the self link.
+def resolve(
+ self,
+ url
+)
+
Resolve url to the current request url.
+ + + + + + + + + + + + + +Unused search model.
+ + + + + + + + + + + + + +Rate limiting middleware.
+logger
+
def get_limiter(
+ key_func=<function get_remote_address at 0x7f6e2209f160>
+)
+
Create and return a Limiter instance for rate limiting.
+def setup_rate_limit(
+ app: fastapi.applications.FastAPI,
+ rate_limit: Union[str, NoneType] = None,
+ key_func=<function get_remote_address at 0x7f6e2209f160>
+)
+
Set up rate limiting middleware.
+ + + + + + + + + + + + + +Route Dependencies Module.
+route_dependencies_schema
+
def get_dependencies(
+ route_dependency_conf: dict
+) -> list
+
Get dependencies from route dependency configuration.
+def get_route_dependencies(
+ route_dependencies_env: str = ''
+) -> list
+
Route dependencies generator.
+Generate a set of route dependencies for authentication to the +provided FastAPI application.
+def get_route_dependencies_conf(
+ route_dependencies_env: str
+) -> list
+
Get Route dependencies configuration from file or environment variable.
+def get_routes(
+ route_dependency_conf: dict
+) -> list
+
Get routes from route dependency configuration.
+ + + + + + + + + + + + + +Serializers.
+class CollectionSerializer(
+
+)
+
Serialization methods for STAC collections.
+def db_to_stac(
+ collection: dict,
+ request: starlette.requests.Request,
+ extensions: Union[List[str], NoneType] = []
+) -> stac_fastapi.types.stac.Collection
+
Transform database model to STAC collection.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection | +dict | +The collection data in dictionary form, extracted from the database. | +None | +
starlette.requests.Request | +None | +the API request | +None | +
extensions | +None | +A list of the extension class names (ext.__name__ ) or all enabled STAC API extensions. |
+None | +
Returns:
+Type | +Description | +
---|---|
stac_types.Collection | +The STAC collection object. | +
def stac_to_db(
+ collection: stac_fastapi.types.stac.Collection,
+ request: starlette.requests.Request
+) -> stac_fastapi.types.stac.Collection
+
Transform STAC Collection to database-ready STAC collection.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
stac_data | +None | +the STAC Collection object to be transformed | +None | +
starlette.requests.Request | +None | +the API request | +None | +
Returns:
+Type | +Description | +
---|---|
stac_types.Collection | +The database-ready STAC Collection object. | +
class ItemSerializer(
+
+)
+
Serialization methods for STAC items.
+def db_to_stac(
+ item: dict,
+ base_url: str
+) -> stac_fastapi.types.stac.Item
+
Transform database-ready STAC item to STAC item.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +dict | +The database-ready STAC item to be transformed. | +None | +
base_url | +str | +The base URL for the STAC API. | +None | +
Returns:
+Type | +Description | +
---|---|
stac_types.Item | +The STAC item object. | +
def stac_to_db(
+ stac_data: stac_fastapi.types.stac.Item,
+ base_url: str
+) -> stac_fastapi.types.stac.Item
+
Transform STAC item to database-ready STAC item.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
stac_data | +stac_types.Item | +The STAC item object to be transformed. | +None | +
base_url | +str | +The base URL for the STAC API. | +None | +
Returns:
+Type | +Description | +
---|---|
stac_types.Item | +The database-ready STAC item object. | +
class Serializer(
+
+)
+
Defines serialization methods between the API and the data model.
+This class is meant to be subclassed and implemented by specific serializers for different STAC objects (e.g. Item, Collection).
+def db_to_stac(
+ item: dict,
+ base_url: str
+) -> Any
+
Transform database model to STAC object.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +dict | +A dictionary representing the database model. | +None | +
base_url | +str | +The base URL of the STAC API. | +None | +
Returns:
+Type | +Description | +
---|---|
Any | +A STAC object, e.g. an Item or Collection , representing the input item . |
+
def stac_to_db(
+ stac_object: Any,
+ base_url: str
+) -> dict
+
Transform STAC object to database model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
stac_object | +Any | +A STAC object, e.g. an Item or Collection . |
+None | +
base_url | +str | +The base URL of the STAC API. | +None | +
Returns:
+Type | +Description | +
---|---|
dict | +A dictionary representing the database model. | +
database session management.
+logger
+
class Session(
+
+)
+
Database session management.
+def create_from_env(
+
+)
+
Create from environment.
+def create_from_settings(
+ settings
+)
+
Create a Session object from settings.
+ + + + + + + + + + + + + +Module for geospatial processing functions.
+This module contains functions for transforming geospatial coordinates, +such as converting bounding boxes to polygon representations.
+MAX_LIMIT
+
def bbox2polygon(
+ b0: float,
+ b1: float,
+ b2: float,
+ b3: float
+) -> List[List[List[float]]]
+
Transform a bounding box represented by its four coordinates b0
, b1
, b2
, and b3
into a polygon.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
b0 | +float | +The x-coordinate of the lower-left corner of the bounding box. | +None | +
b1 | +float | +The y-coordinate of the lower-left corner of the bounding box. | +None | +
b2 | +float | +The x-coordinate of the upper-right corner of the bounding box. | +None | +
b3 | +float | +The y-coordinate of the upper-right corner of the bounding box. | +None | +
Returns:
+Type | +Description | +
---|---|
List[List[List[float]]] | +A polygon represented as a list of lists of coordinates. | +
def dict_deep_update(
+ merge_to: Dict[str, Any],
+ merge_from: Dict[str, Any]
+) -> None
+
Perform a deep update of two dicts.
+merge_to is updated in-place with the values from merge_from. +merge_from values take precedence over existing values in merge_to.
+def filter_fields(
+ item: Union[stac_fastapi.types.stac.Item, Dict[str, Any]],
+ include: Union[Set[str], NoneType] = None,
+ exclude: Union[Set[str], NoneType] = None
+) -> stac_fastapi.types.stac.Item
+
Preserve and remove fields as indicated by the fields extension include/exclude sets.
+Returns a shallow copy of the Item with the fields filtered.
+This will not perform a deep copy; values of the original item will be referenced +in the return item.
+ + + + + + + + + + + + + +library version.
+ + + + + + + + + + + + + +FastAPI application.
+aggregation_extension
+
api
+
app
+
database_logic
+
extensions
+
filter_extension
+
handler
+
search_extensions
+
session
+
settings
+
def create_handler(
+ app
+)
+
Create a handler to use with AWS Lambda if mangum available.
+def run(
+
+) -> None
+
Run app from command line using uvicorn if available.
+class post_request_model(
+ /,
+ **data: 'Any'
+)
+
Base arguments for POST Request.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
def validate_bbox(
+ v: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+) -> Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+
def validate_datetime(
+ value: str
+) -> str
+
def validate_spatial(
+ values: Dict[str, Any]
+) -> Dict[str, Any]
+
end_date
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+spatial_filter
+
Return a geojson-pydantic object representing the spatial filter for the search request.
+Check for both because the bbox
and intersects
parameters are mutually exclusive.
start_date
+
def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self: 'BaseModel',
+ context: 'Any',
+ /
+) -> 'None'
+
We need to both initialize private attributes and call the user-defined model_post_init
+method.
+ + + + + + + + + + + + + +API configuration.
+class AsyncElasticsearchSettings(
+ __pydantic_self__,
+ _case_sensitive: 'bool | None' = None,
+ _nested_model_default_partial_update: 'bool | None' = None,
+ _env_prefix: 'str | None' = None,
+ _env_file: 'DotenvType | None' = PosixPath('.'),
+ _env_file_encoding: 'str | None' = None,
+ _env_ignore_empty: 'bool | None' = None,
+ _env_nested_delimiter: 'str | None' = None,
+ _env_parse_none_str: 'str | None' = None,
+ _env_parse_enums: 'bool | None' = None,
+ _cli_prog_name: 'str | None' = None,
+ _cli_parse_args: 'bool | list[str] | tuple[str, ...] | None' = None,
+ _cli_settings_source: 'CliSettingsSource[Any] | None' = None,
+ _cli_parse_none_str: 'str | None' = None,
+ _cli_hide_none_type: 'bool | None' = None,
+ _cli_avoid_json: 'bool | None' = None,
+ _cli_enforce_required: 'bool | None' = None,
+ _cli_use_class_docs_for_groups: 'bool | None' = None,
+ _cli_exit_on_error: 'bool | None' = None,
+ _cli_prefix: 'str | None' = None,
+ _cli_flag_prefix_char: 'str | None' = None,
+ _cli_implicit_flags: 'bool | None' = None,
+ _cli_ignore_unknown_args: 'bool | None' = None,
+ _secrets_dir: 'PathType | None' = None,
+ **values: 'Any'
+)
+
API settings.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def settings_customise_sources(
+ settings_cls: 'type[BaseSettings]',
+ init_settings: 'PydanticBaseSettingsSource',
+ env_settings: 'PydanticBaseSettingsSource',
+ dotenv_settings: 'PydanticBaseSettingsSource',
+ file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
Define the sources and their order for loading the settings values.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
settings_cls | +None | +The Settings class. | +None | +
init_settings | +None | +The InitSettingsSource instance. |
+None | +
env_settings | +None | +The EnvSettingsSource instance. |
+None | +
dotenv_settings | +None | +The DotEnvSettingsSource instance. |
+None | +
file_secret_settings | +None | +The SecretsSettingsSource instance. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +A tuple containing the sources and their order for loading the settings values. | +
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
create_client
+
Create async elasticsearch client.
+model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class ElasticsearchSettings(
+ __pydantic_self__,
+ _case_sensitive: 'bool | None' = None,
+ _nested_model_default_partial_update: 'bool | None' = None,
+ _env_prefix: 'str | None' = None,
+ _env_file: 'DotenvType | None' = PosixPath('.'),
+ _env_file_encoding: 'str | None' = None,
+ _env_ignore_empty: 'bool | None' = None,
+ _env_nested_delimiter: 'str | None' = None,
+ _env_parse_none_str: 'str | None' = None,
+ _env_parse_enums: 'bool | None' = None,
+ _cli_prog_name: 'str | None' = None,
+ _cli_parse_args: 'bool | list[str] | tuple[str, ...] | None' = None,
+ _cli_settings_source: 'CliSettingsSource[Any] | None' = None,
+ _cli_parse_none_str: 'str | None' = None,
+ _cli_hide_none_type: 'bool | None' = None,
+ _cli_avoid_json: 'bool | None' = None,
+ _cli_enforce_required: 'bool | None' = None,
+ _cli_use_class_docs_for_groups: 'bool | None' = None,
+ _cli_exit_on_error: 'bool | None' = None,
+ _cli_prefix: 'str | None' = None,
+ _cli_flag_prefix_char: 'str | None' = None,
+ _cli_implicit_flags: 'bool | None' = None,
+ _cli_ignore_unknown_args: 'bool | None' = None,
+ _secrets_dir: 'PathType | None' = None,
+ **values: 'Any'
+)
+
API settings.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def settings_customise_sources(
+ settings_cls: 'type[BaseSettings]',
+ init_settings: 'PydanticBaseSettingsSource',
+ env_settings: 'PydanticBaseSettingsSource',
+ dotenv_settings: 'PydanticBaseSettingsSource',
+ file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
Define the sources and their order for loading the settings values.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
settings_cls | +None | +The Settings class. | +None | +
init_settings | +None | +The InitSettingsSource instance. |
+None | +
env_settings | +None | +The EnvSettingsSource instance. |
+None | +
dotenv_settings | +None | +The DotEnvSettingsSource instance. |
+None | +
file_secret_settings | +None | +The SecretsSettingsSource instance. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +A tuple containing the sources and their order for loading the settings values. | +
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
create_client
+
Create es client.
+model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Database logic.
+COLLECTIONS_INDEX
+
DEFAULT_SORT
+
ES_COLLECTIONS_MAPPINGS
+
ES_INDEX_NAME_UNSUPPORTED_CHARS
+
ES_ITEMS_MAPPINGS
+
ES_ITEMS_SETTINGS
+
ES_MAPPINGS_DYNAMIC_TEMPLATES
+
ITEMS_INDEX_PREFIX
+
ITEM_INDICES
+
MAX_LIMIT
+
NumType
+
logger
+
def create_collection_index(
+
+) -> None
+
Create the index for a Collection. The settings of the index template will be used implicitly.
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def create_index_templates(
+
+) -> None
+
Create index templates for the Collection and Item indices.
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def create_item_index(
+ collection_id: str
+)
+
Create the index for Items. The settings of the index template will be used implicitly.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +Collection identifier. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
def delete_item_index(
+ collection_id: str
+)
+
Delete the index for items in a collection.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The ID of the collection whose items index will be deleted. | +None | +
def index_by_collection_id(
+ collection_id: str
+) -> str
+
Translate a collection id into an Elasticsearch index name.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The collection id to translate into an index name. | +None | +
Returns:
+Type | +Description | +
---|---|
str | +The index name derived from the collection id. | +
def indices(
+ collection_ids: Union[List[str], NoneType]
+) -> str
+
Get a comma-separated string of index names for a given list of collection ids.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_ids | +None | +A list of collection ids. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A string of comma-separated index names. If collection_ids is None, returns the default indices. |
+
def mk_actions(
+ collection_id: str,
+ processed_items: List[stac_fastapi.types.stac.Item]
+)
+
Create Elasticsearch bulk actions for a list of processed items.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The identifier for the collection the items belong to. | +None | +
processed_items | +List[Item] | +The list of processed items to be bulk indexed. | +None | +
Returns:
+Type | +Description | +
---|---|
List[Dict[str, Union[str, Dict]]] | +The list of bulk actions to be executed, each action being a dictionary with the following keys: - _index : the index to store the document in.- _id : the document's identifier.- _source : the source of the document. |
+
def mk_item_id(
+ item_id: str,
+ collection_id: str
+)
+
Create the document id for an Item in Elasticsearch.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +str | +The id of the Item. | +None | +
collection_id | +str | +The id of the Collection that the Item belongs to. | +None | +
Returns:
+Type | +Description | +
---|---|
str | +The document id for the Item, combining the Item id and the Collection id, separated by a | character. |
+
class DatabaseLogic(
+ item_serializer: Type[stac_fastapi.core.serializers.ItemSerializer] = <class 'stac_fastapi.core.serializers.ItemSerializer'>,
+ collection_serializer: Type[stac_fastapi.core.serializers.CollectionSerializer] = <class 'stac_fastapi.core.serializers.CollectionSerializer'>,
+ extensions: List[str] = NOTHING
+)
+
Database logic.
+aggregation_mapping
+
client
+
sync_client
+
def apply_bbox_filter(
+ search: elasticsearch_dsl.search.Search,
+ bbox: List
+)
+
Filter search results based on bounding box.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to apply the filter to. | +None | +
bbox | +List | +The bounding box coordinates, represented as a list of four values [minx, miny, maxx, maxy]. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +search (Search): The search object with the bounding box filter applied. | +
def apply_collections_filter(
+ search: elasticsearch_dsl.search.Search,
+ collection_ids: List[str]
+)
+
Database logic to search a list of STAC collection ids.
+def apply_cql2_filter(
+ search: elasticsearch_dsl.search.Search,
+ _filter: Union[Dict[str, Any], NoneType]
+)
+
Apply a CQL2 filter to an Elasticsearch Search object.
+This method transforms a dictionary representing a CQL2 filter into an Elasticsearch query +and applies it to the provided Search object. If the filter is None, the original Search +object is returned unmodified.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The Elasticsearch Search object to which the filter will be applied. | +None | +
_filter | +Optional[Dict[str, Any]] | +The filter in dictionary form that needs to be applied to the search. The dictionary should follow the structure required by the to_es function which converts itto an Elasticsearch query. |
+None | +
Returns:
+Type | +Description | +
---|---|
Search | +The modified Search object with the filter applied if a filter is provided, otherwise the original Search object. |
+
def apply_datetime_filter(
+ search: elasticsearch_dsl.search.Search,
+ datetime_search
+)
+
Apply a filter to search based on datetime field.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to filter. | +None | +
datetime_search | +dict | +The datetime filter criteria. | +None | +
Returns:
+Type | +Description | +
---|---|
Search | +The filtered search object. | +
def apply_free_text_filter(
+ search: elasticsearch_dsl.search.Search,
+ free_text_queries: Union[List[str], NoneType]
+)
+
Database logic to perform query for search endpoint.
+def apply_ids_filter(
+ search: elasticsearch_dsl.search.Search,
+ item_ids: List[str]
+)
+
Database logic to search a list of STAC item ids.
+def apply_intersects_filter(
+ search: elasticsearch_dsl.search.Search,
+ intersects: stac_fastapi.elasticsearch.database_logic.Geometry
+)
+
Filter search results based on intersecting geometry.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to apply the filter to. | +None | +
intersects | +Geometry | +The intersecting geometry, represented as a GeoJSON-like object. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +search (Search): The search object with the intersecting geometry filter applied. | +
def apply_stacql_filter(
+ search: elasticsearch_dsl.search.Search,
+ op: str,
+ field: str,
+ value: float
+)
+
Filter search results based on a comparison between a field and a value.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to apply the filter to. | +None | +
op | +str | +The comparison operator to use. Can be 'eq' (equal), 'gt' (greater than), 'gte' (greater than or equal), 'lt' (less than), or 'lte' (less than or equal). |
+None | +
field | +str | +The field to perform the comparison on. | +None | +
value | +float | +The value to compare the field against. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +search (Search): The search object with the specified filter applied. | +
def make_search(
+
+)
+
Database logic to create a Search instance.
+def populate_sort(
+ sortby: List
+) -> Union[Dict[str, Dict[str, str]], NoneType]
+
Database logic to sort search instance.
+def aggregate(
+ self,
+ collection_ids: Union[List[str], NoneType],
+ aggregations: List[str],
+ search: elasticsearch_dsl.search.Search,
+ centroid_geohash_grid_precision: int,
+ centroid_geohex_grid_precision: int,
+ centroid_geotile_grid_precision: int,
+ geometry_geohash_grid_precision: int,
+ geometry_geotile_grid_precision: int,
+ datetime_frequency_interval: str,
+ ignore_unavailable: Union[bool, NoneType] = True
+)
+
Return aggregations of STAC Items.
+def bulk_async(
+ self,
+ collection_id: str,
+ processed_items: List[stac_fastapi.types.stac.Item],
+ refresh: bool = False
+) -> None
+
Perform a bulk insert of items into the database asynchronously.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to which the items belong. | +None | +
processed_items | +List[Item] | +A list of Item objects to be inserted into the database. |
+None | +
refresh | +bool | +Whether to refresh the index after the bulk insert (default: False). | +None | +
def bulk_sync(
+ self,
+ collection_id: str,
+ processed_items: List[stac_fastapi.types.stac.Item],
+ refresh: bool = False
+) -> None
+
Perform a bulk insert of items into the database synchronously.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to which the items belong. | +None | +
processed_items | +List[Item] | +A list of Item objects to be inserted into the database. |
+None | +
refresh | +bool | +Whether to refresh the index after the bulk insert (default: False). | +None | +
def check_collection_exists(
+ self,
+ collection_id: str
+)
+
Database logic to check if a collection exists.
+def create_collection(
+ self,
+ collection: stac_fastapi.types.stac.Collection,
+ refresh: bool = False
+)
+
Create a single collection in the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection | +Collection | +The Collection object to be created. | +None | +
refresh | +bool | +Whether to refresh the index after the creation. Default is False. | +None | +
Raises:
+Type | +Description | +
---|---|
ConflictError | +If a Collection with the same id already exists in the database. | +
def create_item(
+ self,
+ item: stac_fastapi.types.stac.Item,
+ refresh: bool = False
+)
+
Database logic for creating one item.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +Item | +The item to be created. | +None | +
refresh | +bool | +Refresh the index after performing the operation. Defaults to False. | +False | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Raises:
+Type | +Description | +
---|---|
ConflictError | +If the item already exists in the database. | +
def delete_collection(
+ self,
+ collection_id: str,
+ refresh: bool = False
+)
+
Delete a collection from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to be deleted. | +None | +
refresh | +bool | +Whether to refresh the index after the deletion (default: False). | +None | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection with the given collection_id is not found in the database. |
+
def delete_collections(
+ self
+) -> None
+
Danger. this is only for tests.
+def delete_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ refresh: bool = False
+)
+
Delete a single item from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +str | +The id of the Item to be deleted. | +None | +
collection_id | +str | +The id of the Collection that the Item belongs to. | +None | +
refresh | +bool | +Whether to refresh the index after the deletion. Default is False. | +None | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the Item does not exist in the database. | +
def delete_items(
+ self
+) -> None
+
Danger. this is only for tests.
+def execute_search(
+ self,
+ search: elasticsearch_dsl.search.Search,
+ limit: int,
+ token: Union[str, NoneType],
+ sort: Union[Dict[str, Dict[str, str]], NoneType],
+ collection_ids: Union[List[str], NoneType],
+ ignore_unavailable: bool = True
+) -> Tuple[Iterable[Dict[str, Any]], Union[int, NoneType], Union[str, NoneType]]
+
Execute a search query with limit and other optional parameters.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search query to be executed. | +None | +
limit | +int | +The maximum number of results to be returned. | +None | +
token | +Optional[str] | +The token used to return the next set of results. | +None | +
sort | +Optional[Dict[str, Dict[str, str]]] | +Specifies how the results should be sorted. | +None | +
collection_ids | +Optional[List[str]] | +The collection ids to search. | +None | +
ignore_unavailable | +bool | +Whether to ignore unavailable collections. Defaults to True. | +True | +
Returns:
+Type | +Description | +
---|---|
Tuple[Iterable[Dict[str, Any]], Optional[int], Optional[str]] | +A tuple containing: - An iterable of search results, where each result is a dictionary with keys and values representing the fields and values of each document. - The total number of results (if the count could be computed), or None if the count could not be computed. - The token to be used to retrieve the next set of results, or None if there are no more results. |
+
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collections specified in collection_ids do not exist. |
+
def find_collection(
+ self,
+ collection_id: str
+) -> stac_fastapi.types.stac.Collection
+
Find and return a collection from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to be found. | +None | +
Returns:
+Type | +Description | +
---|---|
Collection | +The found collection, represented as a Collection object. |
+
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection with the given collection_id is not found in the database. |
+
def get_all_collections(
+ self,
+ token: Union[str, NoneType],
+ limit: int,
+ request: starlette.requests.Request
+) -> Tuple[List[Dict[str, Any]], Union[str, NoneType]]
+
Retrieve a list of all collections from Elasticsearch, supporting pagination.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
token | +Optional[str] | +The pagination token. | +None | +
limit | +int | +The number of results to return. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A tuple of (collections, next pagination token if any). | +
def get_one_item(
+ self,
+ collection_id: str,
+ item_id: str
+) -> Dict
+
Retrieve a single item from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The id of the Collection that the Item belongs to. | +None | +
item_id | +str | +The id of the Item. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +item (Dict): A dictionary containing the source data for the Item. | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the specified Item does not exist in the Collection. | +
def prep_create_item(
+ self,
+ item: stac_fastapi.types.stac.Item,
+ base_url: str,
+ exist_ok: bool = False
+) -> stac_fastapi.types.stac.Item
+
Preps an item for insertion into the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +Item | +The item to be prepped for insertion. | +None | +
base_url | +str | +The base URL used to create the item's self URL. | +None | +
exist_ok | +bool | +Indicates whether the item can exist already. | +None | +
Returns:
+Type | +Description | +
---|---|
Item | +The prepped item. | +
Raises:
+Type | +Description | +
---|---|
ConflictError | +If the item already exists in the database. | +
def sync_prep_create_item(
+ self,
+ item: stac_fastapi.types.stac.Item,
+ base_url: str,
+ exist_ok: bool = False
+) -> stac_fastapi.types.stac.Item
+
Prepare an item for insertion into the database.
+This method performs pre-insertion preparation on the given item
,
+such as checking if the collection the item belongs to exists,
+and optionally verifying that an item with the same ID does not already exist in the database.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +Item | +The item to be inserted into the database. | +None | +
base_url | +str | +The base URL used for constructing URLs for the item. | +None | +
exist_ok | +bool | +Indicates whether the item can exist already. | +None | +
Returns:
+Type | +Description | +
---|---|
Item | +The item after preparation is done. | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection that the item belongs to does not exist in the database. | +
ConflictError | +If an item with the same ID already exists in the collection. | +
def update_collection(
+ self,
+ collection_id: str,
+ collection: stac_fastapi.types.stac.Collection,
+ refresh: bool = False
+)
+
Update a collection from the database.
+Args: + self: The instance of the object calling this function. + collection_id (str): The ID of the collection to be updated. + collection (Collection): The Collection object to be used for the update.
+Raises:
+ NotFoundError: If the collection with the given collection_id
is not
+ found in the database.
Notes:
+ This function updates the collection in the database using the specified
+ collection_id
and with the collection specified in the Collection
object.
+ If the collection is not found, a NotFoundError
is raised.
class Geometry(
+ *args,
+ **kwargs
+)
+
Base class for protocol classes.
+Protocol classes are defined as::
+class Proto(Protocol):
+ def meth(self) -> int:
+ ...
+
Such classes are primarily used with static type checkers that recognize +structural subtyping (static duck-typing), for example::
+class C:
+ def meth(self) -> int:
+ return 0
+
+def func(x: Proto) -> int:
+ return x.meth()
+
+func(C()) # Passes static type check
+
See PEP 544 for details. Protocol classes decorated with
+library version.
+ + + + + + + + + + + + + +Aggregation Extension.
+class AggregationConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for the Aggregation extension.
+See +stac-api-extensions/aggregation
+AGGREGATION
+
name
+
value
+
class AggregationExtension(
+ schema_href: Union[str, NoneType] = None,
+ client: Union[stac_fastapi.extensions.core.aggregation.client.AsyncBaseAggregationClient, stac_fastapi.extensions.core.aggregation.client.BaseAggregationClient] = NOTHING,
+ conformance_classes: List[str] = [<AggregationConformanceClasses.AGGREGATION: 'https://api.stacspec.org/v0.3.0/aggregation'>],
+ router: fastapi.routing.APIRouter = NOTHING
+)
+
Aggregation Extension.
+The purpose of the Aggregation Extension is to provide an endpoint similar to +the Search endpoint (/search), but which will provide aggregated information +on matching Items rather than the Items themselves. This is highly influenced +by the Elasticsearch and OpenSearch aggregation endpoint, but with a more +regular structure for responses.
+The Aggregation extension adds several endpoints which allow the retrieval of +available aggregation fields and aggregation buckets based on a seearch query: + GET /aggregations + POST /aggregations + GET /collections/{collection_id}/aggregations + POST /collections/{collection_id}/aggregations + GET /aggregate + POST /aggregate + GET /collections/{collection_id}/aggregate + POST /collections/{collection_id}/aggregate
+github.com/stac-api-extensions/aggregation/blob/main/README.md
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +None | +Conformance classes provided by the extension | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Aggregation extensions clients.
+class AsyncBaseAggregationClient(
+
+)
+
Defines an async pattern for implementing the STAC aggregation extension.
+def aggregate(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ aggregations: Union[str, List[str], NoneType] = None,
+ collections: Union[List[str], NoneType] = None,
+ ids: Union[List[str], NoneType] = None,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ intersects: Union[typing_extensions.Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')], NoneType] = None,
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ limit: Union[int, NoneType] = 10,
+ **kwargs
+) -> stac_fastapi.extensions.core.aggregation.types.AggregationCollection
+
Return the aggregation buckets for a given search result
+def get_aggregations(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ **kwargs
+) -> stac_fastapi.extensions.core.aggregation.types.AggregationCollection
+
Get the aggregations available for the given collection_id.
+If collection_id is None, returns the available aggregations over all +collections.
+class BaseAggregationClient(
+
+)
+
Defines a pattern for implementing the STAC aggregation extension.
+def aggregate(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ **kwargs
+) -> stac_fastapi.extensions.core.aggregation.types.AggregationCollection
+
Return the aggregation buckets for a given search result
+def get_aggregations(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ **kwargs
+) -> stac_fastapi.extensions.core.aggregation.types.AggregationCollection
+
Get the aggregations available for the given collection_id.
+If collection_id is None, returns the available aggregations over all +collections.
+ + + + + + + + + + + + + +Aggregation extension module.
+class AggregationExtension(
+ schema_href: Union[str, NoneType] = None,
+ client: Union[stac_fastapi.extensions.core.aggregation.client.AsyncBaseAggregationClient, stac_fastapi.extensions.core.aggregation.client.BaseAggregationClient] = NOTHING,
+ conformance_classes: List[str] = [<AggregationConformanceClasses.AGGREGATION: 'https://api.stacspec.org/v0.3.0/aggregation'>],
+ router: fastapi.routing.APIRouter = NOTHING
+)
+
Aggregation Extension.
+The purpose of the Aggregation Extension is to provide an endpoint similar to +the Search endpoint (/search), but which will provide aggregated information +on matching Items rather than the Items themselves. This is highly influenced +by the Elasticsearch and OpenSearch aggregation endpoint, but with a more +regular structure for responses.
+The Aggregation extension adds several endpoints which allow the retrieval of +available aggregation fields and aggregation buckets based on a seearch query: + GET /aggregations + POST /aggregations + GET /collections/{collection_id}/aggregations + POST /collections/{collection_id}/aggregations + GET /aggregate + POST /aggregate + GET /collections/{collection_id}/aggregate + POST /collections/{collection_id}/aggregate
+github.com/stac-api-extensions/aggregation/blob/main/README.md
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +None | +Conformance classes provided by the extension | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Request model for the Aggregation extension.
+class AggregationExtensionGetRequest(
+ collections: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ ids: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ bbox: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ intersects: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ datetime: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ limit: typing_extensions.Annotated[Union[typing_extensions.Annotated[int, Gt(gt=0), AfterValidator(func=<function crop at 0x7f6e23133040>)], NoneType], Query(PydanticUndefined)] = 10,
+ aggregations: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Aggregation Extension GET request model.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class AggregationExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Aggregation Extension POST request model.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
def validate_bbox(
+ v: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+) -> Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+
def validate_datetime(
+ value: str
+) -> str
+
def validate_spatial(
+ values: Dict[str, Any]
+) -> Dict[str, Any]
+
end_date
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+spatial_filter
+
Return a geojson-pydantic object representing the spatial filter for the search request.
+Check for both because the bbox
and intersects
parameters are mutually exclusive.
start_date
+
def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self: 'BaseModel',
+ context: 'Any',
+ /
+) -> 'None'
+
We need to both initialize private attributes and call the user-defined model_post_init
+method.
+ + + + + + + + + + + + + +Aggregation Extension types.
+class Aggregation(
+ /,
+ *args,
+ **kwargs
+)
+
A STAC aggregation.
+buckets
+
overflow
+
value
+
def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class AggregationCollection(
+ /,
+ *args,
+ **kwargs
+)
+
STAC Item Aggregation Collection.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class Bucket(
+ /,
+ *args,
+ **kwargs
+)
+
A STAC aggregation bucket.
+frequency
+
to
+
def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+ + + + + + + + + + + + + +collection-search extensions clients.
+class AsyncBaseCollectionSearchClient(
+
+)
+
Defines a pattern for implementing the STAC collection-search POST extension.
+def post_all_collections(
+ self,
+ search_request: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Get all available collections.
+Called with POST /collections
.
Returns:
+Type | +Description | +
---|---|
None | +A list of collections. | +
class BaseCollectionSearchClient(
+
+)
+
Defines a pattern for implementing the STAC collection-search POST extension.
+def post_all_collections(
+ self,
+ search_request: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Get all available collections.
+Called with POST /collections
.
Returns:
+Type | +Description | +
---|---|
None | +A list of collections. | +
Collection-Search extension.
+class CollectionSearchExtension(
+ GET: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest'>,
+ conformance_classes: List[str] = [<ConformanceClasses.COLLECTIONSEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search'>, <ConformanceClasses.BASIS: 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Collection-Search Extension.
+The Collection-Search extension adds functionality to the GET - /collections
+endpoint which allows the caller to include or exclude specific from the API
+response.
+Registering this extension with the application has the added effect of
+removing the ItemCollection
response model from the /search
endpoint, as
+the Fields extension allows the API to return potentially invalid responses
+by excluding fields which are required by the STAC spec, such as geometry.
stac-api-extensions/collection-search
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +fastapi.FastAPI | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class CollectionSearchPostExtension(
+ client: Union[stac_fastapi.extensions.core.collection_search.client.AsyncBaseCollectionSearchClient, stac_fastapi.extensions.core.collection_search.client.BaseCollectionSearchClient],
+ settings: stac_fastapi.types.config.ApiSettings,
+ conformance_classes: List[str] = [<ConformanceClasses.COLLECTIONSEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search'>, <ConformanceClasses.BASIS: 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query'>],
+ schema_href: Union[str, NoneType] = None,
+ router: fastapi.routing.APIRouter = NOTHING,
+ GET: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest'>,
+ POST: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest'>
+)
+
Collection-Search Extension.
+Extents the collection-search extension with an additional +POST - /collections endpoint
+NOTE: the POST - /collections endpoint can be conflicting with the +POST /collections endpoint registered for the Transaction extension.
+stac-api-extensions/collection-search
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class ConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for the Collection-Search extension.
+See +stac-api-extensions/collection-search
+BASIS
+
COLLECTIONSEARCH
+
FIELDS
+
FILTER
+
FREETEXT
+
QUERY
+
SORT
+
name
+
value
+
Collection-Search extension module.
+class CollectionSearchExtension(
+ GET: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest'>,
+ conformance_classes: List[str] = [<ConformanceClasses.COLLECTIONSEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search'>, <ConformanceClasses.BASIS: 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Collection-Search Extension.
+The Collection-Search extension adds functionality to the GET - /collections
+endpoint which allows the caller to include or exclude specific from the API
+response.
+Registering this extension with the application has the added effect of
+removing the ItemCollection
response model from the /search
endpoint, as
+the Fields extension allows the API to return potentially invalid responses
+by excluding fields which are required by the STAC spec, such as geometry.
stac-api-extensions/collection-search
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +fastapi.FastAPI | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class CollectionSearchPostExtension(
+ client: Union[stac_fastapi.extensions.core.collection_search.client.AsyncBaseCollectionSearchClient, stac_fastapi.extensions.core.collection_search.client.BaseCollectionSearchClient],
+ settings: stac_fastapi.types.config.ApiSettings,
+ conformance_classes: List[str] = [<ConformanceClasses.COLLECTIONSEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search'>, <ConformanceClasses.BASIS: 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query'>],
+ schema_href: Union[str, NoneType] = None,
+ router: fastapi.routing.APIRouter = NOTHING,
+ GET: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest'>,
+ POST: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest'>
+)
+
Collection-Search Extension.
+Extents the collection-search extension with an additional +POST - /collections endpoint
+NOTE: the POST - /collections endpoint can be conflicting with the +POST /collections endpoint registered for the Transaction extension.
+stac-api-extensions/collection-search
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class ConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for the Collection-Search extension.
+See +stac-api-extensions/collection-search
+BASIS
+
COLLECTIONSEARCH
+
FIELDS
+
FILTER
+
FREETEXT
+
QUERY
+
SORT
+
name
+
value
+
Request models for the Collection-Search extension.
+class BaseCollectionSearchGetRequest(
+ bbox: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ datetime: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ limit: typing_extensions.Annotated[Union[typing_extensions.Annotated[int, Gt(gt=0), AfterValidator(func=<function crop at 0x7f6e23133040>)], NoneType], Query(PydanticUndefined)] = 10
+)
+
Basics additional Collection-Search parameters for the GET request.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class BaseCollectionSearchPostRequest(
+ /,
+ **data: 'Any'
+)
+
Collection-Search POST model.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
def validate_bbox(
+ v: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+) -> Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+
validate bbox.
+def validate_datetime(
+ value: str
+) -> str
+
validate datetime.
+end_date
+
end date.
+model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+start_date
+
start date.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self: 'BaseModel',
+ context: 'Any',
+ /
+) -> 'None'
+
This function is meant to behave like a BaseModel method to initialise private attributes.
+It takes context as an argument since that's what pydantic-core passes when calling it.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The BaseModel instance. | +None | +
context | +None | +The context. | +None | +
Fields extension.
+class FieldsExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Fields Extension.
+The Fields extension adds functionality to the /search
endpoint which
+allows the caller to include or exclude specific from the API response.
+Registering this extension with the application has the added effect of
+removing the ItemCollection
response model from the /search
endpoint, as
+the Fields extension allows the API to return potentially invalid responses
+by excluding fields which are required by the STAC spec, such as geometry.
Name | +Type | +Description | +Default | +
---|---|---|---|
default_includes | +set | +defines the default set of included fields. | +None | +
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +fastapi.FastAPI | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Fields extension module.
+class FieldsExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Fields Extension.
+The Fields extension adds functionality to the /search
endpoint which
+allows the caller to include or exclude specific from the API response.
+Registering this extension with the application has the added effect of
+removing the ItemCollection
response model from the /search
endpoint, as
+the Fields extension allows the API to return potentially invalid responses
+by excluding fields which are required by the STAC spec, such as geometry.
Name | +Type | +Description | +Default | +
---|---|---|---|
default_includes | +set | +defines the default set of included fields. | +None | +
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +fastapi.FastAPI | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Request models for the fields extension.
+class FieldsExtensionGetRequest(
+ fields: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Additional fields for the GET request.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class FieldsExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Additional fields and schema for the POST request.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class PostFieldsExtension(
+ /,
+ **data: 'Any'
+)
+
FieldsExtension.
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +set of fields to include. | +None | +
exclude | +None | +set of fields to exclude. | +None | +
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Filter extensions clients.
+class AsyncBaseFiltersClient(
+
+)
+
Defines a pattern for implementing the STAC filter extension.
+def get_queryables(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ **kwargs
+) -> Dict[str, Any]
+
Get the queryables available for the given collection_id.
+If collection_id is None, returns the intersection of all queryables over all +collections.
+This base implementation returns a blank queryable schema. This is not allowed +under OGC CQL but it is allowed by the STAC API Filter Extension +github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables
+class BaseFiltersClient(
+
+)
+
Defines a pattern for implementing the STAC filter extension.
+def get_queryables(
+ self,
+ collection_id: Union[str, NoneType] = None,
+ **kwargs
+) -> Dict[str, Any]
+
Get the queryables available for the given collection_id.
+If collection_id is None, returns the intersection of all queryables over all +collections.
+This base implementation returns a blank queryable schema. This is not allowed +under OGC CQL but it is allowed by the STAC API Filter Extension +stac-api-extensions/filter#queryables
+ + + + + + + + + + + + + +Filter Extension.
+class FilterConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for the Filter extension.
+See +stac-api-extensions/filter
+ACCENT_CASE_INSENSITIVE_COMPARISON
+
ADVANCED_COMPARISON_OPERATORS
+
ARITHMETIC
+
ARRAYS
+
BASIC_CQL2
+
BASIC_SPATIAL_OPERATORS
+
CQL2_JSON
+
CQL2_TEXT
+
FEATURES_FILTER
+
FILTER
+
FUNCTIONS
+
ITEM_SEARCH_FILTER
+
PROPERTY_PROPERTY
+
SPATIAL_OPERATORS
+
TEMPORAL_OPERATORS
+
name
+
value
+
class FilterExtension(
+ schema_href: Union[str, NoneType] = None,
+ client: Union[stac_fastapi.extensions.core.filter.client.AsyncBaseFiltersClient, stac_fastapi.extensions.core.filter.client.BaseFiltersClient] = NOTHING,
+ conformance_classes: List[str] = [<FilterConformanceClasses.FILTER: 'http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter'>, <FilterConformanceClasses.FEATURES_FILTER: 'http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/features-filter'>, <FilterConformanceClasses.ITEM_SEARCH_FILTER: 'https://api.stacspec.org/v1.0.0-rc.2/item-search#filter'>, <FilterConformanceClasses.BASIC_CQL2: 'http://www.opengis.net/spec/cql2/1.0/conf/basic-cql2'>, <FilterConformanceClasses.CQL2_JSON: 'http://www.opengis.net/spec/cql2/1.0/conf/cql2-json'>, <FilterConformanceClasses.CQL2_TEXT: 'http://www.opengis.net/spec/cql2/1.0/conf/cql2-text'>, 'http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators', 'http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators'],
+ router: fastapi.routing.APIRouter = NOTHING,
+ response_class: Type[starlette.responses.Response] = <class 'stac_fastapi.api.models.JSONSchemaResponse'>
+)
+
Filter Extension.
+The filter extension adds several endpoints which allow the retrieval of +queryables and provides an expressive mechanism for searching based on Item
+Name | +Type | +Description | +Default | +
---|---|---|---|
client | +None | +Queryables endpoint logic | +None | +
conformance_classes | +None | +Conformance classes provided by the extension | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Filter extension module.
+class FilterExtension(
+ schema_href: Union[str, NoneType] = None,
+ client: Union[stac_fastapi.extensions.core.filter.client.AsyncBaseFiltersClient, stac_fastapi.extensions.core.filter.client.BaseFiltersClient] = NOTHING,
+ conformance_classes: List[str] = [<FilterConformanceClasses.FILTER: 'http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter'>, <FilterConformanceClasses.FEATURES_FILTER: 'http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/features-filter'>, <FilterConformanceClasses.ITEM_SEARCH_FILTER: 'https://api.stacspec.org/v1.0.0-rc.2/item-search#filter'>, <FilterConformanceClasses.BASIC_CQL2: 'http://www.opengis.net/spec/cql2/1.0/conf/basic-cql2'>, <FilterConformanceClasses.CQL2_JSON: 'http://www.opengis.net/spec/cql2/1.0/conf/cql2-json'>, <FilterConformanceClasses.CQL2_TEXT: 'http://www.opengis.net/spec/cql2/1.0/conf/cql2-text'>, 'http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators', 'http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators'],
+ router: fastapi.routing.APIRouter = NOTHING,
+ response_class: Type[starlette.responses.Response] = <class 'stac_fastapi.api.models.JSONSchemaResponse'>
+)
+
Filter Extension.
+The filter extension adds several endpoints which allow the retrieval of +queryables and provides an expressive mechanism for searching based on Item
+Name | +Type | +Description | +Default | +
---|---|---|---|
client | +None | +Queryables endpoint logic | +None | +
conformance_classes | +None | +Conformance classes provided by the extension | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Filter extension request models.
+FilterLang
+
class FilterExtensionGetRequest(
+ filter: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ filter_crs: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ filter_lang: typing_extensions.Annotated[Union[Literal['cql-json', 'cql2-json', 'cql2-text'], NoneType], Query(PydanticUndefined)] = 'cql2-text'
+)
+
Filter extension GET request model.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class FilterExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Filter extension POST request model.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Free-text extension.
+class FreeTextAdvancedExtension(
+ conformance_classes: List[str] = [<FreeTextConformanceClasses.SEARCH_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/item-search#advanced-free-text'>, <FreeTextConformanceClasses.COLLECTIONS_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search#advanced-free-text'>, <FreeTextConformanceClasses.ITEMS_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/ogcapi-features#advanced-free-text'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Free-text Extension.
+The Free-text extension adds an additional q
parameter to /search
requests which
+allows the caller to perform free-text queries against STAC metadata.
stac-api-extensions/freetext-search?tab=readme-ov-file#advanced
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class FreeTextConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for the Free-Text extension.
+See stac-api-extensions/freetext-search
+COLLECTIONS
+
COLLECTIONS_ADVANCED
+
ITEMS
+
ITEMS_ADVANCED
+
SEARCH
+
SEARCH_ADVANCED
+
name
+
value
+
class FreeTextExtension(
+ conformance_classes: List[str] = [<FreeTextConformanceClasses.SEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/item-search#free-text'>, <FreeTextConformanceClasses.COLLECTIONS: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search#free-text'>, <FreeTextConformanceClasses.ITEMS: 'https://api.stacspec.org/v1.0.0-rc.1/ogcapi-features#free-text'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Free-text Extension.
+The Free-text extension adds an additional q
parameter to /search
requests which
+allows the caller to perform free-text queries against STAC metadata.
stac-api-extensions/freetext-search?tab=readme-ov-file#basic
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Query extension module.
+class FreeTextAdvancedExtension(
+ conformance_classes: List[str] = [<FreeTextConformanceClasses.SEARCH_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/item-search#advanced-free-text'>, <FreeTextConformanceClasses.COLLECTIONS_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search#advanced-free-text'>, <FreeTextConformanceClasses.ITEMS_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/ogcapi-features#advanced-free-text'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Free-text Extension.
+The Free-text extension adds an additional q
parameter to /search
requests which
+allows the caller to perform free-text queries against STAC metadata.
stac-api-extensions/freetext-search?tab=readme-ov-file#advanced
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class FreeTextConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for the Free-Text extension.
+See stac-api-extensions/freetext-search
+COLLECTIONS
+
COLLECTIONS_ADVANCED
+
ITEMS
+
ITEMS_ADVANCED
+
SEARCH
+
SEARCH_ADVANCED
+
name
+
value
+
class FreeTextExtension(
+ conformance_classes: List[str] = [<FreeTextConformanceClasses.SEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/item-search#free-text'>, <FreeTextConformanceClasses.COLLECTIONS: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search#free-text'>, <FreeTextConformanceClasses.ITEMS: 'https://api.stacspec.org/v1.0.0-rc.1/ogcapi-features#free-text'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Free-text Extension.
+The Free-text extension adds an additional q
parameter to /search
requests which
+allows the caller to perform free-text queries against STAC metadata.
stac-api-extensions/freetext-search?tab=readme-ov-file#basic
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Request model for the Free-text extension.
+class FreeTextAdvancedExtensionGetRequest(
+ q: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Free-text Extension GET request model.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class FreeTextAdvancedExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Free-text Extension POST request model.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class FreeTextExtensionGetRequest(
+ q: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Free-text Extension GET request model.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class FreeTextExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Free-text Extension POST request model.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +stac_api.extensions.core module.
+class AggregationExtension(
+ schema_href: Union[str, NoneType] = None,
+ client: Union[stac_fastapi.extensions.core.aggregation.client.AsyncBaseAggregationClient, stac_fastapi.extensions.core.aggregation.client.BaseAggregationClient] = NOTHING,
+ conformance_classes: List[str] = [<AggregationConformanceClasses.AGGREGATION: 'https://api.stacspec.org/v0.3.0/aggregation'>],
+ router: fastapi.routing.APIRouter = NOTHING
+)
+
Aggregation Extension.
+The purpose of the Aggregation Extension is to provide an endpoint similar to +the Search endpoint (/search), but which will provide aggregated information +on matching Items rather than the Items themselves. This is highly influenced +by the Elasticsearch and OpenSearch aggregation endpoint, but with a more +regular structure for responses.
+The Aggregation extension adds several endpoints which allow the retrieval of +available aggregation fields and aggregation buckets based on a seearch query: + GET /aggregations + POST /aggregations + GET /collections/{collection_id}/aggregations + POST /collections/{collection_id}/aggregations + GET /aggregate + POST /aggregate + GET /collections/{collection_id}/aggregate + POST /collections/{collection_id}/aggregate
+github.com/stac-api-extensions/aggregation/blob/main/README.md
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +None | +Conformance classes provided by the extension | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class CollectionSearchExtension(
+ GET: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest'>,
+ conformance_classes: List[str] = [<ConformanceClasses.COLLECTIONSEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search'>, <ConformanceClasses.BASIS: 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Collection-Search Extension.
+The Collection-Search extension adds functionality to the GET - /collections
+endpoint which allows the caller to include or exclude specific from the API
+response.
+Registering this extension with the application has the added effect of
+removing the ItemCollection
response model from the /search
endpoint, as
+the Fields extension allows the API to return potentially invalid responses
+by excluding fields which are required by the STAC spec, such as geometry.
stac-api-extensions/collection-search
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +fastapi.FastAPI | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class CollectionSearchPostExtension(
+ client: Union[stac_fastapi.extensions.core.collection_search.client.AsyncBaseCollectionSearchClient, stac_fastapi.extensions.core.collection_search.client.BaseCollectionSearchClient],
+ settings: stac_fastapi.types.config.ApiSettings,
+ conformance_classes: List[str] = [<ConformanceClasses.COLLECTIONSEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search'>, <ConformanceClasses.BASIS: 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query'>],
+ schema_href: Union[str, NoneType] = None,
+ router: fastapi.routing.APIRouter = NOTHING,
+ GET: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchGetRequest'>,
+ POST: stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest = <class 'stac_fastapi.extensions.core.collection_search.request.BaseCollectionSearchPostRequest'>
+)
+
Collection-Search Extension.
+Extents the collection-search extension with an additional +POST - /collections endpoint
+NOTE: the POST - /collections endpoint can be conflicting with the +POST /collections endpoint registered for the Transaction extension.
+stac-api-extensions/collection-search
+Name | +Type | +Description | +Default | +
---|---|---|---|
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class FieldsExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Fields Extension.
+The Fields extension adds functionality to the /search
endpoint which
+allows the caller to include or exclude specific from the API response.
+Registering this extension with the application has the added effect of
+removing the ItemCollection
response model from the /search
endpoint, as
+the Fields extension allows the API to return potentially invalid responses
+by excluding fields which are required by the STAC spec, such as geometry.
Name | +Type | +Description | +Default | +
---|---|---|---|
default_includes | +set | +defines the default set of included fields. | +None | +
conformance_classes | +list | +Defines the list of conformance classes for the extension |
+None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +fastapi.FastAPI | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class FilterExtension(
+ schema_href: Union[str, NoneType] = None,
+ client: Union[stac_fastapi.extensions.core.filter.client.AsyncBaseFiltersClient, stac_fastapi.extensions.core.filter.client.BaseFiltersClient] = NOTHING,
+ conformance_classes: List[str] = [<FilterConformanceClasses.FILTER: 'http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter'>, <FilterConformanceClasses.FEATURES_FILTER: 'http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/features-filter'>, <FilterConformanceClasses.ITEM_SEARCH_FILTER: 'https://api.stacspec.org/v1.0.0-rc.2/item-search#filter'>, <FilterConformanceClasses.BASIC_CQL2: 'http://www.opengis.net/spec/cql2/1.0/conf/basic-cql2'>, <FilterConformanceClasses.CQL2_JSON: 'http://www.opengis.net/spec/cql2/1.0/conf/cql2-json'>, <FilterConformanceClasses.CQL2_TEXT: 'http://www.opengis.net/spec/cql2/1.0/conf/cql2-text'>, 'http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators', 'http://www.opengis.net/spec/cql2/1.0/conf/advanced-comparison-operators'],
+ router: fastapi.routing.APIRouter = NOTHING,
+ response_class: Type[starlette.responses.Response] = <class 'stac_fastapi.api.models.JSONSchemaResponse'>
+)
+
Filter Extension.
+The filter extension adds several endpoints which allow the retrieval of +queryables and provides an expressive mechanism for searching based on Item
+Name | +Type | +Description | +Default | +
---|---|---|---|
client | +None | +Queryables endpoint logic | +None | +
conformance_classes | +None | +Conformance classes provided by the extension | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class FreeTextAdvancedExtension(
+ conformance_classes: List[str] = [<FreeTextConformanceClasses.SEARCH_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/item-search#advanced-free-text'>, <FreeTextConformanceClasses.COLLECTIONS_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search#advanced-free-text'>, <FreeTextConformanceClasses.ITEMS_ADVANCED: 'https://api.stacspec.org/v1.0.0-rc.1/ogcapi-features#advanced-free-text'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Free-text Extension.
+The Free-text extension adds an additional q
parameter to /search
requests which
+allows the caller to perform free-text queries against STAC metadata.
stac-api-extensions/freetext-search?tab=readme-ov-file#advanced
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class FreeTextExtension(
+ conformance_classes: List[str] = [<FreeTextConformanceClasses.SEARCH: 'https://api.stacspec.org/v1.0.0-rc.1/item-search#free-text'>, <FreeTextConformanceClasses.COLLECTIONS: 'https://api.stacspec.org/v1.0.0-rc.1/collection-search#free-text'>, <FreeTextConformanceClasses.ITEMS: 'https://api.stacspec.org/v1.0.0-rc.1/ogcapi-features#free-text'>],
+ schema_href: Union[str, NoneType] = None
+)
+
Free-text Extension.
+The Free-text extension adds an additional q
parameter to /search
requests which
+allows the caller to perform free-text queries against STAC metadata.
stac-api-extensions/freetext-search?tab=readme-ov-file#basic
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class PaginationExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Token Pagination.
+Though not strictly an extension, the chosen pagination will modify the form of the +request object. By making pagination an extension class, we can use +create_request_model to dynamically add the correct pagination parameter to the +request model for OpenAPI generation.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class QueryExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Query Extension.
+The Query extension adds an additional query
parameter to /search
requests which
+allows the caller to perform queries against item metadata (ex. find all images with
+cloud cover less than 15%).
+stac-api-extensions/query
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class SortExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Sort Extension.
+The Sort extension adds the sortby
parameter to the /search
endpoint, allowing the
+caller to specify the sort order of the returned items.
+stac-api-extensions/sort
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class TokenPaginationExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Token Pagination.
+Though not strictly an extension, the chosen pagination will modify the form of the +request object. By making pagination an extension class, we can use +create_request_model to dynamically add the correct pagination parameter to the +request model for OpenAPI generation.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class TransactionExtension(
+ client: Union[stac_fastapi.types.core.AsyncBaseTransactionsClient, stac_fastapi.types.core.BaseTransactionsClient],
+ settings: stac_fastapi.types.config.ApiSettings,
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None,
+ router: fastapi.routing.APIRouter = NOTHING,
+ response_class: Type[starlette.responses.Response] = <class 'starlette.responses.JSONResponse'>
+)
+
Transaction Extension.
+The transaction extension adds several endpoints which allow the creation, +deletion, and updating of items and collections: + POST /collections + PUT /collections/{collection_id} + DELETE /collections/{collection_id} + POST /collections/{collection_id}/items + PUT /collections/{collection_id}/items + DELETE /collections/{collection_id}/items
+stac-api-extensions/transaction +stac-api-extensions/collection-transaction
+Name | +Type | +Description | +Default | +
---|---|---|---|
client | +None | +CRUD application logic | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_create_collection(
+ self
+)
+
Register create collection endpoint (POST /collections).
+def register_create_item(
+ self
+)
+
Register create item endpoint (POST /collections/{collection_id}/items).
+def register_delete_collection(
+ self
+)
+
Register delete collection endpoint (DELETE /collections/{collection_id}).
+def register_delete_item(
+ self
+)
+
Register delete item endpoint (DELETE
+/collections/{collection_id}/items/{item_id}).
+def register_patch_collection(
+ self
+)
+
Register patch collection endpoint (PATCH /collections/{collection_id}).
+def register_patch_item(
+ self
+)
+
Register patch item endpoint (PATCH
+/collections/{collection_id}/items/{item_id}).
+def register_update_collection(
+ self
+)
+
Register update collection endpoint (PUT /collections/{collection_id}).
+def register_update_item(
+ self
+)
+
Register update item endpoint (PUT
+/collections/{collection_id}/items/{item_id}).
+ + + + + + + + + + + + + +Pagination classes as extensions.
+class PaginationExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Token Pagination.
+Though not strictly an extension, the chosen pagination will modify the form of the +request object. By making pagination an extension class, we can use +create_request_model to dynamically add the correct pagination parameter to the +request model for OpenAPI generation.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class TokenPaginationExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Token Pagination.
+Though not strictly an extension, the chosen pagination will modify the form of the +request object. By making pagination an extension class, we can use +create_request_model to dynamically add the correct pagination parameter to the +request model for OpenAPI generation.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Pagination API extension.
+class PaginationExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Token Pagination.
+Though not strictly an extension, the chosen pagination will modify the form of the +request object. By making pagination an extension class, we can use +create_request_model to dynamically add the correct pagination parameter to the +request model for OpenAPI generation.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Pagination extension request models.
+class GETPagination(
+ page: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Page based pagination for GET requests.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class GETTokenPagination(
+ token: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Token pagination for GET requests.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class POSTPagination(
+ /,
+ **data: 'Any'
+)
+
Page based pagination for POST requests.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class POSTTokenPagination(
+ /,
+ **data: 'Any'
+)
+
Token pagination model for POST requests.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Token pagination API extension.
+class TokenPaginationExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Token Pagination.
+Though not strictly an extension, the chosen pagination will modify the form of the +request object. By making pagination an extension class, we can use +create_request_model to dynamically add the correct pagination parameter to the +request model for OpenAPI generation.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Query extension module.
+class QueryExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Query Extension.
+The Query extension adds an additional query
parameter to /search
requests which
+allows the caller to perform queries against item metadata (ex. find all images with
+cloud cover less than 15%).
+stac-api-extensions/query
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Query extension.
+class QueryExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Query Extension.
+The Query extension adds an additional query
parameter to /search
requests which
+allows the caller to perform queries against item metadata (ex. find all images with
+cloud cover less than 15%).
+stac-api-extensions/query
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Request model for the Query extension.
+class QueryExtensionGetRequest(
+ query: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Query Extension GET request model.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class QueryExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Query Extension POST request model.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Sort extension module.
+class SortExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Sort Extension.
+The Sort extension adds the sortby
parameter to the /search
endpoint, allowing the
+caller to specify the sort order of the returned items.
+stac-api-extensions/sort
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Request model for the Sort Extension.
+class SortExtensionGetRequest(
+ sortby: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None
+)
+
Sortby Parameter for GET requests.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class SortExtensionPostRequest(
+ /,
+ **data: 'Any'
+)
+
Sortby parameter for POST requests.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Sort extension.
+class SortExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Sort Extension.
+The Sort extension adds the sortby
parameter to the /search
endpoint, allowing the
+caller to specify the sort order of the returned items.
+stac-api-extensions/sort
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Transaction extension.
+class PostItem(
+ collection_id: typing_extensions.Annotated[str, Path(PydanticUndefined)],
+ item: typing_extensions.Annotated[Union[stac_pydantic.item.Item, stac_pydantic.item_collection.ItemCollection], Body(PydanticUndefined)] = None
+)
+
Create Item.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class PutCollection(
+ collection_id: typing_extensions.Annotated[str, Path(PydanticUndefined)],
+ collection: typing_extensions.Annotated[stac_pydantic.collection.Collection, Body(PydanticUndefined)] = None
+)
+
Update Collection.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class PutItem(
+ collection_id: typing_extensions.Annotated[str, Path(PydanticUndefined)],
+ item_id: typing_extensions.Annotated[str, Path(PydanticUndefined)],
+ item: typing_extensions.Annotated[stac_pydantic.item.Item, Body(PydanticUndefined)] = None
+)
+
Update Item.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class TransactionExtension(
+ client: Union[stac_fastapi.types.core.AsyncBaseTransactionsClient, stac_fastapi.types.core.BaseTransactionsClient],
+ settings: stac_fastapi.types.config.ApiSettings,
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None,
+ router: fastapi.routing.APIRouter = NOTHING,
+ response_class: Type[starlette.responses.Response] = <class 'starlette.responses.JSONResponse'>
+)
+
Transaction Extension.
+The transaction extension adds several endpoints which allow the creation, +deletion, and updating of items and collections: + POST /collections + PUT /collections/{collection_id} + DELETE /collections/{collection_id} + POST /collections/{collection_id}/items + PUT /collections/{collection_id}/items + DELETE /collections/{collection_id}/items
+stac-api-extensions/transaction +stac-api-extensions/collection-transaction
+Name | +Type | +Description | +Default | +
---|---|---|---|
client | +None | +CRUD application logic | +None | +
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
def register_create_collection(
+ self
+)
+
Register create collection endpoint (POST /collections).
+def register_create_item(
+ self
+)
+
Register create item endpoint (POST /collections/{collection_id}/items).
+def register_delete_collection(
+ self
+)
+
Register delete collection endpoint (DELETE /collections/{collection_id}).
+def register_delete_item(
+ self
+)
+
Register delete item endpoint (DELETE
+/collections/{collection_id}/items/{item_id}).
+def register_patch_collection(
+ self
+)
+
Register patch collection endpoint (PATCH /collections/{collection_id}).
+def register_patch_item(
+ self
+)
+
Register patch item endpoint (PATCH
+/collections/{collection_id}/items/{item_id}).
+def register_update_collection(
+ self
+)
+
Register update collection endpoint (PUT /collections/{collection_id}).
+def register_update_item(
+ self
+)
+
Register update item endpoint (PUT
+/collections/{collection_id}/items/{item_id}).
+ + + + + + + + + + + + + +Bulk transactions extension.
+class AsyncBaseBulkTransactionsClient(
+
+)
+
BulkTransactionsClient.
+def bulk_item_insert(
+ self,
+ items: stac_fastapi.extensions.third_party.bulk_transactions.Items,
+ **kwargs
+) -> str
+
Bulk creation of items.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
items | +None | +list of items. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +Message indicating the status of the insert. | +
class BaseBulkTransactionsClient(
+
+)
+
BulkTransactionsClient.
+def bulk_item_insert(
+ self,
+ items: stac_fastapi.extensions.third_party.bulk_transactions.Items,
+ chunk_size: Union[int, NoneType] = None,
+ **kwargs
+) -> str
+
Bulk creation of items.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
items | +None | +list of items. | +None | +
chunk_size | +None | +number of items processed at a time. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +Message indicating the status of the insert. | +
class BulkTransactionExtension(
+ client: Union[stac_fastapi.extensions.third_party.bulk_transactions.AsyncBaseBulkTransactionsClient, stac_fastapi.extensions.third_party.bulk_transactions.BaseBulkTransactionsClient],
+ conformance_classes: List[str] = [],
+ schema_href: Union[str, NoneType] = None
+)
+
Bulk Transaction Extension.
+Bulk Transaction extension adds the POST
+/collections/{collection_id}/bulk_items
endpoint to the application for
+efficient bulk insertion of items. The input to this is an object with an
+attribute "items", that has a value that is an object with a group of
+attributes that are the ids of each Item, and the value is the Item entity.
Optionally, clients can specify a "method" attribute that is either "insert" +or "upsert". If "insert", then the items will be inserted if they do not +exist, and an error will be returned if they do. If "upsert", then the items +will be inserted if they do not exist, and updated if they do. This defaults +to "insert".
+{
+ "items": {
+ "id1": { "type": "Feature", ... },
+ "id2": { "type": "Feature", ... },
+ "id3": { "type": "Feature", ... }
+ },
+ "method": "insert"
+}
+
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
class BulkTransactionMethod(
+ /,
+ *args,
+ **kwargs
+)
+
Bulk Transaction Methods.
+INSERT
+
UPSERT
+
name
+
value
+
class Items(
+ /,
+ **data: 'Any'
+)
+
A group of STAC Item objects, in the form of a dictionary from Item.id -> Item.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +stac_api.extensions.third_party module.
+class BulkTransactionExtension(
+ client: Union[stac_fastapi.extensions.third_party.bulk_transactions.AsyncBaseBulkTransactionsClient, stac_fastapi.extensions.third_party.bulk_transactions.BaseBulkTransactionsClient],
+ conformance_classes: List[str] = [],
+ schema_href: Union[str, NoneType] = None
+)
+
Bulk Transaction Extension.
+Bulk Transaction extension adds the POST
+/collections/{collection_id}/bulk_items
endpoint to the application for
+efficient bulk insertion of items. The input to this is an object with an
+attribute "items", that has a value that is an object with a group of
+attributes that are the ids of each Item, and the value is the Item entity.
Optionally, clients can specify a "method" attribute that is either "insert" +or "upsert". If "insert", then the items will be inserted if they do not +exist, and an error will be returned if they do. If "upsert", then the items +will be inserted if they do not exist, and updated if they do. This defaults +to "insert".
+{
+ "items": {
+ "id1": { "type": "Feature", ... },
+ "id2": { "type": "Feature", ... },
+ "id3": { "type": "Feature", ... }
+ },
+ "method": "insert"
+}
+
GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Library version.
+ + + + + + + + + + + + + +FastAPI application.
+aggregation_extension
+
api
+
app
+
database_logic
+
extensions
+
filter_extension
+
handler
+
search_extensions
+
session
+
settings
+
def create_handler(
+ app
+)
+
Create a handler to use with AWS Lambda if mangum available.
+def run(
+
+) -> None
+
Run app from command line using uvicorn if available.
+class post_request_model(
+ /,
+ **data: 'Any'
+)
+
Base arguments for POST Request.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
def validate_bbox(
+ v: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+) -> Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+
def validate_datetime(
+ value: str
+) -> str
+
def validate_spatial(
+ values: Dict[str, Any]
+) -> Dict[str, Any]
+
end_date
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+spatial_filter
+
Return a geojson-pydantic object representing the spatial filter for the search request.
+Check for both because the bbox
and intersects
parameters are mutually exclusive.
start_date
+
def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self: 'BaseModel',
+ context: 'Any',
+ /
+) -> 'None'
+
We need to both initialize private attributes and call the user-defined model_post_init
+method.
+ + + + + + + + + + + + + +API configuration.
+class AsyncOpensearchSettings(
+ __pydantic_self__,
+ _case_sensitive: 'bool | None' = None,
+ _nested_model_default_partial_update: 'bool | None' = None,
+ _env_prefix: 'str | None' = None,
+ _env_file: 'DotenvType | None' = PosixPath('.'),
+ _env_file_encoding: 'str | None' = None,
+ _env_ignore_empty: 'bool | None' = None,
+ _env_nested_delimiter: 'str | None' = None,
+ _env_parse_none_str: 'str | None' = None,
+ _env_parse_enums: 'bool | None' = None,
+ _cli_prog_name: 'str | None' = None,
+ _cli_parse_args: 'bool | list[str] | tuple[str, ...] | None' = None,
+ _cli_settings_source: 'CliSettingsSource[Any] | None' = None,
+ _cli_parse_none_str: 'str | None' = None,
+ _cli_hide_none_type: 'bool | None' = None,
+ _cli_avoid_json: 'bool | None' = None,
+ _cli_enforce_required: 'bool | None' = None,
+ _cli_use_class_docs_for_groups: 'bool | None' = None,
+ _cli_exit_on_error: 'bool | None' = None,
+ _cli_prefix: 'str | None' = None,
+ _cli_flag_prefix_char: 'str | None' = None,
+ _cli_implicit_flags: 'bool | None' = None,
+ _cli_ignore_unknown_args: 'bool | None' = None,
+ _secrets_dir: 'PathType | None' = None,
+ **values: 'Any'
+)
+
API settings.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def settings_customise_sources(
+ settings_cls: 'type[BaseSettings]',
+ init_settings: 'PydanticBaseSettingsSource',
+ env_settings: 'PydanticBaseSettingsSource',
+ dotenv_settings: 'PydanticBaseSettingsSource',
+ file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
Define the sources and their order for loading the settings values.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
settings_cls | +None | +The Settings class. | +None | +
init_settings | +None | +The InitSettingsSource instance. |
+None | +
env_settings | +None | +The EnvSettingsSource instance. |
+None | +
dotenv_settings | +None | +The DotEnvSettingsSource instance. |
+None | +
file_secret_settings | +None | +The SecretsSettingsSource instance. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +A tuple containing the sources and their order for loading the settings values. | +
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
create_client
+
Create async elasticsearch client.
+model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class OpensearchSettings(
+ __pydantic_self__,
+ _case_sensitive: 'bool | None' = None,
+ _nested_model_default_partial_update: 'bool | None' = None,
+ _env_prefix: 'str | None' = None,
+ _env_file: 'DotenvType | None' = PosixPath('.'),
+ _env_file_encoding: 'str | None' = None,
+ _env_ignore_empty: 'bool | None' = None,
+ _env_nested_delimiter: 'str | None' = None,
+ _env_parse_none_str: 'str | None' = None,
+ _env_parse_enums: 'bool | None' = None,
+ _cli_prog_name: 'str | None' = None,
+ _cli_parse_args: 'bool | list[str] | tuple[str, ...] | None' = None,
+ _cli_settings_source: 'CliSettingsSource[Any] | None' = None,
+ _cli_parse_none_str: 'str | None' = None,
+ _cli_hide_none_type: 'bool | None' = None,
+ _cli_avoid_json: 'bool | None' = None,
+ _cli_enforce_required: 'bool | None' = None,
+ _cli_use_class_docs_for_groups: 'bool | None' = None,
+ _cli_exit_on_error: 'bool | None' = None,
+ _cli_prefix: 'str | None' = None,
+ _cli_flag_prefix_char: 'str | None' = None,
+ _cli_implicit_flags: 'bool | None' = None,
+ _cli_ignore_unknown_args: 'bool | None' = None,
+ _secrets_dir: 'PathType | None' = None,
+ **values: 'Any'
+)
+
API settings.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def settings_customise_sources(
+ settings_cls: 'type[BaseSettings]',
+ init_settings: 'PydanticBaseSettingsSource',
+ env_settings: 'PydanticBaseSettingsSource',
+ dotenv_settings: 'PydanticBaseSettingsSource',
+ file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
Define the sources and their order for loading the settings values.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
settings_cls | +None | +The Settings class. | +None | +
init_settings | +None | +The InitSettingsSource instance. |
+None | +
env_settings | +None | +The EnvSettingsSource instance. |
+None | +
dotenv_settings | +None | +The DotEnvSettingsSource instance. |
+None | +
file_secret_settings | +None | +The SecretsSettingsSource instance. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +A tuple containing the sources and their order for loading the settings values. | +
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
create_client
+
Create es client.
+model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+ + + + + + + + + + + + + +Database logic.
+COLLECTIONS_INDEX
+
DEFAULT_SORT
+
ES_COLLECTIONS_MAPPINGS
+
ES_INDEX_NAME_UNSUPPORTED_CHARS
+
ES_ITEMS_MAPPINGS
+
ES_ITEMS_SETTINGS
+
ES_MAPPINGS_DYNAMIC_TEMPLATES
+
ITEMS_INDEX_PREFIX
+
ITEM_INDICES
+
MAX_LIMIT
+
NumType
+
logger
+
def create_collection_index(
+
+) -> None
+
Create the index for a Collection. The settings of the index template will be used implicitly.
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def create_index_templates(
+
+) -> None
+
Create index templates for the Collection and Item indices.
+Returns:
+Type | +Description | +
---|---|
None | +None | +
def create_item_index(
+ collection_id: str
+)
+
Create the index for Items. The settings of the index template will be used implicitly.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +Collection identifier. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
def delete_item_index(
+ collection_id: str
+)
+
Delete the index for items in a collection.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The ID of the collection whose items index will be deleted. | +None | +
def index_by_collection_id(
+ collection_id: str
+) -> str
+
Translate a collection id into an Elasticsearch index name.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The collection id to translate into an index name. | +None | +
Returns:
+Type | +Description | +
---|---|
str | +The index name derived from the collection id. | +
def indices(
+ collection_ids: Union[List[str], NoneType]
+) -> str
+
Get a comma-separated string of index names for a given list of collection ids.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_ids | +None | +A list of collection ids. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A string of comma-separated index names. If collection_ids is None, returns the default indices. |
+
def mk_actions(
+ collection_id: str,
+ processed_items: List[stac_fastapi.types.stac.Item]
+)
+
Create Elasticsearch bulk actions for a list of processed items.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The identifier for the collection the items belong to. | +None | +
processed_items | +List[Item] | +The list of processed items to be bulk indexed. | +None | +
Returns:
+Type | +Description | +
---|---|
List[Dict[str, Union[str, Dict]]] | +The list of bulk actions to be executed, each action being a dictionary with the following keys: - _index : the index to store the document in.- _id : the document's identifier.- _source : the source of the document. |
+
def mk_item_id(
+ item_id: str,
+ collection_id: str
+)
+
Create the document id for an Item in Elasticsearch.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +str | +The id of the Item. | +None | +
collection_id | +str | +The id of the Collection that the Item belongs to. | +None | +
Returns:
+Type | +Description | +
---|---|
str | +The document id for the Item, combining the Item id and the Collection id, separated by a | character. |
+
class DatabaseLogic(
+ item_serializer: Type[stac_fastapi.core.serializers.ItemSerializer] = <class 'stac_fastapi.core.serializers.ItemSerializer'>,
+ collection_serializer: Type[stac_fastapi.core.serializers.CollectionSerializer] = <class 'stac_fastapi.core.serializers.CollectionSerializer'>,
+ extensions: List[str] = NOTHING
+)
+
Database logic.
+aggregation_mapping
+
client
+
sync_client
+
def apply_bbox_filter(
+ search: opensearchpy.helpers.search.Search,
+ bbox: List
+)
+
Filter search results based on bounding box.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to apply the filter to. | +None | +
bbox | +List | +The bounding box coordinates, represented as a list of four values [minx, miny, maxx, maxy]. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +search (Search): The search object with the bounding box filter applied. | +
def apply_collections_filter(
+ search: opensearchpy.helpers.search.Search,
+ collection_ids: List[str]
+)
+
Database logic to search a list of STAC collection ids.
+def apply_cql2_filter(
+ search: opensearchpy.helpers.search.Search,
+ _filter: Union[Dict[str, Any], NoneType]
+)
+
Apply a CQL2 filter to an Opensearch Search object.
+This method transforms a dictionary representing a CQL2 filter into an Opensearch query +and applies it to the provided Search object. If the filter is None, the original Search +object is returned unmodified.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The Opensearch Search object to which the filter will be applied. | +None | +
_filter | +Optional[Dict[str, Any]] | +The filter in dictionary form that needs to be applied to the search. The dictionary should follow the structure required by the to_es function which converts itto an Opensearch query. |
+None | +
Returns:
+Type | +Description | +
---|---|
Search | +The modified Search object with the filter applied if a filter is provided, otherwise the original Search object. |
+
def apply_datetime_filter(
+ search: opensearchpy.helpers.search.Search,
+ datetime_search
+)
+
Apply a filter to search based on datetime field.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to filter. | +None | +
datetime_search | +dict | +The datetime filter criteria. | +None | +
Returns:
+Type | +Description | +
---|---|
Search | +The filtered search object. | +
def apply_free_text_filter(
+ search: opensearchpy.helpers.search.Search,
+ free_text_queries: Union[List[str], NoneType]
+)
+
Database logic to perform query for search endpoint.
+def apply_ids_filter(
+ search: opensearchpy.helpers.search.Search,
+ item_ids: List[str]
+)
+
Database logic to search a list of STAC item ids.
+def apply_intersects_filter(
+ search: opensearchpy.helpers.search.Search,
+ intersects: stac_fastapi.opensearch.database_logic.Geometry
+)
+
Filter search results based on intersecting geometry.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to apply the filter to. | +None | +
intersects | +Geometry | +The intersecting geometry, represented as a GeoJSON-like object. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +search (Search): The search object with the intersecting geometry filter applied. | +
def apply_stacql_filter(
+ search: opensearchpy.helpers.search.Search,
+ op: str,
+ field: str,
+ value: float
+)
+
Filter search results based on a comparison between a field and a value.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search object to apply the filter to. | +None | +
op | +str | +The comparison operator to use. Can be 'eq' (equal), 'gt' (greater than), 'gte' (greater than or equal), 'lt' (less than), or 'lte' (less than or equal). |
+None | +
field | +str | +The field to perform the comparison on. | +None | +
value | +float | +The value to compare the field against. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +search (Search): The search object with the specified filter applied. | +
def make_search(
+
+)
+
Database logic to create a Search instance.
+def populate_sort(
+ sortby: List
+) -> Union[Dict[str, Dict[str, str]], NoneType]
+
Database logic to sort search instance.
+def aggregate(
+ self,
+ collection_ids: Union[List[str], NoneType],
+ aggregations: List[str],
+ search: opensearchpy.helpers.search.Search,
+ centroid_geohash_grid_precision: int,
+ centroid_geohex_grid_precision: int,
+ centroid_geotile_grid_precision: int,
+ geometry_geohash_grid_precision: int,
+ geometry_geotile_grid_precision: int,
+ datetime_frequency_interval: str,
+ ignore_unavailable: Union[bool, NoneType] = True
+)
+
Return aggregations of STAC Items.
+def bulk_async(
+ self,
+ collection_id: str,
+ processed_items: List[stac_fastapi.types.stac.Item],
+ refresh: bool = False
+) -> None
+
Perform a bulk insert of items into the database asynchronously.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to which the items belong. | +None | +
processed_items | +List[Item] | +A list of Item objects to be inserted into the database. |
+None | +
refresh | +bool | +Whether to refresh the index after the bulk insert (default: False). | +None | +
def bulk_sync(
+ self,
+ collection_id: str,
+ processed_items: List[stac_fastapi.types.stac.Item],
+ refresh: bool = False
+) -> None
+
Perform a bulk insert of items into the database synchronously.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to which the items belong. | +None | +
processed_items | +List[Item] | +A list of Item objects to be inserted into the database. |
+None | +
refresh | +bool | +Whether to refresh the index after the bulk insert (default: False). | +None | +
def check_collection_exists(
+ self,
+ collection_id: str
+)
+
Database logic to check if a collection exists.
+def create_collection(
+ self,
+ collection: stac_fastapi.types.stac.Collection,
+ refresh: bool = False
+)
+
Create a single collection in the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection | +Collection | +The Collection object to be created. | +None | +
refresh | +bool | +Whether to refresh the index after the creation. Default is False. | +None | +
Raises:
+Type | +Description | +
---|---|
ConflictError | +If a Collection with the same id already exists in the database. | +
def create_item(
+ self,
+ item: stac_fastapi.types.stac.Item,
+ refresh: bool = False
+)
+
Database logic for creating one item.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +Item | +The item to be created. | +None | +
refresh | +bool | +Refresh the index after performing the operation. Defaults to False. | +False | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Raises:
+Type | +Description | +
---|---|
ConflictError | +If the item already exists in the database. | +
def delete_collection(
+ self,
+ collection_id: str,
+ refresh: bool = False
+)
+
Delete a collection from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to be deleted. | +None | +
refresh | +bool | +Whether to refresh the index after the deletion (default: False). | +None | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection with the given collection_id is not found in the database. |
+
def delete_collections(
+ self
+) -> None
+
Danger. this is only for tests.
+def delete_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ refresh: bool = False
+)
+
Delete a single item from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +str | +The id of the Item to be deleted. | +None | +
collection_id | +str | +The id of the Collection that the Item belongs to. | +None | +
refresh | +bool | +Whether to refresh the index after the deletion. Default is False. | +None | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the Item does not exist in the database. | +
def delete_items(
+ self
+) -> None
+
Danger. this is only for tests.
+def execute_search(
+ self,
+ search: opensearchpy.helpers.search.Search,
+ limit: int,
+ token: Union[str, NoneType],
+ sort: Union[Dict[str, Dict[str, str]], NoneType],
+ collection_ids: Union[List[str], NoneType],
+ ignore_unavailable: bool = True
+) -> Tuple[Iterable[Dict[str, Any]], Union[int, NoneType], Union[str, NoneType]]
+
Execute a search query with limit and other optional parameters.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search | +Search | +The search query to be executed. | +None | +
limit | +int | +The maximum number of results to be returned. | +None | +
token | +Optional[str] | +The token used to return the next set of results. | +None | +
sort | +Optional[Dict[str, Dict[str, str]]] | +Specifies how the results should be sorted. | +None | +
collection_ids | +Optional[List[str]] | +The collection ids to search. | +None | +
ignore_unavailable | +bool | +Whether to ignore unavailable collections. Defaults to True. | +True | +
Returns:
+Type | +Description | +
---|---|
Tuple[Iterable[Dict[str, Any]], Optional[int], Optional[str]] | +A tuple containing: - An iterable of search results, where each result is a dictionary with keys and values representing the fields and values of each document. - The total number of results (if the count could be computed), or None if the count could not be computed. - The token to be used to retrieve the next set of results, or None if there are no more results. |
+
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collections specified in collection_ids do not exist. |
+
def find_collection(
+ self,
+ collection_id: str
+) -> stac_fastapi.types.stac.Collection
+
Find and return a collection from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
self | +None | +The instance of the object calling this function. | +None | +
collection_id | +str | +The ID of the collection to be found. | +None | +
Returns:
+Type | +Description | +
---|---|
Collection | +The found collection, represented as a Collection object. |
+
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection with the given collection_id is not found in the database. |
+
def get_all_collections(
+ self,
+ token: Union[str, NoneType],
+ limit: int,
+ request: starlette.requests.Request
+) -> Tuple[List[Dict[str, Any]], Union[str, NoneType]]
+
Retrieve a list of all collections from Opensearch, supporting pagination.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
token | +Optional[str] | +The pagination token. | +None | +
limit | +int | +The number of results to return. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A tuple of (collections, next pagination token if any). | +
def get_one_item(
+ self,
+ collection_id: str,
+ item_id: str
+) -> Dict
+
Retrieve a single item from the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +str | +The id of the Collection that the Item belongs to. | +None | +
item_id | +str | +The id of the Item. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +item (Dict): A dictionary containing the source data for the Item. | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the specified Item does not exist in the Collection. | +
def prep_create_item(
+ self,
+ item: stac_fastapi.types.stac.Item,
+ base_url: str,
+ exist_ok: bool = False
+) -> stac_fastapi.types.stac.Item
+
Preps an item for insertion into the database.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +Item | +The item to be prepped for insertion. | +None | +
base_url | +str | +The base URL used to create the item's self URL. | +None | +
exist_ok | +bool | +Indicates whether the item can exist already. | +None | +
Returns:
+Type | +Description | +
---|---|
Item | +The prepped item. | +
Raises:
+Type | +Description | +
---|---|
ConflictError | +If the item already exists in the database. | +
def sync_prep_create_item(
+ self,
+ item: stac_fastapi.types.stac.Item,
+ base_url: str,
+ exist_ok: bool = False
+) -> stac_fastapi.types.stac.Item
+
Prepare an item for insertion into the database.
+This method performs pre-insertion preparation on the given item
,
+such as checking if the collection the item belongs to exists,
+and optionally verifying that an item with the same ID does not already exist in the database.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +Item | +The item to be inserted into the database. | +None | +
base_url | +str | +The base URL used for constructing URLs for the item. | +None | +
exist_ok | +bool | +Indicates whether the item can exist already. | +None | +
Returns:
+Type | +Description | +
---|---|
Item | +The item after preparation is done. | +
Raises:
+Type | +Description | +
---|---|
NotFoundError | +If the collection that the item belongs to does not exist in the database. | +
ConflictError | +If an item with the same ID already exists in the collection. | +
def update_collection(
+ self,
+ collection_id: str,
+ collection: stac_fastapi.types.stac.Collection,
+ refresh: bool = False
+)
+
Update a collection from the database.
+Args: + self: The instance of the object calling this function. + collection_id (str): The ID of the collection to be updated. + collection (Collection): The Collection object to be used for the update.
+Raises:
+ NotFoundError: If the collection with the given collection_id
is not
+ found in the database.
Notes:
+ This function updates the collection in the database using the specified
+ collection_id
and with the collection specified in the Collection
object.
+ If the collection is not found, a NotFoundError
is raised.
class Geometry(
+ *args,
+ **kwargs
+)
+
Base class for protocol classes.
+Protocol classes are defined as::
+class Proto(Protocol):
+ def meth(self) -> int:
+ ...
+
Such classes are primarily used with static type checkers that recognize +structural subtyping (static duck-typing), for example::
+class C:
+ def meth(self) -> int:
+ return 0
+
+def func(x: Proto) -> int:
+ return x.meth()
+
+func(C()) # Passes static type check
+
See PEP 544 for details. Protocol classes decorated with
+library version.
+ + + + + + + + + + + + + +stac_fastapi.types.config module.
+class ApiSettings(
+ __pydantic_self__,
+ _case_sensitive: 'bool | None' = None,
+ _nested_model_default_partial_update: 'bool | None' = None,
+ _env_prefix: 'str | None' = None,
+ _env_file: 'DotenvType | None' = PosixPath('.'),
+ _env_file_encoding: 'str | None' = None,
+ _env_ignore_empty: 'bool | None' = None,
+ _env_nested_delimiter: 'str | None' = None,
+ _env_parse_none_str: 'str | None' = None,
+ _env_parse_enums: 'bool | None' = None,
+ _cli_prog_name: 'str | None' = None,
+ _cli_parse_args: 'bool | list[str] | tuple[str, ...] | None' = None,
+ _cli_settings_source: 'CliSettingsSource[Any] | None' = None,
+ _cli_parse_none_str: 'str | None' = None,
+ _cli_hide_none_type: 'bool | None' = None,
+ _cli_avoid_json: 'bool | None' = None,
+ _cli_enforce_required: 'bool | None' = None,
+ _cli_use_class_docs_for_groups: 'bool | None' = None,
+ _cli_exit_on_error: 'bool | None' = None,
+ _cli_prefix: 'str | None' = None,
+ _cli_flag_prefix_char: 'str | None' = None,
+ _cli_implicit_flags: 'bool | None' = None,
+ _cli_ignore_unknown_args: 'bool | None' = None,
+ _secrets_dir: 'PathType | None' = None,
+ **values: 'Any'
+)
+
ApiSettings.
+Defines api configuration, potentially through environment variables. +See pydantic-docs.helpmanual.io/usage/settings/.
+Name | +Type | +Description | +Default | +
---|---|---|---|
environment | +None | +name of the environment (ex. dev/prod). | +None | +
debug | +None | +toggles debug mode. | +None | +
forbidden_fields | +None | +set of fields defined by STAC but not included in the database. | +None | +
indexed_fields | +None | +set of fields which are usually in item.properties but are indexedas distinct columns in the database. |
+None | +
model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def settings_customise_sources(
+ settings_cls: 'type[BaseSettings]',
+ init_settings: 'PydanticBaseSettingsSource',
+ env_settings: 'PydanticBaseSettingsSource',
+ dotenv_settings: 'PydanticBaseSettingsSource',
+ file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
Define the sources and their order for loading the settings values.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
settings_cls | +None | +The Settings class. | +None | +
init_settings | +None | +The InitSettingsSource instance. |
+None | +
env_settings | +None | +The EnvSettingsSource instance. |
+None | +
dotenv_settings | +None | +The DotEnvSettingsSource instance. |
+None | +
file_secret_settings | +None | +The SecretsSettingsSource instance. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +A tuple containing the sources and their order for loading the settings values. | +
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self,
+ _BaseModel__context: 'Any'
+) -> 'None'
+
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
+class Settings(
+ /,
+ *args,
+ **kwargs
+)
+
Holds the global instance of settings.
+def get(
+
+) -> stac_fastapi.types.config.ApiSettings
+
Get the settings.
+If they have not yet been set, throws an exception.
+def set(
+ base_settings: stac_fastapi.types.config.ApiSettings
+)
+
Set the global settings.
+ + + + + + + + + + + + + +Conformance Classes.
+BASE_CONFORMANCE_CLASSES
+
class OAFConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for OGC API - Features.
+CORE
+
GEOJSON
+
OPEN_API
+
name
+
value
+
class STACConformanceClasses(
+ /,
+ *args,
+ **kwargs
+)
+
Conformance classes for the STAC API spec.
+COLLECTIONS
+
CORE
+
ITEM_SEARCH
+
OGC_API_FEAT
+
name
+
value
+
Base clients.
+NumType
+
StacType
+
class AsyncBaseCoreClient(
+ stac_version: str = '1.0.0',
+ landing_page_id: str = 'stac-fastapi',
+ title: str = 'stac-fastapi',
+ description: str = 'stac-fastapi',
+ base_conformance_classes: List[str] = NOTHING,
+ extensions: List[stac_fastapi.types.extension.ApiExtension] = NOTHING,
+ post_request_model=<class 'stac_fastapi.types.search.BaseSearchPostRequest'>
+)
+
Defines a pattern for implementing STAC api core endpoints.
+Name | +Type | +Description | +Default | +
---|---|---|---|
extensions | +None | +list of registered api extensions. | +None | +
post_request_model
+
def all_collections(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.Collections
+
Get all available collections.
+Called with GET /collections
.
Returns:
+Type | +Description | +
---|---|
None | +A list of collections. | +
def conformance(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.Conformance
+
Conformance classes.
+Called with GET /conformance
.
Returns:
+Type | +Description | +
---|---|
None | +Conformance classes which the server conforms to. | +
def conformance_classes(
+ self
+) -> List[str]
+
Generate conformance classes by adding extension conformance to base
+conformance classes.
+def extension_is_enabled(
+ self,
+ extension: str
+) -> bool
+
Check if an api extension is enabled.
+def get_collection(
+ self,
+ collection_id: str,
+ **kwargs
+) -> stac_fastapi.types.stac.Collection
+
Get collection by id.
+Called with GET /collections/{collection_id}
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +Id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +Collection. | +
def get_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ **kwargs
+) -> stac_fastapi.types.stac.Item
+
Get item by id.
+Called with GET /collections/{collection_id}/items/{item_id}
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +None | +Id of the item. | +None | +
collection_id | +None | +Id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +Item. | +
def get_search(
+ self,
+ collections: Union[List[str], NoneType] = None,
+ ids: Union[List[str], NoneType] = None,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ intersects: Union[typing_extensions.Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')], NoneType] = None,
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ limit: Union[int, NoneType] = 10,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Cross catalog search (GET).
+Called with GET /search
.
Returns:
+Type | +Description | +
---|---|
None | +ItemCollection containing items which match the search criteria. | +
def item_collection(
+ self,
+ collection_id: str,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ limit: int = 10,
+ token: str = None,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Get all items from a specific collection.
+Called with GET /collections/{collection_id}/items
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +id of the collection. | +None | +
limit | +None | +number of items to return. | +None | +
token | +None | +pagination token. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +An ItemCollection. | +
def landing_page(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.LandingPage
+
Landing page.
+Called with GET /
.
Returns:
+Type | +Description | +
---|---|
None | +API landing page, serving as an entry point to the API. | +
def post_search(
+ self,
+ search_request: stac_fastapi.types.search.BaseSearchPostRequest,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Cross catalog search (POST).
+Called with POST /search
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search_request | +None | +search request parameters. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +ItemCollection containing items which match the search criteria. | +
class AsyncBaseTransactionsClient(
+
+)
+
Defines a pattern for implementing the STAC transaction extension.
+def create_collection(
+ self,
+ collection: stac_pydantic.collection.Collection,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Collection, starlette.responses.Response, NoneType]
+
Create a new collection.
+Called with POST /collections
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection | +None | +the collection | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The collection that was created. | +
def create_item(
+ self,
+ collection_id: str,
+ item: Union[stac_pydantic.item.Item, stac_pydantic.item_collection.ItemCollection],
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, starlette.responses.Response, NoneType]
+
Create a new item.
+Called with POST /collections/{collection_id}/items
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +None | +the item or item collection | +None | +
collection_id | +None | +the id of the collection from the resource path | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The item that was created or None if item collection. | +
def delete_collection(
+ self,
+ collection_id: str,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Collection, starlette.responses.Response, NoneType]
+
Delete a collection.
+Called with DELETE /collections/{collection_id}
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The deleted collection. | +
def delete_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, starlette.responses.Response, NoneType]
+
Delete an item from a collection.
+Called with DELETE /collections/{collection_id}/items/{item_id}
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +None | +id of the item. | +None | +
collection_id | +None | +id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The deleted item. | +
def update_collection(
+ self,
+ collection_id: str,
+ collection: stac_pydantic.collection.Collection,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Collection, starlette.responses.Response, NoneType]
+
Perform a complete update on an existing collection.
+Called with PUT /collections/{collection_id}
. It is expected that this item
+already exists. The update should do a diff against the saved collection and
+perform any necessary updates. Partial updates are not supported by the
+transactions extension.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +id of the existing collection to be updated | +None | +
collection | +None | +the updated collection (must be complete) | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The updated collection. | +
def update_item(
+ self,
+ collection_id: str,
+ item_id: str,
+ item: stac_pydantic.item.Item,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, starlette.responses.Response, NoneType]
+
Perform a complete update on an existing item.
+Called with PUT /collections/{collection_id}/items
. It is expected
+that this item already exists. The update should do a diff against the
+saved item and perform any necessary updates. Partial updates are not
+supported by the transactions extension.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +None | +the item (must be complete) | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The updated item. | +
class BaseCoreClient(
+ stac_version: str = '1.0.0',
+ landing_page_id: str = 'stac-fastapi',
+ title: str = 'stac-fastapi',
+ description: str = 'stac-fastapi',
+ base_conformance_classes: List[str] = NOTHING,
+ extensions: List[stac_fastapi.types.extension.ApiExtension] = NOTHING,
+ post_request_model=<class 'stac_fastapi.types.search.BaseSearchPostRequest'>
+)
+
Defines a pattern for implementing STAC api core endpoints.
+Name | +Type | +Description | +Default | +
---|---|---|---|
extensions | +None | +list of registered api extensions. | +None | +
post_request_model
+
def all_collections(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.Collections
+
Get all available collections.
+Called with GET /collections
.
Returns:
+Type | +Description | +
---|---|
None | +A list of collections. | +
def conformance(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.Conformance
+
Conformance classes.
+Called with GET /conformance
.
Returns:
+Type | +Description | +
---|---|
None | +Conformance classes which the server conforms to. | +
def conformance_classes(
+ self
+) -> List[str]
+
Generate conformance classes by adding extension conformance to base
+conformance classes.
+def extension_is_enabled(
+ self,
+ extension: str
+) -> bool
+
Check if an api extension is enabled.
+def get_collection(
+ self,
+ collection_id: str,
+ **kwargs
+) -> stac_fastapi.types.stac.Collection
+
Get collection by id.
+Called with GET /collections/{collection_id}
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +Id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +Collection. | +
def get_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ **kwargs
+) -> stac_fastapi.types.stac.Item
+
Get item by id.
+Called with GET /collections/{collection_id}/items/{item_id}
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +None | +Id of the item. | +None | +
collection_id | +None | +Id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +Item. | +
def get_search(
+ self,
+ collections: Union[List[str], NoneType] = None,
+ ids: Union[List[str], NoneType] = None,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ intersects: Union[typing_extensions.Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')], NoneType] = None,
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ limit: Union[int, NoneType] = 10,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Cross catalog search (GET).
+Called with GET /search
.
Returns:
+Type | +Description | +
---|---|
None | +ItemCollection containing items which match the search criteria. | +
def item_collection(
+ self,
+ collection_id: str,
+ bbox: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType] = None,
+ datetime: Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType] = None,
+ limit: int = 10,
+ token: str = None,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Get all items from a specific collection.
+Called with GET /collections/{collection_id}/items
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +id of the collection. | +None | +
limit | +None | +number of items to return. | +None | +
token | +None | +pagination token. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +An ItemCollection. | +
def landing_page(
+ self,
+ **kwargs
+) -> stac_fastapi.types.stac.LandingPage
+
Landing page.
+Called with GET /
.
Returns:
+Type | +Description | +
---|---|
None | +API landing page, serving as an entry point to the API. | +
def list_conformance_classes(
+ self
+)
+
Return a list of conformance classes, including implemented extensions.
+def post_search(
+ self,
+ search_request: stac_fastapi.types.search.BaseSearchPostRequest,
+ **kwargs
+) -> stac_fastapi.types.stac.ItemCollection
+
Cross catalog search (POST).
+Called with POST /search
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
search_request | +None | +search request parameters. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +ItemCollection containing items which match the search criteria. | +
class BaseTransactionsClient(
+
+)
+
Defines a pattern for implementing the STAC API Transaction Extension.
+def create_collection(
+ self,
+ collection: stac_pydantic.collection.Collection,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Collection, starlette.responses.Response, NoneType]
+
Create a new collection.
+Called with POST /collections
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection | +None | +the collection | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The collection that was created. | +
def create_item(
+ self,
+ collection_id: str,
+ item: Union[stac_pydantic.item.Item, stac_pydantic.item_collection.ItemCollection],
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, starlette.responses.Response, NoneType]
+
Create a new item.
+Called with POST /collections/{collection_id}/items
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +None | +the item or item collection | +None | +
collection_id | +None | +the id of the collection from the resource path | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The item that was created or None if item collection. | +
def delete_collection(
+ self,
+ collection_id: str,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Collection, starlette.responses.Response, NoneType]
+
Delete a collection.
+Called with DELETE /collections/{collection_id}
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The deleted collection. | +
def delete_item(
+ self,
+ item_id: str,
+ collection_id: str,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, starlette.responses.Response, NoneType]
+
Delete an item from a collection.
+Called with DELETE /collections/{collection_id}/items/{item_id}
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item_id | +None | +id of the item. | +None | +
collection_id | +None | +id of the collection. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The deleted item. | +
def update_collection(
+ self,
+ collection_id: str,
+ collection: stac_pydantic.collection.Collection,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Collection, starlette.responses.Response, NoneType]
+
Perform a complete update on an existing collection.
+Called with PUT /collections/{collection_id}
. It is expected that this
+collection already exists. The update should do a diff against the saved
+collection and perform any necessary updates. Partial updates are not
+supported by the transactions extension.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
collection_id | +None | +id of the existing collection to be updated | +None | +
collection | +None | +the updated collection (must be complete) | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The updated collection. | +
def update_item(
+ self,
+ collection_id: str,
+ item_id: str,
+ item: stac_pydantic.item.Item,
+ **kwargs
+) -> Union[stac_fastapi.types.stac.Item, starlette.responses.Response, NoneType]
+
Perform a complete update on an existing item.
+Called with PUT /collections/{collection_id}/items
. It is expected
+that this item already exists. The update should do a diff against the
+saved item and perform any necessary updates. Partial updates are not
+supported by the transactions extension.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
item | +None | +the item (must be complete) | +None | +
collection_id | +None | +the id of the collection from the resource path | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The updated item. | +
class LandingPageMixin(
+ stac_version: str = '1.0.0',
+ landing_page_id: str = 'stac-fastapi',
+ title: str = 'stac-fastapi',
+ description: str = 'stac-fastapi'
+)
+
Create a STAC landing page (GET /).
+stac_fastapi.types.errors module.
+class ConflictError(
+ /,
+ *args,
+ **kwargs
+)
+
Database conflict.
+args
+
def with_traceback(
+ ...
+)
+
Exception.with_traceback(tb) --
+set self.traceback to tb and return self.
+class DatabaseError(
+ /,
+ *args,
+ **kwargs
+)
+
Generic database errors.
+args
+
def with_traceback(
+ ...
+)
+
Exception.with_traceback(tb) --
+set self.traceback to tb and return self.
+class ForeignKeyError(
+ /,
+ *args,
+ **kwargs
+)
+
Foreign key error (collection does not exist).
+args
+
def with_traceback(
+ ...
+)
+
Exception.with_traceback(tb) --
+set self.traceback to tb and return self.
+class InvalidQueryParameter(
+ /,
+ *args,
+ **kwargs
+)
+
Error for unknown or invalid query parameters.
+Used to capture errors that should respond according to +docs.opengeospatial.org/is/17-069r3/17-069r3.html#query_parameters
+args
+
def with_traceback(
+ ...
+)
+
Exception.with_traceback(tb) --
+set self.traceback to tb and return self.
+class NotFoundError(
+ /,
+ *args,
+ **kwargs
+)
+
Resource not found.
+args
+
def with_traceback(
+ ...
+)
+
Exception.with_traceback(tb) --
+set self.traceback to tb and return self.
+class StacApiError(
+ /,
+ *args,
+ **kwargs
+)
+
Generic API error.
+args
+
def with_traceback(
+ ...
+)
+
Exception.with_traceback(tb) --
+set self.traceback to tb and return self.
+ + + + + + + + + + + + + +Base api extension.
+class ApiExtension(
+ conformance_classes: List[str] = NOTHING,
+ schema_href: Union[str, NoneType] = None
+)
+
Abstract base class for defining API extensions.
+GET
+
POST
+
def get_request_model(
+ self,
+ verb: Union[str, NoneType] = 'GET'
+) -> Union[pydantic.main.BaseModel, NoneType]
+
Return the request model for the extension.method.
+The model can differ based on HTTP verb
+def register(
+ self,
+ app: fastapi.applications.FastAPI
+) -> None
+
Register the extension with a FastAPI application.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
app | +None | +target FastAPI application. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +None | +
Backend submodule.
+Link helpers.
+INFERRED_LINK_RELS
+
def filter_links(
+ links: List[Dict]
+) -> List[Dict]
+
Remove inferred links.
+def resolve_links(
+ links: list,
+ base_url: str
+) -> List[Dict]
+
Convert relative links to absolute links.
+class BaseLinks(
+ collection_id: str,
+ base_url: str
+)
+
Create inferred links common to collections and items.
+def root(
+ self
+) -> Dict[str, Any]
+
Return the catalog root.
+class CollectionLinks(
+ collection_id: str,
+ base_url: str
+)
+
Create inferred links specific to collections.
+def create_links(
+ self
+) -> List[Dict[str, Any]]
+
Return all inferred links.
+def items(
+ self
+) -> Dict[str, Any]
+
Create the items
link.
def parent(
+ self
+) -> Dict[str, Any]
+
Create the parent
link.
def root(
+ self
+) -> Dict[str, Any]
+
Return the catalog root.
+def self(
+ self
+) -> Dict[str, Any]
+
Create the self
link.
class ItemLinks(
+ collection_id: str,
+ base_url: str,
+ item_id: str
+)
+
Create inferred links specific to items.
+def collection(
+ self
+) -> Dict[str, Any]
+
Create the collection
link.
def create_links(
+ self
+) -> List[Dict[str, Any]]
+
Return all inferred links.
+def parent(
+ self
+) -> Dict[str, Any]
+
Create the parent
link.
def root(
+ self
+) -> Dict[str, Any]
+
Return the catalog root.
+def self(
+ self
+) -> Dict[str, Any]
+
Create the self
link.
rfc3339.
+DateTimeType
+
RFC33339_PATTERN
+
def datetime_to_str(
+ dt: datetime.datetime,
+ timespec: str = 'auto'
+) -> str
+
Converts a :class:datetime.datetime
instance to an ISO8601 string in the
RFC 3339, section 5.6
+<https://datatracker.ietf.org/doc/html/rfc3339#section-5.6>
__ format required by
+the :stac-spec:STAC Spec <master/item-spec/common-metadata.md#date-and-time>
.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
dt | +None | +The datetime to convert. | +None | +
timespec | +None | +An optional argument that specifies the number of additional terms of the time to include. Valid options are 'auto', 'hours', 'minutes', 'seconds', 'milliseconds' and 'microseconds'. The default value is 'auto'. |
+None | +
Returns:
+Type | +Description | +
---|---|
str | +The ISO8601 (RFC 3339) formatted string representing the datetime. | +
def now_in_utc(
+
+) -> datetime.datetime
+
Return a datetime value of now with the UTC timezone applied.
+def now_to_rfc3339_str(
+
+) -> str
+
Return an RFC 3339 string representing now.
+def parse_single_date(
+ date_str: str
+) -> datetime.datetime
+
Parse a single RFC3339 date string into a datetime object.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
date_str | +str | +A string representing the date in RFC3339 format. | +None | +
Returns:
+Type | +Description | +
---|---|
datetime | +A datetime object parsed from the date_str. | +
Raises:
+Type | +Description | +
---|---|
ValueError | +If the date_str is empty or contains the placeholder '..'. | +
def rfc3339_str_to_datetime(
+ s: str
+) -> datetime.datetime
+
Convert a string conforming to RFC 3339 to a :class:datetime.datetime
.
Uses :meth:iso8601.parse_date
under the hood.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
s | +str | +The string to convert to :class:datetime.datetime . |
+None | +
Returns:
+Type | +Description | +
---|---|
str | +The datetime represented by the ISO8601 (RFC 3339) formatted string. | +
Raises:
+Type | +Description | +
---|---|
ValueError | +If the string is not a valid RFC 3339 string. | +
def str_to_interval(
+ interval: Union[str, NoneType]
+) -> Union[datetime.datetime, Tuple[datetime.datetime, datetime.datetime], Tuple[datetime.datetime, NoneType], Tuple[NoneType, datetime.datetime], NoneType]
+
Extract a single datetime object or a tuple of datetime objects from an
+interval string defined by the OGC API. The interval can either be a +single datetime or a range with start and end datetime.
+Args: + interval (Optional[str]): The interval string to convert to datetime objects, + or None if no datetime is specified.
+Returns: + Optional[DateTimeType]: A single datetime.datetime object, a tuple of + datetime.datetime objects, or None if input is None.
+Raises: + HTTPException: If the string is not valid for various reasons such as being empty, + having more than one slash, or if date formats are invalid.
+ + + + + + + + + + + + + +stac_fastapi.types.search module.
+Limit
+
NumType
+
def crop(
+ v: typing_extensions.Annotated[int, Gt(gt=0)]
+) -> typing_extensions.Annotated[int, Gt(gt=0)]
+
Crop value to 10,000.
+def str2bbox(
+ x: str
+) -> Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]], NoneType]
+
Convert string to BBox based on , delimiter.
+def str2list(
+ x: str
+) -> Union[List[str], NoneType]
+
Convert string to list base on , delimiter.
+class APIRequest(
+
+)
+
Generic API Request base class.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class BaseSearchGetRequest(
+ collections: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ ids: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ bbox: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ intersects: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ datetime: typing_extensions.Annotated[Union[str, NoneType], Query(PydanticUndefined)] = None,
+ limit: typing_extensions.Annotated[Union[typing_extensions.Annotated[int, Gt(gt=0), AfterValidator(func=<function crop at 0x7f6e23133040>)], NoneType], Query(PydanticUndefined)] = 10
+)
+
Base arguments for GET Request.
+def kwargs(
+ self
+) -> Dict
+
Transform api request params into format which matches the signature of the
+endpoint.
+class BaseSearchPostRequest(
+ /,
+ **data: 'Any'
+)
+
Base arguments for POST Request.
+model_computed_fields
+
model_config
+
model_fields
+
def construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
def from_orm(
+ obj: 'Any'
+) -> 'Self'
+
def model_construct(
+ _fields_set: 'set[str] | None' = None,
+ **values: 'Any'
+) -> 'Self'
+
Creates a new instance of the Model
class with validated data.
Creates a new model setting __dict__
and __pydantic_fields_set__
from trusted or pre-validated data.
+Default values are respected, but no other validation is performed.
Note
+model_construct()
generally respects the model_config.extra
setting on the provided model.
+That is, if model_config.extra == 'allow'
, then all extra passed values are added to the model instance's __dict__
+and __pydantic_extra__
fields. If model_config.extra == 'ignore'
(the default), then all extra passed values are ignored.
+Because no validation is performed with a call to model_construct()
, having model_config.extra == 'forbid'
does not result in
+an error if extra values are passed, but they will be ignored.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
_fields_set | +None | +A set of field names that were originally explicitly set during instantiation. If provided, this is directly used for the [ model_fields_set ][pydantic.BaseModel.model_fields_set] attribute.Otherwise, the field names from the values argument will be used. |
+None | +
values | +None | +Trusted or pre-validated data dictionary. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A new instance of the Model class with validated data. |
+
def model_json_schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+ mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
Generates a JSON schema for a model class.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
by_alias | +None | +Whether to use attribute aliases or not. | +None | +
ref_template | +None | +The reference template. | +None | +
schema_generator | +None | +To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications |
+None | +
mode | +None | +The mode in which to generate the schema. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The JSON schema for the given model class. | +
def model_parametrized_name(
+ params: 'tuple[type[Any], ...]'
+) -> 'str'
+
Compute the class name for parametrizations of generic classes.
+This method can be overridden to achieve a custom naming scheme for generic BaseModels.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
params | +None | +Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int] ,the value (str, int) would be passed to params . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +String representing the new class where params are passed to cls as type variables. |
+
Raises:
+Type | +Description | +
---|---|
TypeError | +Raised when trying to generate concrete names for non-generic models. | +
def model_rebuild(
+ *,
+ force: 'bool' = False,
+ raise_errors: 'bool' = True,
+ _parent_namespace_depth: 'int' = 2,
+ _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
Try to rebuild the pydantic-core schema for the model.
+This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
force | +None | +Whether to force the rebuilding of the model schema, defaults to False . |
+None | +
raise_errors | +None | +Whether to raise errors, defaults to True . |
+None | +
_parent_namespace_depth | +None | +The depth level of the parent namespace, defaults to 2. | +None | +
_types_namespace | +None | +The types namespace, defaults to None . |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +Returns None if the schema is already "complete" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False . |
+
def model_validate(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ from_attributes: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate a pydantic model instance.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
from_attributes | +None | +Whether to extract data from object attributes. | +None | +
context | +None | +Additional context to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated model instance. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If the object could not be validated. | +
def model_validate_json(
+ json_data: 'str | bytes | bytearray',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/json/#json-parsing
+Validate the given JSON data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
json_data | +None | +The JSON data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
Raises:
+Type | +Description | +
---|---|
ValidationError | +If json_data is not a JSON string or the object could not be validated. |
+
def model_validate_strings(
+ obj: 'Any',
+ *,
+ strict: 'bool | None' = None,
+ context: 'Any | None' = None
+) -> 'Self'
+
Validate the given object with string data against the Pydantic model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
obj | +None | +The object containing string data to validate. | +None | +
strict | +None | +Whether to enforce types strictly. | +None | +
context | +None | +Extra variables to pass to the validator. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +The validated Pydantic model. | +
def parse_file(
+ path: 'str | Path',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def parse_obj(
+ obj: 'Any'
+) -> 'Self'
+
def parse_raw(
+ b: 'str | bytes',
+ *,
+ content_type: 'str | None' = None,
+ encoding: 'str' = 'utf8',
+ proto: 'DeprecatedParseProtocol | None' = None,
+ allow_pickle: 'bool' = False
+) -> 'Self'
+
def schema(
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}'
+) -> 'Dict[str, Any]'
+
def schema_json(
+ *,
+ by_alias: 'bool' = True,
+ ref_template: 'str' = '#/$defs/{model}',
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def update_forward_refs(
+ **localns: 'Any'
+) -> 'None'
+
def validate(
+ value: 'Any'
+) -> 'Self'
+
def validate_bbox(
+ v: Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+) -> Union[Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int]], Tuple[Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int], Union[float, int]]]
+
def validate_datetime(
+ value: str
+) -> str
+
def validate_spatial(
+ values: Dict[str, Any]
+) -> Dict[str, Any]
+
end_date
+
model_extra
+
Get extra fields set during validation.
+model_fields_set
+
Returns the set of fields that have been explicitly set on this model instance.
+spatial_filter
+
Return a geojson-pydantic object representing the spatial filter for the search request.
+Check for both because the bbox
and intersects
parameters are mutually exclusive.
start_date
+
def copy(
+ self,
+ *,
+ include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+ update: 'Dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Returns a copy of the model.
+Deprecated
+This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
include | +None | +Optional set or mapping specifying which fields to include in the copied model. | +None | +
exclude | +None | +Optional set or mapping specifying which fields to exclude in the copied model. | +None | +
update | +None | +Optional dictionary of field-value pairs to override field values in the copied model. | +None | +
deep | +None | +If True, the values of fields that are Pydantic models will be deep-copied. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A copy of the model with included, excluded and updated fields as specified. | +
def dict(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False
+) -> 'Dict[str, Any]'
+
def json(
+ self,
+ *,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ encoder: 'Callable[[Any], Any] | None' = PydanticUndefined,
+ models_as_dict: 'bool' = PydanticUndefined,
+ **dumps_kwargs: 'Any'
+) -> 'str'
+
def model_copy(
+ self,
+ *,
+ update: 'dict[str, Any] | None' = None,
+ deep: 'bool' = False
+) -> 'Self'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#model_copy
+Returns a copy of the model.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
update | +None | +Values to change/add in the new model. Note: the data is not validated before creating the new model. You should trust this data. |
+None | +
deep | +None | +Set to True to make a deep copy of the model. |
+None | +
Returns:
+Type | +Description | +
---|---|
None | +New model instance. | +
def model_dump(
+ self,
+ *,
+ mode: "Literal[('json', 'python')] | str" = 'python',
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump
+Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
+Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
mode | +None | +The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types. If mode is 'python', the output may contain non-JSON-serializable Python objects. |
+None | +
include | +None | +A set of fields to include in the output. | +None | +
exclude | +None | +A set of fields to exclude from the output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to use the field's alias in the dictionary key if defined. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A dictionary representation of the model. | +
def model_dump_json(
+ self,
+ *,
+ indent: 'int | None' = None,
+ include: 'IncEx | None' = None,
+ exclude: 'IncEx | None' = None,
+ context: 'Any | None' = None,
+ by_alias: 'bool' = False,
+ exclude_unset: 'bool' = False,
+ exclude_defaults: 'bool' = False,
+ exclude_none: 'bool' = False,
+ round_trip: 'bool' = False,
+ warnings: "bool | Literal[('none', 'warn', 'error')]" = True,
+ serialize_as_any: 'bool' = False
+) -> 'str'
+
Usage docs: docs.pydantic.dev/2.9/concepts/serialization/#modelmodel_dump_json
+Generates a JSON representation of the model using Pydantic's to_json
method.
Parameters:
+Name | +Type | +Description | +Default | +
---|---|---|---|
indent | +None | +Indentation to use in the JSON output. If None is passed, the output will be compact. | +None | +
include | +None | +Field(s) to include in the JSON output. | +None | +
exclude | +None | +Field(s) to exclude from the JSON output. | +None | +
context | +None | +Additional context to pass to the serializer. | +None | +
by_alias | +None | +Whether to serialize using field aliases. | +None | +
exclude_unset | +None | +Whether to exclude fields that have not been explicitly set. | +None | +
exclude_defaults | +None | +Whether to exclude fields that are set to their default value. | +None | +
exclude_none | +None | +Whether to exclude fields that have a value of None . |
+None | +
round_trip | +None | +If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | +None | +
warnings | +None | +How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, "error" raises a [ PydanticSerializationError ][pydantic_core.PydanticSerializationError]. |
+None | +
serialize_as_any | +None | +Whether to serialize fields with duck-typing serialization behavior. | +None | +
Returns:
+Type | +Description | +
---|---|
None | +A JSON string representation of the model. | +
def model_post_init(
+ self: 'BaseModel',
+ context: 'Any',
+ /
+) -> 'None'
+
We need to both initialize private attributes and call the user-defined model_post_init
+method.
+ + + + + + + + + + + + + +STAC types.
+NumType
+
class Catalog(
+ /,
+ *args,
+ **kwargs
+)
+
STAC Catalog.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class Collection(
+ /,
+ *args,
+ **kwargs
+)
+
STAC Collection.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class Collections(
+ /,
+ *args,
+ **kwargs
+)
+
All collections endpoint.
+github.com/radiantearth/stac-api-spec/tree/master/collections
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class Conformance(
+ /,
+ *args,
+ **kwargs
+)
+
STAC Conformance Classes.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class Item(
+ /,
+ *args,
+ **kwargs
+)
+
STAC Item.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class ItemCollection(
+ /,
+ *args,
+ **kwargs
+)
+
STAC Item Collection.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+class LandingPage(
+ /,
+ *args,
+ **kwargs
+)
+
STAC Landing Page.
+def clear(
+ ...
+)
+
D.clear() -> None. Remove all items from D.
+def copy(
+ ...
+)
+
D.copy() -> a shallow copy of D
+def fromkeys(
+ iterable,
+ value=None,
+ /
+)
+
Create a new dictionary with keys from iterable and values set to value.
+def get(
+ self,
+ key,
+ default=None,
+ /
+)
+
Return the value for key if key is in the dictionary, else default.
+def items(
+ ...
+)
+
D.items() -> a set-like object providing a view on D's items
+def keys(
+ ...
+)
+
D.keys() -> a set-like object providing a view on D's keys
+def pop(
+ ...
+)
+
D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+If key is not found, d is returned if given, otherwise KeyError is raised
+def popitem(
+ self,
+ /
+)
+
Remove and return a (key, value) pair as a 2-tuple.
+Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.
+def setdefault(
+ self,
+ key,
+ default=None,
+ /
+)
+
Insert key with a value of default if key is not in the dictionary.
+Return the value for key if key is in the dictionary, else default.
+def update(
+ ...
+)
+
D.update([E, ]**F) -> None. Update D from dict/iterable E and F.
+If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]
+def values(
+ ...
+)
+
D.values() -> an object providing a view on D's values
+ + + + + + + + + + + + + +Library version.
+ + + + + + + + + + + + + +